3

/NOTE, I've fixed the code.. look for Edit note/

For iOS 5.0+, for running on the iPad, I've created a function to allow the user to mask an input image, generating two new images, a foreground image and a background image. When I add these to an UIImageView, and display on device or simulator, I get what I expect.

However, when I save these by encoding the data as session data, the resulting images are backwards (ie the image matte has been reversed). Two of us have run over the code, there aren't any places were these are reversed, no copy/paste errors. I thought there could be something to kCGImageAlphaPremultipliedFirst vs kCGImageAlphaPremultipliedLast. When I encode the matted images, they start out with kCGImageAlphaPremultipliedFirst, when they are loaded, they are kCGImageAlphaPremultipliedLast.

Before Saving

After Saving Any help or ideas would be greatly appreciate.

Amy@InsatiableGenius

The functions below are called with :

[self createMask]; 
[self addImageAndBackground:foregroundImg backgroundImg:backgroundImg];


- (UIImage*)maskImage:(UIImage *)image withMask:(UIImage *)maskImage {
    CGImageRef maskRef = maskImage.CGImage;
    CGImageRef mask = CGImageMaskCreate(CGImageGetWidth(maskRef),
                                        CGImageGetHeight(maskRef),
                                        CGImageGetBitsPerComponent(maskRef),
                                        CGImageGetBitsPerPixel(maskRef),
                                        CGImageGetBytesPerRow(maskRef),
                                        CGImageGetDataProvider(maskRef), NULL, false);

    CGImageRef sourceImage = [image CGImage];
    CGImageRef imageWithAlpha = sourceImage;
      if ((CGImageGetAlphaInfo(sourceImage) == kCGImageAlphaNone)
         || (CGImageGetAlphaInfo(sourceImage) == kCGImageAlphaNoneSkipFirst)
        || (CGImageGetAlphaInfo(sourceImage) == kCGImageAlphaNoneSkipLast)) {
         imageWithAlpha = CopyImageAndAddAlphaChannel(sourceImage);
     }

    CGImageRef masked = CGImageCreateWithMask(imageWithAlpha, mask);
    CGImageRelease(mask);

    if (sourceImage != imageWithAlpha) {
        CGImageRelease(imageWithAlpha);
    }

    UIImage* retImage = [UIImage imageWithCGImage:masked];
    CGImageRelease(masked);


    /* EDIT STARTS HERE return retImage; */

//Added extra render step to force it to save correct alpha values (not the mask)
UIImage* retImage = [UIImage imageWithCGImage:masked];
CGImageRelease(masked);

UIGraphicsBeginImageContext(retImage.size);
[retImage drawAtPoint:CGPointZero];
UIImage *newImg = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
retImage = nil;

return newImg;

}


-(void)createMask{

    //take whole screen uiimage from paintview
    //user painted black for mask, set rest of window to white
    [paintView setWhiteBackground:YES];
    //get user painted mask
    UIImage *maskFromPaint  = [paintView allocNormalResImageWithBlur:NO/*blur?*/];
     [self dumpTestImg:maskFromPaint name:@"maskFromPaint"];
    UIImage *maskNoAlpha = [maskFromPaint resetImageAlpha:1.0];
    [self dumpTestImg:maskNoAlpha name:@"maskFromPaintNoAlpha"];

    //mask has to be gray
    UIImage *maskFromPaintGray = [self convertImageToGrayScale:maskNoAlpha];
     [self dumpTestImg:maskFromPaintGray name:@"maskFromPaintGray"];

    //Had to call this normalize function because some pngs are not compatiable (8 bit)
    UIImage *disp_original = [[UIImage alloc] initWithCGImage:[[original normalize] CGImage] ];
    //Resize original to screen size (alternatively we could upscale the paint... not sure which for now)
    disp_original = [disp_original resizedImageWithContentMode:UIViewContentModeScaleAspectFit bounds:inputImageView.frame.size interpolationQuality:kCGInterpolationHigh] ;

   CGSize imageInViewSize = disp_original.size;

    //use size of displayed original to crop the paintview
    CGRect overlayRect = CGRectMake((int)(inputImageView.frame.size.width - imageInViewSize.width) / 2,
                                    (int)(inputImageView.frame.size.height - imageInViewSize.height) / 2, 
                                    (int)imageInViewSize.width, 
                                    (int)imageInViewSize.height);

    //here is the actual crop
    //get rectangle from paint that is the same size as the displayed original
    CGImageRef maskFromPaintimageRef = CGImageCreateWithImageInRect([maskFromPaintGray CGImage], overlayRect);

    UIImage *invertedMaskFromPaint = [UIImage imageWithCGImage:maskFromPaintimageRef];

     self.maskImg  = [self invertImage:invertedMaskFromPaint];

     [self dumpTestImg:self.maskImg name:@"maskFromPaintCropped"];


    self.backgroundImg = [self    maskImage:disp_original withMask:self.maskImg];
    self.foregroundImg = [self    maskImage:disp_original withMask:invertedMaskFromPaint];

    foregroundImgView.image = foregroundImg;
    backgroundImgView.image = backgroundImg;

    foregroundImgView.hidden =NO;
    backgroundImgView.hidden =NO;
    [container bringSubviewToFront:foregroundImgView];
    [container bringSubviewToFront:backgroundImgView];

    [self dumpTestImg:foregroundImg name:@"foregroundImg"];
    [self dumpTestImg:backgroundImg name:@"backgroundImg"];
    //cleanup
    CGImageRelease(maskFromPaintimageRef);
    maskFromPaint = nil;
    maskFromPaintGray = nil;
    maskNoAlpha = nil;
    disp_original = nil;

    //put things back
    [paintView setWhiteBackground:NO];

}


CGImageRef CopyImageAndAddAlphaChannel(CGImageRef sourceImage) {

    CGImageRef retVal = NULL;

    size_t width = CGImageGetWidth(sourceImage);

    size_t height = CGImageGetHeight(sourceImage);

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    CGContextRef offscreenContext = CGBitmapContextCreate(NULL, width, height,

                                                          8, 0, colorSpace,   kCGImageAlphaPremultipliedLast );  


    if (offscreenContext != NULL) {

        CGContextDrawImage(offscreenContext, CGRectMake(0, 0, width, height), sourceImage);

        retVal = CGBitmapContextCreateImage(offscreenContext);

        CGContextRelease(offscreenContext);

    }

    CGColorSpaceRelease(colorSpace);

    return retVal;

}


- (UIImage*)invertImage:(UIImage *)sourceImage {
    CIContext *context = [CIContext contextWithOptions:nil];
    CIFilter *filter= [CIFilter filterWithName:@"CIColorInvert"];
    CIImage *inputImage = [[CIImage alloc] initWithImage:sourceImage];
    [filter setValue:inputImage forKey:@"inputImage"];
    return [UIImage imageWithCGImage:[context createCGImage:filter.outputImage fromRect:filter.outputImage.extent]];

}


-(void)addImageAndBackground:(UIImage *)foregroundImgIn backgroundImg:(UIImage *)backgroundImgIn{
    UIImageView *tmpIV;

    UIImageView *imgVF = [[UIImageView alloc] initWithImage:  foregroundImgIn];
    imgVF.userInteractionEnabled = YES;
    [self dumpTestImg:foregroundImgIn name:@"foregroundIn"];

    UIImageView *imgVB = [[UIImageView alloc] initWithImage:  backgroundImgIn];
    imgVB.userInteractionEnabled = YES;
    [self dumpTestImg:backgroundImgIn name:@"backgroundIn"];

}
user1435707
  • 111
  • 1
  • 6
  • CGImageCreateWithMask(imageWithAlpha, mask) behaves differently if "mask" is an image or a mask. Maybe that is the cause of the confusion? – Sten Feb 11 '13 at 20:51
  • @Sten: I think you are right. I'm trying to figure it out. I finally found a similar post that mentions the same thing. I will post solution when solved. Thanks! http://stackoverflow.com/questions/1708225/writing-a-masked-image-to-disk-as-a-png-file – user1435707 Feb 15 '13 at 18:53
  • The simpliest solution to this problem is to change the maskImage function and force it to return alpha values, I've edited the function above – user1435707 Mar 01 '13 at 19:50

0 Answers0