The CGImageCreateWithMask image is encoded with an inverted alpha channel

/NOTE. I have fixed the code. look for Edit note /

For iOS 5.0+ to run on an iPad, I created a function to allow the user to mask the input image by generating two new images, a foreground image and a background image. When I add them to the UIImageView and display them on a device or simulator, I get what I expect.

However, when I save them, encoding the data as session data, the resulting images are reversed back (i.e. the matte layout has been canceled). The two of us ran through the code, there were no places to undo, no copy / paste errors. I thought there might be something for kCGImageAlphaPremultipliedFirst vs kCGImageAlphaPremultipliedLast. When I encode matted images they start with kCGImageAlphaPremultipliedFirst, when they are loaded it is kCGImageAlphaPremultipliedLast.

Before Saving

After Saving Any help or ideas would be greatly appreciated.

Amy @InsatiableGenius

The functions called below are called with:

[self createMask]; 
[self addImageAndBackground:foregroundImg backgroundImg:backgroundImg];


- (UIImage*)maskImage:(UIImage *)image withMask:(UIImage *)maskImage {
    CGImageRef maskRef = maskImage.CGImage;
    CGImageRef mask = CGImageMaskCreate(CGImageGetWidth(maskRef),
                                        CGImageGetHeight(maskRef),
                                        CGImageGetBitsPerComponent(maskRef),
                                        CGImageGetBitsPerPixel(maskRef),
                                        CGImageGetBytesPerRow(maskRef),
                                        CGImageGetDataProvider(maskRef), NULL, false);

    CGImageRef sourceImage = [image CGImage];
    CGImageRef imageWithAlpha = sourceImage;
      if ((CGImageGetAlphaInfo(sourceImage) == kCGImageAlphaNone)
         || (CGImageGetAlphaInfo(sourceImage) == kCGImageAlphaNoneSkipFirst)
        || (CGImageGetAlphaInfo(sourceImage) == kCGImageAlphaNoneSkipLast)) {
         imageWithAlpha = CopyImageAndAddAlphaChannel(sourceImage);
     }

    CGImageRef masked = CGImageCreateWithMask(imageWithAlpha, mask);
    CGImageRelease(mask);

    if (sourceImage != imageWithAlpha) {
        CGImageRelease(imageWithAlpha);
    }

    UIImage* retImage = [UIImage imageWithCGImage:masked];
    CGImageRelease(masked);


    /* EDIT STARTS HERE return retImage; */

//Added extra render step to force it to save correct alpha values (not the mask)
UIImage* retImage = [UIImage imageWithCGImage:masked];
CGImageRelease(masked);

UIGraphicsBeginImageContext(retImage.size);
[retImage drawAtPoint:CGPointZero];
UIImage *newImg = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
retImage = nil;

return newImg;

}


-(void)createMask{

    //take whole screen uiimage from paintview
    //user painted black for mask, set rest of window to white
    [paintView setWhiteBackground:YES];
    //get user painted mask
    UIImage *maskFromPaint  = [paintView allocNormalResImageWithBlur:NO/*blur?*/];
     [self dumpTestImg:maskFromPaint name:@"maskFromPaint"];
    UIImage *maskNoAlpha = [maskFromPaint resetImageAlpha:1.0];
    [self dumpTestImg:maskNoAlpha name:@"maskFromPaintNoAlpha"];

    //mask has to be gray
    UIImage *maskFromPaintGray = [self convertImageToGrayScale:maskNoAlpha];
     [self dumpTestImg:maskFromPaintGray name:@"maskFromPaintGray"];

    //Had to call this normalize function because some pngs are not compatiable (8 bit)
    UIImage *disp_original = [[UIImage alloc] initWithCGImage:[[original normalize] CGImage] ];
    //Resize original to screen size (alternatively we could upscale the paint... not sure which for now)
    disp_original = [disp_original resizedImageWithContentMode:UIViewContentModeScaleAspectFit bounds:inputImageView.frame.size interpolationQuality:kCGInterpolationHigh] ;

   CGSize imageInViewSize = disp_original.size;

    //use size of displayed original to crop the paintview
    CGRect overlayRect = CGRectMake((int)(inputImageView.frame.size.width - imageInViewSize.width) / 2,
                                    (int)(inputImageView.frame.size.height - imageInViewSize.height) / 2, 
                                    (int)imageInViewSize.width, 
                                    (int)imageInViewSize.height);

    //here is the actual crop
    //get rectangle from paint that is the same size as the displayed original
    CGImageRef maskFromPaintimageRef = CGImageCreateWithImageInRect([maskFromPaintGray CGImage], overlayRect);

    UIImage *invertedMaskFromPaint = [UIImage imageWithCGImage:maskFromPaintimageRef];

     self.maskImg  = [self invertImage:invertedMaskFromPaint];

     [self dumpTestImg:self.maskImg name:@"maskFromPaintCropped"];


    self.backgroundImg = [self    maskImage:disp_original withMask:self.maskImg];
    self.foregroundImg = [self    maskImage:disp_original withMask:invertedMaskFromPaint];

    foregroundImgView.image = foregroundImg;
    backgroundImgView.image = backgroundImg;

    foregroundImgView.hidden =NO;
    backgroundImgView.hidden =NO;
    [container bringSubviewToFront:foregroundImgView];
    [container bringSubviewToFront:backgroundImgView];

    [self dumpTestImg:foregroundImg name:@"foregroundImg"];
    [self dumpTestImg:backgroundImg name:@"backgroundImg"];
    //cleanup
    CGImageRelease(maskFromPaintimageRef);
    maskFromPaint = nil;
    maskFromPaintGray = nil;
    maskNoAlpha = nil;
    disp_original = nil;

    //put things back
    [paintView setWhiteBackground:NO];

}


CGImageRef CopyImageAndAddAlphaChannel(CGImageRef sourceImage) {

    CGImageRef retVal = NULL;

    size_t width = CGImageGetWidth(sourceImage);

    size_t height = CGImageGetHeight(sourceImage);

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    CGContextRef offscreenContext = CGBitmapContextCreate(NULL, width, height,

                                                          8, 0, colorSpace,   kCGImageAlphaPremultipliedLast );  


    if (offscreenContext != NULL) {

        CGContextDrawImage(offscreenContext, CGRectMake(0, 0, width, height), sourceImage);

        retVal = CGBitmapContextCreateImage(offscreenContext);

        CGContextRelease(offscreenContext);

    }

    CGColorSpaceRelease(colorSpace);

    return retVal;

}


- (UIImage*)invertImage:(UIImage *)sourceImage {
    CIContext *context = [CIContext contextWithOptions:nil];
    CIFilter *filter= [CIFilter filterWithName:@"CIColorInvert"];
    CIImage *inputImage = [[CIImage alloc] initWithImage:sourceImage];
    [filter setValue:inputImage forKey:@"inputImage"];
    return [UIImage imageWithCGImage:[context createCGImage:filter.outputImage fromRect:filter.outputImage.extent]];

}


-(void)addImageAndBackground:(UIImage *)foregroundImgIn backgroundImg:(UIImage *)backgroundImgIn{
    UIImageView *tmpIV;

    UIImageView *imgVF = [[UIImageView alloc] initWithImage:  foregroundImgIn];
    imgVF.userInteractionEnabled = YES;
    [self dumpTestImg:foregroundImgIn name:@"foregroundIn"];

    UIImageView *imgVB = [[UIImageView alloc] initWithImage:  backgroundImgIn];
    imgVB.userInteractionEnabled = YES;
    [self dumpTestImg:backgroundImgIn name:@"backgroundIn"];

}

      

+3


source to share





All Articles