0

i have this code, where self is UIImage object:

CGFloat scale = [sideSize floatValue] / MIN(self.size.width, self.size.height);
 UIGraphicsBeginImageContextWithOptions(CGSizeMake(self.size.width*scale,self.size.height*scale), NO, 0.0);
[self drawInRect:CGRectMake(0, 0, self.size.width*scale, self.size.height*scale)];
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
SBLog(@"%f, %f", newImage.size.width, newImage.size.height);
return newImage;

But when i use function UIImagePNGRepresentation to get bytes data to transfer via internet, i have bytes of image that have another size after scale.

More over after this code when i use [newImage CGImage] i get the same bad size.

So, i think UIImagePNGRepresentation use CGImage to get data bytes from image.

So, how to do identical UIImage and CGImage?

Logioniz
  • 891
  • 6
  • 15

1 Answers1

0
//This method will resize the original image to desired width 
//maintaining the Aspect Ratio
-(UIImage*)getResizedToWidth:(CGFloat)width
{
    UIImage *resultImage = nil;

    CGFloat ar = self.size.width/self.size.height;
    CGFloat ht = width/ar;

    CGSize newSize = CGSizeMake(width, ht);

    UIGraphicsBeginImageContext(newSize);
    CGContextRef ctx = UIGraphicsGetCurrentContext();

    CGContextScaleCTM(ctx, 1, -1);
    CGContextTranslateCTM(ctx, 0, -newSize.height);

    CGRect imageRect = CGRectMake(0, 0, newSize.width, newSize.height);

    CGContextDrawImage(ctx, imageRect, self.CGImage);

    resultImage = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();

    return resultImage;
}

//This method will resize the original image to desired height 
//maintaining the Aspect Ratio
-(UIImage*)getResizedToHeight:(CGFloat)height
{
    UIImage *resultImage = nil;

    CGFloat ar = self.size.width/self.size.height;
    CGFloat wd = height*ar;

    CGSize newSize = CGSizeMake(wd, height);

    UIGraphicsBeginImageContext(newSize);
    CGContextRef ctx = UIGraphicsGetCurrentContext();

    CGContextScaleCTM(ctx, 1, -1);
    CGContextTranslateCTM(ctx, 0, -newSize.height);

    CGRect imageRect = CGRectMake(0, 0, newSize.width, newSize.height);

    CGContextDrawImage(ctx, imageRect, self.CGImage);

    resultImage = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();

    return resultImage;
}

//This method will take in the maxSizeLength and automatically
// detect the maximum side in image and reduce it to given
// maxSideLength maintaining the Aspect Ratio
-(UIImage*)getResizedMaxSideToLength:(CGFloat)maxSideLength
{
    UIImage *src = [UIImage imageWithCGImage:self.CGImage];

    if (src.size.width > maxSideLength)
    {
        src = [src getResizedToWidth:maxSideLength];
    }
    else
    if (src.size.height >= maxSideLength )
    {
        src = [src getResizedToHeight:maxSideLength];
    }

    return src;
}

When you need CGImage, then simply get it by accessing the CGImage property of any UIImage, like below

CGImage *myCGImage = myUIImage.CGImage;
CodenameLambda1
  • 1,299
  • 7
  • 17
  • Result image have different orientation – Logioniz Aug 14 '13 at 12:43
  • In this case you'll have to fix the orientation of image prior to perform resize operations with my code. Here's a link on how to fix the orientation of image http://stackoverflow.com/questions/5427656/ios-uiimagepickercontroller-result-image-orientation-after-upload/5427890#5427890 – CodenameLambda1 Aug 19 '13 at 04:54