26

I want to get the image from the UIBezierpath closed path(See the image). I draw the image on the UIView using drawRect method and also draw the lines are using the drawRect method. How can I get the particular closed path image? Please help me. Thanks in advance.

This code used for draw the bezierpath.

UIBezierPath *aPath = [UIBezierPath bezierPath];
for (NSString *pointString in pointArray) {
    if ([pointArray indexOfObject:pointString] == 0)
        [aPath moveToPoint:CGPointFromString(pointString)];
    else
        [aPath addLineToPoint:CGPointFromString(pointString)];
}
[aPath closePath];

enter image description here

iDev
  • 23,310
  • 7
  • 60
  • 85
Mani
  • 1,310
  • 1
  • 20
  • 40

8 Answers8

15

You can use a shapeLayer for that. Something like,

UIBezierPath *aPath = [UIBezierPath bezierPath];
for (NSString *pointString in pointArray) {
    if ([pointArray indexOfObject:pointString] == 0)
        [aPath moveToPoint:CGPointFromString(pointString)];
    else
        [aPath addLineToPoint:CGPointFromString(pointString)];
}
[aPath closePath];

CAShapeLayer *shapeLayer = [CAShapeLayer layer];
shapeLayer.path = aPath.CGPath;
[view.layer setMask:shapeLayer];//or make it as [imageview.layer setMask:shapeLayer]; 
//and add imageView as subview of whichever view you want. draw the original image
//on the imageview in that case

To get it as an image,

UIGraphicsBeginImageContext(view.bounds.size);
[view.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();

image should have the corresponding image.

Nishant Tyagi
  • 9,893
  • 3
  • 40
  • 61
iDev
  • 23,310
  • 7
  • 60
  • 85
  • Thank you ACB...How to get the image from the UIBezierpath. – Mani Nov 02 '12 at 05:10
  • Thank you...I tried this, Its get the image and background is white color. I want image of the UIBezierPath portion(see imag cat head) only. Are you understand me? – Mani Nov 02 '12 at 05:45
  • Why dont you try to set the original image in UIImageView and use the above code on that. Check if that is giving the proper image. Instead of 'view' in code use 'imageview'. – iDev Nov 02 '12 at 05:56
  • My problem is after crop the image then chage to black & white image at the time. Image remaining background is black color. So I am searching particular path of the image. – Mani Nov 02 '12 at 06:05
  • @ACB can you send me sample of your code, because we are not getting proper image – Rajneesh071 Aug 03 '13 at 07:55
  • Super-old but still helpful. Rendering it directly in an imageview does indeed remove the background. – Erik Aug 25 '16 at 06:47
4

For Swift try this : ZImageCropper

ZImageCropper is using following things as core part:

  • UIBezierPath
  • UITouch Events
  • CAShapeLayer
  • CoreGraphics
Mohammad Zaid Pathan
  • 16,304
  • 7
  • 99
  • 130
3

Well, there are a couple of things that you have to take in notice.

  1. Are you passing your UIImageView instead of your UIView?
  2. If you are, did you enable your image view for touch handling?
  3. Subclass your UIImageView and use drawrect() for handling that stuff for you.

If you want only a portion of the image (like the cat), then you need to submask your image according to UIBezierPath.

Updated

The following is a complete working example, change it to your requirements.

ViewController.h:

@interface ViewController : UIViewController
{
  UIBezierPath *aPath;
}
@property (nonatomic,retain) NSMutableArray *pathArray;
@property (nonatomic,retain) NSMutableDictionary *dic;
@property (nonatomic,retain) IBOutlet UIImageView *imgView;

@end

ViewController.m:

@interface ViewController ()
 - (IBAction)Crop:(id)sender;
@end

@implementation ViewController
@synthesize pathArray,dic,imgView;
- (void)viewDidLoad
 {
   [super viewDidLoad];

 }
 - (void) setClippingPath:(UIBezierPath *)clippingPath : (UIImageView *)imgView;
{

    NSLog(@"Mask Paths %@",clippingPath);

    CAShapeLayer *maskLayer = [CAShapeLayer layer];
    maskLayer.frame = self.imgView.frame;
    maskLayer.path = [clippingPath CGPath];
    maskLayer.fillColor = [[UIColor whiteColor] CGColor];
    maskLayer.backgroundColor = [[UIColor clearColor] CGColor];


    self.imgView.layer.mask = maskLayer;



}
-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
 {

   UITouch *mytouch=[[touches allObjects] objectAtIndex:0];
   self->aPath = [[UIBezierPath alloc]init];
   [aPath moveToPoint:[mytouch locationInView:imgView]];

}
-(void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
 {

  UITouch *mytouch=[[touches allObjects] objectAtIndex:0];
  [aPath addLineToPoint:[mytouch locationInView:imgView
                       ]];

 }
-(void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
 {

 }
- (IBAction)Crop:(id)sender
 {
  [self setClippingPath:aPath :imgView];
 }
Spotlight
  • 472
  • 1
  • 11
  • 22
umer sufyan
  • 1,005
  • 1
  • 12
  • 22
  • Hi, now how do we save this layer as a uiimage with clear background? I tried converting CALayer self.imgView.layer.mask to a UIImage as mentioned in http://stackoverflow.com/questions/3454356/uiimage-from-calayer-iphone-sdk . But it is not the desired result. – ScarletWitch May 23 '13 at 08:07
  • Hi umer sufyan. How do I start to understand uibezierpath? I tried to read the documentation but as I'm just a newbie, I am finding a hard time to get a grip. Please guide me to some good tutorials of uibezierpath. Thanks. – UsamaMan Nov 24 '15 at 10:21
  • its bit late, but you should render your layer some where in and get desired UIImage.. – umer sufyan Jun 12 '17 at 21:07
2

use this pod ios-helpers

- (UIImage *)imageWithStrokeColor: (UIColor *)stroke_color andFillColor: (UIColor *)fill_color {

    // adjust bounds to account for extra space needed for lineWidth
    CGFloat width = self.bounds.size.width + self.lineWidth * 2;
    CGFloat height = self.bounds.size.height + self.lineWidth * 2;
    CGRect bounds = CGRectMake(self.bounds.origin.x, self.bounds.origin.y, width, height);

    // create a view to draw the path in
    UIView *view = [[UIView alloc] initWithFrame:bounds];

    // begin graphics context for drawing
    UIGraphicsBeginImageContextWithOptions(view.frame.size, NO, [[UIScreen mainScreen] scale]);

    // configure the view to render in the graphics context
    [view.layer renderInContext:UIGraphicsGetCurrentContext()];

    // get reference to the graphics context
    CGContextRef context = UIGraphicsGetCurrentContext();

    // reverse the y-axis to match the opengl coordinate space
    CGContextScaleCTM(context, 1, -1);
    CGContextTranslateCTM(context, 0, -view.bounds.size.height);

    // translate matrix so that path will be centered in bounds
    CGContextTranslateCTM(context, -(bounds.origin.x - self.lineWidth), -(bounds.origin.y - self.lineWidth));

    // stroke color
    [stroke_color setStroke];
    [self stroke];

    //fill color
    [fill_color setFill];
    [self fill];

    // get an image of the graphics context
    UIImage *viewImage = UIGraphicsGetImageFromCurrentImageContext();

    // end the context
    UIGraphicsEndImageContext();

    return viewImage;
}
Yijun
  • 433
  • 1
  • 3
  • 9
  • 1
    Could you add more detail to your answer? It's good that you included a link to the docs and example code, but could you describe what exactly it does? – bjb568 Jun 09 '14 at 03:34
1

If you're drawing everything in code just use -addClip, i.e.:

UIBezierPath *aPath = [UIBezierPath bezierPath];
for (NSString *pointString in pointArray) {
    if ([pointArray indexOfObject:pointString] == 0)
        [aPath moveToPoint:CGPointFromString(pointString)];
    else
        [aPath addLineToPoint:CGPointFromString(pointString)];
}
[aPath closePath];


CGContextSaveGState(context);
{
    [aPath addClip];
    // draw image
    [aPath stroke];
}
CGContextRestoreGState(context);
hypercrypt
  • 15,389
  • 6
  • 48
  • 59
1

iDev's answer is excellent, but if you need transparent background, then except UIGraphicsBeginImageContext(view.bounds.size); you should use

UIGraphicsBeginImageContextWithOptions(view.bounds.size, NO, 1.0);

By default image context is created with opaque == YES, so you have to change it to NO.

0

this works for me...

UIBezierPath *path = ....//your path
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef    context    = CGBitmapContextCreate(nil, frame.size.width, frame.size.height, 8, 4*frame.size.width, colorSpace, (CGBitmapInfo)kCGImageAlphaPremultipliedFirst);

CGContextAddPath(context, path.CGPath);
CGContextClip(context);
CGContextDrawImage(context, frame, image.CGImage);

[UIImage imageWithCGImage:CGBitmapContextCreateImage(context)];
Camo
  • 1,778
  • 14
  • 12
0
// This Work 100%

- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {

mouseSwiped = NO;
UITouch *touch = [touches anyObject];
lastPoint = [touch locationInView:self.view];
megView = [[UIImageView alloc]initWithFrame:CGRectMake(lastPoint.x , lastPoint.y , k_POINT_WIDTH , k_POINT_WIDTH )];
//[megView setImage:[UIImage imageNamed:@"b_image22.png"]];
[megView setContentMode:UIViewContentModeScaleToFill];
megView.alpha = 2;
//megView.layer.backgroundColor = [UIColor whiteColor].CGColor;
//megView.layer.cornerRadius = megView.frame.size.width / 2;
megView.clipsToBounds = YES;
[self.main_uiview addSubview:megView];


self.bezierPath = [UIBezierPath bezierPath];
[self.bezierPath moveToPoint:lastPoint];

}

  // Touchmove method


 - (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event {

mouseSwiped = YES;
UITouch *touch = [touches anyObject];
CGPoint currentPoint = [touch locationInView:self.view];
UIGraphicsBeginImageContext(self.main_uiview.frame.size);
[self.bg_imageview.image drawInRect:CGRectMake(0, 0, self.main_uiview.frame.size.width, self.main_uiview.frame.size.height)];
megView.frame = CGRectMake( currentPoint.x - k_POINT_WIDTH/2 , currentPoint.y - k_POINT_WIDTH/2 , k_POINT_WIDTH , k_POINT_WIDTH );
CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), currentPoint.x, currentPoint.y);
CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
CGContextStrokePath(UIGraphicsGetCurrentContext());


CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), red,green ,blue, 0.20);
CGContextSetBlendMode(UIGraphicsGetCurrentContext(), kCGBlendModeCopy);
  self.bg_imageview.image = UIGraphicsGetImageFromCurrentImageContext();
  [self.bg_imageview setAlpha:opacity];

 UIGraphicsEndImageContext();

 lastPoint = currentPoint;

[self.bezierPath addLineToPoint:lastPoint];

}

 // TouchEnd Method


   - (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event

{

[megView removeFromSuperview];

}

      // Image Crop Method


    -(UIImage *)croppedImage
   {
UIImage *myImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
[self.bezierPath closePath];
CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 0.0, 0.0, 0.0, 0.0);
_b_image = self.bg_imageview.image;
CGSize imageSize = _b_image.size;
CGRect imageRect = CGRectMake(0, 0, imageSize.width, imageSize.height);
UIGraphicsBeginImageContextWithOptions(imageSize, NO, [[UIScreen mainScreen] scale]);
[self.bezierPath addClip];
[_b_image drawInRect:imageRect];

UIImage *croppedImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return croppedImage;

}

Ramani Hitesh
  • 214
  • 3
  • 15