8

I receive an image view

-(void)imagePickerController:(UIImagePickerController *)picker
didFinishPickingMediaWithInfo:(NSDictionary *)info
{
    NSString *mediaType = info[UIImagePickerControllerMediaType];

    [self dismissViewControllerAnimated:YES completion:nil];

    if ([mediaType isEqualToString:(NSString *)kUTTypeImage]) {
        UIImage *image = info[UIImagePickerControllerOriginalImage];

        //imgvprofileImage.image = image;
        //[self detectForFacesInUIImage:[UIImage imageNamed:@"image00.jpg"]];

        [self detectForFacesInUIImage:image];
    }
    else if ([mediaType isEqualToString:(NSString *)kUTTypeMovie])
    {
        // Code here to support video if enabled
    }
}

When I send a photo like this

[self detectForFacesInUIImage:[UIImage imageNamed:@"image00.jpg"]]; 

The detection works well and finds a face but when I use the image returned from the camera it doesn't work.

 [self detectForFacesInUIImage:image]

This is the function i use to detect the face

-(void)detectForFacesInUIImage:(UIImage *)facePicture
{
    CIImage* image = [CIImage imageWithCGImage:facePicture.CGImage];

    CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyLow forKey:CIDetectorAccuracy]];

    NSArray* features = [detector featuresInImage:image];

    if (features.count == 0) {
        NSLog(@"There is no faces in captured image ") ;
    }

    for(CIFaceFeature* faceObject in features)
    {
        CGRect modifiedFaceBounds = faceObject.bounds;
        modifiedFaceBounds.origin.y = facePicture.size.height-faceObject.bounds.size.height-faceObject.bounds.origin.y;

        [self addSubViewWithFrame:facePicture toRect:modifiedFaceBounds] ;
    }
}
david.pfx
  • 10,520
  • 3
  • 30
  • 63
mohamed
  • 243
  • 3
  • 14

2 Answers2

5

Problem is in image orientation.

Can't remember where I took this, but it works:

- (void) detectForFaces:(CGImageRef)facePicture orientation:(UIImageOrientation)orientation {


    CIImage* image = [CIImage imageWithCGImage:facePicture];

    CIContext *context = [CIContext contextWithOptions:nil];                    // 1
    NSDictionary *opts = @{ CIDetectorAccuracy : CIDetectorAccuracyLow };      // 2
    CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeFace
                                              context:context
                                              options:opts];                    // 3

    int exifOrientation;
    switch (orientation) {
        case UIImageOrientationUp:
            exifOrientation = 1;
            break;
        case UIImageOrientationDown:
            exifOrientation = 3;
            break;
        case UIImageOrientationLeft:
            exifOrientation = 8;
            break;
        case UIImageOrientationRight:
            exifOrientation = 6;
            break;
        case UIImageOrientationUpMirrored:
            exifOrientation = 2;
            break;
        case UIImageOrientationDownMirrored:
            exifOrientation = 4;
            break;
        case UIImageOrientationLeftMirrored:
            exifOrientation = 5;
            break;
        case UIImageOrientationRightMirrored:
            exifOrientation = 7;
            break;
        default:
            break;
    }


    opts = @{ CIDetectorImageOrientation :[NSNumber numberWithInt:exifOrientation
                                           ] };

    NSArray *features = [detector featuresInImage:image options:opts];

    if ([features count] > 0) {
        CIFaceFeature *face = [features lastObject];
        NSLog(@"%@", NSStringFromCGRect(face.bounds));
    }
}


How to use:

UIImage *image = // some image here;
[self detectForFaces:image.CGImage orientation:image.imageOrientation];
Volodymyr B.
  • 3,369
  • 2
  • 30
  • 48
  • how i get parameter UIImageOrientation ? – mohamed Apr 11 '14 at 15:25
  • @mohamed UIImage has property `imageOrientation` – Volodymyr B. Apr 11 '14 at 15:29
  • UIImage *image = info[UIImagePickerControllerOriginalImage]; [self detectForFaces:image.CGImage orientation:[image get]] this how i call is this right ?? and i didn't find the property ?? – mohamed Apr 11 '14 at 15:32
  • and the detection not accurate like before ?? – mohamed Apr 11 '14 at 15:56
  • when i corp the image before it was exactly the face , but now it isn't accurate at all ?? – mohamed Apr 11 '14 at 15:57
  • you can try change Accuracy in options, but should work the same. – Volodymyr B. Apr 11 '14 at 15:57
  • not the same the the previous choice get all the face , but now get from nose to neck – mohamed Apr 11 '14 at 15:59
  • accuracy depends on quality of image and size! the orientation should not affect on it. If you want you can easily rotate image with context and try it with correct image orientation, to be sure if it's not this method. – Volodymyr B. Apr 11 '14 at 16:01
  • the same problem ,,, it works fine with image i get from the internet , but with camera it returns the image and found face , but in incorrect position ? – mohamed Apr 11 '14 at 16:12
  • @mohamed Are you sure that your displayed image not deformed in imageview? With default options, content can be scaled to fit. If you actually checking like that... Check it just in case. – Volodymyr B. Apr 11 '14 at 16:15
  • no i set image to button like this [buttonProfilePic setImage:cropped forState:UIControlStateNormal]; – mohamed Apr 11 '14 at 16:17
  • first i crop the image – mohamed Apr 11 '14 at 16:17
  • @mohamed ok, no idea what's happening around this method, so I can't help. Just keep trying to find the issue. – Volodymyr B. Apr 11 '14 at 16:21
2

small Correction here

- (void) detectForFaces:(UIImage *)facePicture orientation:(UIImageOrientation)orientation {


CIImage* image = [CIImage imageWithCGImage:facePicture.CGImage];

CIContext *context = [CIContext contextWithOptions:nil];                    // 1
NSDictionary *opts = @{ CIDetectorAccuracy : CIDetectorAccuracyLow };      // 2
CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeFace
                                          context:context
                                          options:opts];                    // 3

int exifOrientation;
switch (orientation) {
    case UIImageOrientationUp:
        exifOrientation = 1;
        break;
    case UIImageOrientationDown:
        exifOrientation = 3;
        break;
    case UIImageOrientationLeft:
        exifOrientation = 8;
        break;
    case UIImageOrientationRight:
        exifOrientation = 6;
        break;
    case UIImageOrientationUpMirrored:
        exifOrientation = 2;
        break;
    case UIImageOrientationDownMirrored:
        exifOrientation = 4;
        break;
    case UIImageOrientationLeftMirrored:
        exifOrientation = 5;
        break;
    case UIImageOrientationRightMirrored:
        exifOrientation = 7;
        break;
    default:
        break;
}


opts = @{ CIDetectorImageOrientation :[NSNumber numberWithInt:exifOrientation
                                       ] };

NSArray *features = [detector featuresInImage:image options:opts];

if ([features count] > 0) {
    CIFaceFeature *face = [features lastObject];
    NSLog(@"%@", NSStringFromCGRect(face.bounds));
}}

UIImage *image = // some image here; [self detectForFaces:image orientation:image.imageOrientation];

send only image not image.CGImage This worked for me