For a professional project, I need to detect a person from his picture. So I can get CIFaceFeature for each face.
Then I would get unique values for each face to the identifiers.
I looked on the side of TrackingID, but this one is always equal to 0. I also made some calculations with the position of the eyes and mouth but it depant the distance of the face compared to the iPhone.
-(void)detectForFacesInUIImage:(UIImage *)facePicture
{
CIImage* image = [CIImage imageWithCGImage:facePicture.CGImage];
// CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeFace
// context:nil options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyLow forKey:CIDetectorAccuracy]];
NSArray* features = [detector featuresInImage:image];
for(CIFaceFeature* faceObject in features)
{
CGRect modifiedFaceBounds = faceObject.bounds;
modifiedFaceBounds.origin.y = facePicture.size.height-faceObject.bounds.size.height-faceObject.bounds.origin.y;
CGPoint c = [faceObject leftEyePosition];
NSLog(@"left eye position %f %f = %f", c.x, c.y, c.y - c.x);
c = [faceObject rightEyePosition];
NSLog(@"right eye position %f %f = %f", c.x, c.y, c.y - c.x);
c = [faceObject mouthPosition];
NSLog(@"mouth eye position %f %f = %f", c.x, c.y, c.y - c.x);
CGRect d = [faceObject bounds];
NSLog(@"bounds eye position %f %f", d.size.width, d.size.height);
NSLog(@"trackID = %d", faceObject.trackingID);
NSLog(@"\n\n");
[self addSubViewWithFrame:modifiedFaceBounds];
if(faceObject.hasLeftEyePosition)
{
CGRect leftEye = CGRectMake(faceObject.leftEyePosition.x,(facePicture.size.height-faceObject.leftEyePosition.y), 10, 10);
[self addSubViewWithFrame:leftEye];
}
if(faceObject.hasRightEyePosition)
{
CGRect rightEye = CGRectMake(faceObject.rightEyePosition.x, (facePicture.size.height-faceObject.rightEyePosition.y), 10, 10);
[self addSubViewWithFrame:rightEye];
}
if(faceObject.hasMouthPosition)
{
CGRect mouth = CGRectMake(faceObject.mouthPosition.x,facePicture.size.height-faceObject.mouthPosition.y,10, 10);
[self addSubViewWithFrame:mouth];
}
}
}