I am capturing an image using AVFoundation through the camera and then displaying it through a segue to another viewController which has a UIImageView
. The image size is 1936 x 2592 and it takes 5.5 seconds for the screen to update with the image after viewWillAppear
gets called.
Is this normal and due to simply the size of the image? This is on an iPhone4 running iOS6. This delay is not acceptable so I have to find a solution, probably trying to scale down the image first, but I thought I would ask before jumping through hoops.
Update
Reducing image size to 484 x 648 takes even longer: 5.6 seconds. There was a request in the comments for the capture code I am using. It is at the bottom of this post. Also, I am only capturing the one image and then displaying it.
// Console output:
have image
2013-03-03 11:37:16.741 RPFaceCamera[4867:907] viewWillAppear - time taken: 0.000215>
2013-03-03 11:37:22.249 RPFaceCamera[4867:907] viewDidAppear - time taken: 5.507458>
//Code
- (void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
if (self.faceImage != nil){
start = [NSDate date];
printf("have image \n");
self.faceImageView.image = self.faceImage;
NSLog(@"viewWillAppear - time taken: %f", -[start timeIntervalSinceNow]);
}
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
NSLog(@"viewDidAppear - time taken: %f", -[start timeIntervalSinceNow]);
}
//capture code - (I think I cribbed this from Apple's AVDemo). Objective-C's format leaves a lot to be desired for readability.
- (void)captureImage
{
AVCaptureConnection *connection;
connection = [stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
[stillImageOutput setOutputSettings:[NSDictionary
dictionaryWithObject:[NSNumber numberWithInt:kCMPixelFormat_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
[stillImageOutput captureStillImageAsynchronouslyFromConnection:connection
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if(error)
NSLog(@"captureImage failed");
else{
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(imageDataSampleBuffer);
CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault,
imageDataSampleBuffer,
kCMAttachmentMode_ShouldPropagate);
CIImage *ciImage =
[[CIImage alloc] initWithCVPixelBuffer:pixelBuffer
options:(__bridge NSDictionary *)attachments];
NSNumber *orientation = (__bridge NSNumber *)(CMGetAttachment(imageDataSampleBuffer,
kCGImagePropertyOrientation, NULL));
printf("original orientation %d\n", [orientation intValue]);
self.faceImage = [UIImage imageWithCIImage:ciImage scale:1.0f orientation:[orientation intValue]];
printf("self.faceImage %d\n", self.faceImage.imageOrientation);
printf("self.faceImage width: %f height: %f\n", self.faceImage.size.width, self.faceImage.size.height);
__weak __typeof__(self) weakSelf = self;
[weakSelf performSelectorOnMainThread:@selector(showFaceView) withObject:nil waitUntilDone:NO];
}
}
];
}
// method called on main thread when capture complete - (void)showFaceView { NSLog(@"showfaceView");
[self destroyAVCapture];
[self.delegate dismissCameraWithImage:self.faceImage];
}