2

I want to know how to access the iphones camera and work with it in realtime: for example just draw on the camera view.

Another related Question:

Can I display 4 camera-views at once like in "Photo Booth" on the Mac.

Cœur
  • 37,241
  • 25
  • 195
  • 267
JJgendarme
  • 1,411
  • 2
  • 16
  • 30
  • I'm assuming you want to apply effects to each one as well, like on the Mac. Not sure this would even work on anything but the iPhone 4, given the GPU power needed to pull this off. – Mark Adams Mar 03 '11 at 07:44

2 Answers2

4

You can do it by using AVFoundation

- (void)initCapture {

    AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput 
                                          deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] 
                                          error:nil];

    AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];

    captureOutput.alwaysDiscardsLateVideoFrames = YES; 

    dispatch_queue_t queue;
    queue = dispatch_queue_create("cameraQueue", NULL);
    [captureOutput setSampleBufferDelegate:self queue:queue];
    dispatch_release(queue);

    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; 
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; 
    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; 
    [captureOutput setVideoSettings:videoSettings]; 


    self.captureSession = [[AVCaptureSession alloc] init];
    [self.captureSession setSessionPreset:AVCaptureSessionPresetLow];

    [self.captureSession addInput:captureInput];
    [self.captureSession addOutput:captureOutput];

    [self.captureSession startRunning];

    self.customLayer = [CALayer layer];

    self.customLayer.frame =CGRectMake(5-25,25, 200,150);

    self.customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);

    //self.customLayer.transform =CATransform3DMakeRotation(M_PI/2.0f, 0, 0, 1);


    //[self.view.layer addSublayer:imageView.layer];
    //self.customLayer.frame =CGRectMake(0, 0, 200,150);
    //self.customLayer.contentsGravity = kCAGravityResizeAspectFill;

    [self.view.layer insertSublayer:self.customLayer atIndex:4];
    //[self.view.layer addSublayer:self.customLayer];


    self.customLayer1 = [CALayer layer];
    //self.customLayer.frame = self.view.bounds;
    self.customLayer1.frame =CGRectMake(165-25, 25, 200, 150);
    self.customLayer1.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
    //self.customLayer1.contentsGravity = kCAGravityResizeAspectFill;
    [self.view.layer addSublayer:self.customLayer1];




    self.customLayer2 = [CALayer layer];
    //self.customLayer.frame = self.view.bounds;
    self.customLayer2.frame =CGRectMake(5-25, 210 +25, 200, 150);
    self.customLayer2.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
    //self.customLayer1.contentsGravity = kCAGravityResizeAspectFill;
    [self.view.layer addSublayer:self.customLayer2];


    self.customLayer3 = [CALayer layer];
    //self.customLayer.frame = self.view.bounds;
    self.customLayer3.frame =CGRectMake(165-25, 210 +25, 200, 150);
    self.customLayer3.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
    //self.customLayer1.contentsGravity = kCAGravityResizeAspectFill;
    [self.view.layer addSublayer:self.customLayer3];



}



#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput 
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 
       fromConnection:(AVCaptureConnection *)connection 
{ 


    NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];

    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
    /*Lock the image buffer*/
    CVPixelBufferLockBaseAddress(imageBuffer,0); 
    /*Get information about the image*/
    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); 
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
    size_t width = CVPixelBufferGetWidth(imageBuffer); 
    size_t height = CVPixelBufferGetHeight(imageBuffer);  


    /*Create a CGImageRef from the CVImageBufferRef*/
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 



    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef newImage2 = CGBitmapContextCreateImage(newContext); 
    /*We release some components*/
    CGContextRelease(newContext); 
    CGColorSpaceRelease(colorSpace);

    [self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];
    [self.customLayer1 performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];
    [self.customLayer2 performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];
    [self.customLayer3 performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];


    //  UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];


    /*We relase the CGImageRef*/
    CGImageRelease(newImage2);

    //  [self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];

    /*We unlock the  image buffer*/
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

    [pool drain];

} 

it works fine..

http://crayoncoding.blogspot.com/2011/04/iphone-4-camera-views-at-once.html

see the above link for detail code

DShah
  • 9,768
  • 11
  • 71
  • 127
AmiT
  • 56
  • 2
0

You can try having 4 UIImagePickerControllers. not sure if it'll work but its worth a shot.

Access the camera with iPhone SDK

Community
  • 1
  • 1
Sohan
  • 1,287
  • 1
  • 15
  • 29