5

I want to capture images at specific instances, for example when a button is pushed; but I don't want to show any video preview screen. I guess captureStillImageAsynchronouslyFromConnection is what I need to use for this scenario. Currently, I can capture image if I show a video preview. However, if I remove the code to show the preview, the app crashes with the following output:

2012-04-07 11:25:54.898 imCapWOPreview[748:707] *** Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '*** -[AVCaptureStillImageOutput captureStillImageAsynchronouslyFromConnection:completionHandler:] - inactive/invalid connection passed.' *** First throw call stack: (0x336ee8bf 0x301e21e5 0x3697c35d 0x34187 0x33648435 0x310949eb 0x310949a7 0x31094985 0x310946f5 0x3109502d 0x3109350f 0x31092f01 0x310794ed 0x31078d2d 0x37db7df3 0x336c2553 0x336c24f5 0x336c1343 0x336444dd 0x336443a5 0x37db6fcd 0x310a7743 0x33887 0x3382c) terminate called throwing an exception(lldb)

So here is my implementation:

BIDViewController.h:

#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>

@interface BIDViewController : UIViewController
{
    AVCaptureStillImageOutput *stillImageOutput;
}
@property (strong, nonatomic) IBOutlet UIView *videoPreview;
- (IBAction)doCap:(id)sender;

@end

Relevant staff inside BIDViewController.m :

#import "BIDViewController.h"

@interface BIDViewController ()

@end

@implementation BIDViewController
@synthesize capturedIm;
@synthesize videoPreview;

- (void)viewDidLoad
{
[super viewDidLoad];
[self setupAVCapture];
}

- (BOOL)setupAVCapture
{
NSError *error = nil;

AVCaptureSession *session = [AVCaptureSession new];
[session setSessionPreset:AVCaptureSessionPresetHigh];

/*
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.videoPreview.bounds;
[self.videoPreview.layer addSublayer:captureVideoPreviewLayer];    
 */

// Select a video device, make an input
AVCaptureDevice *backCamera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:&error];
if (error)
    return NO;
if ([session canAddInput:input])
    [session addInput:input];

// Make a still image output
stillImageOutput = [AVCaptureStillImageOutput new];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];    
if ([session canAddOutput:stillImageOutput])
    [session addOutput:stillImageOutput];

[session startRunning];

return YES;
}

- (IBAction)doCap:(id)sender {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
    for (AVCaptureInputPort *port in [connection inputPorts])
    {
        if ([[port mediaType] isEqual:AVMediaTypeVideo] )
        {
            videoConnection = connection;
            break;
        }
    }
    if (videoConnection) { break; }
}

[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection 
  completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *__strong error) {
      // Do something with the captured image
  }];

}

With the above code, if doCap is called, then the crash occurs. On the other hand, if I remove the following comments in setupAVCapture function

/*
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.videoPreview.bounds;
[self.videoPreview.layer addSublayer:captureVideoPreviewLayer];    
 */

then it works without any problem.

In summary, my questions is, How can I capture images at controlled instances without showing preview ?

Jojodmo
  • 23,357
  • 13
  • 65
  • 107
ali gurbuz
  • 190
  • 1
  • 3
  • 14
  • most simple way would be `self.videoPreview.hidden = YES;` – Felix Apr 07 '12 at 11:26
  • the code works on my iphone 4S – Felix Apr 07 '12 at 11:39
  • @phix23 hiding the videoPreview worked for me too... Then the next question is; is there a performance penalty with this approach ? i.e. is redundant processing spent for sending video preview data to an hidden layer ? – ali gurbuz Apr 07 '12 at 14:11

3 Answers3

8

I use the following code for capturing from front facing camera (if available) or using the back camera. Works well on my iPhone 4S.

-(void)viewDidLoad{

    AVCaptureSession *session = [[AVCaptureSession alloc] init];
    session.sessionPreset = AVCaptureSessionPresetMedium;

    AVCaptureDevice *device = [self frontFacingCameraIfAvailable];

    NSError *error = nil;
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
    if (!input) {
        // Handle the error appropriately.
        NSLog(@"ERROR: trying to open camera: %@", error);
    }
    [session addInput:input];

//stillImageOutput is a global variable in .h file: "AVCaptureStillImageOutput *stillImageOutput;"
    stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
    NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
    [stillImageOutput setOutputSettings:outputSettings];

    [session addOutput:stillImageOutput];

    [session startRunning];
}

-(AVCaptureDevice *)frontFacingCameraIfAvailable{

    NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    AVCaptureDevice *captureDevice = nil;

    for (AVCaptureDevice *device in videoDevices){

        if (device.position == AVCaptureDevicePositionFront){

            captureDevice = device;
            break;
        }
    }

    //  couldn't find one on the front, so just get the default video device.
    if (!captureDevice){

        captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    }

    return captureDevice;
}

-(IBAction)captureNow{

    AVCaptureConnection *videoConnection = nil;
    for (AVCaptureConnection *connection in stillImageOutput.connections){
        for (AVCaptureInputPort *port in [connection inputPorts]){

            if ([[port mediaType] isEqual:AVMediaTypeVideo]){

                videoConnection = connection;
                break;
            }
        }
        if (videoConnection) { 
           break; 
        }
    }

    NSLog(@"about to request a capture from: %@", stillImageOutput);
    [stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error){

         CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
         if (exifAttachments){

            // Do something with the attachments if you want to. 
            NSLog(@"attachements: %@", exifAttachments);
        }
        else
            NSLog(@"no attachments");

        NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
        UIImage *image = [[UIImage alloc] initWithData:imageData];

        self.vImage.image = image;
    }];
}
Chintan Patel
  • 3,175
  • 3
  • 30
  • 36
  • 1
    i get error at stillImageOutput.connections is empty or contain 0 objects. what is wrong? – Muhammad Umar Aug 14 '13 at 10:44
  • This is real good solution. But the screen goes blank for a second after snap happens to get the image. In SnapChat app its not like that. How can I get the behavior as in SnapChat app? – Satyam May 02 '14 at 05:42
  • 1
    @MuhammadUmar facing same `[stillImageOutput.connections count]` is `0` you find the solution why ? – Buntylm May 02 '14 at 10:28
  • @whack...Have you solved the issue.I am also getting the same. – Imran Oct 24 '14 at 13:22
1

Well, i was facing a similar issue where by the captureStillImageAsynchronouslyFromConnection:stillImageConnection was raising an exception that the passed connection is invalid. Later on, i figured out that when i made properties for the session and stillImageOutPut to retain values, the issue got resolved.

Jamal Zafar
  • 2,179
  • 2
  • 27
  • 32
1

I am a javascript developer. Wanted to create an iOS native framework for my cross-platform Javascript project

When I started to do the same, I faced many issues with methods deprecated and other runtime errors.

After fixing all the issues, below is the answer which is compliant with iOS 13.5

The code helps you to take a picture on a button click without a preview.

Your .h file

@interface NoPreviewCameraViewController : UIViewController <AVCapturePhotoCaptureDelegate> {
    AVCaptureSession *captureSession;
    AVCapturePhotoOutput *photoOutput;
    AVCapturePhotoSettings *photoSetting;
    AVCaptureConnection *captureConnection;
    UIImageView *imageView;
}
@end

Your .m file


- (void)viewDidLoad {
    [super viewDidLoad];
    imageView = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height - 140)];
    [self.view addSubview:imageView];
    UIButton *takePicture = [UIButton buttonWithType:UIButtonTypeCustom];
    [takePicture addTarget:self action:@selector(takePicture:) forControlEvents:UIControlEventTouchUpInside];
    [takePicture setTitle:@"Take Picture" forState:UIControlStateNormal];
    takePicture.frame = CGRectMake(40.0, self.view.frame.size.height - 140,  self.view.frame.size.width - 40, 40);
    [self.view addSubview:takePicture];
    [self initCaptureSession];
}

- (void) initCaptureSession {
    captureSession = [[AVCaptureSession alloc] init];
    if([captureSession canSetSessionPreset: AVCaptureSessionPresetPhoto] ) {
        [captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
    }
    AVCaptureDeviceInput *deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] error:nil];
    if ([captureSession canAddInput:deviceInput]) {
        [captureSession addInput:deviceInput];
    }
    photoOutput = [[AVCapturePhotoOutput alloc] init];
    if ([captureSession canAddOutput:photoOutput]) {
        [captureSession addOutput:photoOutput];
    }
    [captureSession startRunning];
}

-(void) setNewPhotoSetting {
    photoSetting = [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeJPEG}];
    [photoOutput setPhotoSettingsForSceneMonitoring:photoSetting];
}

- (IBAction)takePicture:(id)sender {
    captureConnection = nil;
    [self setNewPhotoSetting];
    for (AVCaptureConnection *connection in photoOutput.connections) {
        for (AVCaptureInputPort *port in [connection inputPorts]) {
            if ([[port mediaType] isEqual: AVMediaTypeVideo]) {
                captureConnection = connection;
                
                NSLog(@"Value of connection = %@", connection);
                NSLog(@"Value of captureConnection = %@", captureConnection);
                
                break;
            }
        }
        if (captureConnection) {
            break;
        }
    }
    [photoOutput capturePhotoWithSettings:photoSetting delegate:self];
}

Kuldeep Bhimte
  • 961
  • 1
  • 10
  • 25