I would like to link OpenCV with swift/objective c++ to be able to develop applications for ios. I found that CocoaPods work reasonably well with OpenCV pods. So I used them as a starting point and tried some image stitching examples successfully. However, when I try to capture images from the camera, I cannot see the output at the display. The code run and loops around the captureOutput
function but the camera image is not displayed. It seems that the code runs in the background:
Objective c++ code:
@interface VideoSource () <AVCaptureVideoDataOutputSampleBufferDelegate>
@property (strong, nonatomic) AVCaptureVideoPreviewLayer *previewLayer;
@property (strong, nonatomic) AVCaptureSession *captureSession;
@end
@implementation VideoSource
- (void)setTargetView:(UIView *)targetView {
if (self.previewLayer == nil) {
return;
}
self.previewLayer.contentsGravity = kCAGravityResizeAspectFill;
self.previewLayer.frame = targetView.bounds;
self.previewLayer.affineTransform = CGAffineTransformMakeRotation(M_PI / 2);
[targetView.layer addSublayer:self.previewLayer];
std::cout<<"VideoSource setTargetView ... done "<<std::endl;
}
- (instancetype)init
{
self = [super init];
if (self) {
_captureSession = [[AVCaptureSession alloc] init];
_captureSession.sessionPreset = AVCaptureSessionPreset640x480;
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];
[_captureSession addInput:input];
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
output.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)};
output.alwaysDiscardsLateVideoFrames = YES;
[_captureSession addOutput:output];
dispatch_queue_t queue = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL);
[output setSampleBufferDelegate:self queue:queue];
_previewLayer = [AVCaptureVideoPreviewLayer layer];
std::cout<<"VideoSource init ... done "<<std::endl;
}
return self;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
uint8_t *base;
int width, height, bytesPerRow;
base = (uint8_t*)CVPixelBufferGetBaseAddress(imageBuffer);
width = (int)CVPixelBufferGetWidth(imageBuffer);
height = (int)CVPixelBufferGetHeight(imageBuffer);
bytesPerRow = (int)CVPixelBufferGetBytesPerRow(imageBuffer);
Mat mat = Mat(height, width, CV_8UC4, base);
//Processing here
[self.delegate processFrame:mat];
CGImageRef imageRef = [self CGImageFromCVMat:mat];
dispatch_sync(dispatch_get_main_queue(), ^{
self.previewLayer.contents = (__bridge id)imageRef;
});
CGImageRelease(imageRef);
CVPixelBufferUnlockBaseAddress( imageBuffer, 0 );
std::cout<<"VideoSource captureOutput ... done "<<std::endl;
}
- (void)start {
[self.captureSession startRunning];
std::cout<<"VideoSource start ... done "<<std::endl;
}
- (CGImageRef)CGImageFromCVMat:(Mat)cvMat {
if (cvMat.elemSize() == 4) {
cv::cvtColor(cvMat, cvMat, COLOR_BGRA2RGBA);
}
NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
CGColorSpaceRef colorSpace;
if (cvMat.elemSize() == 1) {
colorSpace = CGColorSpaceCreateDeviceGray();
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
}
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
// Creating CGImage from cv::Mat
CGImageRef imageRef = CGImageCreate(cvMat.cols,
//width
cvMat.rows,
//height
8,
//bits per component
8 * cvMat.elemSize(),
//bits per pixel
cvMat.step[0],
//bytesPerRow
colorSpace,
//colorspace
kCGImageAlphaNone|kCGBitmapByteOrderDefault,// bitmap info
provider,
//CGDataProviderRef
NULL,
//decode
false,
//should interpolate
kCGRenderingIntentDefault
//intent
);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
//std::cout<<"VideoSource CGImageFromCVMat ... done "<<std::endl;
return imageRef;
}
@end
The swift side:
@IBOutlet var spinner:UIActivityIndicatorView!
@IBOutlet weak var previewView: UIView!
let wrapper = Wrapper()
and then in the call function:
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.view.backgroundColor = UIColor.darkGray
self.wrapper.setTargetView(self.previewView)
self.wrapper.start()
}