I am trying to filter video in iPhone. Here's my program structure and source code:
AppDelegate.h
AppDelegate.m
ViewController.h
ViewController.m
The AppDelegate file is same as default. Here's my ViewController.
//ViewController.h
#import <UIKit/UIKit.h>
#import <GLKit/GLKit.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreMedia/CoreMedia.h>
#import <CoreVideo/CoreVideo.h>
#import <QuartzCore/QuartzCore.h>
#import <CoreImage/CoreImage.h>
#import <ImageIO/ImageIO.h>
@interface ViewController : GLKViewController <AVCaptureVideoDataOutputSampleBufferDelegate>{
AVCaptureSession *avCaptureSession;
CIContext *coreImageContext;
CIImage *maskImage;
CGSize screenSize;
CGContextRef cgContext;
GLuint _renderBuffer;
float scale;
}
@property (strong, nonatomic) EAGLContext *context;
-(void)setupCGContext;
@end
// ViewController.m
#import "ViewController.h"
@implementation ViewController
@synthesize context;
- (void)viewDidLoad
{
[super viewDidLoad];
self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!self.context) {
NSLog(@"Failed to create ES context");
}
GLKView *view = (GLKView *)self.view;
view.context = self.context;
view.drawableDepthFormat = GLKViewDrawableDepthFormat24;
coreImageContext = [CIContext contextWithEAGLContext:self.context];
glGenRenderbuffers(1, &_renderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer);
NSError *error;
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
[dataOutput setAlwaysDiscardsLateVideoFrames:YES];
[dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
[dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
avCaptureSession = [[AVCaptureSession alloc] init];
[avCaptureSession beginConfiguration];
[avCaptureSession setSessionPreset:AVCaptureSessionPreset1280x720];
[avCaptureSession addInput:input];
[avCaptureSession addOutput:dataOutput];
[avCaptureSession commitConfiguration];
[avCaptureSession startRunning];
[self setupCGContext];
CGImageRef cgImg = CGBitmapContextCreateImage(cgContext);
maskImage = [CIImage imageWithCGImage:cgImg];
CGImageRelease(cgImg);
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage *image = [CIImage imageWithCVPixelBuffer:pixelBuffer];
image = [CIFilter filterWithName:@"CISepiaTone" keysAndValues:kCIInputImageKey,
image, @"inputIntensity",
[NSNumber numberWithFloat:0.8],
nil].outputImage;
[coreImageContext drawImage:image atPoint:CGPointZero fromRect:[image extent] ];
[self.context presentRenderbuffer:GL_RENDERBUFFER];
}
-(void)setupCGContext {
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
NSUInteger bytesPerPixel = 4;
NSUInteger bytesPerRow = bytesPerPixel * screenSize.width;
NSUInteger bitsPerComponent = 8;
cgContext = CGBitmapContextCreate(NULL, screenSize.width, screenSize.height, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colorSpace);
}
The sepia filter works, but the video is little slower. When I don't apply filter, the video is normal. Any idea on how I can improve the video and make it faster?
Thanks.