Okay, so I have this project that I am working on. The task is to measure heart rate using the IPhone/Ipad camera. I am trying the capture the video using AVFoundation, get each frame and sum the red component of every pixel in the frame and divide it by the size to get the average.
I first setup the video
-(void) setupAVCapture{
_session = [[AVCaptureSession alloc] init];
_session.sessionPreset = AVCaptureSessionPresetMedium;
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!error) {
if ([device lockForConfiguration:&error]) {
if ([device hasTorch] && [device isTorchModeSupported:AVCaptureTorchModeOn]) {
[device setTorchMode:AVCaptureTorchModeOn];
}
[device unlockForConfiguration];
}
if ( [_session canAddInput:input] )
[_session addInput:input];
AVCaptureVideoDataOutput *videoDataOutput = [AVCaptureVideoDataOutput new];
[videoDataOutput setVideoSettings:@{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA) }];
[videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
dispatch_queue_t videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
[videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
if ( [_session canAddOutput:videoDataOutput] )
[_session addOutput:videoDataOutput];
[_session startRunning];
}
else{
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[NSString stringWithFormat:@"Failed with error %d", (int)[error code]] message:[error localizedDescription] delegate:nil cancelButtonTitle:@"Dismiss" otherButtonTitles:nil];
[alertView show];
}
}
And then use the delegate method as follows -
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
// got an image
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
//redScores property is an array which stores the red values of all the frames
[self.redScores addObject: [NSNumber numberWithFloat:[self processPixelBuffer:pixelBuffer]]];
}
-(float) processPixelBuffer:(CVPixelBufferRef) pixelBuffer{
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
size_t bufferWidth = CVPixelBufferGetWidth(pixelBuffer);
size_t bufferHeight = CVPixelBufferGetHeight(pixelBuffer);
unsigned char *pixels = (unsigned char *)CVPixelBufferGetBaseAddress(pixelBuffer);
int meanRedPixelWeight=0.0;
for (int i = 0; i < (bufferWidth * bufferHeight); i++) {
meanRedPixelWeight += pixels[2];
}
meanRedPixelWeight=meanRedPixelWeight/(bufferWidth*bufferHeight);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
NSLog(@"%d",meanRedPixelWeight);
return meanRedPixelWeight;
}
But this doesn't seem to give me the correct red values. For one, I see the values constantly decreasing. It should be going up and down. Secondly, I took the video and processed in matlab by doing something like -
v=VideoReader('filepath');
noOfFrames = v.NumberOfFrames;
x=zeros(1,numFrames);
for i=1:noOfFrames,
frame = read(v,1);
redPlane = frame(:, :, 1);
x(i) = sum(sum(redPlane)) / (size(frame, 1) * size(frame, 2));
I get very different average values. The matlab ones are close to 255 and so I can tell they are correct because all the frames are almost fully red.
Any ideas on what is wrong with the objective-c code?