I'm trying to implement an heart beat detector algorithm for iPhone It's for educational purposes considering the fact that there are already several apps doing that...
I've got this functions to get the variation of colors in my frames...
- (void)processNewCameraFrame:(CVImageBufferRef)cameraFrame; {
CVPixelBufferLockBaseAddress(cameraFrame, 0);
int bufferHeight = CVPixelBufferGetHeight(cameraFrame);
int bufferWidth = CVPixelBufferGetWidth(cameraFrame);
// Create a new texture from the camera frame data, display that using the shaders
glGenTextures(1, &videoFrameTexture);
glBindTexture(GL_TEXTURE_2D, videoFrameTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// This is necessary for non-power-of-two textures
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
/*Create a CGImageRef from the CVImageBufferRef*/
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(cameraFrame);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(cameraFrame);
size_t width = CVPixelBufferGetWidth(cameraFrame);
size_t height = CVPixelBufferGetHeight(cameraFrame);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
UIImage *image = [UIImage imageWithCGImage:newImage];
image = [image imageByApplyingDiagonalMotionBlur5x5];
[self processData:image];
CGImageRelease(newImage);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
// Using BGRA extension to pull in video frame data directly
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
[self drawFrame];
glDeleteTextures(1, &videoFrameTexture);
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
and the following function to get the dominant color in image
- (UIColor *)getDominantColor:(UIImage*)image {
NSUInteger red = 0;
NSUInteger green = 0;
NSUInteger blue = 0;
// Allocate a buffer big enough to hold all the pixels
struct pixel* pixels = (struct pixel*) calloc(1, image.size.width * image.size.height * sizeof(struct pixel));
if (pixels != nil) {
CGContextRef context = CGBitmapContextCreate(
(void*) pixels,
image.size.width,
image.size.height,
8,
image.size.width * 4,
CGImageGetColorSpace(image.CGImage),
kCGImageAlphaPremultipliedLast
);
if (context != NULL) {
// Draw the image in the bitmap
CGContextDrawImage(context, CGRectMake(0.0f, 0.0f, image.size.width, image.size.height), image.CGImage);
// Now that we have the image drawn in our own buffer, we can loop over the pixels to
// process it. This simple case simply counts all pixels that have a pure red component.
// There are probably more efficient and interesting ways to do this. But the important
// part is that the pixels buffer can be read directly.
NSUInteger numberOfPixels = image.size.width * image.size.height;
for (int i=0; i<numberOfPixels; i++) {
red += pixels[i].r;
green += pixels[i].g;
blue += pixels[i].b;
}
red /= numberOfPixels;
green /= numberOfPixels;
blue /= numberOfPixels;
CGContextRelease(context);
}
free(pixels);
}
return [UIColor colorWithRed:red/255.0f green:green/255.0f blue:blue/255.0f alpha:1.0f];
}
but plotting the sequence of red components from each frame isn't giving to me a reliable plot of heart beats...
is this the correct way to proceed? I'm playing with FFT but I think it's useless to process the signal's spectrum in order to count peaks... may be I can use FFT to apply a convolution with a low-pass filter and than to a inverse FFT in order to check peaks with these new data...