I was unable to do full-size real-time video encoding. However, as an alternative, consider this.
Instead of recording frames, record actions (with their timestamps) as they occur. Then, when you want to play back, just replay the actions. You already have the code, because you execute it in "real life."
All you do is replay those same actions, relative to one another in time.
EDIT
If you want to try the recording, here's what I did (note, I abandoned it... it was an experiment in progress, so just take it as an example of how I approached it... nothing is production-ready). I was able to record live audio/video at 640x360, but that resolution was too low for me. It looked fine on the iPad, but terrible when I moved the video to my mac and watched it there.
I had problems with higher resolutions. I adapted the bulk of this code from the RosyWriter example project. Here are the main routines for setting up the asset writer, starting the recording, and adding a UIImage to the video stream.
Good luck.
CGSize const VIDEO_SIZE = { 640, 360 };
- (void) startRecording
{
dispatch_async(movieWritingQueue, ^{
NSLog(@"startRecording called in state 0x%04x", state);
if (state != STATE_IDLE) return;
state = STATE_STARTING_RECORDING;
NSLog(@"startRecording changed state to 0x%04x", state);
NSError *error;
//assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:AVFileTypeQuickTimeMovie error:&error];
assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:AVFileTypeMPEG4 error:&error];
if (error) {
[self showError:error];
}
[self removeFile:movieURL];
[self resumeCaptureSession];
[self.delegate recordingWillStart];
});
}
// TODO: this is where we write an image into the movie stream...
- (void) writeImage:(UIImage*)inImage
{
static CFTimeInterval const minInterval = 1.0 / 10.0;
static CFAbsoluteTime lastFrameWrittenWallClockTime;
CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();
CFTimeInterval timeBetweenFrames = thisFrameWallClockTime - lastFrameWrittenWallClockTime;
if (timeBetweenFrames < minInterval) return;
// Not really accurate, but we just want to limit the rate we try to write frames...
lastFrameWrittenWallClockTime = thisFrameWallClockTime;
dispatch_async(movieWritingQueue, ^{
if ( !assetWriter ) return;
if ((state & STATE_STARTING_RECORDING) && !(state & STATE_MASK_VIDEO_READY)) {
if ([self setupAssetWriterImageInput:inImage]) {
[self videoIsReady];
}
}
if (state != STATE_RECORDING) return;
if (assetWriter.status != AVAssetWriterStatusWriting) return;
CGImageRef cgImage = CGImageCreateCopy([inImage CGImage]);
if (assetWriterVideoIn.readyForMoreMediaData) {
CVPixelBufferRef pixelBuffer = NULL;
// Resize the original image...
if (!CGSizeEqualToSize(inImage.size, VIDEO_SIZE)) {
// Build a context that's the same dimensions as the new size
CGRect newRect = CGRectIntegral(CGRectMake(0, 0, VIDEO_SIZE.width, VIDEO_SIZE.height));
CGContextRef bitmap = CGBitmapContextCreate(NULL,
newRect.size.width,
newRect.size.height,
CGImageGetBitsPerComponent(cgImage),
0,
CGImageGetColorSpace(cgImage),
CGImageGetBitmapInfo(cgImage));
// Rotate and/or flip the image if required by its orientation
//CGContextConcatCTM(bitmap, transform);
// Set the quality level to use when rescaling
CGContextSetInterpolationQuality(bitmap, kCGInterpolationHigh);
// Draw into the context; this scales the image
//CGContextDrawImage(bitmap, transpose ? transposedRect : newRect, imageRef);
CGContextDrawImage(bitmap, newRect, cgImage);
// Get the resized image from the context and a UIImage
CGImageRef newImageRef = CGBitmapContextCreateImage(bitmap);
CGContextRelease(bitmap);
CGImageRelease(cgImage);
cgImage = newImageRef;
}
CFDataRef image = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));
int status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, self.assetWriterPixelBufferAdaptor.pixelBufferPool, &pixelBuffer);
if(status != 0){
//could not get a buffer from the pool
NSLog(@"Error creating pixel buffer: status=%d", status);
}
// set image data into pixel buffer
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
uint8_t* destPixels = CVPixelBufferGetBaseAddress(pixelBuffer);
// Danger, Will Robinson!!!!! USE_BLOCK_IN_FRAME warning...
CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels);
if(status == 0){
//CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();
CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime;
CMTime presentationTime = CMTimeAdd(firstBufferTimeStamp, CMTimeMake(elapsedTime * TIME_SCALE, TIME_SCALE));
BOOL success = [self.assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
if (!success)
NSLog(@"Warning: Unable to write buffer to video");
}
//clean up
CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
CVPixelBufferRelease( pixelBuffer );
CFRelease(image);
CGImageRelease(cgImage);
} else {
NSLog(@"Not ready for video data");
}
});
}
-(BOOL) setupAssetWriterImageInput:(UIImage*)image
{
NSDictionary* videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:1024.0*1024.0], AVVideoAverageBitRateKey,
nil ];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
//[NSNumber numberWithInt:image.size.width], AVVideoWidthKey,
//[NSNumber numberWithInt:image.size.height], AVVideoHeightKey,
[NSNumber numberWithInt:VIDEO_SIZE.width], AVVideoWidthKey,
[NSNumber numberWithInt:VIDEO_SIZE.height], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
NSLog(@"videoSettings: %@", videoSettings);
assetWriterVideoIn = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(assetWriterVideoIn);
assetWriterVideoIn.expectsMediaDataInRealTime = YES;
NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
self.assetWriterPixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:assetWriterVideoIn sourcePixelBufferAttributes:bufferAttributes];
//add input
if ([assetWriter canAddInput:assetWriterVideoIn]) {
[assetWriter addInput:assetWriterVideoIn];
}
else {
NSLog(@"Couldn't add asset writer video input.");
return NO;
}
return YES;
}