I have an UIImage array of 10 frames. I want to export this as a movie of 15 seconds in total. so it would have to loop those 10 frames for as long as 15 seconds. between each frame I'd need an interval of 0.2 seconds.
I have managed to create a movie of my array but I don't understand how CMTime works. I've tried messing with the numbers but I can't get the result I want. either my movie is too short, it plays too fast,etc...
I have read this: Trying to understand CMTime and CMTimeMake but it still makes no sense to me...
I need to understand how these 3 CMTime variables are related to each other
CMTime frameTime = CMTimeMake(1, 5);
CMTime lastTime = CMTimeMake(i, 5);
CMTime presentTime = CMTimeAdd(lastTime, frameTime);
this is the code I'm using
- (void)writeImageAsMovie:(NSArray *)array toPath:(NSURL*)path size:(CGSize)size {
NSError *error = nil;
self.videoWriter = [[AVAssetWriter alloc] initWithURL:path
fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(self.videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:size.width] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:size.height] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:attributes];
NSParameterAssert(writerInput);
NSParameterAssert([self.videoWriter canAddInput:writerInput]);
[self.videoWriter addInput:writerInput];
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
[self.videoWriter startWriting];
[self.videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
buffer = ([self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage] size:CGSizeMake(size.width, size.height)]);
CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer);
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
int i = 1;
while (writerInput.readyForMoreMediaData)
{
CMTime frameTime = CMTimeMake(1, 15);
CMTime lastTime= CMTimeMake(i, 15);
CMTime presentTime= CMTimeAdd(lastTime, frameTime);
if (i >= [array count]) {
buffer = NULL;
}
else {
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage] size:size];
}
if (buffer) {
[adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
i++;
} else {
[writerInput markAsFinished];
[self.videoWriter finishWritingWithCompletionHandler:^{
NSLog (@"Done");
}];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
break;
}
}
}