I’m trying to create a video from images using AVFoundation. There are already multiple threads regarding this approach, but I believe many of them have the same issue as I’m a facing here.
The video plays fine on the iPhone but it doesn’t play on the VLC for example, neither it play correctly on Facebook and Vimeo (sometimes some frames are out of sync). The VLC says that frame rate of the video is 0.58 fps, but it should be more then 24 right?
Does anyone know what is causing this kind of behavior?
Here is the code used to create a video:
self.videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeMPEG4 error:&error];
// Codec compression settings
NSDictionary *videoSettings = @{
AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : @(self.videoSize.width),
AVVideoHeightKey : @(self.videoSize.height),
AVVideoCompressionPropertiesKey : @{
AVVideoAverageBitRateKey : @(20000*1000), // 20 000 kbits/s
AVVideoProfileLevelKey : AVVideoProfileLevelH264High40,
AVVideoMaxKeyFrameIntervalKey : @(1)
}
};
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
videoWriterInput.expectsMediaDataInRealTime = NO;
[self.videoWriter addInput:videoWriterInput];
[self.videoWriter startWriting];
[self.videoWriter startSessionAtSourceTime:kCMTimeZero];
[adaptor.assetWriterInput requestMediaDataWhenReadyOnQueue:self.photoToVideoQueue usingBlock:^{
CMTime time = CMTimeMakeWithSeconds(0, 1000);
for (Segment* segment in segments) {
@autoreleasepool {
UIImage* image = segment.segmentImage;
CVPixelBufferRef buffer = [self pixelBufferFromImage:image withImageSize:self.videoSize];
[ImageToVideoManager appendToAdapter:adaptor pixelBuffer:buffer atTime:time];
CVPixelBufferRelease(buffer);
CMTime millisecondsDuration = CMTimeMake(segment.durationMS.integerValue, 1000);
time = CMTimeAdd(time, millisecondsDuration);
}
}
[videoWriterInput markAsFinished];
[self.videoWriter endSessionAtSourceTime:time];
[self.videoWriter finishWritingWithCompletionHandler:^{
NSLog(@"Video writer has finished creating video");
}];
}];
- (CVPixelBufferRef)pixelBufferFromImage:(UIImage*)image withImageSize:(CGSize)size{
CGImageRef cgImage = image.CGImage;
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
DebugLog(@"Failed to create pixel buffer");
}
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, 2);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(cgImage), CGImageGetHeight(cgImage)), cgImage);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
+ (BOOL)appendToAdapter:(AVAssetWriterInputPixelBufferAdaptor*)adaptor
pixelBuffer:(CVPixelBufferRef)buffer
atTime:(CMTime)time{
while (!adaptor.assetWriterInput.readyForMoreMediaData) {
[[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.1]];
}
return [adaptor appendPixelBuffer:buffer withPresentationTime:time];
}