I'm trying to convert an array of UIImages to video but I have a lot of black frames in resulting file (like, 4 black frames at the beginning, and 3 good frames after them, and after that 3 black frames and 2 good frames and this pattern is repeated till the end of the video).
My code is based on this solution but I believe that the main source of problem should be in this part of code:
func build(progress: (NSProgress -> Void), success: (NSURL -> Void), failure: (NSError -> Void)) {
//videosizes and path to temp output file
let inputSize = CGSize(width: 568, height: 320)
let outputSize = CGSize(width: 568, height: 320)
var error: NSError?
let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as! NSString
let videoOutputURL = NSURL(fileURLWithPath: documentsPath.stringByAppendingPathComponent("TempVideo.mov"))!
NSFileManager.defaultManager().removeItemAtURL(videoOutputURL, error: nil)
videoWriter = AVAssetWriter(URL: videoOutputURL, fileType: AVFileTypeMPEG4, error: &error)
if let videoWriter = videoWriter {
let videoSettings: [NSObject : AnyObject] = [
AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : outputSize.width,
AVVideoHeightKey : outputSize.height,
]
let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(
assetWriterInput: videoWriterInput,
sourcePixelBufferAttributes: [
kCVPixelBufferPixelFormatTypeKey : kCVPixelFormatType_32ARGB,
kCVPixelBufferWidthKey : inputSize.width,
kCVPixelBufferHeightKey : inputSize.height,
]
)
assert(videoWriter.canAddInput(videoWriterInput))
videoWriter.addInput(videoWriterInput)
if videoWriter.startWriting() {
videoWriter.startSessionAtSourceTime(kCMTimeZero)
assert(pixelBufferAdaptor.pixelBufferPool != nil)
let media_queue = dispatch_queue_create("mediaInputQueue", nil)
videoWriterInput.requestMediaDataWhenReadyOnQueue(media_queue, usingBlock: { () -> Void in
let fps: Int32 = 30
let frameDuration = CMTimeMake(1, fps)
let currentProgress = NSProgress(totalUnitCount: Int64(self.photoURLs.count))
var frameCount: Int64 = 0
for var i = 0; i < self.photoURLs.count - 1; i++ {
var currentFrame = self.photoURLs[i]
var lastFrameTime = CMTimeMake(Int64(i), fps)
var presentationTime = CMTimeAdd(lastFrameTime, frameDuration)
//this one is needed because sometimes videoWriter is not ready, and we have to wait for a while
while videoWriterInput.readyForMoreMediaData == false {
var maxDate = NSDate(timeIntervalSinceNow: 0.5)
var currentRunLoop = NSRunLoop()
currentRunLoop.runUntilDate(maxDate)
}
self.appendPixelBufferForImageAtURL(currentFrame, pixelBufferAdaptor: pixelBufferAdaptor, presentationTime: presentationTime)
frameCount++
currentProgress.completedUnitCount = frameCount
progress(currentProgress)
}
videoWriterInput.markAsFinished()
videoWriter.finishWritingWithCompletionHandler { () -> Void in
if error == nil {
success(videoOutputURL)
}
}
})
} else {
error = NSError(
domain: kErrorDomain,
code: kFailedToStartAssetWriterError,
userInfo: ["description": "AVAssetWriter failed to start writing"]
)
}
}
if let error = error {
failure(error)
}
}
Obviously I'm doing something wrong but what? I think it should be here because some of the images don't have any problems with conversion, but there are two more functions for pixelbuffer:
func appendPixelBufferForImageAtURL(image: UIImage, pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor, presentationTime: CMTime) -> Bool {
var appendSucceeded = true
autoreleasepool {
var pixelBuffer: Unmanaged<CVPixelBuffer>?
let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(
kCFAllocatorDefault,
pixelBufferAdaptor.pixelBufferPool,
&pixelBuffer
)
if let pixelBuffer = pixelBuffer where status == 0 {
let managedPixelBuffer = pixelBuffer.takeRetainedValue()
fillPixelBufferFromImage(image, pixelBuffer: managedPixelBuffer)
appendSucceeded = pixelBufferAdaptor.appendPixelBuffer(
managedPixelBuffer,
withPresentationTime: presentationTime
)
} else {
NSLog("error: Failed to allocate pixel buffer from pool")
}
}
return appendSucceeded
}
func fillPixelBufferFromImage(image: UIImage, pixelBuffer: CVPixelBufferRef) {
let imageData = CGDataProviderCopyData(CGImageGetDataProvider(image.CGImage))
let lockStatus:UInt8 = UInt8(CVPixelBufferLockBaseAddress(pixelBuffer, 0))
let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.PremultipliedFirst.rawValue)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGBitmapContextCreate(
pixelData,
Int(568),
Int(320),
8,
Int(8 * 320),
rgbColorSpace,
bitmapInfo
)
var imageDataProvider = CGDataProviderCreateWithCFData(imageData)
var imageRef = CGImageCreateWithJPEGDataProvider(imageDataProvider, nil, true, kCGRenderingIntentDefault)
CGContextDrawImage(context, CGRectMake(0, 0, 568, 320), imageRef)
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0)
}