17

I have successfully created video from images using the following code

-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size duration:(int)duration 
{
    NSError *error = nil;
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                  [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];
    NSParameterAssert(videoWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                                   nil];
    AVAssetWriterInput* writerInput = [[AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings] retain];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                     sourcePixelBufferAttributes:nil];
    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);
    [videoWriter addInput:writerInput];


    //Start a session:
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer = NULL;
    buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
    [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

    //Write samples:
    for (int i = 0;i<[array count]; i++)
    {
        if([writerInput isReadyForMoreMediaData])
        {
            NSLog(@"inside for loop %d",i);
            CMTime frameTime = CMTimeMake(1, 20);

            CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 24 of the loop above

            CMTime presentTime=CMTimeAdd(lastTime, frameTime);

            buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage]];

            [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];

        }
        else
        {
            NSLog(@"error");
            i--;
        }
    }
    NSLog(@"outside for loop");

    //Finish the session:
    [writerInput markAsFinished];
    [videoWriter finishWriting];
}

Here I have used CVPixelBufferRef. Instead of this, I want to use the CVPixelBufferPoolRef in conjunction with AVAssetWriterInputPixelBufferAdaptor.

Can anybody provide an example which I can debug and use?

WrightsCS
  • 50,551
  • 22
  • 134
  • 186
Atulkumar V. Jain
  • 5,102
  • 9
  • 44
  • 61

4 Answers4

15

You are passing nil 'sourcePixelBufferAttributes', because of which the pixel buffer pool will not get created:

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];

Instead pass some attributes, for example:

NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

Then you can use the pool to create the pixel buffers, like:

CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &pixelBuffer);
Atulkumar V. Jain
  • 5,102
  • 9
  • 44
  • 61
radical
  • 4,364
  • 2
  • 25
  • 27
6

@Atulkumar V. Jain : great! good luck ^^ @Brian : you are right thanks, I correct it and I am getting it work now here is the working code (if someone else need it :-) )

CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[[imagesArray objectAtIndex:0] CGImage]];
CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor_.pixelBufferPool, &buffer);

[adaptor_ appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue =  dispatch_queue_create("mediaInputQueue", NULL);
static int i = 1;
int frameNumber = [imagesArray count];

[writerInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^{
    while (1){
        if (i == frameNumber) {
            break;
        }
        if ([writerInput isReadyForMoreMediaData]) {

            CVPixelBufferRef sampleBuffer = [self pixelBufferFromCGImage:[[imagesArray objectAtIndex:i] CGImage]];
            NSLog(@"inside for loop %d",i);
            CMTime frameTime = CMTimeMake(1, 20);

           CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 19 of the loop above

           CMTime presentTime=CMTimeAdd(lastTime, frameTime);       

        if (sampleBuffer) {
                [adaptor_ appendPixelBuffer:sampleBuffer withPresentationTime:presentTime];
                i++;
                CFRelease(sampleBuffer);
            } else {
                break;
            }
        }
    }
    NSLog (@"done");
    [writerInput markAsFinished];
    [videoWriter finishWriting];     

    CVPixelBufferPoolRelease(adaptor_.pixelBufferPool);
    [videoWriter release];
    [writerInput release];      
    [imagesArray removeAllObjects];


}];
pink
  • 101
  • 1
  • 1
4

Instead of using "for" use this code :

dispatch_queue_t mediaInputQueue =  dispatch_queue_create("mediaInputQueue", NULL);
[writerInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^{

    CVPixelBufferRef buffer = NULL;
    buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
    CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer);

    [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
    int i = 1;
     while (writerInput.readyForMoreMediaData) {
         NSLog(@"inside for loop %d",i);
         CMTime frameTime = CMTimeMake(1, 20);

         CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 19 of the loop above

         CMTime presentTime=CMTimeAdd(lastTime, frameTime);

         if (i >= [array count]) {
             buffer = NULL;
         }else {
              buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage]];
         }          
         //CVBufferRetain(buffer);

         if (buffer) {
             // append buffer
             [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
             i++;
         } else {
             // done!

             //Finish the session:
             [writerInput markAsFinished];
             [videoWriter finishWriting];                

             CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
             [videoWriter release];
             [writerInput release];
             NSLog (@"Done");
             [imageArray removeAllObjects];              
             break;
         }
     }
}];
pink
  • 101
  • 1
  • 1
  • thanx for the help but i have already solved the problem and the also completed the project... – Atulkumar V. Jain Mar 11 '11 at 13:12
  • 1
    There's also a bug in this routine in that the writer may become 'not ready' before you iterate over your array. If you move your counter out of the block, and clean up the loop entry to key of any index, you should be fine. – Brian King Mar 14 '11 at 03:52
4

I got it all working!

Here is the sample code link: git@github.com:RudyAramayo/AVAssetWriterInputPixelBufferAdaptorSample.git

Here is the code you need:

- (void) testCompressionSession
{
    CGSize size = CGSizeMake(480, 320);

    NSString *betaCompressionDirectory = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];

    NSError *error = nil;

    unlink([betaCompressionDirectory UTF8String]);

    //----initialize compression engine
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory]
                                                           fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];
    NSParameterAssert(videoWriter);
    if(error)
        NSLog(@"error = %@", [error localizedDescription]);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
                                       [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                       [NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
    AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
                                                               [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                                                                                         sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);

    if ([videoWriter canAddInput:writerInput])
        NSLog(@"I can add this input");
    else
        NSLog(@"i can't add this input");

    [videoWriter addInput:writerInput];
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    //---
    // insert demo debugging code to write the same image repeated as a movie

    CGImageRef theImage = [[UIImage imageNamed:@"Lotus.png"] CGImage];

    dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
    int __block frame = 0;

    [writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
        while ([writerInput isReadyForMoreMediaData])
        {
            if(++frame >= 120)
            {
                [writerInput markAsFinished];
                [videoWriter finishWriting];
                [videoWriter release];
                break;
            }

            CVPixelBufferRef buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:theImage size:size];
            if (buffer)
            {
                if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 20)])
                    NSLog(@"FAIL");
                else
                    NSLog(@"Success:%d", frame);
                CFRelease(buffer);
            }
        }
    }];

    NSLog(@"outside for loop");

}

- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, 
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
    // CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer);

    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); 

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
    NSParameterAssert(context);

    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);

    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}
Community
  • 1
  • 1
Orbitus007
  • 685
  • 6
  • 12
  • Did you mean to uncomment the `CVPixelBufferPoolCreatePixelBuffer` line inside `pixelBufferFromCGImage`? Or did you decide not to use `CVPixelBufferPoolCreatePixelBuffer`? – taber Sep 28 '12 at 01:45
  • I was so lost at the time I wrote this, it was a compilation of many different sets of code that I tried to re-engineer... please don't mind some of the quarks like the commented stuff, I was still learning/testing... Ill have to improve the sample and submit more AVFoundation code soon. – Orbitus007 Oct 18 '12 at 13:59
  • 1
    Don't use this code - it's wrong. To use a pool you need to call only CVPixelBufferPoolCreatePixelBuffer. – AlexeyVMP Mar 12 '14 at 22:21