7

I am trying to create a movie from some pictures. It works just fine with hd pictures ({720, 1280}) or lower resolutions . But when i try to create the movie with full hd pictures {1080, 1920} , the video is scrambled. Here is a link to see how it looks http://www.youtube.com/watch?v=BfYldb8e_18 . Do you have any ideas what i may be doing wrong?

- (void) createMovieWithOptions:(NSDictionary *) options
{
@autoreleasepool {
    NSString *path = [options valueForKey:@"path"];
    CGSize size =  [(NSValue *)[options valueForKey:@"size"] CGSizeValue];
    NSArray *imageArray = [options valueForKey:@"pictures"];
    NSInteger recordingFPS = [[options valueForKey:@"fps"] integerValue];
    BOOL success=YES;
    NSError *error = nil;

    AVAssetWriter *assetWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
                                                           fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];
    NSParameterAssert(assetWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithFloat:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithFloat:size.height], AVVideoHeightKey,
                                   nil];

    AVAssetWriterInput *videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                                              outputSettings:videoSettings];

    // Configure settings for the pixel buffer adaptor.
    NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
                                      [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                                                                                     sourcePixelBufferAttributes:bufferAttributes];

    NSParameterAssert(videoWriterInput);
    NSParameterAssert([assetWriter canAddInput:videoWriterInput]);

    videoWriterInput.expectsMediaDataInRealTime = NO;
    [assetWriter addInput:videoWriterInput];

    //Start a session:
    [assetWriter startWriting];
    [assetWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer = NULL;

    //convert uiimage to CGImage.

    int frameCount = 0;
    float progress = 0;
    float progressFromFrames = _progressView.progress; //only for create iflipbook movie

    for(UIImage * img in imageArray)
    {
        if([[NSThread currentThread] isCancelled])
        {
            [NSThread exit];
        }

        [condCreateMovie lock];
        if(isCreateMoviePaused)
        {
            [condCreateMovie wait];
        }

        uint64_t totalFreeSpace=[Utils getFreeDiskspace];
        if(((totalFreeSpace/1024ll)/1024ll)<50)
        {
            success=NO;
            break;
        }

        //        @autoreleasepool {
        NSLog(@"size:%@",NSStringFromCGSize(img.size));

        buffer = [[MovieWritter sharedMovieWritter] pixelBufferFromCGImage:[img CGImage] andSize:size];

        BOOL append_ok = NO;
        int j = 0;
        while (!append_ok && j < 60)
        {
            if(adaptor.assetWriterInput.readyForMoreMediaData)
            {
                CMTime frameTime = CMTimeMake(frameCount, recordingFPS);
                append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];

                CVPixelBufferRelease(buffer);

                [NSThread sleepForTimeInterval:0.1];


                if(isCreatingiFlipBookFromImported)
                    progress = (float)frameCount/(float)[imageArray count]/2.0 + progressFromFrames;
                else
                    progress = (float)frameCount/(float)[imageArray count];

                [[NSNotificationCenter defaultCenter] postNotificationName:@"movieCreationProgress" object:[NSNumber numberWithFloat:progress]];
            }
            else
            {
                [NSThread sleepForTimeInterval:0.5];
            }
            j++;
        }
        if (!append_ok)
        {
            NSLog(@"error appending image %d times %d\n", frameCount, j);
        }
        frameCount++;

        [condCreateMovie unlock];
    }

    //Finish the session:
    [videoWriterInput markAsFinished];
    [assetWriter finishWriting];

    NSDictionary *dict = [NSDictionary dictionaryWithObjectsAndKeys:
                          [NSNumber numberWithBool:success], @"success",
                          path, @"path", nil];

    [[NSNotificationCenter defaultCenter] postNotificationName:@"movieCreationFinished" object:dict];
}
}

*Edit . Here is the code for [[MovieWritter sharedMovieWritter] pixelBufferFromCGImage:]

- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image andSize:(CGSize) size
{
@autoreleasepool {
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];
    CVPixelBufferRef pxbuffer = NULL;

    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
                                          size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                                          &pxbuffer);
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
                                                 size.height, 8, 4*size.width, rgbColorSpace,
                                                 kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);
    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                           CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

     return pxbuffer;
}
}
Andrei Neacsu
  • 1,453
  • 3
  • 20
  • 33
flaviusilaghi
  • 677
  • 3
  • 10
  • 27

4 Answers4

3

I had the same problem and this answer resolved it: the size of the video must be a multiple of 16.

Community
  • 1
  • 1
Remy Cilia
  • 2,573
  • 1
  • 20
  • 31
  • Thank you so much! Been struggling with various bits of this for days. A ridiculous requirement, but Apple's full of those. – Linuxios May 19 '15 at 17:40
2

Pretty sure that this is either a HW limitation or a bug. Please file a Radar.

Cocoanetics
  • 8,171
  • 2
  • 30
  • 57
2

how about something like this to get pixel buffer

    //you could use a cgiimageref here instead
    CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(imageView.image.CGImage));
    NSLog (@"copied image data");
    cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
                                         FRAME_WIDTH,
                                         FRAME_HEIGHT,
                                         kCVPixelFormatType_32BGRA,
                                         (void*)CFDataGetBytePtr(imageData),
                                         CGImageGetBytesPerRow(imageView.image.CGImage),
                                         NULL,
                                         NULL,
                                         NULL,
                                         &pixelBuffer);
    NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr);

    CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();  
    CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime;  
    NSLog (@"elapsedTime: %f", elapsedTime);
    CMTime presentationTime =  CMTimeMake(elapsedTime * TIME_SCALE, TIME_SCALE);

    // write the sample
    BOOL appended = [assetWriterPixelBufferAdaptor  appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
    CVPixelBufferRelease(pixelBuffer);
    CFRelease(imageData);
    if (appended) {
        NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
    } else {
        NSLog (@"failed to append");
        [self stopRecording];
        self.startStopButton.selected = NO;
    }
Cesar
  • 4,418
  • 2
  • 31
  • 37
Michelle Cannon
  • 1,341
  • 8
  • 8
-1

You may also want to set the capture settings preset , although high usually is suitable and that is default */ Constants to define capture setting presets using the sessionPreset property.

NSString *const AVCaptureSessionPresetPhoto;

NSString *const AVCaptureSessionPresetHigh;

NSString *const AVCaptureSessionPresetMedium;

NSString *const AVCaptureSessionPresetLow;

NSString *const AVCaptureSessionPreset352x288;

NSString *const AVCaptureSessionPreset640x480;

NSString *const AVCaptureSessionPreset1280x720;

NSString *const AVCaptureSessionPreset1920x1080;

NSString *const AVCaptureSessionPresetiFrame960x540;

NSString *const AVCaptureSessionPresetiFrame1280x720; */

//set it like this

self.captureSession.sessionPreset = AVCaptureSessionPreset1920x1080;

//or like this when you define avcapturesession

[self.captureSession setSessionPreset:AVCaptureSessionPreset1920x1080];

Michelle Cannon
  • 1,341
  • 8
  • 8
  • 1
    this is most irrelevant. My colleague and I need to fix this issue when we are feeding FULL HD images to a AVAssetWriterInputPixelBufferAdaptor. Images can be taken from the Camera Roll...we are resizing them to suite the FULL HD resolution. We are also capturing frames from the camera buffers but they still don't works using the AVAssetWriterInputPixelBufferAdaptor. Maybe this is a bug or limitation with AVAssetWriterInputPixelBufferAdaptor. – Andrei Neacsu Dec 17 '12 at 10:55
  • 1
    I've have some issues before not setting the preset so I mentioned it, having a bad attitude will not get you quicker help. could the pixel format be wrong try changing kCVPixelFormatType_32ARGB to kCVPixelFormatType_32BGRA like in the sample I show – Michelle Cannon Dec 17 '12 at 16:36
  • pixel format has nothing to do here. appendPixelBuffer ... method also returns a YES response. We have checked the size, settings, everything, but it is still not working. – Andrei Neacsu Dec 17 '12 at 17:14
  • Well from your youtube it looks like your getting a very distorted image, and since I don't see anything wrong with your code I'd start looking at your input, and format issues are a logical first choice. You say it works fine at lower resolutions which is puzzling though. and I don't think it could be something simple like using an int were a float is expected like for height or width. – Michelle Cannon Dec 17 '12 at 17:58
  • how about this, this you consider aspect ratio at all. where you lower formats test using the same aspect ratio. http://stackoverflow.com/questions/7327847/output-from-avassetwriter-uiimages-written-to-video-distorted – Michelle Cannon Dec 17 '12 at 18:02
  • aspect ration is the same. – Andrei Neacsu Dec 18 '12 at 14:19