3

I found this tutorial http://codethink.no-ip.org/wordpress/archives/673#comment-118063 from this SO question Screen capture video in iOS programmatically of how to do something like this, and it was a bit outdated for iOS, so I renewed it, and am very close to having it work, but putting the UIImages together just isn't quite working right now.

Here is how I call the method in viewDidLoad

[captureView performSelector:@selector(startRecording) withObject:nil afterDelay:1.0];
                            [captureView performSelector:@selector(stopRecording) withObject:nil afterDelay:5.0];

and captureView is an IBOutlet connected to my view.

And then I have the class ScreenCapture.h & .m

Here is .h

@protocol ScreenCaptureViewDelegate <NSObject>
- (void) recordingFinished:(NSString*)outputPathOrNil;
@end

@interface ScreenCaptureView : UIView {
    //video writing
    AVAssetWriter *videoWriter;
    AVAssetWriterInput *videoWriterInput;
    AVAssetWriterInputPixelBufferAdaptor *avAdaptor;

    //recording state
    BOOL _recording;
    NSDate* startedAt;
    void* bitmapData;
}

//for recording video
- (bool) startRecording;
- (void) stopRecording;

//for accessing the current screen and adjusting the capture rate, etc.
@property(retain) UIImage* currentScreen;
@property(assign) float frameRate;
@property(nonatomic, assign) id<ScreenCaptureViewDelegate> delegate;

@end

And here is my .m

@interface ScreenCaptureView(Private)
- (void) writeVideoFrameAtTime:(CMTime)time;
@end

@implementation ScreenCaptureView

@synthesize currentScreen, frameRate, delegate;

- (void) initialize {
    // Initialization code
    self.clearsContextBeforeDrawing = YES;
    self.currentScreen = nil;
    self.frameRate = 10.0f;     //10 frames per seconds
    _recording = false;
    videoWriter = nil;
    videoWriterInput = nil;
    avAdaptor = nil;
    startedAt = nil;
    bitmapData = NULL;
}

- (id) initWithCoder:(NSCoder *)aDecoder {
    self = [super initWithCoder:aDecoder];
    if (self) {
        [self initialize];
    }
    return self;
}

- (id) init {
    self = [super init];
    if (self) {
        [self initialize];
    }
    return self;
}

- (id)initWithFrame:(CGRect)frame {
    self = [super initWithFrame:frame];
    if (self) {
        [self initialize];
    }
    return self;
}

- (CGContextRef) createBitmapContextOfSize:(CGSize) size {
    CGContextRef    context = NULL;
    CGColorSpaceRef colorSpace;
    int             bitmapByteCount;
    int             bitmapBytesPerRow;

    bitmapBytesPerRow   = (size.width * 4);
    bitmapByteCount     = (bitmapBytesPerRow * size.height);
    colorSpace = CGColorSpaceCreateDeviceRGB();
    if (bitmapData != NULL) {
        free(bitmapData);
    }
    bitmapData = malloc( bitmapByteCount );
    if (bitmapData == NULL) {
        fprintf (stderr, "Memory not allocated!");
        return NULL;
    }

    context = CGBitmapContextCreate (bitmapData,
                                     size.width,
                                     size.height,
                                     8,      // bits per component
                                     bitmapBytesPerRow,
                                     colorSpace,
                                     (CGBitmapInfo) kCGImageAlphaNoneSkipFirst);

    CGContextSetAllowsAntialiasing(context,NO);
    if (context== NULL) {
        free (bitmapData);
        fprintf (stderr, "Context not created!");
        return NULL;
    }
    CGColorSpaceRelease( colorSpace );

    return context;
}

static int frameCount = 0;            //debugging
- (void) drawRect:(CGRect)rect {
    NSDate* start = [NSDate date];
    CGContextRef context = [self createBitmapContextOfSize:self.frame.size];

    //not sure why this is necessary...image renders upside-down and mirrored
    CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
    CGContextConcatCTM(context, flipVertical);

    [self.layer renderInContext:context];

    CGImageRef cgImage = CGBitmapContextCreateImage(context);
    UIImage* background = [UIImage imageWithCGImage: cgImage];
    CGImageRelease(cgImage);

    self.currentScreen = background;

    //debugging
    if (frameCount < 40) {
          NSString* filename = [NSString stringWithFormat:@"Documents/frame_%d.png", frameCount];
          NSString* pngPath = [NSHomeDirectory() stringByAppendingPathComponent:filename];
          [UIImagePNGRepresentation(self.currentScreen) writeToFile: pngPath atomically: YES];
          frameCount++;
    }

    //NOTE:  to record a scrollview while it is scrolling you need to implement your UIScrollViewDelegate such that it calls
    //       'setNeedsDisplay' on the ScreenCaptureView.
    if (_recording) {
        float millisElapsed = [[NSDate date] timeIntervalSinceDate:startedAt] * 1000.0;
        [self writeVideoFrameAtTime:CMTimeMake((int)millisElapsed, 1000)];
    }

    float processingSeconds = [[NSDate date] timeIntervalSinceDate:start];
    float delayRemaining = (1.0 / self.frameRate) - processingSeconds;

    CGContextRelease(context);

    //redraw at the specified framerate
    [self performSelector:@selector(setNeedsDisplay) withObject:nil afterDelay:delayRemaining > 0.0 ? delayRemaining : 0.01];
}

- (void) cleanupWriter {
    avAdaptor = nil;

    videoWriterInput = nil;

    videoWriter = nil;

    startedAt = nil;

    if (bitmapData != NULL) {
        free(bitmapData);
        bitmapData = NULL;
    }
}

- (void)dealloc {
    [self cleanupWriter];
}

- (NSURL*) tempFileURL {
    NSString* outputPath = [[NSString alloc] initWithFormat:@"%@/%@", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], @"output.mp4"];
    NSURL* outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
    NSFileManager* fileManager = [NSFileManager defaultManager];
    if ([fileManager fileExistsAtPath:outputPath]) {
        NSError* error;
        if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
            NSLog(@"Could not delete old recording file at path:  %@", outputPath);
        }
    }

    return outputURL;
}

-(BOOL) setUpWriter {
    NSError* error = nil;
    videoWriter = [[AVAssetWriter alloc] initWithURL:[self tempFileURL] fileType:AVFileTypeQuickTimeMovie error:&error];
    NSParameterAssert(videoWriter);

    //Configure video
    NSDictionary* videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
                                           [NSNumber numberWithDouble:1024.0*1024.0], AVVideoAverageBitRateKey,
                                           nil ];

    NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:self.frame.size.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:self.frame.size.height], AVVideoHeightKey,
                                   videoCompressionProps, AVVideoCompressionPropertiesKey,
                                   nil];

    videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

    NSParameterAssert(videoWriterInput);
    videoWriterInput.expectsMediaDataInRealTime = YES;
    NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
                                      [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes];

    //add input
    [videoWriter addInput:videoWriterInput];
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];

    return YES;
}

- (void) completeRecordingSession {

    [videoWriterInput markAsFinished];

    // Wait for the video
    int status = videoWriter.status;
    while (status == AVAssetWriterStatusUnknown) {
        NSLog(@"Waiting...");
        [NSThread sleepForTimeInterval:0.5f];
        status = videoWriter.status;
    }

    @synchronized(self) {




       [videoWriter finishWritingWithCompletionHandler:^{

           [self cleanupWriter];
           BOOL success = YES;
           id delegateObj = self.delegate;
           NSString *outputPath = [[NSString alloc] initWithFormat:@"%@/%@", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], @"output.mp4"];
           NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];

           NSLog(@"Completed recording, file is stored at:  %@", outputURL);
           if ([delegateObj respondsToSelector:@selector(recordingFinished:)]) {
               [delegateObj performSelectorOnMainThread:@selector(recordingFinished:) withObject:(success ? outputURL : nil) waitUntilDone:YES];
           }


       }];


    }

}

- (bool) startRecording {
    bool result = NO;
    @synchronized(self) {
        if (! _recording) {
            result = [self setUpWriter];
            startedAt = [NSDate date];
            _recording = true;
        }
    }

    return result;
}

- (void) stopRecording {
    @synchronized(self) {
        if (_recording) {
            _recording = false;
            [self completeRecordingSession];
        }
    }
}

-(void) writeVideoFrameAtTime:(CMTime)time {
    if (![videoWriterInput isReadyForMoreMediaData]) {
        NSLog(@"Not ready for video data");
    }
    else {
        @synchronized (self) {
            UIImage *newFrame = self.currentScreen;
            CVPixelBufferRef pixelBuffer = NULL;
            CGImageRef cgImage = CGImageCreateCopy([newFrame CGImage]);
            CFDataRef image = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));

            int status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, avAdaptor.pixelBufferPool, &pixelBuffer);
            if(status != 0){
                //could not get a buffer from the pool
                NSLog(@"Error creating pixel buffer:  status=%d", status);
            }
            // set image data into pixel buffer
            CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
            uint8_t *destPixels = CVPixelBufferGetBaseAddress(pixelBuffer);
            CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels);  //XXX:  will work if the pixel buffer is contiguous and has the same bytesPerRow as the input data

            if(status == 0){
                BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
                if (!success)
                    NSLog(@"Warning:  Unable to write buffer to video");
            }

            //clean up
            CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
            CVPixelBufferRelease( pixelBuffer );
            CFRelease(image);
            CGImageRelease(cgImage);
        }

    }

}

And I as you can see in the drawRect method I save all the images, and they look great, but then when I try to make the video, it creates just a still image that looks like this, when the images look like this.

Here is the output, it is a video but just this. When the picture looks normal (not slanted and all weird)

enter image description here

My question is what is going wrong when the video is being made?

Thanks for the help and your time, I know this is a long question.

Community
  • 1
  • 1
iqueqiorio
  • 1,149
  • 2
  • 35
  • 78

4 Answers4

2

I found this post after having the same issue with certain resolutions causing the exact same video effect when I wanted to create a CVPixelBufferRef from a CGImageRef (coming from a UIImage.)

The very short answer in my case was that I had hard wired the bytes per row to be 4 times the width. Which used to work all the time! Now I query the CVPixelBuffer itself to get this value and poof, problem solved!

Code that created the problem was this:

CGContextRef context = CGBitmapContextCreate(pxdata, w, h, 8, 4*w, rgbColorSpace, bitMapInfo);

Code that fixed the problem was this:

    CGContextRef context = CGBitmapContextCreate(
                                             pxdata, w, h,
                                             8, CVPixelBufferGetBytesPerRow(pxbuffer),
                                             rgbColorSpace,bitMapInfo);

And in both cases, the bitMapInfo was set:

GBitmapInfo bitMapInfo =kCGImageAlphaPremultipliedFirst; // According to Apple's doc, this is safe:  June 26, 2014
VTPete
  • 361
  • 1
  • 12
1

Pixel Buffer adaptors only work with certain pixel sizes of images. You're probably going to need to change the size of the images. You can imagine that what's happening in your video is that the writer is trying to write your, let's say, 361x241 images into a 360x240 size space. Each row starts with the last pixel of the last row so that it ends up getting diagonally skewed like you see. Check the apple docs for supported dimensions. I believe that I used 480x320 and it's supported. You can use this method to resize your images:

+(UIImage *)scaleImage:(UIImage*)image toSize:(CGSize)newSize {

    CGRect scaledImageRect = CGRectZero;

    CGFloat aspectWidth = newSize.width / image.size.width;
    CGFloat aspectHeight = newSize.height / image.size.height;
    CGFloat aspectRatio = 3.0 / 2;

    scaledImageRect.size.width = image.size.width * aspectRatio;
    scaledImageRect.size.height = image.size.height * aspectRatio;
    scaledImageRect.origin.x = (newSize.width - scaledImageRect.size.width) / 2.0f;
    scaledImageRect.origin.y = (newSize.height - scaledImageRect.size.height) / 2.0f;

    UIGraphicsBeginImageContextWithOptions(CGSizeMake(480, 320), NO, 0 );
    [image drawInRect:scaledImageRect];
    UIImage* scaledImage = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();

    return scaledImage;
}
AlexKoren
  • 1,605
  • 16
  • 28
  • Great thanks for the method, when I log `pixelBuffer` I get this ` – iqueqiorio Apr 10 '15 at 02:11
  • //Change this: `UIImage *newFrame = self.currentScreen;` //to this: `UIImage *newFrame = [self scaleImage:self.currentScreen toSize:CGSizeMake(480,320)];` – AlexKoren Apr 10 '15 at 04:07
  • Ya thats what I did, you can try for yourself, it throws an error thats kinda of hard to explain, here is project https://github.com/spennyf/cropVid, just uncomment line 81, and 82 in ViewController.m to have it try to record, thanks – iqueqiorio Apr 10 '15 at 04:49
  • Hey Alex, were you able to take a look, and give it a try to see what I mean? – iqueqiorio Apr 10 '15 at 19:05
  • Unfortunately I can't because I'm busy over the next few days. I also don't have the images so it's very hard for me to test. Can you at least tell me what the error is? – AlexKoren Apr 10 '15 at 20:48
  • Hey when every you can get around to it is cool, after the weekend whenever man. To test just run once, take a shot vid (already been programed) and then you will see what I am talking about it would take like 5 mins, its a weird error, I doesn't thrown an error it just fails and then I see the green line across this line `[self.layer renderInContext:context];` in screenCapture.m I don't see anything in the Log – iqueqiorio Apr 10 '15 at 21:25
  • Is it a EXC_BAD_ACCESS? if so the context probably got dealloced. – AlexKoren Apr 10 '15 at 21:42
  • Yes it is a EXC_BAD_ACCESS, and is auctaully happening on this line `CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels);` in ScreenCapture.m which has to do with the pixel buffer, so you're on the right track with changing the size – iqueqiorio Apr 10 '15 at 22:06
  • I think the problem is that `destPixels` in this line is empty when i log it so I think this line is causing the problem `uint8_t *destPixels = CVPixelBufferGetBaseAddress(pixelBuffer);` but Im not quite sure what it does – iqueqiorio Apr 10 '15 at 22:21
  • Let us [continue this discussion in chat](http://chat.stackoverflow.com/rooms/74975/discussion-between-alexkoren-and-iqueqiorio). – AlexKoren Apr 10 '15 at 22:22
  • @iqueqiorio could you find the exc_bad_access error? I am having the exact problem and first i thougth it was about the image/video size/aspectratio but even if i change it, the error remains:/ – aytunch Apr 19 '15 at 23:59
1

I think this is because the pixelBuffer bytes per row does not match the UIImage bytes per row. In my case (iPhone 6 iOS8.3) the UIImage is 568 x 320 and the CFDataGetLength is 727040 so the bytes per row is 2272. But the pixelBuffer bytes per row is 2304. I think this extra 32 bytes is from padding so that bytes per row in the pixelBuffer is divisible by 64. How you force the pixelBuffer to match the input data, or vice versa, across all devices I'm not sure yet.

Matto.G.G
  • 13
  • 3
  • Setting the bitmapBytesPerRow equal to the pixelBuffer bytes per row in (CGContextRef) createBitmapContextOfSize:(CGSize) size works for all devices. – Matto.G.G May 11 '15 at 05:41
0

I've suffered a lot in this case. I tried so many ways to create video from the Image array but result was almost same as yours.

The problem was in the CVPixel buffer. The Buffer I used to create from the image was not correct.

But finally I got it working.

Main Function to create video at a url from an Array

You just have toinput array of images and fps, and size can be equal to size of images (if you want). fps = num of images in array / desired duration

for example: fps = 90 / 3 = 30

- (void)getVideoFrom:(NSArray *)array
              toPath:(NSString*)path
                size:(CGSize)size
                 fps:(int)fps
   withCallbackBlock:(void (^) (BOOL))callbackBlock
{
    NSLog(@"%@", path);
    NSError *error = nil;
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
                                                           fileType:AVFileTypeMPEG4
                                                              error:&error];
    if (error) {
        if (callbackBlock) {
            callbackBlock(NO);
        }
        return;
    }
    NSParameterAssert(videoWriter);

    NSDictionary *videoSettings = @{AVVideoCodecKey: AVVideoCodecTypeH264,
                                    AVVideoWidthKey: [NSNumber numberWithInt:size.width],
                                    AVVideoHeightKey: [NSNumber numberWithInt:size.height]};

    AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                                         outputSettings:videoSettings];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                                                                                     sourcePixelBufferAttributes:nil];
    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);
    [videoWriter addInput:writerInput];

    //Start a session:
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer;
    CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &buffer);

    CMTime presentTime = CMTimeMake(0, fps);

    int i = 0;
    while (1)
    {

        if(writerInput.readyForMoreMediaData){

            presentTime = CMTimeMake(i, fps);

            if (i >= [array count]) {
                buffer = NULL;
            } else {
                buffer = [self pixelBufferFromCGImage:[array[i] CGImage] size:CGSizeMake(480, 320)];
            }

            if (buffer) {
                //append buffer

                BOOL appendSuccess = [self appendToAdapter:adaptor
                                               pixelBuffer:buffer
                                                    atTime:presentTime
                                                 withInput:writerInput];
                NSAssert(appendSuccess, @"Failed to append");


                i++;
            } else {

                //Finish the session:
                [writerInput markAsFinished];

                [videoWriter finishWritingWithCompletionHandler:^{
                    NSLog(@"Successfully closed video writer");
                    if (videoWriter.status == AVAssetWriterStatusCompleted) {
                        if (callbackBlock) {
                            callbackBlock(YES);
                        }
                    } else {
                        if (callbackBlock) {
                            callbackBlock(NO);
                        }
                    }
                }];

                CVPixelBufferPoolRelease(adaptor.pixelBufferPool);

                NSLog (@"Done");
                break;
            }
        }
    }
}

Function to get CVPixelBuffer from CGImage

-(CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image size:(CGSize)imageSize
{

    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];

    CVPixelBufferRef pxbuffer = NULL;
    CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(image),
                        CGImageGetHeight(image), kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                        &pxbuffer);
    CVPixelBufferLockBaseAddress(pxbuffer, 0);

    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(image),
                                                 CGImageGetHeight(image), 8, CVPixelBufferGetBytesPerRow(pxbuffer), rgbColorSpace,
                                                 (int)kCGImageAlphaNoneSkipFirst);


    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));

    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);
    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}

Function to append to adapter

-(BOOL)appendToAdapter:(AVAssetWriterInputPixelBufferAdaptor*)adaptor
           pixelBuffer:(CVPixelBufferRef)buffer
                atTime:(CMTime)presentTime
             withInput:(AVAssetWriterInput*)writerInput
{
    while (!writerInput.readyForMoreMediaData) {
        usleep(1);
    }

    return [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
}
halfer
  • 19,824
  • 17
  • 99
  • 186
Talha Ahmad Khan
  • 3,416
  • 5
  • 23
  • 38