8

I have used this method and get multiple images. I am able to successfully create a movie but my problem is that when I play the movie, it seems to be playing too fast i.e. the movie doesn't have all the frames. Here is my code.

-(UIImage *)uiImageScreen
{
  CGImageRef screen = UIGetScreenImage();
  UIImage* image = [UIImage imageWithCGImage:screen];
  CGImageRelease(screen);
  UIImageWriteToSavedPhotosAlbum(image, self,nil, nil);
  return image;
}

-(void) writeSample: (NSTimer*) _timer 
 {
if (assetWriterInput.readyForMoreMediaData) {
    // CMSampleBufferRef sample = nil;

    CVReturn cvErr = kCVReturnSuccess;

    // get screenshot image!
    CGImageRef image = (CGImageRef) [[self uiImageScreen] CGImage];
    NSLog (@"made screenshot");

    // prepare the pixel buffer
    CVPixelBufferRef pixelBuffer = NULL;
    CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(image));
    NSLog (@"copied image data");
    cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
                                         FRAME_WIDTH,
                                         FRAME_HEIGHT,
                                         kCVPixelFormatType_32BGRA,
                                         (void*)CFDataGetBytePtr(imageData),
                                         CGImageGetBytesPerRow(image),
                                         NULL,
                                         NULL,
                                         NULL,
                                         &pixelBuffer);
    NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr);

    // calculate the time
    CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();
    CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime;
    NSLog (@"elapsedTime: %f", elapsedTime);
    CMTime presentationTime =  CMTimeMake (elapsedTime * 600, 600);

    // write the sample
    BOOL appended = [assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];

    if (appended) 
    {
        NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
    } 
    else 
    {
        NSLog (@"failed to append");

    }
}
}

Then I call this method to create movie.

-(void)StartRecording
{
 NSString *moviePath = [[self pathToDocumentsDirectory] stringByAppendingPathComponent:OUTPUT_FILE_NAME];
if ([[NSFileManager defaultManager] fileExistsAtPath:moviePath]) {
    [[NSFileManager defaultManager] removeItemAtPath:moviePath error:nil];
}

NSURL *movieURL = [NSURL fileURLWithPath:moviePath];
NSLog(@"path=%@",movieURL);
NSError *movieError = nil;
[assetWriter release];
assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL 
                                        fileType: AVFileTypeQuickTimeMovie 
                                           error: &movieError];
NSDictionary *assetWriterInputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                          AVVideoCodecH264, AVVideoCodecKey,
                                          [NSNumber numberWithInt:320], AVVideoWidthKey,
                                          [NSNumber numberWithInt:480], AVVideoHeightKey,
                                          nil];
assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeVideo
                                                      outputSettings:assetWriterInputSettings];
assetWriterInput.expectsMediaDataInRealTime = YES;
[assetWriter addInput:assetWriterInput];

[assetWriterPixelBufferAdaptor release];
assetWriterPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor  alloc]
                                 initWithAssetWriterInput:assetWriterInput
                                 sourcePixelBufferAttributes:nil];
[assetWriter startWriting];

firstFrameWallClockTime = CFAbsoluteTimeGetCurrent();
[assetWriter startSessionAtSourceTime: CMTimeMake(0, 1000)];

// start writing samples to it
[assetWriterTimer release];
assetWriterTimer = [NSTimer scheduledTimerWithTimeInterval:0.1
                                                    target:self
                                                  selector:@selector (writeSample:)
                                                  userInfo:nil
                                                   repeats:YES] ;
}
ROMANIA_engineer
  • 54,432
  • 29
  • 203
  • 199
Ajay Chaudhary
  • 1,993
  • 15
  • 23
  • i am still waiting for answer... – Ajay Chaudhary Jul 13 '12 at 04:51
  • 1
    Please help me..I am new in ios. Is there any way to shortout this problem. – Ajay Chaudhary Jul 14 '12 at 05:00
  • Hey m also stuck in the same thing !! tell me did you sorted out your problm!! – Deepjyoti Roy Oct 15 '12 at 09:15
  • You are right, you are not getting all frames, probably because your grabbing method is slow. First thing I would do is to remove these NSLog lines. They slow down the code immensely. Another thing I would do is to create an array to work like a buffer for the screen shots an asynchronous method to read from that array in a second thread and write that to the stream. – Duck Jan 27 '13 at 21:49
  • 1
    try these links 1. http://www.binpress.com/app/ios-screen-capture-view/1038 2.https://github.com/gabriel/CaptureRecord – Raj Apr 03 '13 at 04:38
  • Please help me about this. I am still unable to solve out this problem – Ajay Chaudhary May 22 '13 at 07:34
  • Which API's have you been using until now ? – kiltek May 29 '13 at 07:13

1 Answers1

1

try this method....

if (![videoWriterInput isReadyForMoreMediaData]) {
    NSLog(@"Not ready for video data");
}
else {
    @synchronized (self) {
        UIImage* newFrame = [self.currentScreen retain];
        CVPixelBufferRef pixelBuffer = NULL;
        CGImageRef cgImage = CGImageCreateCopy([newFrame CGImage]);
        CFDataRef image = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));

        int status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, avAdaptor.pixelBufferPool, &pixelBuffer);
        if(status != 0){
            //could not get a buffer from the pool
            NSLog(@"Error creating pixel buffer:  status=%d", status);
        }
        // set image data into pixel buffer
        CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
        uint8_t* destPixels = CVPixelBufferGetBaseAddress(pixelBuffer);
        CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels);  //XXX:  will work if the pixel buffer is contiguous and has the same bytesPerRow as the input data

        if(status == 0){
            BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
            if (!success)
                NSLog(@"Warning:  Unable to write buffer to video");
        }

        //clean up
        [newFrame release];
        CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
        CVPixelBufferRelease( pixelBuffer );
        CFRelease(image);
        CGImageRelease(cgImage);
    }

}
Sam
  • 21
  • 3