8

I'm currently extracting every frame from a video with AVAssetImageGenerator, but sometimes it returns me successively 2 times almost the same image (they do not have the same "frame time"). The funny thing is it always happen (in my test video) each 5 frames.

Here and here are the two images (open each in new tab then switch the tabs to see the differences).

Here's my code :

//setting up generator & compositor
self.generator = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];
generator.appliesPreferredTrackTransform = YES;
self.composition = [AVVideoComposition videoCompositionWithPropertiesOfAsset:asset];

NSTimeInterval duration = CMTimeGetSeconds(asset.duration);
NSTimeInterval frameDuration = CMTimeGetSeconds(composition.frameDuration);
CGFloat totalFrames = round(duration/frameDuration);

NSMutableArray * times = [NSMutableArray array];
for (int i=0; i<totalFrames; i++) {
    NSValue * time = [NSValue valueWithCMTime:CMTimeMakeWithSeconds(i*frameDuration, composition.frameDuration.timescale)];
    [times addObject:time];
}

AVAssetImageGeneratorCompletionHandler handler = ^(CMTime requestedTime, CGImageRef im, CMTime actualTime, AVAssetImageGeneratorResult result, NSError *error){
    // If actualTime is not equal to requestedTime image is ignored
    if(CMTimeCompare(actualTime, requestedTime) == 0) {
        if (result == AVAssetImageGeneratorSucceeded) {
            NSLog(@"%.02f     %.02f", CMTimeGetSeconds(requestedTime), CMTimeGetSeconds(actualTime));
            // Each log have differents actualTimes.
            // frame extraction is here...
        }
    }
};

generator.requestedTimeToleranceBefore = kCMTimeZero;
generator.requestedTimeToleranceAfter = kCMTimeZero;
[generator generateCGImagesAsynchronouslyForTimes:times completionHandler:handler];

Any idea where it could come from?

Martin
  • 11,881
  • 6
  • 64
  • 110
  • Hello dear Martin, now it's 2014 and I'm having the same issue of your... did you manage to find a solution ? Thanks in advance :) – Cesar Jun 12 '14 at 02:27

3 Answers3

17

Please see the following properties of AVAssetImageGenerator. You should set kCMTimeZero for both properties to get the exact frames.

/* The actual time of the generated images will be within the range [requestedTime-toleranceBefore, requestedTime+toleranceAfter] and may differ from the requested time for efficiency.
   Pass kCMTimeZero for both toleranceBefore and toleranceAfter to request frame-accurate image generation; this may incur additional decoding delay.
   Default is kCMTimePositiveInfinity. */
@property (nonatomic) CMTime requestedTimeToleranceBefore NS_AVAILABLE(10_7, 5_0);
@property (nonatomic) CMTime requestedTimeToleranceAfter NS_AVAILABLE(10_7, 5_0);

Before I set kCMTimeZero for both properties, I got some same images for different request time as you experienced. Just try the following code.

self.imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:myAsset];
self.imageGenerator.requestedTimeToleranceBefore = kCMTimeZero;
self.imageGenerator.requestedTimeToleranceAfter = kCMTimeZero;
alones
  • 2,848
  • 2
  • 27
  • 30
  • 1
    Please read more carefully my question before posting. Your proposition is already part of my code. – Martin Jun 24 '13 at 09:27
2

I was using a slightly different way for calculating the CMTime request, and it seemed to work. Here is the code (assuming iOS) :

-(void)extractImagesFromMovie {

// set the asset
    NSString* path = [[NSBundle mainBundle] pathForResource:@"myMovie" ofType:@"MOV"];
    NSURL* movURL = [NSURL fileURLWithPath:path];

NSMutableDictionary* myDict = [NSMutableDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:YES] , 
                                 AVURLAssetPreferPreciseDurationAndTimingKey , 
                                [NSNumber numberWithInt:0],
                                AVURLAssetReferenceRestrictionsKey, nil];

AVURLAsset* movie = [[AVURLAsset alloc] initWithURL:movURL options:myDict];


// set the generator
AVAssetImageGenerator* generator = [[AVAssetImageGenerator assetImageGeneratorWithAsset:movie] retain];
generator.requestedTimeToleranceBefore = kCMTimeZero;
generator.requestedTimeToleranceAfter = kCMTimeZero;

// look for the video track
AVAssetTrack* videoTrack;
bool foundTrack = NO;

for (AVAssetTrack* track in movie.tracks) {

    if ([track.mediaType isEqualToString:@"vide"]) {
        if (foundTrack) {NSLog (@"Error - - - more than one video tracks"); return(-1);}
        else {
            videoTrack = track;
            foundTrack = YES;
        }
    }
}
if (foundTrack == NO) {NSLog (@"Error - - No Video Tracks at all"); return(-1);}

// set the number of frames in the movie
int frameRate = videoTrack.nominalFrameRate;
float value = movie.duration.value;
float timeScale = movie.duration.timescale;
float totalSeconds = value / timeScale;
int totalFrames = totalSeconds * frameRate;

NSLog (@"total frames %d", totalFrames);

int timeValuePerFrame = movie.duration.timescale / frameRate;

NSMutableArray* allFrames = [[NSMutableArray new] retain];

// get each frame
for (int k=0; k< totalFrames; k++) {

    int timeValue = timeValuePerFrame * k;
    CMTime frameTime;
    frameTime.value = timeValue;
    frameTime.timescale = movie.duration.timescale;
    frameTime.flags = movie.duration.flags;
    frameTime.epoch = movie.duration.epoch;

    CMTime gotTime;

    CGImageRef myRef = [generator copyCGImageAtTime:frameTime actualTime:&gotTime error:nil];
    [allFrames addObject:[UIImage imageWithCGImage:myRef]];

    if (gotTime.value != frameTime.value) NSLog (@"requested %lld got %lld for k %d", frameTime.value, gotTime.value, k)

}

NSLog (@"got %d images in the array", [allFrames count]);
// do something with images here...
}
Manish
  • 608
  • 1
  • 11
  • 23
  • Thanks for your answer, and welcome on SO. I'll try your answer the next time I on the project. – Martin Mar 07 '13 at 17:01
  • No, it does not work this way (same result). Please take in mind that you have some leaks in your code (`allFrames`, `myRef`). Moreover, the way you get the CMTime values and TimeScale is not the recommanded way (you should use `CMTimeGetSeconds`) – Martin Mar 12 '13 at 15:18
  • It happens **exaclty** 1 frame out of 5 in my test movie, a movie taken with the iphone itself (.mov). On other movies, it happens sometimes but not so often. Didn't tried on other video format because the app should only work on iphone movies. – Martin Mar 13 '13 at 08:27
2

I was having the same issue of you, but much more evident, the duplication was happening when the interval between the two frames was under 1.0 second and I realised it was depending on the timescale I was using for generating CMTime values.

Before

CMTime requestTime = CMTimeMakeWithSeconds(imageTime, 1);

After

CMTime requestTime = CMTimeMakeWithSeconds(imageTime, playerItem.asset.duration.timescale);

... and Boom, no more duplication :)

So maybe you can try to increase, double perhaps, the timescale, using your code:

NSValue * time = [NSValue valueWithCMTime:CMTimeMakeWithSeconds(i*frameDuration, composition.frameDuration.timescale*2)]; // *2 at the end

For future references here is my code:

    playerItem = [AVPlayerItem playerItemWithURL:item.movieUrl];
    imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:playerItem.asset];
    imageGenerator.requestedTimeToleranceAfter = kCMTimeZero;
    imageGenerator.requestedTimeToleranceBefore = kCMTimeZero;

    float duration = item.duration;
    float interval = item.interval;

    NSLog(@"\nItem info:\n%f \n%f", duration,interval);

    NSString *srcPath = nil;
    NSString *zipPath = nil;

    srcPath = [item.path stringByAppendingPathComponent:@"info.json"];
    zipPath = [NSString stringWithFormat:@"/%@/info.json",galleryID];

    [zip addFileToZip:srcPath newname:zipPath level:0];

    NSTimeInterval frameNum = item.duration / item.interval;
    for (int i=0; i<=frameNum; i++)
    {
        NSArray* cachePathArray = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
        NSString* cachePath = [cachePathArray lastObject];

        srcPath = [cachePath stringByAppendingPathComponent:@"export-tmp.jpg"];
        zipPath = [NSString stringWithFormat:@"/%@/%d.jpg",galleryID,i];

        float imageTime = ( i * interval );

        NSError *error = nil;
        CMTime requestTime = CMTimeMakeWithSeconds(imageTime, playerItem.asset.duration.timescale);
        CMTime actualTime;

        CGImageRef imageRef = [imageGenerator copyCGImageAtTime:requestTime actualTime:&actualTime error:&error];

        if (error == nil) {
            float req = ((float)requestTime.value/requestTime.timescale);
            float real = ((float)actualTime.value/actualTime.timescale);
            float diff = fabsf(req-real);

            NSLog(@"copyCGImageAtTime: %.2f, %.2f, %f",req,real,diff);
        }
        else
        {
            NSLog(@"copyCGImageAtTime: error: %@",error.localizedDescription);
        }



        // consider using CGImageDestination -> http://stackoverflow.com/questions/1320988/saving-cgimageref-to-a-png-file
        UIImage *img = [UIImage imageWithCGImage:imageRef];
        CGImageRelease(imageRef);  // CGImageRef won't be released by ARC



        [UIImageJPEGRepresentation(img, 100) writeToFile:srcPath atomically:YES];

        if (srcPath != nil && zipPath!= nil)
        {
            [zip addFileToZip:srcPath newname:zipPath level:0]; // 0 = no compression. everything is a jpg image
            unlink([srcPath UTF8String]);
        }
Cesar
  • 4,418
  • 2
  • 31
  • 37