3

i've created a method that trims and exports videos based on a given time range. It also rotates the video to landscape.

For some reason though, the AVAssetExportSession fails when attempting to process a video that previously was trimmed using UIVideoEditorController.

Anyone encountered this issue before?

I get this error:

AVAssetExportSessionStatusFailed: Error Domain=AVFoundationErrorDomain Code=-11841 "The operation couldn’t be completed. (AVFoundationErrorDomain error -11841.)"

For this method:

    - (void) trimVideoWithRange: (CMTimeRange)range fromInputURL: (NSURL *)inputURL withCompletionHandler:(void (^)(BOOL success, NSURL *outputURL))handler;
{
    AVAsset *asset = [AVURLAsset assetWithURL:inputURL];

    AVAssetTrack *videoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];

    AVAssetTrack *audioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];


    NSLog(@"%@, %@, %@", asset, videoTrack, audioTrack);



    NSError *error;

    //  Create a video composition
    AVMutableComposition *composition = [AVMutableComposition composition];

    AVMutableCompositionTrack *videoCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

    error = nil;

    [videoCompositionTrack insertTimeRange:videoTrack.timeRange ofTrack:videoTrack atTime:CMTimeMakeWithSeconds(0, NSEC_PER_SEC) error:&error];

    NSLog(@"videoCompositionTrack timeRange: %lld, %lld", videoCompositionTrack.timeRange.start.value, videoCompositionTrack.timeRange.duration.value);


    if(error)
        NSLog(@"videoCompositionTrack error: %@", error);

    AVMutableCompositionTrack *audioCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];




    error = nil;

    [audioCompositionTrack insertTimeRange:audioTrack.timeRange ofTrack:audioTrack atTime:CMTimeMakeWithSeconds(0, NSEC_PER_SEC) error:&error];

    NSLog(@"audioCompositionTrack timeRange: %lld, %lld", audioCompositionTrack.timeRange.start.value, audioCompositionTrack.timeRange.duration.value);

    if(error)
        NSLog(@"audioCompositionTrack error: %@", error);


    //  Rotate video if needed
    CGAffineTransform rotationTransform = videoTrack.preferredTransform;


    //  Create video composition
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];

    videoComposition.renderScale = 1.0;

    videoComposition.renderSize = videoTrack.naturalSize;

    videoComposition.frameDuration = CMTimeMake(1, 30);


    //  Apply the transform which may have been changed
    AVMutableVideoCompositionLayerInstruction *instruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
    [instruction setTransform:rotationTransform atTime:kCMTimeZero];

    //  Set the time range and layer instructions for the video composition
    AVMutableVideoCompositionInstruction *videoTrackInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];

    videoTrackInstruction.layerInstructions = [NSArray arrayWithObject:instruction];
    videoTrackInstruction.timeRange = range;

    videoComposition.instructions = @[videoTrackInstruction];


    // Check so that we can proceed with our desired output preset
    NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:composition];

    if (![compatiblePresets containsObject:AVAssetExportPreset960x540])
    {
        //  Nope.
        if(handler)
            handler(NO, nil);

        return;
    }

    //  Create export session with composition
    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPreset960x540];

    //  Configure export session
    exportSession.outputURL = [NSURL fileURLWithPath:pathToTemporaryOutput];

    exportSession.outputFileType = AVFileTypeQuickTimeMovie;

    exportSession.videoComposition = videoComposition;



    exportSession.shouldOptimizeForNetworkUse = YES;

    //  Export async
    [exportSession exportAsynchronouslyWithCompletionHandler:^{


        switch ([exportSession status])
        {
            case AVAssetExportSessionStatusCompleted:
            {
                dispatch_async(dispatch_get_main_queue(), ^{

                    //  Everything OK. Execute completion block with URL to rendered video
                    if(handler)
                        handler(exportSession.status == AVAssetExportSessionStatusCompleted, [NSURL fileURLWithPath:pathToTemporaryOutput]);

                });



            }
                break;

            case AVAssetExportSessionStatusFailed:
            {
                NSError *exportError = exportSession.error;

                NSLog(@"AVAssetExportSessionStatusFailed: %@", exportError.description);

                dispatch_async(dispatch_get_main_queue(), ^{

                    //  No go. Execute handler with fail.
                    if(handler)
                        handler(NO, nil);

                });
            }
                break;
        }




    }];
}
David
  • 145
  • 3
  • 9

1 Answers1

1

This works for me. Here exportSession is AVAssetExportSession

NSURL *videoFileUrl = [NSURL fileURLWithPath:self.originalVideoPath];

AVAsset *anAsset = [[AVURLAsset alloc] initWithURL:videoFileUrl options:nil];
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:anAsset];
if ([compatiblePresets containsObject:AVAssetExportPresetMediumQuality]) {

    self.exportSession = [[AVAssetExportSession alloc]
                          initWithAsset:anAsset presetName:AVAssetExportPresetPassthrough];
    // Implementation continues.

    NSURL *furl = [NSURL fileURLWithPath:self.tmpVideoPath];

    self.exportSession.outputURL = furl;
    //provide outputFileType acording to video format extension
    self.exportSession.outputFileType = AVFileTypeQuickTimeMovie;

    CMTime start = CMTimeMakeWithSeconds(self.startTime, anAsset.duration.timescale);
    CMTime duration = CMTimeMakeWithSeconds(self.stopTime-self.startTime, anAsset.duration.timescale);
    CMTimeRange range = CMTimeRangeMake(start, duration);
    self.exportSession.timeRange = range;

    self.self.btnTrim.hidden = YES;
    self.myActivityIndicator.hidden = NO;
    [self.myActivityIndicator startAnimating];
    [self.exportSession exportAsynchronouslyWithCompletionHandler:^{

        switch ([self.exportSession status]) {
            case AVAssetExportSessionStatusFailed:
                NSLog(@"Export failed: %@", [[self.exportSession error] localizedDescription]);
                break;
            case AVAssetExportSessionStatusCancelled:
                NSLog(@"Export canceled");
                break;
            default:
                NSLog(@"Triming Completed");
                dispatch_async(dispatch_get_main_queue(), ^{
                    [self.myActivityIndicator stopAnimating];
                    self.myActivityIndicator.hidden = YES;
                });

                break;
        }
    }];

}
Paresh Navadiya
  • 38,095
  • 11
  • 81
  • 132
  • 3
    I am facing the same problem asked above but, I am assigning the video Composition property of exportsession and because of that my export session is crashing. Do you know the probable reason because of which export session might crash ??? Thanx in Advance !! – Parvez Belim Dec 06 '13 at 06:23