4

My application merges two videos.

I am using following code to merge two videos using AVVideoComposition

- (void)buildSequenceComposition:(AVMutableComposition *)mixComposition andVideoComposition:(AVMutableVideoComposition *)videoComposition withAudioMix:(AVMutableAudioMix *)audioMix
{
    CMTime nextClipStartTime = kCMTimeZero;
    NSInteger i;

    // No transitions: place clips into one video track and one audio track in composition.
    AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    NSMutableArray*arrLayerInstruction = [NSMutableArray array];

    for (i = 0; i < [_clips count]; i++ )
    {
        AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
        AVURLAsset *asset = [[_clips objectAtIndex:i] objectForKey:@"videoURL"];

        CMTimeRange timeRangeInAsset;

        timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]);

        AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
        NSError*err = nil;
        [compositionVideoTrack insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:&err];

        if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0)
        {
            AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

            [compositionAudioTrack insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil];


            AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:[[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]];
            [exportAudioMixInputParameters setVolume:[[[_clips objectAtIndex:i] objectForKey:@"videoSoundLevel"] floatValue] atTime:nextClipStartTime];
            exportAudioMixInputParameters.trackID = compositionAudioTrack.trackID;
            audioMix.inputParameters=[NSArray arrayWithObject:exportAudioMixInputParameters];

        }
        //FIXING ORIENTATION//
        AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];

        UIImageOrientation FirstAssetOrientation_  = UIImageOrientationUp;
        BOOL  isFirstAssetPortrait_  = NO;
        CGAffineTransform firstTransform = clipVideoTrack.preferredTransform;
        if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0)
        {
            FirstAssetOrientation_= UIImageOrientationRight;
            isFirstAssetPortrait_ = YES;
        }
        if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0)
        {
            FirstAssetOrientation_ =  UIImageOrientationLeft;
            isFirstAssetPortrait_ = YES;
        }
        if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0)
        {
            FirstAssetOrientation_ =  UIImageOrientationUp;
        }
        if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0)
        {
            FirstAssetOrientation_ = UIImageOrientationDown;
        }

        CGFloat tHeight = [clipVideoTrack naturalSize].height;
        CGFloat tWidth = [clipVideoTrack naturalSize].width;

        if(isFirstAssetPortrait_)
        {
            tHeight = [clipVideoTrack naturalSize].height;
            tWidth = [clipVideoTrack naturalSize].width;
            CGFloat temp = tHeight;
            tHeight = tWidth;
            tWidth = temp;

        }

        CGFloat FirstAssetScaleToFitRatioWidth = [mixComposition naturalSize].width/tWidth;
        CGFloat FirstAssetScaleToFitRatioHeight = [mixComposition naturalSize].height/tHeight;


        CGFloat FirstAssetScaleToFitRatio = FirstAssetScaleToFitRatioWidth>FirstAssetScaleToFitRatioHeight?FirstAssetScaleToFitRatioHeight:FirstAssetScaleToFitRatioWidth;
        CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
        CGSize naturalSize = CGSizeApplyAffineTransform(CGSizeMake(tWidth, tHeight), FirstAssetScaleFactor);

        CGAffineTransform transform = CGAffineTransformIdentity;

        CGSize translateSize = CGSizeMake(0, 0);

        if (FirstAssetScaleToFitRatioWidth<FirstAssetScaleToFitRatioHeight)
        {
            transform = CGAffineTransformMakeTranslation(0, ([mixComposition naturalSize].height-naturalSize.height)/2);
            translateSize.height = ([mixComposition naturalSize].height-naturalSize.height)/2;
        }
        else if (FirstAssetScaleToFitRatioWidth==FirstAssetScaleToFitRatioHeight)
        {

        }
        else
        {
            transform = CGAffineTransformMakeTranslation(([mixComposition naturalSize].width-naturalSize.width)/2, 0);
            translateSize.width = ([mixComposition naturalSize].width-naturalSize.width)/2;
        }

        [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(clipVideoTrack.preferredTransform, FirstAssetScaleFactor),transform) atTime:kCMTimeZero];

        [FirstlayerInstruction setOpacity:0.0 atTime:CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration)];
        [FirstlayerInstruction setOpacity:1.0 atTime:nextClipStartTime];

        [arrLayerInstruction addObject:FirstlayerInstruction];
        nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
    }
    MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, nextClipStartTime);
    MainInstruction.layerInstructions = arrLayerInstruction;;
    videoComposition.instructions = [NSArray arrayWithObject:MainInstruction];
}

Although it works fine with iOS7, while exporting video with AVVideoCompositon in iOS8, it gives me following error:

Title :Error Domain=AVFoundationErrorDomain Code=-11821 “Cannot Decode” { NSLocalizedFailureReason=The media data could not be decoded. It may be damaged.}

It works fine with iOS7 and other prior to iOS versions, but not in iOS8.

I have also tried Apple's sample code from AVSampleEditor and it also gives me same error while exporting video in iOS8.

Kindly help me to solve the problem. Thanks.

NiravPatel
  • 3,260
  • 2
  • 21
  • 31
HarshIT
  • 4,583
  • 2
  • 30
  • 60

1 Answers1

1

Check this demo code. Working for me

Pradip Vanparia
  • 1,742
  • 10
  • 22