30

I am trying to combine several video clips into one using AVFoundation. I can create a single video using AVMutableComposition using the code below

AVMutableComposition *composition = [AVMutableComposition composition];

AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

CMTime startTime = kCMTimeZero;

/*videoClipPaths is a array of paths of the video clips recorded*/

//for loop to combine clips into a single video
for (NSInteger i=0; i < [videoClipPaths count]; i++) {

    NSString *path = (NSString*)[videoClipPaths objectAtIndex:i];

    NSURL *url = [[NSURL alloc] initFileURLWithPath:path];

    AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
    [url release];

    AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

    //set the orientation
    if(i == 0)
    {
        [compositionVideoTrack setPreferredTransform:videoTrack.preferredTransform];
    }

    ok = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:videoTrack atTime:startTime error:nil];
    ok = [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:audioTrack atTime:startTime error:nil];

    startTime = CMTimeAdd(startTime, [asset duration]);
}

//export the combined video
NSString *combinedPath = /* path of the combined video*/;

NSURL *url = [[NSURL alloc] initFileURLWithPath: combinedPath];

AVAssetExportSession *exporter = [[[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPreset640x480] autorelease];

exporter.outputURL = url;
[url release];

exporter.outputFileType = [[exporter supportedFileTypes] objectAtIndex:0];

[exporter exportAsynchronouslyWithCompletionHandler:^(void){[self combineVideoFinished:exporter.outputURL status:exporter.status error:exporter.error];}];

The code above works fine if all the video clips were recorded in the same orientation (portrait or landscape). However if I have a mixture of orientations in the clips, the final video will have part of it rotated 90 degrees to the right (or left).

I was wondering is there a way to transform all clips to the same orientation (e.g. the orientation of the first clip) while composing them. From what I read from the XCode document AVMutableVideoCompositionLayerInstruction seems can be used to transform AVAsset, but I am not sure how to create and apply several different layer instruction to corresponding clips and use then in the composition (AVMutableComposition*)

Any help would be appreciated!

Song
  • 397
  • 1
  • 5
  • 9

2 Answers2

25

This is what I do. I then use an AVAssetExportSession to create the actual file. but I warn you, the CGAffineTransforms are sometimes applied late, so you'll see a or two of the original before the video transforms. I have no clue why this happens, a different combination of videos will yield the expected result, but sometimes its off.

AVMutableComposition *composition = [AVMutableComposition composition];    
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition]; 
videoComposition.frameDuration = CMTimeMake(1,30); 
videoComposition.renderScale = 1.0;

AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];

// Get only paths the user selected NSMutableArray *array = [NSMutableArray array]; for(NSString* string in videoPathArray){
if(![string isEqualToString:@""]){
    [array addObject:string];
} 

self.videoPathArray = array;

float time = 0;

for (int i = 0; i<self.videoPathArray.count; i++) {

    AVURLAsset *sourceAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[videoPathArray objectAtIndex:i]] options:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]];

    NSError *error = nil;

    BOOL ok = NO;
    AVAssetTrack *sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    CGSize temp = CGSizeApplyAffineTransform(sourceVideoTrack.naturalSize, sourceVideoTrack.preferredTransform);
    CGSize size = CGSizeMake(fabsf(temp.width), fabsf(temp.height));
    CGAffineTransform transform = sourceVideoTrack.preferredTransform;

    videoComposition.renderSize = sourceVideoTrack.naturalSize;
    if (size.width > size.height) {
        [layerInstruction setTransform:transform atTime:CMTimeMakeWithSeconds(time, 30)];
    } else {

        float s = size.width/size.height;

        CGAffineTransform new = CGAffineTransformConcat(transform, CGAffineTransformMakeScale(s,s));

        float x = (size.height - size.width*s)/2;

        CGAffineTransform newer = CGAffineTransformConcat(new, CGAffineTransformMakeTranslation(x, 0));

        [layerInstruction setTransform:newer atTime:CMTimeMakeWithSeconds(time, 30)];
    }

    ok = [compositionVideoTrack insertTimeRange:sourceVideoTrack.timeRange ofTrack:sourceVideoTrack atTime:[composition duration] error:&error];

    if (!ok) {
        // Deal with the error.
        NSLog(@"something went wrong");
    }

    NSLog(@"\n source asset duration is %f \n source vid track timerange is %f %f \n composition duration is %f \n composition vid track time range is %f %f",CMTimeGetSeconds([sourceAsset duration]), CMTimeGetSeconds(sourceVideoTrack.timeRange.start),CMTimeGetSeconds(sourceVideoTrack.timeRange.duration),CMTimeGetSeconds([composition duration]), CMTimeGetSeconds(compositionVideoTrack.timeRange.start),CMTimeGetSeconds(compositionVideoTrack.timeRange.duration));

    time += CMTimeGetSeconds(sourceVideoTrack.timeRange.duration);
}

instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
instruction.timeRange = compositionVideoTrack.timeRange; 
videoComposition.instructions = [NSArray arrayWithObject:instruction];

This is what I do. I then use an AVAssetExportSession to create the actual file. but I warn you, the CGAffineTransforms are sometimes applied late, so you'll see a or two of the original before the video transforms. I have no clue why this happens, a different combination of videos will yield the expected result, but sometimes its off.

neilb
  • 120
  • 1
  • 2
  • 11
bogardon
  • 896
  • 2
  • 10
  • 22
  • it seems working, however I didn't encounter the transforms being applied later problem – Song Jul 19 '11 at 04:48
  • 2
    Weird. I'm not trying to merge two files, I'm just trying to get an AVAssetExportSession to maintain the video orientation. You are supposed to just be able to call `[compositionVideoTrack setPreferredTransform:transform]` but it doesn't work. Using your method didn't work for me either. But using *both* did work. Sounds like a framework bug. I also am using the export session setting `AVAssetExportPresetPassthrough`. – Dex Mar 07 '12 at 08:11
  • I'm also having very strange issues with the timing as well. More often than not, the last 1 second or so of the video gets cut off. – Dex Mar 07 '12 at 08:20
  • @Dex, same for me in terms of setting both transforms (not the timing issue). – kevlar Mar 30 '12 at 05:20
  • @bogardon when i merge one portrait video and one landscape video i dont see any transformation in the merged videos they are merged according to their original orientation. could you please tell how to transform the portait video to Landscape? – Vinod ram Feb 25 '13 at 09:44
  • it mix two video or combine two video ? – Allan Sep 23 '15 at 13:59
  • @bogardon: can you explain, how composition and video composition go together? Do you somehow put the video composition on the composition itself? – Julian F. Weinert Oct 23 '15 at 07:58
  • Thanks for the Answer +1 – Salman Khakwani Jun 17 '16 at 10:48
  • The videos lose their audio with this solution, should we add "AVMutableCompositionTrack"? Could you show it can be done please? – B K Feb 14 '17 at 12:15
0

Here is @bogardon's answer in swift 4+

 import ARKit

class ARKitSampleViewController: UIViewController {
    var label: UILabel?
    var planeFound = false

    func plane(from anchor: ARPlaneAnchor?) -> SCNNode? {
        let plane = SCNPlane(width: CGFloat(anchor?.extent.x ?? 0.0), height: CGFloat(anchor?.extent.z ?? 0.0))

        plane.firstMaterial?.diffuse.contents = UIColor.clear
        let planeNode = SCNNode(geometry: plane)
        planeNode.position = SCNVector3Make(anchor?.center.x ?? 0.0, 0, anchor?.center.z ?? 0.0)
        // SCNPlanes are vertically oriented in their local coordinate space.
        // Rotate it to match the horizontal orientation of the ARPlaneAnchor.
        planeNode.transform = SCNMatrix4MakeRotation(-.pi * 0.5, 1, 0, 0)

        return planeNode
    }

// MARK: -  ARSCNViewDelegate
    func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
        if planeFound == false {
            if (anchor is ARPlaneAnchor) {
                DispatchQueue.main.async(execute: {
                    self.planeFound = true
                    self.label?.text = "DANCEFLOOR FOUND. LET'S BOOGIE"

                    let overlay = UIView(frame: self.view.frame)
                    overlay.backgroundColor = UIColor.black
                    overlay.alpha = 0
                    if let label = self.label {
                        self.view.insertSubview(overlay, belowSubview: label)
                    }

                    UIView.animate(withDuration: 1.5, delay: 2, options: .curveEaseIn, animations: {
                        self.label?.alpha = 0
                        overlay.alpha = 0.5
                    }) { finished in
                        let planeAnchor = anchor as? ARPlaneAnchor
                        // Show the disco ball here
                    }
                })
            }
        }
    }
}
  • 2
    I think you've pasted the wrong code. The code you pasted is… nothing to do with @bogardon's answer – p10ben Aug 04 '19 at 07:46