11

I'm attempting to combine an image and a video. I have them combining and exporting however it's rotated side ways.

Sorry for the bulk code paste. I've seen answers about applying a transform to compositionVideoTrack.preferredTransform however that does nothing. Adding to AVMutableVideoCompositionInstruction does nothing also.

I feel like this area is where things start to go wrong. here here:

// I feel like this loading here is the problem
        let videoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0]

        // because it makes our parentLayer and videoLayer sizes wrong
        let videoSize       = videoTrack.naturalSize

        // this is returning 1920x1080, so it is rotating the video
        print("\(videoSize.width) , \(videoSize.height)")

So by here our frame sizes are wrong for the rest of the method. Now when we try to go and create the overlay image layer the frame is not correct:

    let aLayer = CALayer()
    aLayer.contents = UIImage(named: "OverlayTestImageOverlay")?.CGImage
    aLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height)
    aLayer.opacity = 1

Here is my complete method.

  func combineImageVid() {

        let path = NSBundle.mainBundle().pathForResource("SampleMovie", ofType:"MOV")
        let fileURL = NSURL(fileURLWithPath: path!)

        let videoAsset = AVURLAsset(URL: fileURL)
        let mixComposition = AVMutableComposition()

        let compositionVideoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)

        var clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)

        do {
            try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), ofTrack: clipVideoTrack[0], atTime: kCMTimeZero)
        }
        catch _ {
            print("failed to insertTimeRange")
        }


        compositionVideoTrack.preferredTransform = videoAsset.preferredTransform

        // I feel like this loading here is the problem
        let videoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0]

        // because it makes our parentLayer and videoLayer sizes wrong
        let videoSize       = videoTrack.naturalSize

        // this is returning 1920x1080, so it is rotating the video
        print("\(videoSize.width) , \(videoSize.height)")

        let aLayer = CALayer()
        aLayer.contents = UIImage(named: "OverlayTestImageOverlay")?.CGImage
        aLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height)
        aLayer.opacity = 1


        let parentLayer     = CALayer()
        let videoLayer      = CALayer()

        parentLayer.frame   = CGRectMake(0, 0, videoSize.width, videoSize.height)
        videoLayer.frame    = CGRectMake(0, 0, videoSize.width, videoSize.height)

        parentLayer.addSublayer(videoLayer)
        parentLayer.addSublayer(aLayer)


        let videoComp = AVMutableVideoComposition()
        videoComp.renderSize = videoSize
        videoComp.frameDuration = CMTimeMake(1, 30)
        videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer)

        let instruction = AVMutableVideoCompositionInstruction()

        instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)

        let mixVideoTrack = mixComposition.tracksWithMediaType(AVMediaTypeVideo)[0]
        mixVideoTrack.preferredTransform = CGAffineTransformMakeRotation(CGFloat(M_PI * 90.0 / 180))

        let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: mixVideoTrack)
        instruction.layerInstructions = [layerInstruction]
        videoComp.instructions = [instruction]


        //  create new file to receive data
        let dirPaths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
        let docsDir: AnyObject = dirPaths[0]
        let movieFilePath = docsDir.stringByAppendingPathComponent("result.mov")
        let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)

        do {
            try NSFileManager.defaultManager().removeItemAtPath(movieFilePath)
        }
        catch _ {}


        // use AVAssetExportSession to export video
        let assetExport = AVAssetExportSession(asset: mixComposition, presetName:AVAssetExportPresetHighestQuality)
        assetExport?.videoComposition = videoComp
        assetExport!.outputFileType = AVFileTypeQuickTimeMovie
        assetExport!.outputURL = movieDestinationUrl
        assetExport!.exportAsynchronouslyWithCompletionHandler({
            switch assetExport!.status{
            case  AVAssetExportSessionStatus.Failed:
                print("failed \(assetExport!.error)")
            case AVAssetExportSessionStatus.Cancelled:
                print("cancelled \(assetExport!.error)")
            default:
                print("Movie complete")


                // play video
                NSOperationQueue.mainQueue().addOperationWithBlock({ () -> Void in
                    print(movieDestinationUrl)
                })
            }
        })
    }

This is what I'm getting exported: enter image description here


I tried adding these two methods in order to rotate the video:

class func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {

    let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)

    let assetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0]

    let transform = assetTrack.preferredTransform
    let assetInfo = orientationFromTransform(transform)
    var scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.width

    if assetInfo.isPortrait {

        scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.height
        let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
        instruction.setTransform(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor),
            atTime: kCMTimeZero)
    } else {

        let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
        var concat = CGAffineTransformConcat(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), CGAffineTransformMakeTranslation(0, UIScreen.mainScreen().bounds.width / 2))
        if assetInfo.orientation == .Down {
            let fixUpsideDown = CGAffineTransformMakeRotation(CGFloat(M_PI))
            let windowBounds = UIScreen.mainScreen().bounds
            let yFix = assetTrack.naturalSize.height + windowBounds.height
            let centerFix = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, yFix)
            concat = CGAffineTransformConcat(CGAffineTransformConcat(fixUpsideDown, centerFix), scaleFactor)
        }
        instruction.setTransform(concat, atTime: kCMTimeZero)
    }

    return instruction
}

class func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
    var assetOrientation = UIImageOrientation.Up
    var isPortrait = false
    if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
        assetOrientation = .Right
        isPortrait = true
    } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
        assetOrientation = .Left
        isPortrait = true
    } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
        assetOrientation = .Up
    } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
        assetOrientation = .Down
    }
    return (assetOrientation, isPortrait)
}

The updated my combineImageVid() method adding this in

let instruction = AVMutableVideoCompositionInstruction()

instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)

let mixVideoTrack = mixComposition.tracksWithMediaType(AVMediaTypeVideo)[0]

//let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: mixVideoTrack)
//layerInstruction.setTransform(videoAsset.preferredTransform, atTime: kCMTimeZero)

let layerInstruction = videoCompositionInstructionForTrack(compositionVideoTrack, asset: videoAsset)

Which gives me this output:

enter image description here

So I'm getting closer however I feel that because the track is originally being loaded the wrong way, I need to address the issue there. Also, I don't know why the huge black box is there now. I thought maybe it was due to my image layer taking the bounds of the loaded video asset here:

aLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height)

However changing that to some small width/height doesn't make a difference. I then thought about adding a crop rec to get rid of the black square but that didn't work either :(


Following Allens suggestions of not using these two methods:

class func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction

class func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) 

But updating my original method to look like this:

videoLayer.frame    = CGRectMake(0, 0, videoSize.height, videoSize.width) //notice the switched width and height
...
videoComp.renderSize = CGSizeMake(videoSize.height,videoSize.width) //this make the final video in portrait
...
layerInstruction.setTransform(videoTrack.preferredTransform, atTime: kCMTimeZero) //important piece of information let composition know you want to rotate the original video in output

We are getting really close however the problem now seems to be editing renderSize. If I change it to anything other than the landscape size I get this:

enter image description here

Dania Delbani
  • 816
  • 1
  • 11
  • 27
random
  • 8,568
  • 12
  • 50
  • 85
  • This link here might help you http://stackoverflow.com/questions/10034337/how-to-export-video-asset-via-avassetexportsession-in-portrait-mode – super handsum Feb 09 '16 at 03:54
  • I've tried that as well :( I appreciate the suggest though. – random Feb 09 '16 at 14:58
  • Can you try changing this in combineImageVid() method --- compositionVideoTrack.preferredTransform = CGAffineTransformMakeRotation(M_PI_2); – Evol Gate Feb 12 '16 at 05:08
  • @AnkitGupta doesn't help at all :( it seems to never respect that setting – random Feb 12 '16 at 22:00

2 Answers2

21

here is the document for the orientation at Apple:

https://developer.apple.com/library/ios/qa/qa1744/_index.html

if your original video was taken in portrait mode iOS, it's nature size will still be landscape, but it comes with a metadata of rotate in the mov file. In order to rotate your video, you need to make changes to your 1st piece of code with the following:

videoLayer.frame    = CGRectMake(0, 0, videoSize.height, videoSize.width) //notice the switched width and height
...
videoComp.renderSize = CGSizeMake(videoSize.height,videoSize.width) //this make the final video in portrait
...
layerInstruction.setTransform(videoTrack.preferredTransform, atTime: kCMTimeZero) //important piece of information let composition know you want to rotate the original video in output

Yes, you are really close!

Allen
  • 6,505
  • 16
  • 19
  • I appreciate the help! We are getting really close however there seems to be an issue when changing the `renderSize`. I've updated any question with a .gif of what's going on. – random Feb 12 '16 at 22:24
  • @random, are you running in simulator? then don't, run it in real iOS device, the simulator can't do the translation well. – Allen Feb 12 '16 at 23:35
  • yup, running it in the simulator was the problem! You're awesome, thank you so much for the help!! – random Feb 16 '16 at 22:00
  • 2
    I tried this code and it works with the back camera, but in front still getting the same result. The video is at the right orientation but cut in half. – Alvin John May 06 '17 at 06:54
  • check link for front Camera https://stackoverflow.com/questions/43818429/avassetexportsession-wrong-orientation-in-front-camera?noredirect=1&lq=1 – Dania Delbani Jan 30 '18 at 14:10
  • if self.currentCamera == .front { movieFileOutputConnection?.isVideoMirrored = false } – Dania Delbani Jan 31 '18 at 07:45
  • @Dania Yes, but how can we do this with a mirrored front camera? – thelearner Jun 16 '18 at 08:12
2

Maybe U should check the videoTrack's preferredTransform so to give it a exact renderSize and transform:

CGAffineTransform transform = assetVideoTrack.preferredTransform;
CGFloat rotation = [self rotationWithTransform:transform]; 
//if been rotated
        if (rotation != 0)
        {
            //if rotation is 360°
            if (fabs((rotation - M_PI * 2)) >= valueOfError) {

                CGFloat m = rotation / M_PI;
                CGAffineTransform t1;
                //rotation is 90° or 270°
                if (fabs(m - 1/2.0) < valueOfError || fabs(m - 3/2.0) < valueOfError) {
                    self.mutableVideoComposition.renderSize = CGSizeMake(assetVideoTrack.naturalSize.height,assetVideoTrack.naturalSize.width);
                    t1 = CGAffineTransformMakeTranslation(assetVideoTrack.naturalSize.height, 0);
                }
                //rotation is 180°
                if (fabs(m - 1.0) < valueOfError) {
                    t1 = CGAffineTransformMakeTranslation(assetVideoTrack.naturalSize.width, assetVideoTrack.naturalSize.height);
                }
                CGAffineTransform t2 = CGAffineTransformRotate(t1,rotation);
                //                CGAffineTransform transform = makeTransform(1.0, 1.0, 90, videoTrack.naturalSize.height, 0);
                [passThroughLayer setTransform:t2 atTime:kCMTimeZero];
            }
        }

//convert transform to radian
- (CGFloat)rotationWithTransform:(CGAffineTransform)t
{
    return atan2f(t.b, t.a);
}
JonphyChen
  • 129
  • 1
  • 11