2

The following code was working perfectly to add a logo and a text to a video with AVVideoCompositionCoreAnimationTool. Then Swift 3 came! Now sometimes the video shows with the logo and text sometimes the video does not show when it is exported.

let videoComposition: AVMutableVideoComposition = AVMutableVideoComposition()

    videoComposition.frameDuration = CMTimeMake(1, 60)
    videoComposition.renderSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height)


    let instruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()

    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))

    // transformer is applied to set the video in portrait otherwise it is rotated by 90 degrees
    let transformer: AVMutableVideoCompositionLayerInstruction =
        AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)

    let t1: CGAffineTransform = CGAffineTransform(translationX: clipVideoTrack.naturalSize.height, y: -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height)/2)

    let t2: CGAffineTransform = t1.rotated(by: CGFloat(M_PI_2))

    var finalTransform: CGAffineTransform = t2

    transformer.setTransform(finalTransform, at: kCMTimeZero)

    instruction.layerInstructions = NSArray(object: transformer) as! [AVVideoCompositionLayerInstruction]

    videoComposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol]



    let mixComposition = AVMutableComposition()
    let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)


    do {
        try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: clipVideoTrack, at: kCMTimeZero)
    } catch {
        print(error)
    }


    //Add watermark


    let myImage = UIImage(named: "logo")

    let aLayer = CALayer()
    aLayer.contents = myImage!.cgImage
    aLayer.frame = CGRect(x: (clipVideoTrack.naturalSize.height*(self.view.bounds.width-45))/self.view.bounds.width, y: (clipVideoTrack.naturalSize.height*(self.view.bounds.width-40))/self.view.bounds.width, width: (clipVideoTrack.naturalSize.height*40)/self.view.bounds.width, height: (clipVideoTrack.naturalSize.height*40)/self.view.bounds.width)

    let titleLayer = CATextLayer()
    titleLayer.string = "text"
    titleLayer.font = UIFont(name: "helvetica", size: 0)
    titleLayer.fontSize = clipVideoTrack.naturalSize.height/16
    titleLayer.shadowOpacity = 0.5
    titleLayer.alignmentMode = kCAAlignmentCenter
    titleLayer.frame = CGRect(x: 0, y: 0, width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height/6)
    titleLayer.display()


    let videoSize = asset.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize
    let parentLayer = CALayer()
    let videoLayer = CALayer()
    parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.height, height: videoSize.height)
    videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.height, height: videoSize.height)

    parentLayer.addSublayer(videoLayer)
    parentLayer.addSublayer(aLayer)
    parentLayer.addSublayer(titleLayer)


    videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)



    do { try FileManager.default.removeItem(at: filePath) }
    catch let error as NSError {
        NSLog("\(error), \(error.localizedDescription)")
    }



    var exportUrl: URL = filePath
    self.videoUrl = filePath as NSURL


    var exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetMediumQuality)

    exporter!.videoComposition = videoComposition
    exporter!.outputFileType = AVFileTypeQuickTimeMovie
    exporter!.outputURL = URL(fileURLWithPath: exportUrl.path)


    exporter!.exportAsynchronously(completionHandler: {

        DispatchQueue.main.async {


            self.view.layer.addSublayer(self.avPlayerLayer)

            let item = AVPlayerItem(url: exportUrl)
            self.player.replaceCurrentItem(with: item)

            if (self.player.currentItem != nil) {
                print("Starting playback!")
                self.player.play()
            }

        }

    })

This was working flawlessly with previous version of Swift but now with swift 3 not working anymore.

PLEASE NOTE: if I comment out videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer) then the video is exported and plays successfully overtime but without any overlay.

Sam
  • 1,101
  • 2
  • 13
  • 26
  • what is the `transformer` in `NSArray(object: transformer)`? and have you tried running the code on iOS9 devices? – Rhythmic Fistman Oct 08 '16 at 02:21
  • @RhythmicFistman I edited the question regarding transformer. You made a good point about iOS9 devices, in fact I've just tried on iOS9 devices and it seems to work just fine. Why did you ask about iOS9 devices? Do you thing it's a iOS10 related issue? Thanks a lot :) – Sam Oct 11 '16 at 13:13
  • 1
    I asked because there seem to be bugs in iOS 10 with `AVAssetExportSession`s and video compositing. http://stackoverflow.com/a/39605744/22147 Apart from waiting for iOS 10.1 http://stackoverflow.com/a/39746140/22147, there are some workarounds: http://stackoverflow.com/a/39780044/22147 – Rhythmic Fistman Oct 11 '16 at 13:30
  • @RhythmicFistman Ok thanks a lot for helping out ! If you want to make an answer so I can mark it as the correct answer you are welcome. Thanks! :) – Sam Oct 11 '16 at 13:42
  • Is there any possible way to share full code ? I am looking for `clipVideoTrack` and `asset` – iOS.Lover Sep 04 '17 at 13:28

1 Answers1

1

Seeing as the code works on iOS 9, this is probably a bug in iOS 10.0 where AVAssetExportSessions don't work properly when they have videoComposition set.

Some have reported that things look better in the iOS 10.1 beta and others have worked around the problem.

Community
  • 1
  • 1
Rhythmic Fistman
  • 34,352
  • 5
  • 87
  • 159