16

I am work with image overlay for watermark effect in video using swift.I am using AVFoundation for this but somehow I am not succeed.

Following is my code for overlay image/text

    let path = NSBundle.mainBundle().pathForResource("sample_movie", ofType:"mp4")
    let fileURL = NSURL(fileURLWithPath: path!)

    let composition = AVMutableComposition()
    var vidAsset = AVURLAsset(URL: fileURL, options: nil)

    // get video track
    let vtrack =  vidAsset.tracksWithMediaType(AVMediaTypeVideo)
    let videoTrack:AVAssetTrack = vtrack[0] as! AVAssetTrack
    let vid_duration = videoTrack.timeRange.duration
    let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)

    var error: NSError?
    let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
    compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero, error: &error)

    compositionvideoTrack.preferredTransform = videoTrack.preferredTransform

    // Watermark Effect
    let size = videoTrack.naturalSize

    let imglogo = UIImage(named: "image.png")
    let imglayer = CALayer()
    imglayer.contents = imglogo?.CGImage
    imglayer.frame = CGRectMake(5, 5, 100, 100)
    imglayer.opacity = 0.6

    // create text Layer
    let titleLayer = CATextLayer()
    titleLayer.backgroundColor = UIColor.whiteColor().CGColor
    titleLayer.string = "Dummy text"
    titleLayer.font = UIFont(name: "Helvetica", size: 28)
    titleLayer.shadowOpacity = 0.5
    titleLayer.alignmentMode = kCAAlignmentCenter
    titleLayer.frame = CGRectMake(0, 50, size.width, size.height / 6)

    let videolayer = CALayer()
    videolayer.frame = CGRectMake(0, 0, size.width, size.height)

    let parentlayer = CALayer()
    parentlayer.frame = CGRectMake(0, 0, size.width, size.height)
    parentlayer.addSublayer(videolayer)
    parentlayer.addSublayer(imglayer)
    parentlayer.addSublayer(titleLayer)

    let layercomposition = AVMutableVideoComposition()
    layercomposition.frameDuration = CMTimeMake(1, 30)
    layercomposition.renderSize = size
    layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, inLayer: parentlayer)

    // instruction for watermark
    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
    let videotrack = composition.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
    let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
    instruction.layerInstructions = NSArray(object: layerinstruction) as [AnyObject]
    layercomposition.instructions = NSArray(object: instruction) as [AnyObject]

    //  create new file to receive data
    let dirPaths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
    let docsDir: AnyObject = dirPaths[0]
    let movieFilePath = docsDir.stringByAppendingPathComponent("result.mov")
    let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)       

    // use AVAssetExportSession to export video
    let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
    assetExport.outputFileType = AVFileTypeQuickTimeMovie 
    assetExport.outputURL = movieDestinationUrl
    assetExport.exportAsynchronouslyWithCompletionHandler({
        switch assetExport.status{
        case  AVAssetExportSessionStatus.Failed:
            println("failed \(assetExport.error)")
        case AVAssetExportSessionStatus.Cancelled:
            println("cancelled \(assetExport.error)")
        default:
            println("Movie complete")


            // play video
            NSOperationQueue.mainQueue().addOperationWithBlock({ () -> Void in
                self.playVideo(movieDestinationUrl!)
            })
        }
    })    

By this code, I am not achieve overlay....I don't know what I am doing wrong...

Questions:

  • Is there any missing thing in this code? Or any problem with this code?
  • Is this code only worked with recorded video or all videos including videos from gallery?
Bhavin Bhadani
  • 22,224
  • 10
  • 78
  • 108

5 Answers5

12

The code provided by @El Captain would work. It's only missing:

    assetExport.videoComposition = layercomposition

You can add this right after the instantiation of the AVAssetExportSession

NOTE: The code originally provided would only export the video track but not the audio track. If you need the audio track you could add something like this after you configure the compositionvideoTrack:

let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
    for audioTrack in audioTracks {
        try! compositionAudioTrack.insertTimeRange(audioTrack.timeRange, ofTrack: audioTrack, atTime: kCMTimeZero)
    }
Rey Hernandez
  • 136
  • 1
  • 4
3

Here's an update that's working in Swift 4:

import UIKit
import AVFoundation
import AVKit
import Photos

class ViewController: UIViewController {

var myurl: URL?

override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically from a nib.

}

@IBAction func saveVideoTapper(_ sender: Any) {

    let path = Bundle.main.path(forResource: "sample_video", ofType:"mp4")
    let fileURL = NSURL(fileURLWithPath: path!)

    let composition = AVMutableComposition()
    let vidAsset = AVURLAsset(url: fileURL as URL, options: nil)

    // get video track
    let vtrack =  vidAsset.tracks(withMediaType: AVMediaType.video)
    let videoTrack: AVAssetTrack = vtrack[0]
    let vid_timerange = CMTimeRangeMake(start: CMTime.zero, duration: vidAsset.duration)

    let tr: CMTimeRange = CMTimeRange(start: CMTime.zero, duration: CMTime(seconds: 10.0, preferredTimescale: 600))
    composition.insertEmptyTimeRange(tr)

    let trackID:CMPersistentTrackID = CMPersistentTrackID(kCMPersistentTrackID_Invalid)

    if let compositionvideoTrack: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: trackID) {

        do {
            try compositionvideoTrack.insertTimeRange(vid_timerange, of: videoTrack, at: CMTime.zero)
        } catch {
            print("error")
        }

        compositionvideoTrack.preferredTransform = videoTrack.preferredTransform

    } else {
        print("unable to add video track")
        return
    }


    // Watermark Effect
    let size = videoTrack.naturalSize

    let imglogo = UIImage(named: "image.png")
    let imglayer = CALayer()
    imglayer.contents = imglogo?.cgImage
    imglayer.frame = CGRect(x: 5, y: 5, width: 100, height: 100)
    imglayer.opacity = 0.6

    // create text Layer
    let titleLayer = CATextLayer()
    titleLayer.backgroundColor = UIColor.white.cgColor
    titleLayer.string = "Dummy text"
    titleLayer.font = UIFont(name: "Helvetica", size: 28)
    titleLayer.shadowOpacity = 0.5
    titleLayer.alignmentMode = CATextLayerAlignmentMode.center
    titleLayer.frame = CGRect(x: 0, y: 50, width: size.width, height: size.height / 6)


    let videolayer = CALayer()
    videolayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)

    let parentlayer = CALayer()
    parentlayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
    parentlayer.addSublayer(videolayer)
    parentlayer.addSublayer(imglayer)
    parentlayer.addSublayer(titleLayer)

    let layercomposition = AVMutableVideoComposition()
    layercomposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
    layercomposition.renderSize = size
    layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)

    // instruction for watermark
    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: composition.duration)
    let videotrack = composition.tracks(withMediaType: AVMediaType.video)[0] as AVAssetTrack
    let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
    instruction.layerInstructions = NSArray(object: layerinstruction) as [AnyObject] as! [AVVideoCompositionLayerInstruction]
    layercomposition.instructions = NSArray(object: instruction) as [AnyObject] as! [AVVideoCompositionInstructionProtocol]

    //  create new file to receive data
    let dirPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
    let docsDir = dirPaths[0] as NSString
    let movieFilePath = docsDir.appendingPathComponent("result.mov")
    let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)

    // use AVAssetExportSession to export video
    let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
    assetExport?.outputFileType = AVFileType.mov
    assetExport?.videoComposition = layercomposition

    // Check exist and remove old file
    FileManager.default.removeItemIfExisted(movieDestinationUrl as URL)

    assetExport?.outputURL = movieDestinationUrl as URL
    assetExport?.exportAsynchronously(completionHandler: {
        switch assetExport!.status {
        case AVAssetExportSession.Status.failed:
            print("failed")
            print(assetExport?.error ?? "unknown error")
        case AVAssetExportSession.Status.cancelled:
            print("cancelled")
            print(assetExport?.error ?? "unknown error")
        default:
            print("Movie complete")

            self.myurl = movieDestinationUrl as URL

            PHPhotoLibrary.shared().performChanges({
                PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: movieDestinationUrl as URL)
            }) { saved, error in
                if saved {
                    print("Saved")
                }
            }

            self.playVideo()

        }
    })

}


func playVideo() {
    let player = AVPlayer(url: myurl!)
    let playerLayer = AVPlayerLayer(player: player)
    playerLayer.frame = self.view.bounds
    self.view.layer.addSublayer(playerLayer)
    player.play()
    print("playing...")
}



}


extension FileManager {
func removeItemIfExisted(_ url:URL) -> Void {
    if FileManager.default.fileExists(atPath: url.path) {
        do {
            try FileManager.default.removeItem(atPath: url.path)
        }
        catch {
            print("Failed to delete file")
        }
    }
}
}
brontea
  • 555
  • 4
  • 14
0

For me (what I see in your code), your are not adding the parentlayer to the screen.

You create a CALayer() to add videolayer, imglayer and titleLayer into a new layer but you don't add this last one on the screen.

yourView.layer.addSublayer(parentlayer)

Hope this help you

  • check the line `layercomposition.animationTool` ...where it process video with parentlayer – Bhavin Bhadani Sep 08 '15 at 12:51
  • Try using `layercomposition` instead of `composition` in `let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)` – Jérôme Demyttenaere Sep 09 '15 at 08:52
  • Could you add your complete solution as an answer. – SheffieldKevin Sep 09 '15 at 10:21
  • If someone with a similar problem in the future finds this question, having the answer may be useful for them. You asked the question, you now have the answer. – SheffieldKevin Sep 10 '15 at 11:03
  • Hi @Dolwen do you happen to know how to correct blurriness in a video? Question here: http://stackoverflow.com/questions/34912050/avoiding-blurriness-at-start-end-of-video-even-after-using-setpreferredvideos. Thanks for your time! – Crashalot Jan 21 '16 at 11:14
  • hi @Crashalot, I think this depend of the material used to record the video, here a subject about correcting or not blurring (http://www.bhphotovideo.com/explora/photography/tips-and-solutions/image-stabilization-when-use-it-and-when-turn-it). I'm not expert in this :) – Jérôme Demyttenaere Jan 21 '16 at 14:58
0

@Rey Hernandez this just helped me a lot! If anyone wants further clarification on how to add an audio asset with to the video here is the code to combine them

    let vtrack =  vidAsset.tracksWithMediaType(AVMediaTypeVideo)
    let videoTrack:AVAssetTrack = vtrack[0] 
    let vid_duration = videoTrack.timeRange.duration
    let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)

    let atrack =  vidAsset.tracksWithMediaType(AVMediaTypeAudio)
    let audioTrack:AVAssetTrack = atrack[0]
    let audio_duration = audioTrack.timeRange.duration
    let audio_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)

    do {
        let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())

        try compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero)

        compositionvideoTrack.preferredTransform = videoTrack.preferredTransform



        let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
        try! compositionAudioTrack.insertTimeRange(audio_timerange, ofTrack: audioTrack, atTime: kCMTimeZero)

        compositionvideoTrack.preferredTransform = audioTrack.preferredTransform

    } catch {
        print(error)
    }
spe
  • 65
  • 10
0

To supplement, here's a function that creates CATextLayers based on an array UITextViews supplied by copying over their rotation, scale and font. Just add these to your container-layer supplied to AVVideoCompositionCoreAnimationTool:

private static func createTextLayer(totalSize: CGSize,
                                        textView: UITextView) -> CATextLayer {
        let textLayer: CACenteredTextLayer = CACenteredTextLayer()
        textLayer.backgroundColor = UIColor.clear
        textLayer.foregroundColor = textView.textColor?.cgColor
        textLayer.masksToBounds = false
        textLayer.isWrapped = true

        let scale: CGFloat = UIScreen.main.scale

        if let font: UIFont = textView.font {
            let upscaledFont: UIFont = font.withSize(font.pointSize * scale)
            let attributedString = NSAttributedString(
                string: textView.text,
                attributes: [NSAttributedString.Key.font: upscaledFont,
                             NSAttributedString.Key.foregroundColor: textView.textColor ?? UIColor.white])
            textLayer.string = attributedString
        }

        // Set text alignment
        let alignment: CATextLayerAlignmentMode
        switch textView.textAlignment {
        case NSTextAlignment.left:
            alignment = CATextLayerAlignmentMode.left
        case NSTextAlignment.center:
            alignment = CATextLayerAlignmentMode.center
        default:
            alignment = CATextLayerAlignmentMode.right
        }
        textLayer.alignmentMode = alignment

        let originalFrame: CGRect = textView.frame

        // Also take scale into consideration
        let targetSize: CGSize = CGSize(width: originalFrame.width * scale,
                                        height: originalFrame.height * scale)

        // The CALayer positioning is inverted on the Y-axes, so apply this
        let origin: CGPoint = CGPoint(x: originalFrame.origin.x * scale,
                                      y: (totalSize.height - (originalFrame.origin.y * scale)) - targetSize.height)

        textLayer.frame = CGRect(x: origin.x,
                                 y: origin.y,
                                 width: targetSize.width,
                                 height: targetSize.height)

        // Determine the scale
        textLayer.anchorPoint = CGPoint(x: 0.5,
                                        y: 0.5)

        var newTransform: CATransform3D = CATransform3DMakeScale(textView.transform.xScale,
                                                                 textView.transform.yScale,
                                                                 0)

        // Convert to degrees, invert the amount and convert back to radians to apply
        newTransform = CATransform3DRotate(newTransform,
                                           textView.transform.radiansFor3DTransform,
                                           0,
                                           0,
                                           1)
        textLayer.transform = newTransform

        return textLayer
}

Combine this with this subclassing of CATextLayer to center the text vertically:

final class CACenteredTextLayer: CATextLayer {
    override func draw(in ctx: CGContext) {
        guard let attributedString = string as? NSAttributedString else { return }

        let height = self.bounds.size.height
        let boundingRect: CGRect = attributedString.boundingRect(
            with: CGSize(width: bounds.width,
                         height: CGFloat.greatestFiniteMagnitude),
            options: NSStringDrawingOptions.usesLineFragmentOrigin,
            context: nil)
        let yDiff: CGFloat = (height - boundingRect.size.height) / 2

        ctx.saveGState()
        ctx.translateBy(x: 0.0, y: yDiff)
        super.draw(in: ctx)
        ctx.restoreGState()
    }
}

private extension CGAffineTransform {
    var xScale: CGFloat {
        return sqrt((a*a) + (c*c))
    }

    var yScale: CGFloat {
        return sqrt((b*b) + (d*d))
    }

    var radiansFor3DTransform: CGFloat {
        let radians: CGFloat = atan2(b, a);
        let degrees: CGFloat = -(radians * 180 / CGFloat.pi)
        let convertedRadians: CGFloat = CGFloat(degrees * (CGFloat.pi / 180))
        return convertedRadians
    }
}
Yasper
  • 501
  • 5
  • 23