Here is a complete project if you care to run this yourself: https://www.dropbox.com/s/5p384mogjzflvqk/AVPlayerLayerSoundOnlyBug_iOS10.zip?dl=0
This is a new problem on iOS 10, and it has been fixed as of iOS 10.2. After exporting a video using AVAssetExportSession and AVVideoCompositionCoreAnimationTool to composite a layer on top of the video during export, videos played in AVPlayerLayer fail to play. This doesn't seem to be caused by hitting the AV encode/decode pipeline limit because it often happens after a single export, which as far as I know only spins up 2 pipelines: 1 for the AVAssetExportSession and another for the AVPlayer. I am also setting the layer's frame properly, as you can see by running the code below which gives the layer a blue background you can plainly see.
After an export, waiting for some time before playing a video seems to make it far more reliable but that's not really an acceptable workaround to tell your users.
Any ideas on what's causing this or how I can fix or work around it? Have I messed something up or missing an important step or detail? Any help or pointers to documentation are much appreciated.
import UIKit
import AVFoundation
/* After exporting an AVAsset using AVAssetExportSession with AVVideoCompositionCoreAnimationTool, we
* will attempt to play a video using an AVPlayerLayer with a blue background.
*
* If you see the blue background and hear audio you're experiencing the missing-video bug. Otherwise
* try hitting the button again.
*/
class ViewController: UIViewController {
private var playerLayer: AVPlayerLayer?
private let button = UIButton()
private let indicator = UIActivityIndicatorView(activityIndicatorStyle: .gray)
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.white
button.setTitle("Cause Trouble", for: .normal)
button.setTitleColor(UIColor.black, for: .normal)
button.addTarget(self, action: #selector(ViewController.buttonTapped), for: .touchUpInside)
view.addSubview(button)
button.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
button.centerXAnchor.constraint(equalTo: view.centerXAnchor),
button.bottomAnchor.constraint(equalTo: view.bottomAnchor, constant: -16),
])
indicator.hidesWhenStopped = true
view.insertSubview(indicator, belowSubview: button)
indicator.translatesAutoresizingMaskIntoConstraints = false
NSLayoutConstraint.activate([
indicator.centerXAnchor.constraint(equalTo: button.centerXAnchor),
indicator.centerYAnchor.constraint(equalTo: button.centerYAnchor),
])
}
func buttonTapped() {
button.isHidden = true
indicator.startAnimating()
playerLayer?.removeFromSuperlayer()
let sourcePath = Bundle.main.path(forResource: "video.mov", ofType: nil)!
let sourceURL = URL(fileURLWithPath: sourcePath)
let sourceAsset = AVURLAsset(url: sourceURL)
//////////////////////////////////////////////////////////////////////
// STEP 1: Export a video using AVVideoCompositionCoreAnimationTool //
//////////////////////////////////////////////////////////////////////
let exportSession = { () -> AVAssetExportSession in
let sourceTrack = sourceAsset.tracks(withMediaType: AVMediaTypeVideo).first!
let parentLayer = CALayer()
parentLayer.frame = CGRect(origin: .zero, size: CGSize(width: 1280, height: 720))
let videoLayer = CALayer()
videoLayer.frame = parentLayer.bounds
parentLayer.addSublayer(videoLayer)
let composition = AVMutableVideoComposition(propertiesOf: sourceAsset)
composition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: sourceTrack)
layerInstruction.setTransform(sourceTrack.preferredTransform, at: kCMTimeZero)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRange(start: kCMTimeZero, duration: sourceAsset.duration)
instruction.layerInstructions = [layerInstruction]
composition.instructions = [instruction]
let e = AVAssetExportSession(asset: sourceAsset, presetName: AVAssetExportPreset1280x720)!
e.videoComposition = composition
e.outputFileType = AVFileTypeQuickTimeMovie
e.timeRange = CMTimeRange(start: kCMTimeZero, duration: sourceAsset.duration)
let outputURL = URL(fileURLWithPath: NSTemporaryDirectory().appending("/out2.mov"))
_ = try? FileManager.default.removeItem(at: outputURL)
e.outputURL = outputURL
return e
}()
print("Exporting asset...")
exportSession.exportAsynchronously {
assert(exportSession.status == .completed)
//////////////////////////////////////////////
// STEP 2: Play a video in an AVPlayerLayer //
//////////////////////////////////////////////
DispatchQueue.main.async {
// Reuse player layer, shouldn't be hitting the AV pipeline limit
let playerItem = AVPlayerItem(asset: sourceAsset)
let layer = self.playerLayer ?? AVPlayerLayer()
if layer.player == nil {
layer.player = AVPlayer(playerItem: playerItem)
}
else {
layer.player?.replaceCurrentItem(with: playerItem)
}
layer.backgroundColor = UIColor.blue.cgColor
if UIDeviceOrientationIsPortrait(UIDevice.current.orientation) {
layer.frame = self.view.bounds
layer.bounds.size.height = layer.bounds.width * 9.0 / 16.0
}
else {
layer.frame = self.view.bounds.insetBy(dx: 0, dy: 60)
layer.bounds.size.width = layer.bounds.height * 16.0 / 9.0
}
self.view.layer.insertSublayer(layer, at: 0)
self.playerLayer = layer
layer.player?.play()
print("Playing a video in an AVPlayerLayer...")
self.button.isHidden = false
self.indicator.stopAnimating()
}
}
}
}