In my app i am using AVCaptureDataOutputSynchronizerDelegate
for video output of recording through camera.
The data i got is AVCaptureSynchronizedSampleBufferData
and i have to convert it into Data or Url format so that i am able to play the video preview. I gone through so many questions on SO but not getting any related code. Please help me to convert it in required format. I am sharing my code and output
Code:
init(cameraType: CameraType, preferredSpec: VideoSpec?, previewContainer: CALayer?)
{
super.init()
captureSession.beginConfiguration()
captureSession.sessionPreset = AVCaptureSession.Preset.photo
setupCaptureVideoDevice(with: cameraType)
// setup preview
if let previewContainer = previewContainer {
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = previewContainer.bounds
previewLayer.contentsGravity = CALayerContentsGravity.resizeAspectFill
previewLayer.videoGravity = .resizeAspectFill
previewContainer.insertSublayer(previewLayer, at: 0)
self.previewLayer = previewLayer
}
// setup outputs
do {
// Movie output
// if captureSession.canAddOutput(movieOutput) {
// captureSession.addOutput(movieOutput)
// }
// video output
videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
videoDataOutput.alwaysDiscardsLateVideoFrames = true
videoDataOutput.setSampleBufferDelegate(self, queue: dataOutputQueue)
guard captureSession.canAddOutput(videoDataOutput) else { fatalError() }
captureSession.addOutput(videoDataOutput)
print("-------",videoDataOutput)
videoConnection = videoDataOutput.connection(with: .video)
// depth output
guard captureSession.canAddOutput(depthDataOutput) else { fatalError() }
captureSession.addOutput(depthDataOutput)
depthDataOutput.setDelegate(self, callbackQueue: dataOutputQueue)
depthDataOutput.isFilteringEnabled = false
guard let connection = depthDataOutput.connection(with: .depthData) else { fatalError() }
connection.isEnabled = true
// metadata output
guard captureSession.canAddOutput(metadataOutput) else { fatalError() }
captureSession.addOutput(metadataOutput)
if metadataOutput.availableMetadataObjectTypes.contains(.face) {
metadataOutput.metadataObjectTypes = [.face]
}
// synchronize outputs
dataOutputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [videoDataOutput, depthDataOutput, metadataOutput])
dataOutputSynchronizer.setDelegate(self, queue: dataOutputQueue)
}
setupConnections(with: cameraType)
captureSession.commitConfiguration()
}
private func setupCaptureVideoDevice(with cameraType: CameraType)
{
videoDevice = cameraType.captureDevice()
print("selected video device: \(String(describing: videoDevice))")
videoDevice.selectDepthFormat()
captureSession.inputs.forEach { (captureInput) in
captureSession.removeInput(captureInput)
}
let videoDeviceInput = try! AVCaptureDeviceInput(device: videoDevice)
guard captureSession.canAddInput(videoDeviceInput) else { fatalError() }
captureSession.addInput(videoDeviceInput)
}
private func setupConnections(with cameraType: CameraType)
{
videoConnection = videoDataOutput.connection(with: .video)!
let depthConnection = depthDataOutput.connection(with: .depthData)
switch cameraType {
case .front:
videoConnection.isVideoMirrored = true
depthConnection?.isVideoMirrored = true
default:
break
}
videoConnection.videoOrientation = .portrait
depthConnection?.videoOrientation = .portrait
}
func tempURL() -> URL?
{
let directory = NSTemporaryDirectory() as NSString
if directory != "" {
let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
print("URL",URL(fileURLWithPath: path))
return URL(fileURLWithPath: path)
}
return nil
}
func startCapture()
{
print("\(self.classForCoder)/" + #function)
if captureSession.isRunning {
print("already running")
return
}
captureSession.startRunning()
//(to: outputURL, recordingDelegate: self)
}
func stopCapture()
{
print("\(self.classForCoder)/" + #function)
if !captureSession.isRunning {
print("already stopped")
return
}
captureSession.stopRunning()
}
extension VideoCapture: AVCaptureVideoDataOutputSampleBufferDelegate
{
func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)
{
print("\(self.classForCoder)/" + #function)
}
// synchronizer
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)
{
print("===Method called===")
if let imageBufferHandler = imageBufferHandler, connection == videoConnection
{
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { fatalError() }
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
imageBufferHandler(imageBuffer, timestamp, nil)
}
}
}
extension VideoCapture: AVCaptureDepthDataOutputDelegate
{
func depthDataOutput(_ output: AVCaptureDepthDataOutput, didDrop depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection, reason: AVCaptureOutput.DataDroppedReason)
{
print("\(self.classForCoder)/\(#function)")
}
// synchronizer
func depthDataOutput(_ output: AVCaptureDepthDataOutput, didOutput depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection)
{
print("\(self.classForCoder)/\(#function)")
}
}
extension VideoCapture: AVCaptureDataOutputSynchronizerDelegate
{
func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection)
{
guard let syncedVideoData = synchronizedDataCollection.synchronizedData(for: videoDataOutput) as? AVCaptureSynchronizedSampleBufferData else { return }
guard !syncedVideoData.sampleBufferWasDropped else {
print("dropped video:\(syncedVideoData)")
return
}
let videoSampleBuffer = syncedVideoData.sampleBuffer
//print("---", videoSampleBuffer)
let syncedDepthData = synchronizedDataCollection.synchronizedData(for: depthDataOutput) as? AVCaptureSynchronizedDepthData
var depthData = syncedDepthData?.depthData
if let syncedDepthData = syncedDepthData, syncedDepthData.depthDataWasDropped {
print("dropped depth:\(syncedDepthData)")
depthData = nil
}
let syncedMetaData = synchronizedDataCollection.synchronizedData(for: metadataOutput) as? AVCaptureSynchronizedMetadataObjectData
var face: AVMetadataObject? = nil
if let firstFace = syncedMetaData?.metadataObjects.first {
face = videoDataOutput.transformedMetadataObject(for: firstFace, connection: videoConnection)
}
guard let imagePixelBuffer = CMSampleBufferGetImageBuffer(videoSampleBuffer) else { fatalError() }
syncedDataBufferHandler?(imagePixelBuffer, depthData, face)
print("=====",syncedDataBufferHandler!)
}
}
And i got this output in console
.............. <AVCaptureVideoDataOutput: 0x281d11900>
dropped video:<AVCaptureSynchronizedSampleBufferData: 0x2808aa3a0>
I need to convert this data to URL or Data format and display the video preview after then upload it to the server in data format. Please someone suggest some code or method to resolve it. I am stuck on it from last 1 week.