3

In my app i am using AVCaptureDataOutputSynchronizerDelegate for video output of recording through camera. The data i got is AVCaptureSynchronizedSampleBufferData and i have to convert it into Data or Url format so that i am able to play the video preview. I gone through so many questions on SO but not getting any related code. Please help me to convert it in required format. I am sharing my code and output

Code:

init(cameraType: CameraType, preferredSpec: VideoSpec?, previewContainer: CALayer?)
    {
        super.init()
        
        captureSession.beginConfiguration()
        captureSession.sessionPreset = AVCaptureSession.Preset.photo
        setupCaptureVideoDevice(with: cameraType)
        
        // setup preview
        if let previewContainer = previewContainer {
            let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
            previewLayer.frame = previewContainer.bounds
            previewLayer.contentsGravity = CALayerContentsGravity.resizeAspectFill
            previewLayer.videoGravity = .resizeAspectFill
            previewContainer.insertSublayer(previewLayer, at: 0)
            self.previewLayer = previewLayer
        }
        
        // setup outputs
        do {
            // Movie output
//            if captureSession.canAddOutput(movieOutput) {
//                captureSession.addOutput(movieOutput)
//            }
            
            // video output
            videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
            videoDataOutput.alwaysDiscardsLateVideoFrames = true
            videoDataOutput.setSampleBufferDelegate(self, queue: dataOutputQueue)
            guard captureSession.canAddOutput(videoDataOutput) else { fatalError() }
            captureSession.addOutput(videoDataOutput)
            print("-------",videoDataOutput)
            videoConnection = videoDataOutput.connection(with: .video)
            
            
            // depth output
            guard captureSession.canAddOutput(depthDataOutput) else { fatalError() }
            captureSession.addOutput(depthDataOutput)
            depthDataOutput.setDelegate(self, callbackQueue: dataOutputQueue)
            depthDataOutput.isFilteringEnabled = false
            guard let connection = depthDataOutput.connection(with: .depthData) else { fatalError() }
            connection.isEnabled = true
            
            // metadata output
            guard captureSession.canAddOutput(metadataOutput) else { fatalError() }
            captureSession.addOutput(metadataOutput)
            if metadataOutput.availableMetadataObjectTypes.contains(.face) {
                metadataOutput.metadataObjectTypes = [.face]
            }
            // synchronize outputs
            dataOutputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [videoDataOutput, depthDataOutput, metadataOutput])
            dataOutputSynchronizer.setDelegate(self, queue: dataOutputQueue)
        }
        setupConnections(with: cameraType)
        captureSession.commitConfiguration()
    }
    
    
    private func setupCaptureVideoDevice(with cameraType: CameraType)
    {
        videoDevice = cameraType.captureDevice()
        print("selected video device: \(String(describing: videoDevice))")
        
        videoDevice.selectDepthFormat()

        captureSession.inputs.forEach { (captureInput) in
            captureSession.removeInput(captureInput)
        }
        let videoDeviceInput = try! AVCaptureDeviceInput(device: videoDevice)
        guard captureSession.canAddInput(videoDeviceInput) else { fatalError() }
        captureSession.addInput(videoDeviceInput)
    }
    
    
    private func setupConnections(with cameraType: CameraType)
    {
        videoConnection = videoDataOutput.connection(with: .video)!
        let depthConnection = depthDataOutput.connection(with: .depthData)
        switch cameraType {
        case .front:
            videoConnection.isVideoMirrored = true
            depthConnection?.isVideoMirrored = true
        default:
            break
        }
        videoConnection.videoOrientation = .portrait
        depthConnection?.videoOrientation = .portrait
    }
    
    
    func tempURL() -> URL?
    {
        let directory = NSTemporaryDirectory() as NSString
        if directory != "" {
            let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
            print("URL",URL(fileURLWithPath: path))
            return URL(fileURLWithPath: path)
            
        }
        return nil
    }
    
    
    func startCapture()
    {
        print("\(self.classForCoder)/" + #function)
        if captureSession.isRunning {
            print("already running")
            return
        }
        captureSession.startRunning()
        //(to: outputURL, recordingDelegate: self)
    }
    
    
    func stopCapture()
    {
        print("\(self.classForCoder)/" + #function)
        if !captureSession.isRunning {
            print("already stopped")
            return
        }
        captureSession.stopRunning()
        
    }


extension VideoCapture: AVCaptureVideoDataOutputSampleBufferDelegate
{
    func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)
    {
        print("\(self.classForCoder)/" + #function)
    }
    
    // synchronizer
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)
    {
        print("===Method called===")
        if let imageBufferHandler = imageBufferHandler, connection == videoConnection
        {
            guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { fatalError() }

            let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
            imageBufferHandler(imageBuffer, timestamp, nil)
        }
    }
}


extension VideoCapture: AVCaptureDepthDataOutputDelegate
{
    func depthDataOutput(_ output: AVCaptureDepthDataOutput, didDrop depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection, reason: AVCaptureOutput.DataDroppedReason)
    {
        print("\(self.classForCoder)/\(#function)")
    }
    
    
    // synchronizer
    func depthDataOutput(_ output: AVCaptureDepthDataOutput, didOutput depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection)
    {
        print("\(self.classForCoder)/\(#function)")
    }
}



extension VideoCapture: AVCaptureDataOutputSynchronizerDelegate
{
    func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection)
    {
        guard let syncedVideoData = synchronizedDataCollection.synchronizedData(for: videoDataOutput) as? AVCaptureSynchronizedSampleBufferData else { return }
        guard !syncedVideoData.sampleBufferWasDropped else {
            print("dropped video:\(syncedVideoData)")
             
            return
        }
        
        
        let videoSampleBuffer = syncedVideoData.sampleBuffer
        
        //print("---", videoSampleBuffer)
        
        let syncedDepthData = synchronizedDataCollection.synchronizedData(for: depthDataOutput) as? AVCaptureSynchronizedDepthData
        var depthData = syncedDepthData?.depthData
        if let syncedDepthData = syncedDepthData, syncedDepthData.depthDataWasDropped {
            print("dropped depth:\(syncedDepthData)")
            depthData = nil
        }

        
        let syncedMetaData = synchronizedDataCollection.synchronizedData(for: metadataOutput) as? AVCaptureSynchronizedMetadataObjectData
        var face: AVMetadataObject? = nil
        if let firstFace = syncedMetaData?.metadataObjects.first {
            face = videoDataOutput.transformedMetadataObject(for: firstFace, connection: videoConnection)
        }
        guard let imagePixelBuffer = CMSampleBufferGetImageBuffer(videoSampleBuffer) else { fatalError() }

        syncedDataBufferHandler?(imagePixelBuffer, depthData, face)
        print("=====",syncedDataBufferHandler!)
    }
}

And i got this output in console

.............. <AVCaptureVideoDataOutput: 0x281d11900>
dropped video:<AVCaptureSynchronizedSampleBufferData: 0x2808aa3a0>

I need to convert this data to URL or Data format and display the video preview after then upload it to the server in data format. Please someone suggest some code or method to resolve it. I am stuck on it from last 1 week.

Abhinandan Pratap
  • 2,142
  • 1
  • 18
  • 39
  • Do you just want to show a preview of what the camera is capturing live? You haven't hooked up your `AVCaptureVideoPreviewLayer` to the session. Add `previewLayer.session = captureSession` – mani Aug 19 '22 at 07:39
  • yes i want to show the preview as well as upload it to server by converting it into data – Abhinandan Pratap Aug 20 '22 at 04:33
  • Add a `AVCaptureMovieFileOutput` to output to a temporary location/documents directory. Then upload the file to your server. This way you don't have to deal with raw buffers. – mani Aug 21 '22 at 13:06
  • if you have some related code please share as i already converted it to url with temporary location and convert it to data. but the data i got is of 0 bytes – Abhinandan Pratap Aug 22 '22 at 04:20
  • This answer sheds some light: https://stackoverflow.com/a/4986032/2303348. Seems like you cannot use AVCaptureVideoDataOutput + AVCaptureMovieFileOutput simultaneously. – mani Aug 22 '22 at 11:35

0 Answers0