This is an extension to an unanswered question I had before: AVCaptureSession is not recording audio from the mic in Swift
I'm very unclear how I can write both video that is being modified in real time and audio that is being recorded from the microphone. I've been searching for months and I have found nothing. What seems to set my problem apart from others is that I am getting an image buffer from sampleBuffer in the captureOutput function, turning that into an image, modifying it, and then writing it back to a AVAssetWriterInputPixelBufferAdaptor, instead of recording everything from the output as normal video. From here, I have no idea what to do to get audio from sampleBuffer, or if that's even the correct approach, although I've seen others get an AudioBufferList from captureOutput.
Minimally, this is what I have in my main class:
class CaptureVC: UIViewController, AVCapturePhotoCaptureDelegate, AVCaptureVideoDataOutputSampleBufferDelegate, UIImagePickerControllerDelegate, UINavigationControllerDelegate,UIPickerViewDataSource,UIPickerViewDelegate {
var captureSession: AVCaptureSession?
var stillImageOutput: AVCapturePhotoOutput?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
let videoOutput = AVCaptureVideoDataOutput()
let audioOutput = AVCaptureAudioDataOutput()
var assetWriter: AVAssetWriter?
var assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor?
var assetWriterAudioInput: AVAssetWriterInput?
var currentSampleTime: CMTime?
var currentVideoDimensions: CMVideoDimensions?
var videoIsRecording = false
override func viewDidLoad() {
super.viewDidLoad()
let backCamera = AVCaptureDevice.default(for:AVMediaType.video)
let microphone = AVCaptureDevice.default(.builtInMicrophone, for: AVMediaType.audio, position: .unspecified)
var error: NSError?
var videoInput: AVCaptureDeviceInput!
var micInput: AVCaptureDeviceInput!
do {
videoInput = try AVCaptureDeviceInput(device: backCamera!)
micInput = try AVCaptureDeviceInput(device: microphone!)
} catch let error1 as NSError {
error = error1
videoInput = nil
micInput = nil
print(error!.localizedDescription)
}
if error == nil &&
captureSession!.canAddInput(videoInput) &&
captureSession!.canAddInput(micInput){
captureSession!.addInput(videoInput)
captureSession!.addInput(micInput)
stillImageOutput = AVCapturePhotoOutput()
if captureSession!.canAddOutput(stillImageOutput!) {
captureSession!.addOutput(stillImageOutput!)
let q = DispatchQueue(label: "sample buffer delegate", qos: .default)
videoOutput.setSampleBufferDelegate(self, queue: q)
if captureSession!.canAddOutput(videoOutput){
captureSession!.addOutput(videoOutput)
}
audioOutput.setSampleBufferDelegate(self as? AVCaptureAudioDataOutputSampleBufferDelegate, queue: q)
if captureSession!.canAddOutput(audioOutput){
captureSession!.addOutput(audioOutput)
}
captureSession!.startRunning()
}
}
}
My class that creates the asset writer
func createWriter() {
self.checkForAndDeleteFile()
do {
assetWriter = try AVAssetWriter(outputURL: movieURL() as URL, fileType: AVFileType.mov)
} catch let error as NSError {
print(error.localizedDescription)
return
}
let videoSettings = [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : Int(currentVideoDimensions!.height), // note: these are swapped because of REASONS
AVVideoHeightKey : Int(currentVideoDimensions!.width)
] as [String : Any]
let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoSettings)
assetWriterVideoInput.expectsMediaDataInRealTime = true
let sourcePixelBufferAttributesDictionary = [
String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_32BGRA),
String(kCVPixelBufferWidthKey) : Int(currentVideoDimensions!.width),
String(kCVPixelBufferHeightKey) : Int(currentVideoDimensions!.height),
String(kCVPixelFormatOpenGLESCompatibility) : kCFBooleanTrue
] as [String : Any]
assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput,
sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size);
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2]
assetWriterAudioInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: audioSettings)
assetWriterAudioInput?.expectsMediaDataInRealTime = true
if assetWriter!.canAdd(assetWriterVideoInput) {
assetWriter!.add(assetWriterVideoInput)
} else {
print("cannot add \(assetWriterVideoInput)")
}
if assetWriter!.canAdd(assetWriterAudioInput!) {
assetWriter!.add(assetWriterAudioInput!)
} else {
print("cannot add \(String(describing: assetWriterAudioInput))")
}
}
When I record I create the writer, start writing and start the session:
videoIsRecording = true
createWriter()
assetWriter?.startWriting()
assetWriter?.startSession(atSourceTime: currentSampleTime!)
When I stop the recording, it goes to another view to display the video:
assetWriter?.finishWriting(completionHandler: {[unowned self] () -> Void in
let firstAsset = AVURLAsset(url: self.movieURL() as URL)
guard let exporter = AVAssetExportSession(asset: firstAsset, presetName: AVAssetExportPresetHighestQuality) else { return }
guard let vidComp = self.getVideoComposition(asset: firstAsset,
videoSize: CGSize(width:1280,
height:720)) else {
print("Unable to create video composition")
return
}
print(vidComp.instructions)
exporter.videoComposition = vidComp
exporter.outputURL = self.movieURL() as URL
exporter.outputFileType = AVFileType.mov
exporter.exportAsynchronously() {
DispatchQueue.main.async(){
self.activityTextStatus.text = ""
fileURLSenderVal = self.movieURL() as URL
let manageCaptureVC = self.storyboard?.instantiateViewController(withIdentifier: "ManageCaptureVC") as! ManageCaptureVC
manageCaptureVC.fileURL = fileURLSenderVal
manageCaptureVC.imageCaptureMode = ManageCaptureVC.imageCaptureModes(rawValue: self.imageCaptureMode.rawValue)!
manageCaptureVC.delegate = self
self.present(manageCaptureVC, animated: true, completion: nil)
}
}
})
But this is where I'm stuck - where and how do I record my microphone's input??
// live output from camera
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection){
if(captureOutput){
let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
let cameraImage = CIImage(cvPixelBuffer: pixelBuffer!)
var orientation = UIImageOrientation.right
if(isFrontCamera){
orientation = UIImageOrientation.leftMirrored
}
image = UIImage(ciImage: cameraImage)
if let ciImage = image?.ciImage {
image = applyFilterAndReturnImage(ciImage: ciImage, orientation: orientation, currentCameraRes:currentCameraRes!)
let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
if(videoIsRecording && self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true){
let filteredBuffer = buffer(from: image!)
let success = self.assetWriterPixelBufferInput?.append(filteredBuffer!, withPresentationTime: self.currentSampleTime!)
if success == false {
print("Pixel Buffer failed")
}
}
DispatchQueue.main.async(){
imageView!.image = image
}
}
}
Again, I've been at this on and off for months - No documents I can find help. Thank you