The AVSampleRateKey is used to specify the desired sample rate when working with audio using AVFoundation. However, setting AVSampleRateKey to certain values might lead to crashes, particularly when using values other than 44100.
func manualAudioRender(effect: Effect) throws {
prepareAudioEngine(forEffect: effect)
audioPlayerNode.scheduleFile(avAudioFile, at: nil)
try audioEngine.enableManualRenderingMode(.offline, format: avAudioFile.processingFormat, maximumFrameCount: maxNumberOfFrames)
try audioEngine.start()
audioPlayerNode.play()
let outputFile: AVAudioFile
let url = try PersistenceManager.shared.urlForEffect(named: effect.name)
var recordSettings = avAudioFile.fileFormat.settings
print("------>>>. \(recordSettings)")
recordSettings[AVSampleRateKey] = 48000.0
print("------>>>. \(recordSettings)")
// var bitRate = audioBitrate
// var sampleRate = 44100.0
// var channels = 1
//
// let settings: [String: AnyObject] = [
// AVFormatIDKey : NSNumber(value: Int32(kAudioFormatAppleLossless) as Int32),
// AVEncoderAudioQualityKey: AVAudioQuality.max.rawValue as AnyObject,
// AVEncoderBitRateKey: bitRate as AnyObject,
// AVNumberOfChannelsKey: channels as AnyObject,
// AVSampleRateKey: sampleRate as AnyObject
// ]
outputFile = try AVAudioFile(forWriting: url, settings: recordSettings)
let buffer = AVAudioPCMBuffer(pcmFormat: audioEngine.manualRenderingFormat, frameCapacity: audioEngine.manualRenderingMaximumFrameCount)!
// Adjust the file size based on the effect rate
let outputFileLength = Int64(Double(avAudioFile.length) / effect.rate)
while audioEngine.manualRenderingSampleTime < outputFileLength {
let framesToRender = min(buffer.frameCapacity, AVAudioFrameCount(outputFileLength - audioEngine.manualRenderingSampleTime))
let status = try audioEngine.renderOffline(framesToRender, to: buffer)
switch status {
case .success:
try outputFile.write(from: buffer)
case .error:
print("Error rendering offline audio")
return
default:
break
}
}
audioPlayerNode.stop()
audioEngine.stop()
audioEngine.disableManualRenderingMode()
}