0

The AVSampleRateKey is used to specify the desired sample rate when working with audio using AVFoundation. However, setting AVSampleRateKey to certain values might lead to crashes, particularly when using values other than 44100.

  func manualAudioRender(effect: Effect) throws {
        prepareAudioEngine(forEffect: effect)
        
        audioPlayerNode.scheduleFile(avAudioFile, at: nil)
        try audioEngine.enableManualRenderingMode(.offline, format: avAudioFile.processingFormat, maximumFrameCount: maxNumberOfFrames)
        
        try audioEngine.start()
        audioPlayerNode.play()
        
        let outputFile: AVAudioFile
        let url = try PersistenceManager.shared.urlForEffect(named: effect.name)
        var recordSettings = avAudioFile.fileFormat.settings
        
        print("------>>>. \(recordSettings)")
        
        recordSettings[AVSampleRateKey] = 48000.0
        
        print("------>>>. \(recordSettings)")
      
//        var bitRate = audioBitrate
//        var sampleRate = 44100.0
//        var channels = 1
//
//        let settings: [String: AnyObject] = [
//                    AVFormatIDKey : NSNumber(value: Int32(kAudioFormatAppleLossless) as Int32),
//                    AVEncoderAudioQualityKey: AVAudioQuality.max.rawValue as AnyObject,
//                    AVEncoderBitRateKey: bitRate as AnyObject,
//                    AVNumberOfChannelsKey: channels as AnyObject,
//                    AVSampleRateKey: sampleRate as AnyObject
//                ]
        
        
        outputFile = try AVAudioFile(forWriting: url, settings: recordSettings)
        
        let buffer = AVAudioPCMBuffer(pcmFormat: audioEngine.manualRenderingFormat, frameCapacity: audioEngine.manualRenderingMaximumFrameCount)!
        
        // Adjust the file size based on the effect rate
        let outputFileLength = Int64(Double(avAudioFile.length) / effect.rate)
        
        while audioEngine.manualRenderingSampleTime < outputFileLength {
            let framesToRender = min(buffer.frameCapacity, AVAudioFrameCount(outputFileLength - audioEngine.manualRenderingSampleTime))
            let status = try audioEngine.renderOffline(framesToRender, to: buffer)
            switch status {
            case .success:
                try outputFile.write(from: buffer)
            case .error:
                print("Error rendering offline audio")
                return
            default:
                break
            }
        }
        
        audioPlayerNode.stop()
        audioEngine.stop()
        audioEngine.disableManualRenderingMode()
    }
Rezwan
  • 1
  • 1

1 Answers1

0

As pointed out in this discussion, you can’t convert sample rate with AVAudioEngine nodes or taps directly.

When playing buffers, there is an implicit assumption that the buffers are at the same sample rate as the node's output format.

You need to use AVAudioConverter. An example is found in this answer.

soundflix
  • 928
  • 9
  • 22