I would like to know how to get the bytes of an AVAudioCompressedBuffer and then reconstruct an AVAudioCompressedBuffer from the bytes.
The code below takes an AVAudioPCMBuffer, compresses it with AVAudioConverter to AVAudioCompressedBuffer (iLBC) and gets the bytes, then converts the bytes back to AVAudioCompressedBuffer (iLBC) and uncompresses back to AVAudioPCMBuffer and plays the buffer. Something goes wrong with converting the AVAudioCompressedBuffer to bytes and back. If I skip this conversion, the audio plays as expected.
I can even match the contents of compressedBuffer and compressedBuffer2, which leads me to believe that the mechanism for getting the bytes is not exactly right.
// Do iLBC Compression
let compressedBuffer: AVAudioCompressedBuffer = self.compress(inBuffer: buffer)
// packetCapacity: 4, maximumPacketSize: 38
// self.player.scheduleBuffer(self.uncompress(inBuffer: compressedBuffer)) // This works perfectly
// Convert Buffer to Byte Array
let pointer1: UnsafeMutablePointer = compressedBuffer.data.bindMemory(to: UInt8.self, capacity: 152)
var audioByteArray = [UInt8](repeating: 0, count: 152)
pointer1.withMemoryRebound(to: UInt8.self, capacity: 152) { srcByteData in
audioByteArray.withUnsafeMutableBufferPointer {
$0.baseAddress!.initialize(from: srcByteData, count: 152)
}
}
// Convert Byte Array to Buffer
let compressedBuffer2: AVAudioCompressedBuffer = AVAudioCompressedBuffer(format: AVAudioFormat.init(streamDescription: &self.descriptor)!, packetCapacity: 4, maximumPacketSize: 38)
let destination = compressedBuffer2.data
audioByteArray.withUnsafeBufferPointer {
let src = UnsafeRawPointer($0.baseAddress!).bindMemory(to: UInt8.self, capacity: 152)
destination.copyMemory(from: src, byteCount: 152)
}
// Do iLBC Decompression
let uncompressedBuffer: AVAudioPCMBuffer = self.uncompress(inBuffer: compressedBuffer2)
// Works perfectly with inBuffer: compressedBuffer
// Play Buffer
self.player.scheduleBuffer(uncompressedBuffer)
// Plays fine when 'let uncompressedBuffer: AVAudioPCMBuffer = self.uncompress(inBuffer: compressedBuffer)'
Compress and Uncompress functions
let format = AVAudioFormat.init(commonFormat: AVAudioCommonFormat.pcmFormatFloat32, sampleRate: 16000, channels: 1, interleaved: false)
var compressedFormatDescriptor = AudioStreamBasicDescription(mSampleRate: 8000, mFormatID: kAudioFormatiLBC, mFormatFlags: 0, mBytesPerPacket: 0, mFramesPerPacket: 0, mBytesPerFrame: 0, mChannelsPerFrame: 1, mBitsPerChannel: 0, mReserved: 0)
func compress(inBuffer : AVAudioPCMBuffer) -> AVAudioCompressedBuffer {
let inputFormat = inBuffer.format
let converter = AVAudioConverter(from: inputFormat, to: self.compressedFormat!)
let outBuffer = AVAudioCompressedBuffer(format: self.compressedFormat!, packetCapacity: 4, maximumPacketSize: 38)
let inputBlock : AVAudioConverterInputBlock = { inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return inBuffer
}
var error : NSError?
converter!.convert(to: outBuffer, error: &error, withInputFrom: inputBlock)
return outBuffer
}
func uncompress(inBuffer : AVAudioCompressedBuffer) -> AVAudioPCMBuffer {
let inputFormat = inBuffer.format
let outputFormat = format
let converter = AVAudioConverter(from: inputFormat, to: outputFormat!)
let inputBlock : AVAudioConverterInputBlock = { inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return inBuffer
}
var error : NSError?
let outBuffer: AVAudioPCMBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat!, frameCapacity: 1600)!
converter!.convert(to: outBuffer, error: &error, withInputFrom: inputBlock)
return outBuffer
}