I have a swift version that is working with 2 channels setting the channel map property. I haven't tested it with a full multichannel systems, but the principles should be the same.
let engine = AVAudioEngine()
let player = AVAudioPlayerNode()
func testCode(){
// get output hardware format
let output = engine.outputNode
let outputHWFormat = output.outputFormatForBus(0)
// connect mixer to output
let mixer = engine.mainMixerNode
engine.connect(mixer, to: output, format: outputHWFormat)
//then work on the player end by first attaching the player to the engine
engine.attachNode(player)
//find the audiofile
guard let audioFileURL = NSBundle.mainBundle().URLForResource("tones", withExtension: "wav") else {
fatalError("audio file is not in bundle.")
}
var songFile:AVAudioFile?
do {
songFile = try AVAudioFile(forReading: audioFileURL)
print(songFile!.processingFormat)
// connect player to mixer
engine.connect(player, to: mixer, format: songFile!.processingFormat)
} catch {
fatalError("canot create AVAudioFile \(error)")
}
let channelMap: [Int32] = [0, 1] //left out left, right out right
//let channelMap: [Int32] = [1, 0] //right out left, left out right
let propSize: UInt32 = UInt32(channelMap.count) * UInt32(sizeof(sint32))
let code: OSStatus = AudioUnitSetProperty((engine.inputNode?.audioUnit)!,
kAudioOutputUnitProperty_ChannelMap,
kAudioUnitScope_Global,
1,
channelMap,
propSize);
print(code)
do {
try engine.start()
} catch {
fatalError("Could not start engine. error: \(error).")
}
player.scheduleFile(songFile!, atTime: nil) {
print("done")
self.player.play()
}
player.play()
}