I have the following code for generating an audio tone of given frequency and duration. It's loosely based on this answer for doing the same thing on Android (thanks: @Steve Pomeroy):
https://stackoverflow.com/a/3731075/973364
import Foundation
import CoreAudio
import AVFoundation
import Darwin
class AudioUtil {
class func play(frequency: Int, durationMs: Int) -> Void {
let sampleRateHz: Double = 8000.0
let numberOfSamples = Int((Double(durationMs) / 1000 * sampleRateHz))
let factor: Double = 2 * M_PI / (sampleRateHz/Double(frequency))
// Generate an array of Doubles.
var samples = [Double](count: numberOfSamples, repeatedValue: 0.0)
for i in 1..<numberOfSamples {
let sample = sin(factor * Double(i))
samples[i] = sample
}
// Convert to a 16 bit PCM sound array.
var index = 0
var sound = [Byte](count: 2 * numberOfSamples, repeatedValue: 0)
for doubleValue in samples {
// Scale to maximum amplitude. Int16.max is 37,767.
var value = Int16(doubleValue * Double(Int16.max))
// In a 16 bit wav PCM, first byte is the low order byte.
var firstByte = Int16(value & 0x00ff)
var secondByteHighOrderBits = Int32(value) & 0xff00
var secondByte = Int16(secondByteHighOrderBits >> 8) // Right shift.
// println("\(doubleValue) -> \(value) -> \(firstByte), \(secondByte)")
sound[index++] = Byte(firstByte)
sound[index++] = Byte(secondByte)
}
let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.PCMFormatInt16, sampleRate: sampleRateHz, channels:AVAudioChannelCount(1), interleaved: false)
let buffer = AudioBuffer(mNumberChannels: 1, mDataByteSize: UInt32(sound.count), mData: &sound)
let pcmBuffer = AVAudioPCMBuffer(PCMFormat: format, frameCapacity: AVAudioFrameCount(sound.count))
let audioEngine = AVAudioEngine()
let audioPlayer = AVAudioPlayerNode()
audioEngine.attachNode(audioPlayer)
// Runtime error occurs here:
audioEngine.connect(audioPlayer, to: audioEngine.mainMixerNode, format: format)
audioEngine.startAndReturnError(nil)
audioPlayer.play()
audioPlayer.scheduleBuffer(pcmBuffer, atTime: nil, options: nil, completionHandler: nil)
}
}
The error I get at runtime when calling connect() on the AVAudioEngine is this:
ERROR: [0x3bfcb9dc] AVAudioNode.mm:521: AUSetFormat: error -10868
*** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'error -10868'
Is what I'm generating not really AVAudioCommonFormat.PCMFormatInt16?
[EDIT]
Here's another, simpler attempt using only one buffer as PCMFormatFloat32. There's no error, but no sound either.
import AVFoundation
class AudioManager:NSObject {
let audioPlayer = AVAudioPlayerNode()
lazy var audioEngine: AVAudioEngine = {
let engine = AVAudioEngine()
// Must happen only once.
engine.attachNode(self.audioPlayer)
return engine
}()
func play(frequency: Int, durationMs: Int, completionBlock:dispatch_block_t!) {
var error: NSError?
var mixer = audioEngine.mainMixerNode
var sampleRateHz: Float = Float(mixer.outputFormatForBus(0).sampleRate)
var numberOfSamples = AVAudioFrameCount((Float(durationMs) / 1000 * sampleRateHz))
var format = AVAudioFormat(commonFormat: AVAudioCommonFormat.PCMFormatFloat32, sampleRate: Double(sampleRateHz), channels: AVAudioChannelCount(1), interleaved: false)
var buffer = AVAudioPCMBuffer(PCMFormat: format, frameCapacity: numberOfSamples)
buffer.frameLength = numberOfSamples
// Generate sine wave
for var i = 0; i < Int(buffer.frameLength); i++ {
var val = sinf(Float(frequency) * Float(i) * 2 * Float(M_PI) / sampleRateHz)
// log.debug("val: \(val)")
buffer.floatChannelData.memory[i] = val * 0.5
}
// Audio engine
audioEngine.connect(audioPlayer, to: mixer, format: format)
log.debug("Sample rate: \(sampleRateHz), samples: \(numberOfSamples), format: \(format)")
if !audioEngine.startAndReturnError(&error) {
log.debug("Error: \(error)")
}
// Play player and buffer
audioPlayer.play()
audioPlayer.scheduleBuffer(buffer, atTime: nil, options: nil, completionHandler: completionBlock)
}
}
Thanks: Thomas Royal (http://www.tmroyal.com/playing-sounds-in-swift-audioengine.html)