Hi,
I'm starting to get confused about the proper configuration of AVAudio engine to use for 3D sound with surround system.
My set up is an iPad Air 2 running iOS 9.2 with 5.1 system connected through the USB. I tried to set-up a simple sound scene with one sound source changing it's position relative to the listener. Somehow it seems that AVAudioEngine treats the output as stereo system (i.e. independent on where I place the sound source in the scene, there is no sound output through the rear speakers).
Do I miss some step required to enable 5.1 output for AVAudioEngine?
The most relevant parts of code follow:
let kOutputNumberOfChannels = "outputNumberOfChannels"
func setupAudioSession() {
let sess = AVAudioSession.sharedInstance()
sess.addObserver(self, forKeyPath: kOutputNumberOfChannels,
options: NSKeyValueObservingOptions.New, context: nil)
let nc = NSNotificationCenter.defaultCenter()
nc.addObserver(self, selector: "audioRouteChanged:",
name: AVAudioSessionRouteChangeNotification,
object: nil)
reconfigureSession()
do {
try sess.setActive(true)
try sess.setCategory(AVAudioSessionCategoryPlayback)
} catch {
NSLog("error when setting up audio session \(error)")
return
}
engine = AVAudioEngine()
environment = AVAudioEnvironmentNode()
environment.listenerPosition = AVAudioMake3DPoint(0, 0, 0)
environment.listenerVectorOrientation =
AVAudioMake3DVectorOrientation(AVAudioMake3DVector(0, 0, -1),
AVAudioMake3DVector(0, 1, 0))
engine.attachNode(environment)
engine.connect(environment, to: engine.outputNode,
format: outputFormatForEnvironment())
do {
try engine.start()
} catch {
NSLog("Failed to start engine: \(error)")
return
}
player = AVAudioPlayerNode()
let path = NSBundle.mainBundle()
.pathForResource("sound", ofType:"caf")!
do {
let file =
try AVAudioFile(forReading: NSURL(fileURLWithPath:path))
let audioBuffer = AVAudioPCMBuffer(PCMFormat:file.processingFormat,
frameCapacity: AVAudioFrameCount(file.length))
try file.readIntoBuffer(audioBuffer)
player.renderingAlgorithm = .SoundField
engine.attachNode(player)
setSoundSourcePosition()
engine.connect(player, to: environment,
format: audioBuffer.format)
player.scheduleBuffer(audioBuffer, atTime: nil,
options: .Loops,
completionHandler: nil)
player.play()
} catch {
NSLog("player initialization failed: \(error)")
}
}
func outputFormatForEnvironment() -> AVAudioFormat {
let nChannels = engine.outputNode.outputFormatForBus(0)
.channelCount
let sampleRate = engine.outputNode.outputFormatForBus(0)
.sampleRate
var layoutTag : AudioChannelLayoutTag
if nChannels > 5 {
layoutTag = kAudioChannelLayoutTag_MPEG_5_1_A
} else if nChannels < 3 {
return AVAudioFormat(standardFormatWithSampleRate: sampleRate,
channels: 2)
} else {
layoutTag = kAudioChannelLayoutTag_Stereo
}
let layout = AVAudioChannelLayout(layoutTag: layoutTag)
return AVAudioFormat(standardFormatWithSampleRate: sampleRate,
channelLayout: layout)
}
func reconfigureSession() {
let sess = AVAudioSession.sharedInstance()
let reqChannels = sess.maximumOutputNumberOfChannels
if sess.outputNumberOfChannels != reqChannels {
NSLog("Requesting \(reqChannels) output channels")
do {
try sess.setPreferredOutputNumberOfChannels(reqChannels)
} catch {
NSLog("request failed: \(error)")
}
}
lblNumberOfChannels.text = String(format:
"Current number of channels: %d",
sess.outputNumberOfChannels)
}
// Used to manipulate sound source position by the user.
func setSoundSourcePosition() {
let coords : [Float] = [ -5.0, 0.0, 5.0 ]
let x = coords[xSegment.selectedSegmentIndex]
let y = coords[ySegment.selectedSegmentIndex]
let z = coords[zSegment.selectedSegmentIndex]
NSLog("Setting sound source position to (\(x), \(y), \(z))")
player.position = AVAudioMake3DPoint(x, y, z)
}