问题
How do I access additional audio hardware outputs other than 1-2 using AVFoundation? I'm writing swift code for a Mac OS-X app which plays mp3 files through various output devices (USB interface, dante, soundflower) which looks like the following:
myPlayer = AVPlayer(URL: myFilePathURL)
myPlayer.audioOutputDeviceUniqueID = myAudioOutputDevices[1].deviceUID()
myPlayer.play()
But, I'm not sure how to play the audio file to channels other than just 1-2. For instance I'd like to play an mp3 to outputs 3-4.
Can I do this through AVPlayer? Or do I need to look elsewhere? Maybe AVAudioEngine along with mixer nodes? I looked through the AVAudioEngine examples, and couldn't find hardware channels referenced anywhere. Thanks for any help!
回答1:
I've iterated this code over time - but the basic outline will work. Here is my current setup code to send audio to multi channel setups. I'm currently doing this with Dante Virtual Soundcard with 16 stereo instanced streams using the code below:
func setupAudioPath(){
//print("setupAudioPath")
// get output hardware format
let output = engine.outputNode
outputHWFormat = output.outputFormat(forBus: 0)
//print("outputHWFormat = \(outputHWFormat)")
//print("outputHWFormat.channelCount = \(outputHWFormat.channelCount)")
// connect mixer to output
mixer = engine.mainMixerNode
//then work on the player end by first attaching the player to the engine
engine.attach(player)
engine.connect(mixer, to: output, format: outputHWFormat)
var channelMap: [sint32] = []
//UInt32 numOfChannels = fileFormat.NumberChannels();
let numOfChannels: UInt32 = UInt32(numberOfStreams) * UInt32(2); // Number of output device channels
let mapSize: UInt32 = numOfChannels * UInt32(MemoryLayout<sint32>.size);
for _ in 0...(numOfChannels-1) {
channelMap.append(-1)
}
//channelMap[desiredInputChannel] = deviceOutputChannel;
channelMap[leftChannel - 1] = 0;
channelMap[leftChannel] = 1;
//print(channelMap)
//print("number of channels in my map: \(channelMap.count)")
let code: OSStatus = AudioUnitSetProperty((engine.outputNode.audioUnit)!,
kAudioOutputUnitProperty_ChannelMap,
kAudioUnitScope_Global,
1,
channelMap,
mapSize);
print("osstatus = \(code)")
}
回答2:
I have a swift version that is working with 2 channels setting the channel map property. I haven't tested it with a full multichannel systems, but the principles should be the same.
let engine = AVAudioEngine()
let player = AVAudioPlayerNode()
func testCode(){
// get output hardware format
let output = engine.outputNode
let outputHWFormat = output.outputFormatForBus(0)
// connect mixer to output
let mixer = engine.mainMixerNode
engine.connect(mixer, to: output, format: outputHWFormat)
//then work on the player end by first attaching the player to the engine
engine.attachNode(player)
//find the audiofile
guard let audioFileURL = NSBundle.mainBundle().URLForResource("tones", withExtension: "wav") else {
fatalError("audio file is not in bundle.")
}
var songFile:AVAudioFile?
do {
songFile = try AVAudioFile(forReading: audioFileURL)
print(songFile!.processingFormat)
// connect player to mixer
engine.connect(player, to: mixer, format: songFile!.processingFormat)
} catch {
fatalError("canot create AVAudioFile \(error)")
}
let channelMap: [Int32] = [0, 1] //left out left, right out right
//let channelMap: [Int32] = [1, 0] //right out left, left out right
let propSize: UInt32 = UInt32(channelMap.count) * UInt32(sizeof(sint32))
let code: OSStatus = AudioUnitSetProperty((engine.inputNode?.audioUnit)!,
kAudioOutputUnitProperty_ChannelMap,
kAudioUnitScope_Global,
1,
channelMap,
propSize);
print(code)
do {
try engine.start()
} catch {
fatalError("Could not start engine. error: \(error).")
}
player.scheduleFile(songFile!, atTime: nil) {
print("done")
self.player.play()
}
player.play()
}
来源:https://stackoverflow.com/questions/38601656/accessing-multiple-audio-hardware-outputs-channels-using-avfoundation-and-swift