When a Mac is connected to an audio interface, I can use AVAudioEngine to obtain audio data from the MIC and OTG channels of the audio interface. Part of the code is as follows:
buffer = CircularBuffer<Float>(channelCount: 2, capacity: Int(framesPerSample!) * 512)
engine.setInputDevice(device)
format = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: device.nominalSampleRate!, channels: 2, interleaved: false)!
engine.connect(engine.inputNode, to: engine.mainMixerNode, format: format)
let channels = device.channels(scope: .input)
var channelArray:Array<Int32> = Array(repeating: -1, count: channels.intValue)
if index % 2 == 0{
channelArray[0] = index.int32Value
}else{
channelArray[1] = index.int32Value
}
guard let audioUnit = engine.inputNode.audioUnit else {
return
}
audioUnit.setPropertyChannelMap(&channelArray, .input, 1)
checkErr(AudioUnitAddRenderNotify(engine.inputNode.audioUnit!,
renderCallback,
Unmanaged.passUnretained(self).toOpaque()))
let renderCallback: AURenderCallback = {
(inRefCon: UnsafeMutableRawPointer,
ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>,
inTimeStamp: UnsafePointer<AudioTimeStamp>,
inBusNumber: UInt32,
inNumberFrames: UInt32,
ioData: UnsafeMutablePointer<AudioBufferList>?) -> OSStatus in
if ioActionFlags.pointee == AudioUnitRenderActionFlags.unitRenderAction_PostRender {
let inSelf = Unmanaged<Engine>.fromOpaque(inRefCon).takeUnretainedValue()
let sampleTime = inTimeStamp.pointee.mSampleTime
let start = sampleTime.int64Value
let end = start + Int64(inNumberFrames)
if inSelf.buffer.write(from: ioData!, start: start, end: end) != .noError {
return noErr
}
inSelf.getVolumeIntensity(from: ioData, inNumberFrames: inNumberFrames)
inSelf.lastSampleTime = sampleTime
}
return noErr
}
If the audio interface is used as an output device to play audio, How can I use AVAudioEngine to obtain audio data from each output channel of the audio interface?
Nick
6423 gold badges10 silver badges32 bronze badges
AudioHardwareCreateProcessTap().