Swift-AudioUnit音频采集与播放
2022-02-16 本文已影响0人
T92
AudioConst
import Foundation
import AudioUnit
//var WSBufferDuration: Int {
// get {
// var value = UserDefaults.standard.integer(forKey: "key_WSBufferDuration")
// if value == 0{
// value = 16
// }
// return value
// }
// set { UserDefaults.standard.set(newValue, forKey: "WSBufferDuration") }
//}
let sampleMinValue = 64
var WSBufferDuration = 16
var audioPlayCacheBufferLen = 5
var WSmDataByteSize: Int = 4096
struct AudioConst {
static let SampleRate: Int = 48000//44100
static let Channels: UInt32 = 1
static let InputBus: AudioUnitElement = 1
static let OutputBus: AudioUnitElement = 0
static let mBitsPerChannel: UInt32 = 16
}
PointerConvert
import Foundation
func bridge<T : AnyObject>(ptr : UnsafeRawPointer) -> T {
return Unmanaged<T>.fromOpaque(ptr).takeUnretainedValue()}
func bridge<T : AnyObject>(obj : T) -> UnsafeRawPointer {
return UnsafeRawPointer(Unmanaged.passUnretained(obj).toOpaque())}
setupAudioSession
func setupAudioSession() {
let session: AVAudioSession = AVAudioSession.sharedInstance()
do {
try session.setCategory(.playAndRecord, options: [.allowBluetooth, .allowBluetoothA2DP])
try session.setPreferredSampleRate(Double(AudioConst.SampleRate))
try session.setPreferredIOBufferDuration(Double(WSBufferDuration) / 1000.0)
try session.setActive(true)
} catch {
print(error.localizedDescription)
}
}
设置WSBufferDuration可以调整每次回调采样点的个数,即音频包大小,具体设置需要根据采样率、位深、声道数计算
录制-QAAudioRecorder
import Foundation
import AudioUnit
import AVKit
protocol QAAudioRecordDelegate: AnyObject {
func audioRecorder(recorder: QAAudioRecorder, didUpdate volume: Double)
func audioRecorder(recorder: QAAudioRecorder, didRecieve buffer: AudioBufferList)
}
extension QAAudioRecordDelegate {
func audioRecorder(recorder: QAAudioRecorder, didUpdate volume: Double){}
func audioRecorder(recorder: QAAudioRecorder, didRecieve buffer: AudioBufferList){}
}
class QAAudioRecorder: NSObject {
private var ioUnit: AudioComponentInstance? = nil
weak var delegate: QAAudioRecordDelegate? = nil
private var bufferList: AudioBufferList = AudioBufferList.init(mNumberBuffers: 1, mBuffers: AudioBuffer.init(mNumberChannels: UInt32(AudioConst.Channels), mDataByteSize: UInt32(WSmDataByteSize), mData: UnsafeMutableRawPointer.allocate(byteCount: WSmDataByteSize, alignment: 1)))
override init() {
super.init()
WSAppManager.shared.setupAudioSession()
let _ = self.setupIoUnit()
}
private func setupIoUnit() -> Bool {
var ioDes: AudioComponentDescription = AudioComponentDescription.init(
componentType: kAudioUnitType_Output,
componentSubType: kAudioUnitSubType_RemoteIO,
componentManufacturer: kAudioUnitManufacturer_Apple,
componentFlags: 0,
componentFlagsMask: 0)
guard let inputComp: AudioComponent = AudioComponentFindNext(nil, &ioDes) else {
print("outputComp init error")
return false
}
if AudioComponentInstanceNew(inputComp, &ioUnit) != noErr {
print("io AudioComponentInstanceNew error")
return false
}
var ioFormat: AudioStreamBasicDescription = AudioStreamBasicDescription.init(
mSampleRate: Float64(AudioConst.SampleRate),
mFormatID: kAudioFormatLinearPCM,
mFormatFlags: kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked,
mBytesPerPacket: UInt32(2 * AudioConst.Channels),
mFramesPerPacket: 1,
mBytesPerFrame: UInt32(2 * AudioConst.Channels),
mChannelsPerFrame: UInt32(AudioConst.Channels),
mBitsPerChannel: AudioConst.mBitsPerChannel,
mReserved: 0)
if AudioUnitSetProperty(self.ioUnit!, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, AudioConst.InputBus, &ioFormat, UInt32(MemoryLayout.size(ofValue: ioFormat))) != noErr {
print("set StreamFormat error")
return false
}
var value: UInt32 = 1
if AudioUnitSetProperty(self.ioUnit!, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, AudioConst.InputBus, &value, UInt32(MemoryLayout.size(ofValue: value))) != noErr {
print("can't enable input io")
return false
}
//方式1
// if AudioUnitSetProperty(self.ioUnit!, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, AudioConst.OutputBus, &ioFormat, UInt32(MemoryLayout.size(ofValue: ioFormat))) != noErr {
// print("set StreamFormat error")
// return false
// }
// var recordCallback: AURenderCallbackStruct = AURenderCallbackStruct.init(inputProc: { (inRefCon, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, ioData) -> OSStatus in
// let bridgeSelf: QAAudioRecorder = bridge(ptr: UnsafeRawPointer.init(inRefCon))
// let error: OSStatus = AudioUnitRender(bridgeSelf.ioUnit!, ioActionFlags, inTimeStamp, AudioConst.InputBus, inNumberFrames, ioData!)
// if error == noErr {
// bridgeSelf.updateVolumeValue(buffer: ioData!.pointee.mBuffers)
// bridgeSelf.delegate?.audioRecorder(recorder: bridgeSelf, didRecieve: ioData!.pointee)
// }
// //实际上是有播放,这里设置静音
// let mdata = ioData!.pointee.mBuffers.mData
// memset(mdata, 0, Int(ioData!.pointee.mBuffers.mDataByteSize))
// ioData?.pointee.mBuffers.mData = mdata
// return noErr
// }, inputProcRefCon: UnsafeMutableRawPointer(mutating: bridge(obj: self)))
//
//
// if AudioUnitSetProperty(self.ioUnit!, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, AudioConst.OutputBus, &recordCallback, UInt32(MemoryLayout.size(ofValue: recordCallback))) != noErr {
// print("SetRenderCallback error")
// return false
// }
//方式二
var recordCallback1: AURenderCallbackStruct = AURenderCallbackStruct.init(inputProc: { (inRefCon, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, ioData) -> OSStatus in
let bridgeSelf: QAAudioRecorder = bridge(ptr: UnsafeRawPointer.init(inRefCon))
let error: OSStatus = AudioUnitRender(bridgeSelf.ioUnit!, ioActionFlags, inTimeStamp, AudioConst.InputBus, inNumberFrames, &bridgeSelf.bufferList)
if error == noErr {
let bufferData: AudioBuffer = bridgeSelf.bufferList.mBuffers
let rawPointer = UnsafeMutableRawPointer.allocate(byteCount: Int(bufferData.mDataByteSize), alignment: 1)
if let mData = bufferData.mData {
rawPointer.copyMemory(from: mData, byteCount: Int(bufferData.mDataByteSize))
let tempBuf = AudioBuffer.init(mNumberChannels: bufferData.mNumberChannels, mDataByteSize: bufferData.mDataByteSize, mData: rawPointer)
bridgeSelf.updateVolumeValue(buffer: tempBuf)
}
bridgeSelf.delegate?.audioRecorder(recorder: bridgeSelf, didRecieve: bridgeSelf.bufferList)
rawPointer.deallocate()
}
return noErr
}, inputProcRefCon: UnsafeMutableRawPointer(mutating: bridge(obj: self)))
if AudioUnitSetProperty(self.ioUnit!, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, AudioConst.InputBus, &recordCallback1, UInt32(MemoryLayout.size(ofValue: recordCallback1))) != noErr {
print("SetRenderCallback error")
return false
}
return true
}
private func updateVolumeValue(buffer: AudioBuffer) {
var pcmAll: Int = 0
let bufferPoint = UnsafeMutableBufferPointer<Int16>.init(buffer)
let bufferArray = Array(bufferPoint)
//取5个采样点计算,节省CPU资源
let len = min(5, bufferArray.count)
for index in 0..<len {
let value = bufferArray[index]
pcmAll += Int(value) * Int(value)
}
let mean: Double = Double(pcmAll) / Double(len)
let volume: Double = 10 * log10(mean)
guard "\(volume)" != "nan" else { return }
// print(volume)
// 0-42 42-97
delegate?.audioRecorder(recorder: self, didUpdate: volume)
// var pcmAll: Double = 0
// let bufferPoint = UnsafeMutableBufferPointer<Int16>.init(buffer)
// let bufferArray = Array(bufferPoint)
// //取5个采样点计算,节省CPU资源
// let len = min(5, bufferArray.count)
// for index in 0..<len {
// let value = bufferArray[index]
// pcmAll += Double(value)/65536
// }
// let mean: Double = pcmAll / Double(len)
// let volume: Double = 20 * log10(mean)
// guard "\(volume)" != "nan" else { return }
//// print(volume)
// // 0-42 42-97
// delegate?.audioRecorder(recorder: self, didUpdate: volume)
}
public func startRecord() {
var error = AudioUnitInitialize(self.ioUnit!)
if error != noErr {
print("AudioUnitInitialize error: \(error)")
}
error = AudioOutputUnitStart(self.ioUnit!)
if error != noErr {
print("AudioOutputUnitStart error")
}
}
public func stopRecord() {
AudioUnitUninitialize(self.ioUnit!)
AudioOutputUnitStop(self.ioUnit!)
}
}
播放-QAAudioUnitPlayer
import Foundation
import AudioUnit
import AVKit
class QAAudioUnitPlayer: NSObject {
var ioUnit: AudioComponentInstance? = nil
private(set) var isPlaying = false
private var cacheBufferData = Data()
private let semaphore = DispatchSemaphore(value: 1)
// private var bufferList: AudioBufferList = AudioBufferList.init(mNumberBuffers: 1, mBuffers: AudioBuffer.init(mNumberChannels: UInt32(AudioConst.Channels), mDataByteSize: UInt32(AudioConst.mDataByteSize), mData: UnsafeMutableRawPointer.allocate(byteCount: AudioConst.mDataByteSize, alignment: 1)))
override init() {
super.init()
WSAppManager.shared.setupAudioSession()
let _ = self.setupIoUnit()
}
func addAudioData(data: Data){
guard isPlaying else { return }
// print("包大小:\(data.count)")
if cacheBufferData.count > WSmDataByteSize * audioPlayCacheBufferLen {
removeCacheData(count: cacheBufferData.count - WSmDataByteSize*audioPlayCacheBufferLen)
}
semaphore.wait()
cacheBufferData.append(data)
semaphore.signal()
}
private func setupIoUnit() -> Bool {
var ioDes: AudioComponentDescription = AudioComponentDescription.init(
componentType: kAudioUnitType_Output,
componentSubType: kAudioUnitSubType_RemoteIO,
componentManufacturer: kAudioUnitManufacturer_Apple,
componentFlags: 0,
componentFlagsMask: 0)
guard let inputComp: AudioComponent = AudioComponentFindNext(nil, &ioDes) else {
print("outputComp init error")
return false
}
if AudioComponentInstanceNew(inputComp, &ioUnit) != noErr {
print("io AudioComponentInstanceNew error")
return false
}
var value = 1
if AudioUnitSetProperty(self.ioUnit!, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, AudioConst.OutputBus, &value, UInt32(MemoryLayout.size(ofValue: value))) != noErr {
print("can't enable output io")
return false
}
var ioFormat: AudioStreamBasicDescription = AudioStreamBasicDescription.init(
mSampleRate: Float64(AudioConst.SampleRate),
mFormatID: kAudioFormatLinearPCM,
mFormatFlags: kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked,
mBytesPerPacket: UInt32(2 * AudioConst.Channels),
mFramesPerPacket: 1,
mBytesPerFrame: UInt32(2 * AudioConst.Channels),
mChannelsPerFrame: UInt32(AudioConst.Channels),
mBitsPerChannel: AudioConst.mBitsPerChannel,
mReserved: 0)
if AudioUnitSetProperty(self.ioUnit!, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, AudioConst.OutputBus, &ioFormat, UInt32(MemoryLayout.size(ofValue: ioFormat))) != noErr {
print("set StreamFormat error")
return false
}
var playCallback: AURenderCallbackStruct = AURenderCallbackStruct.init(inputProc: { (inRefCon, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, ioData) -> OSStatus in
let bridgeSelf: QAAudioUnitPlayer = bridge(ptr: UnsafeRawPointer.init(inRefCon))
let cacheData = bridgeSelf.cacheBufferData as NSData
let bufferData: AudioBuffer = ioData!.pointee.mBuffers
let len = Int(bufferData.mDataByteSize)
if len <= cacheData.count{
cacheData.getBytes(bufferData.mData!, range: NSMakeRange(0, len))
bridgeSelf.removeCacheData(count: len)
}else{
let mdata = ioData!.pointee.mBuffers.mData
memset(mdata, 0, Int(ioData!.pointee.mBuffers.mDataByteSize))
}
return noErr
}, inputProcRefCon: UnsafeMutableRawPointer(mutating: bridge(obj: self)))
if AudioUnitSetProperty(self.ioUnit!, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, AudioConst.OutputBus, &playCallback, UInt32(MemoryLayout.size(ofValue: playCallback))) != noErr {
print("SetRenderCallback error")
return false
}
return true
}
private func removeCacheData(count: Int){
guard count >= 0 else { return }
semaphore.wait()
guard cacheBufferData.count >= count else {
semaphore.signal()
return
}
let startIndex = cacheBufferData.index(cacheBufferData.startIndex, offsetBy: 0)
let endIndex = cacheBufferData.index(cacheBufferData.startIndex, offsetBy: count)
let range = startIndex..<endIndex
cacheBufferData.removeSubrange(range)
semaphore.signal()
}
public func startPlay() {
isPlaying = true
var error = AudioUnitInitialize(self.ioUnit!)
if error != noErr {
print("AudioUnitInitialize error: \(error)")
}
error = AudioOutputUnitStart(self.ioUnit!)
if error != noErr {
print("AudioOutputUnitStart error")
}
}
public func stopPlay() {
isPlaying = false
AudioUnitUninitialize(self.ioUnit!)
AudioOutputUnitStop(self.ioUnit!)
cacheBufferData.removeAll()
}
}