问题描述
我正在尝试在 iOS 中使用 Audio Unit 播放音频缓冲区。音频缓冲区来自一个 C 库,它从 Playstation 4 接收音频并使用 Opus 对其进行解码。音频缓冲区的格式为 PCM int16_t。
使用 Audio Unit 和 TPCircularBuffer,我已经完成了播放声音的工作。然而,它严重扭曲并且不干净。
这是我的音频单元类的设置
init() {
_TPCircularBufferInit(&buffer,960,MemoryLayout<TPCircularBuffer>.size)
setupAudio()
}
func play(data: NSMutableData) {
TPCircularBufferProduceBytes(&buffer,data.bytes,UInt32(data.length))
}
private func setupAudio() {
do {
try AVAudioSession.sharedInstance().setActive(true,options: [])
} catch { }
var audioComponentDesc = AudioComponentDescription(componentType: OSType(kAudioUnitType_Output),componentSubType: OSType(kAudioUnitSubType_RemoteIO),componentManufacturer: OSType(kAudioUnitManufacturer_Apple),componentFlags: 0,componentFlagsMask: 0)
let inputComponent = AudioComponentFindNext(nil,&audioComponentDesc)
status = AudioComponentInstanceNew(inputComponent!,&audioUnit)
if status != noErr {
print("Audio Component Instance New Error \(status.debugDescription)")
}
var audioDescription = AudioStreamBasicDescription()
audioDescription.mSampleRate = 48000
audioDescription.mFormatID = kAudioFormatLinearPCM
audioDescription.mFormatFlags = kAudioFormatFlagIsPacked | kAudioFormatFlagIsSignedInteger
audioDescription.mChannelsPerFrame = 2
audioDescription.mFramesPerPacket = 1
audioDescription.mBitsPerChannel = 16
audioDescription.mBytesPerFrame = (audioDescription.mBitsPerChannel / 8) * audioDescription.mChannelsPerFrame
audioDescription.mBytesPerPacket = audioDescription.mBytesPerFrame * audioDescription.mFramesPerPacket
audioDescription.mReserved = 0
status = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Input,&audioDescription,UInt32(MemoryLayout<AudioStreamBasicDescription>.size))
if status != noErr {
print("Enable IO for playback error \(status.debugDescription)")
}
var flag: UInt32 = 0
status = AudioUnitSetProperty(audioUnit,kAudioOutputUnitProperty_EnableIO,1,&flag,UInt32(MemoryLayout.size(ofValue: flag)))
if status != noErr {
print("Enable IO for playback error \(status.debugDescription)")
}
var outputCallbackStruct = AURenderCallbackStruct()
outputCallbackStruct.inputProc = performPlayback
outputCallbackStruct.inputProcRefCon = UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque())
status = AudioUnitSetProperty(audioUnit,kAudioUnitProperty_SetRenderCallback,kAudioUnitScope_Global,&outputCallbackStruct,UInt32(MemoryLayout<AURenderCallbackStruct>.size))
status = AudioUnitinitialize(audioUnit)
if status != noErr {
print("Failed to initialize audio unit \(status!)")
}
status = AudioOutputUnitStart(audioUnit)
if status != noErr {
print("Failed to initialize output unit \(status!)")
}
}
播放功能
private func performPlayback(clientData: UnsafeMutableRawPointer,_ ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>,inTimeStamp: UnsafePointer<AudioTimeStamp>,inBufNumber: UInt32,inNumberFrames: UInt32,ioData: UnsafeMutablePointer<audiobufferlist>?) -> Osstatus {
let player = Unmanaged<AudioPlayer>.fromOpaque(UnsafeRawPointer(clientData)!).takeUnretainedValue()
let buffer = ioData![0].mBuffers
let bytesTocopy = ioData![0].mBuffers.mDataByteSize
var bufferTail: UnsafeMutableRawPointer?
var availableBytes: UInt32 = 0
bufferTail = TPCircularBufferTail(&player.buffer,&availableBytes)
let bytesToWrite = min(bytesTocopy,availableBytes)
memcpy(buffer.mData,bufferTail,Int(bytesToWrite))
TPCircularBufferConsume(&player.buffer,bytesToWrite)
return noErr}
private func StreamAudioFrameCallback(buffers: UnsafeMutablePointer<Int16>?,samplesCount: Int,user: UnsafeMutableRawPointer?) {
let decodedData = NSMutableData()
if let buffer = buffers,samplesCount > 0 {
let decodedDataSize = samplesCount * MemoryLayout<opus_int16>.size
decodedData.append(buffer,length: decodedDataSize)
AudioPlayer.shared.play(data: decodedData)
}
有人熟悉这个吗?任何帮助表示赞赏。
解决方法
您可能不想开始播放,直到循环缓冲区中的内容多于覆盖传入数据速率中的最大时间抖动所需的数量。尝试等待循环缓冲区中有半秒的音频,然后再开始播放。然后试验填充量。