如何在
Swift上使用AVAudioConverter,AVAudioCompressedBuffer和AVAudioPCMBuffer将AAC转换为PCM?
在WWDC 2015上,507 Session被称为AVAudioConverter可以编码和解码PCM缓冲区,显示了编码示例,但未显示解码示例.
我试过解码,有些东西不起作用.我不知道:(
呼叫:
//buffer - it's AVAudioPCMBuffer from AVAudioInputNode(AVAudioEngine) let aacBuffer = AudioBufferConverter.convertToAAC(from: buffer,error: nil) //has data let data = Data(bytes: aacBuffer!.data,count: Int(aacBuffer!.byteLength)) //has data let aacReverseBuffer = AudioBufferConverter.convertToAAC(from: data) //has data let pcmReverseBuffer = AudioBufferConverter.convertToPCM(from: aacBuffer2!,error: nil) //zeros data. data object exist,but filled by zeros
这是转换代码:
class AudioBufferFormatHelper { static func PCMFormat() -> AVAudioFormat? { return AVAudioFormat(commonFormat: .pcmFormatFloat32,sampleRate: 44100,channels: 1,interleaved: false) } static func AACFormat() -> AVAudioFormat? { var outDesc = AudioStreamBasicDescription( mSampleRate: 44100,mFormatID: kAudioFormatMPEG4AAC,mFormatFlags: 0,mBytesPerPacket: 0,mFramesPerPacket: 0,mBytesPerFrame: 0,mChannelsPerFrame: 1,mBitsPerChannel: 0,mReserved: 0) let outFormat = AVAudioFormat(streamDescription: &outDesc) return outFormat } } class AudioBufferConverter { static func convertToAAC(from buffer: AVAudioBuffer,error outError: NSErrorPointer) -> AVAudioCompressedBuffer? { let outputFormat = AudioBufferFormatHelper.AACFormat() let outBuffer = AVAudioCompressedBuffer(format: outputFormat!,packetCapacity: 8,maximumPacketSize: 768) self.convert(from: buffer,to: outBuffer,error: outError) return outBuffer } static func convertToPCM(from buffer: AVAudioBuffer,error outError: NSErrorPointer) -> AVAudioPCMBuffer? { let outputFormat = AudioBufferFormatHelper.PCMFormat() guard let outBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat!,frameCapacity: 4410) else { return nil } outBuffer.frameLength = 4410 self.convert(from: buffer,error: outError) return outBuffer } static func convertToAAC(from data: Data) -> AVAudioCompressedBuffer? { let nsData = NSData(data: data) let inputFormat = AudioBufferFormatHelper.AACFormat() let buffer = AVAudioCompressedBuffer(format: inputFormat!,maximumPacketSize: 768) buffer.byteLength = UInt32(data.count) buffer.packetCount = 8 buffer.data.copyMemory(from: nsData.bytes,byteCount: nsData.length) buffer.packetDescriptions!.pointee.mDataByteSize = 4 return buffer } private static func convert(from sourceBuffer: AVAudioBuffer,to destinationBuffer: AVAudioBuffer,error outError: NSErrorPointer) { //init converter let inputFormat = sourceBuffer.format let outputFormat = destinationBuffer.format let converter = AVAudioConverter(from: inputFormat,to: outputFormat) converter!.bitRate = 32000 let inputBlock : AVAudioConverterInputBlock = { inNumPackets,outStatus in outStatus.pointee = AVAudioConverterInputStatus.haveData return sourceBuffer } _ = converter!.convert(to: destinationBuffer,error: outError,withInputFrom: inputBlock) } }
结果AVAudioPCMBuffer的数据为零.
在消息中我看到错误:
AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload AACDecoder.cpp:220:DecodeFrame: Error deserializing packet [ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 1: err = -1,packet length: 0 AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload AACDecoder.cpp:220:DecodeFrame: Error deserializing packet [ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 3: err = -1,packet length: 0 AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload AACDecoder.cpp:220:DecodeFrame: Error deserializing packet [ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 5: err = -1,packet length: 0 AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload AACDecoder.cpp:220:DecodeFrame: Error deserializing packet [ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 7: err = -1,packet length: 0
解决方法
你的尝试有一些问题:
>转换数据时,您没有设置多个数据包描述 – > AVAudioCompressedBuffer.您需要创建它们,因为AAC数据包的大小可变.您可以从原始AAC缓冲区复制它们,也可以手动(ouch)或使用AudioFileStream api从数据中解析它们.
>你一遍又一遍地重新创建你的AVAudioConverters – 每个缓冲区一次,扔掉他们的状态.例如AAC编码器出于个人原因需要增加2112帧的静音才能再现音频,因此重新创建转换器会让你沉默.
>您反复向AVAudioConverter的输入块提供相同的缓冲区.您应该只呈现每个缓冲区一次.
>比特率32000不起作用(对我来说)
这就是我现在所能想到的.请尝试对您的代码进行以下修改,您现在调用它们,如下所示:
(p.s.我将一些单声道改为立体声,所以我可以在我的mac上播放往返缓冲区,其麦克风输入奇怪的立体声 – 您可能需要将其更改回来)
(pps显然有一些往返/串行/反序列化尝试在这里进行,但你究竟想要做什么?你想将AAC音频从一个设备传输到另一个设备吗?因为它可能更容易让其他API像AVPlayer播放结果流而不是自己处理数据包)
let aacBuffer = AudioBufferConverter.convertToAAC(from: buffer,error: nil)! let data = Data(bytes: aacBuffer.data,count: Int(aacBuffer.byteLength)) let packetDescriptions = Array(UnsafeBufferPointer(start: aacBuffer.packetDescriptions,count: Int(aacBuffer.packetCount))) let aacReverseBuffer = AudioBufferConverter.convertToAAC(from: data,packetDescriptions: packetDescriptions)! // was aacBuffer2 let pcmReverseBuffer = AudioBufferConverter.convertToPCM(from: aacReverseBuffer,error: nil) class AudioBufferFormatHelper { static func PCMFormat() -> AVAudioFormat? { return AVAudioFormat(commonFormat: .pcmFormatFloat32,interleaved: false) } static func AACFormat() -> AVAudioFormat? { var outDesc = AudioStreamBasicDescription( mSampleRate: 44100,mReserved: 0) let outFormat = AVAudioFormat(streamDescription: &outDesc) return outFormat } } class AudioBufferConverter { static var lpcmToAACConverter: AVAudioConverter! = nil static func convertToAAC(from buffer: AVAudioBuffer,maximumPacketSize: 768) //init converter once if lpcmToAACConverter == nil { let inputFormat = buffer.format lpcmToAACConverter = AVAudioConverter(from: inputFormat,to: outputFormat!) // print("available rates \(lpcmToAACConverter.applicableEncodeBitRates)") // lpcmToAACConverter!.bitRate = 96000 lpcmToAACConverter.bitRate = 32000 // have end of stream problems with this,not sure why } self.convert(withConverter:lpcmToAACConverter,from: buffer,error: outError) return outBuffer } static var aacToLPCMConverter: AVAudioConverter! = nil static func convertToPCM(from buffer: AVAudioBuffer,frameCapacity: 4410) else { return nil } //init converter once if aacToLPCMConverter == nil { let inputFormat = buffer.format aacToLPCMConverter = AVAudioConverter(from: inputFormat,to: outputFormat!) } self.convert(withConverter: aacToLPCMConverter,error: outError) return outBuffer } static func convertToAAC(from data: Data,packetDescriptions: [AudioStreamPacketDescription]) -> AVAudioCompressedBuffer? { let nsData = NSData(data: data) let inputFormat = AudioBufferFormatHelper.AACFormat() let maximumPacketSize = packetDescriptions.map { $0.mDataByteSize }.max()! let buffer = AVAudioCompressedBuffer(format: inputFormat!,packetCapacity: AVAudioPacketCount(packetDescriptions.count),maximumPacketSize: Int(maximumPacketSize)) buffer.byteLength = UInt32(data.count) buffer.packetCount = AVAudioPacketCount(packetDescriptions.count) buffer.data.copyMemory(from: nsData.bytes,byteCount: nsData.length) buffer.packetDescriptions!.pointee.mDataByteSize = UInt32(data.count) buffer.packetDescriptions!.initialize(from: packetDescriptions,count: packetDescriptions.count) return buffer } private static func convert(withConverter: AVAudioConverter,from sourceBuffer: AVAudioBuffer,error outError: NSErrorPointer) { // input each buffer only once var newBufferAvailable = true let inputBlock : AVAudioConverterInputBlock = { inNumPackets,outStatus in if newBufferAvailable { outStatus.pointee = .haveData newBufferAvailable = false return sourceBuffer } else { outStatus.pointee = .noDatanow return nil } } let status = withConverter.convert(to: destinationBuffer,withInputFrom: inputBlock) print("status: \(status.rawValue)") } }