我想知道如何获取AVAudioCompressedBuffer的字节,然后
从字节重建一个AVAudioCompressedBuffer。
下面的代码获取一个AVAudioPCMBuffer,将其用AVAudioConverter压缩为AVAudioCompressedBuffer(iLBC)并获取字节,然后将字节转换回AVAudioCompressedBuffer(iLBC),然后解压缩为AVAudioPCMBuffer并播放缓冲区。将AVAudioCompressedBuffer转换为字节然后返回时出了问题。如果我跳过此转换,音频将按预期播放。
我什至可以匹配compressedBuffer和compressedBuffer2的内容,这使我相信获取字节的机制并不完全正确。
// Do iLBC Compression
let compressedBuffer: AVAudioCompressedBuffer = self.compress(inBuffer: buffer)
// packetCapacity: 4, maximumPacketSize: 38
// self.player.scheduleBuffer(self.uncompress(inBuffer: compressedBuffer)) // This works perfectly
// Convert Buffer to Byte Array
let pointer1: UnsafeMutablePointer = compressedBuffer.data.bindMemory(to: UInt8.self, capacity: 152)
var audioByteArray = [UInt8](repeating: 0, count: 152)
pointer1.withMemoryRebound(to: UInt8.self, capacity: 152) { srcByteData in
audioByteArray.withUnsafeMutableBufferPointer {
$0.baseAddress!.initialize(from: srcByteData, count: 152)
}
}
// Convert Byte Array to Buffer
let compressedBuffer2: AVAudioCompressedBuffer = AVAudioCompressedBuffer(format: AVAudioFormat.init(streamDescription: &self.descriptor)!, packetCapacity: 4, maximumPacketSize: 38)
let destination = compressedBuffer2.data
audioByteArray.withUnsafeBufferPointer {
let src = UnsafeRawPointer($0.baseAddress!).bindMemory(to: UInt8.self, capacity: 152)
destination.copyMemory(from: src, byteCount: 152)
}
// Do iLBC Decompression
let uncompressedBuffer: AVAudioPCMBuffer = self.uncompress(inBuffer: compressedBuffer2)
// Works perfectly with inBuffer: compressedBuffer
// Play Buffer
self.player.scheduleBuffer(uncompressedBuffer)
// Plays fine when 'let uncompressedBuffer: AVAudioPCMBuffer = self.uncompress(inBuffer: compressedBuffer)'
压缩和解压缩功能
let format = AVAudioFormat.init(commonFormat: AVAudioCommonFormat.pcmFormatFloat32, sampleRate: 16000, channels: 1, interleaved: false)
var compressedFormatDescriptor = AudioStreamBasicDescription(mSampleRate: 8000, mFormatID: kAudioFormatiLBC, mFormatFlags: 0, mBytesPerPacket: 0, mFramesPerPacket: 0, mBytesPerFrame: 0, mChannelsPerFrame: 1, mBitsPerChannel: 0, mReserved: 0)
func compress(inBuffer : AVAudioPCMBuffer) -> AVAudioCompressedBuffer {
let inputFormat = inBuffer.format
let converter = AVAudioConverter(from: inputFormat, to: self.compressedFormat!)
let outBuffer = AVAudioCompressedBuffer(format: self.compressedFormat!, packetCapacity: 4, maximumPacketSize: 38)
let inputBlock : AVAudioConverterInputBlock = { inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return inBuffer
}
var error : NSError?
converter!.convert(to: outBuffer, error: &error, withInputFrom: inputBlock)
return outBuffer
}
func uncompress(inBuffer : AVAudioCompressedBuffer) -> AVAudioPCMBuffer {
let inputFormat = inBuffer.format
let outputFormat = format
let converter = AVAudioConverter(from: inputFormat, to: outputFormat!)
let inputBlock : AVAudioConverterInputBlock = { inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return inBuffer
}
var error : NSError?
let outBuffer: AVAudioPCMBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat!, frameCapacity: 1600)!
converter!.convert(to: outBuffer, error: &error, withInputFrom: inputBlock)
return outBuffer
}
最佳答案
这就是我最终要做的。
最主要的是在第二个AVAudioCompressedBuffer上设置byteLength和packetCount。
iLBC具有固定的帧大小(每块304位,适用于20ms帧)
304 * 50 = 15200 = 15.2 kbit / s
AVAudioInputNode的AVAudioNodeTapBlock每100毫秒触发一次。
byteLength = 304bits / 8 = 38bytes
packetCount = 5
这将导致5 * 38 * 10 * 8 = 15200 = 15.2 kbit / s。
下面的代码显示了所有写出的内容。
所以,发生的是
1)将AVAudioPCMBuffer从麦克风转换为AVAudioCompressedBuffer(iLBC)
2)将AVAudioCompressedBuffer(iLBC)转换为[UInt8](5 * 38 = 190字节)。
3)将[UInt8]转换为AVAudioCompressedBuffer(iLBC)
4)将AVAudioCompressedBuffer(iLBC)转换为AVAudioPCMBuffer
5)播放AVAudioPCMBuffer
// Compress to iLBC
let packetCapacity = 5
let maximumPacketSize = 38
let capacity = packetCapacity * maximumPacketSize // 190
var descriptor = AudioStreamBasicDescription.init(mSampleRate: 8000, mFormatID: kAudioFormatiLBC, mFormatFlags: 0, mBytesPerPacket: 0, mFramesPerPacket: 0, mBytesPerFrame: 0, mChannelsPerFrame: 1, mBitsPerChannel: 0, mReserved: 0)
let ilbcformat = AVAudioFormat.init(streamDescription: &descriptor)
let compressor: AVAudioConverter = AVAudioConverter.init(from: self.format!, to: ilbcformat)!
let inputBlock : AVAudioConverterInputBlock = { inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return buffer
}
let compressedBuffer: AVAudioCompressedBuffer = AVAudioCompressedBuffer(format: ilbcformat, packetCapacity: 5, maximumPacketSize: 38)
compressor.convert(to: compressedBuffer, error: nil, withInputFrom: inputBlock)
// Convert to Bytes
let compressedBufferPointer = compressedBuffer.data.bindMemory(to: UInt8.self, capacity: 190)
var compressedBytes: [UInt8] = [UInt8].init(repeating: 0, count: 190)
compressedBufferPointer.withMemoryRebound(to: UInt8.self, capacity: 190) { sourceBytes in
compressedBytes.withUnsafeMutableBufferPointer {
$0.baseAddress!.initialize(from: sourceBytes, count: 190)
}
}
// Convert to buffer
let compressedBuffer2: AVAudioCompressedBuffer = AVAudioCompressedBuffer.init(format: ilbcformat, packetCapacity: 5, maximumPacketSize: 38)
compressedBuffer2.byteLength = 190
compressedBuffer2.packetCount = 5
compressedBytes.withUnsafeMutableBufferPointer {
compressedBuffer2.data.copyMemory(from: $0.baseAddress!, byteCount: 190)
}
// Uncompress to PCM
let uncompressor: AVAudioConverter = AVAudioConverter.init(from: ilbcformat, to: self.format!)!
let inputBlock2 : AVAudioConverterInputBlock = { inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return compressedBuffer2
}
let uncompressedBuffer: AVAudioPCMBuffer = AVAudioPCMBuffer.init(pcmFormat: self.format!, frameCapacity: 4410)!
uncompressor.convert(to: uncompressedBuffer, error: nil, withInputFrom: inputBlock2)
// Play Buffer
self.player.scheduleBuffer(uncompressedBuffer)