Hi,
I'm trying to convert a stream into a PCMBuffer and then use Shazam to match. Shazam always fails to match. I have a theory it "listens" to the playback at double speed or more.
Starts from here:
...
let format = audioEngine.outputNode.inputFormat(forBus: 0)
guard let pcmBuffer = format.toPCMBuffer(frame: currentFrame) else {
return
}
session.matchStreamingBuffer(pcmBuffer, at: nil)
Where toPCMBuffer
is:
extension AVAudioFormat {
func toPCMBuffer(frame: AudioFrame) -> AVAudioPCMBuffer? {
guard let pcmBuffer = AVAudioPCMBuffer(pcmFormat: self, frameCapacity: UInt32(frame.dataWrap.size[0]) / streamDescription.pointee.mBytesPerFrame) else {
return nil
}
pcmBuffer.frameLength = pcmBuffer.frameCapacity
for i in 0 ..< min(Int(pcmBuffer.format.channelCount), frame.dataWrap.size.count) {
frame.dataWrap.data[i]?.withMemoryRebound(to: Float.self, capacity: Int(pcmBuffer.frameCapacity)) { srcFloatsForChannel in
pcmBuffer.floatChannelData?[i].assign(from: srcFloatsForChannel, count: Int(pcmBuffer.frameCapacity))
}
}
return pcmBuffer
}
}
AudioFrame
is:
final class AudioFrame: MEFrame {
var timebase = Timebase.defaultValue
var duration: Int64 = 0
var size: Int64 = 0
var position: Int64 = 0
var numberOfSamples = 0
let dataWrap: ByteDataWrap
public init(bufferSize: Int32, channels: Int32) {
dataWrap = ObjectPool.share.object(class: ByteDataWrap.self, key: "AudioData_\(channels)") { ByteDataWrap() }
if dataWrap.size[0] < bufferSize {
dataWrap.size = Array(repeating: Int(bufferSize), count: Int(channels))
}
}
...
}
and MEFrame
is:
extension MEFrame {
public var seconds: TimeInterval { cmtime.seconds }
public var cmtime: CMTime { timebase.cmtime(for: position) }
}