Thank you.
So this is how the Audio Frame is being allocated:
private func audioPlayerShouldInputData(ioData: UnsafeMutableAudioBufferListPointer, numberOfFrames: UInt32) {
var ioDataWriteOffset = 0
var numberOfSamples = Int(numberOfFrames)
while numberOfSamples > 0 {
if currentRender == nil {
currentRender = renderSource?.getAudioOutputRender()
}
guard let currentRender = currentRender else {
break
}
let residueLinesize = currentRender.numberOfSamples - currentRenderReadOffset
guard residueLinesize > 0 else {
self.currentRender = nil
continue
}
let framesToCopy = min(numberOfSamples, residueLinesize)
let bytesToCopy = framesToCopy * MemoryLayout<Float>.size
let offset = currentRenderReadOffset * MemoryLayout<Float>.size
for i in 0 ..< min(ioData.count, currentRender.dataWrap.data.count) {
(ioData[i].mData! + ioDataWriteOffset).copyMemory(from: currentRender.dataWrap.data[i]! + offset, byteCount: bytesToCopy)
}
numberOfSamples -= framesToCopy
ioDataWriteOffset += bytesToCopy
currentRenderReadOffset += framesToCopy
}
let sizeCopied = (Int(numberOfFrames) - numberOfSamples) * MemoryLayout<Float>.size
for i in 0 ..< ioData.count {
let sizeLeft = Int(ioData[i].mDataByteSize) - sizeCopied
if sizeLeft > 0 {
memset(ioData[i].mData! + sizeCopied, 0, sizeLeft)
}
}
}
I've followed getAudioOutputRender and I believe the Audio Frame is first generated here:
let result = avcodec_receive_frame(codecContext, coreFrame)
if result == 0, let avframe = coreFrame {
var frame = try swresample.transfer(avframe: filter?.filter(inputFrame: avframe) ?? avframe)
frame.timebase = packet.assetTrack.timebase
frame.duration = avframe.pointee.pkt_duration
frame.size = Int64(avframe.pointee.pkt_size)
if packet.assetTrack.mediaType == .audio {
bestEffortTimestamp = max(bestEffortTimestamp, avframe.pointee.pts)
frame.position = bestEffortTimestamp
if frame.duration == 0 {
frame.duration = Int64(avframe.pointee.nb_samples) * Int64(frame.timebase.den) / (Int64(avframe.pointee.sample_rate) * Int64(frame.timebase.num))
}
bestEffortTimestamp += frame.duration
} else {
frame.position = avframe.pointee.best_effort_timestamp
}
delegate?.decodeResult(frame: frame)
Post
Replies
Boosts
Views
Activity
The exception is Fatal Exception: NSInvalidArgumentException. It's worth noting these streams usually work fine. When I tested this for a while in the wild however, sometimes it will crash. Maybe the buffer is randomly malformed in someway.
Thank you.
Can you point me to the right way to catch the exception without crashing?
Thanks again.