Why is my AVAssetWriterInput failing to append CMSampleBuffers?

The beginRecording() func and the endRecording() func matter the most. InterpretRawFrameData is just decoding the video stream the entire time and i want to record parts of the stream. In the decodeFrameData func i attempt and fail to append a CMSampleBuffer to my AVAssetWriterInput object


import Foundation
import ************
import AVFoundation
import Photos

typealias FrameData = Array

protocol VideoFrameDecoderDelegate {
    func receivedDisplayableFrame(_ frame: CVPixelBuffer)
}

class VideoFrameDecoder {
   
    static var delegate: VideoFrameDecoderDelegate?
   
    private var formatDesc: CMVideoFormatDescription?
    private var decompressionSession: VTDecompressionSession?
   
    var isRecording: Bool = false {
        didSet { isRecording ? beginRecording() : endRecording() }
    }
    var outputURL: URL?
    var path = ""
    var videoWriter: AVAssetWriter?
    var videoWriterInput: AVAssetWriterInput?
   
    func interpretRawFrameData(_ frameData: inout FrameData) {
        var naluType = frameData[4] & 0x1F
        if naluType != 7 && formatDesc == nil { return }
       
        // Replace start code with the size
        var frameSize = CFSwapInt32HostToBig(UInt32(frameData.count - 4))
        memcpy(&frameData, &frameSize, 4)
       
        // The start indices for nested packets. Default to 0.
        var ppsStartIndex = 0
        var frameStartIndex = 0
       
        var sps: Array?
        var pps: Array?
       
        // SPS parameters
        if naluType == 7 {
            for i in 4..<40 {
                if frameData[i] == 0 && frameData[i+1] == 0 && frameData[i+2] == 0 && frameData[i+3] == 1 {
                    ppsStartIndex = i // Includes the start header
                    sps = Array(frameData[4..<i])
                   
                    // Set naluType to the nested packet's NALU type
                    naluType = frameData[i + 4] & 0x1F
                    break
                }
            }
        }
        // PPS parameters
        if naluType == 8 {
            for i in ppsStartIndex+4..<ppsstartindex+34 {<br="">                if frameData[i] == 0 && frameData[i+1] == 0 && frameData[i+2] == 0 && frameData[i+3] == 1 {
                    frameStartIndex = i
                    pps = Array(frameData[ppsStartIndex+4..<i])
                   
                    // Set naluType to the nested packet's NALU type
                    naluType = frameData[i+4] & 0x1F
                    break
                }
            }
           
            guard let sps = sps,
                let pps = pps,
                createFormatDescription(sps: sps, pps: pps) else {
                    print("===== ===== Failed to create formatDesc")
                    return
            }
            guard createDecompressionSession() else {
                print("===== ===== Failed to create decompressionSession")
                return
            }
        }
       
        if (naluType == 1 || naluType == 5) && decompressionSession != nil {
            // If this is successful, the callback will be called
            // The callback will send the full decoded and decompressed frame to the delegate
            decodeFrameData(Array(frameData[frameStartIndex...frameData.count - 1]))
        }
    }
   
    private func decodeFrameData(_ frameData: FrameData) {
        let bufferPointer = UnsafeMutableRawPointer(mutating: frameData)
        // Replace the start code with the size of the NALU
        var frameSize = CFSwapInt32HostToBig(UInt32(frameData.count - 4))
        memcpy(bufferPointer, &frameSize, 4)
        // Create a memory location to store the processed image
        var outputBuffer: CVPixelBuffer?
        var blockBuffer: CMBlockBuffer?
        var status = CMBlockBufferCreateWithMemoryBlock(
            allocator: kCFAllocatorDefault,
            memoryBlock: bufferPointer,
            blockLength: frameData.count,
            blockAllocator: kCFAllocatorNull,
            customBlockSource: nil,
            offsetToData: 0,
            dataLength: frameData.count,
            flags: 0, blockBufferOut: &blockBuffer)
        // Return if there was an error allocating processed image location
        guard status == kCMBlockBufferNoErr else { return }
        // Do some image processing
        var sampleBuffer: CMSampleBuffer?
        let sampleSizeArray = [frameData.count]
        status = CMSampleBufferCreateReady(
            allocator: kCFAllocatorDefault,
            dataBuffer: blockBuffer,
            formatDescription: formatDesc,
            sampleCount: 1, sampleTimingEntryCount: 0,
            sampleTimingArray: nil,
            sampleSizeEntryCount: 1,
            sampleSizeArray: sampleSizeArray,
            sampleBufferOut: &sampleBuffer)
       
        // Return if there was an error
        if let buffer = sampleBuffer,
            status == kCMBlockBufferNoErr {
            let attachments: CFArray? = CMSampleBufferGetSampleAttachmentsArray(buffer, createIfNecessary: true)
            if let attachmentsArray = attachments {
                let dic = unsafeBitCast(CFArrayGetValueAtIndex(attachmentsArray, 0), to: CFMutableDictionary.self)
                CFDictionarySetValue(
                    dic,
                    Unmanaged.passUnretained(kCMSampleAttachmentKey_DisplayImmediately).toOpaque(),
                    Unmanaged.passUnretained(kCFBooleanTrue).toOpaque())
               
                // Decompress with ************
                var flagOut = VTDecodeInfoFlags(rawValue: 0)
                status = VTDecompressionSessionDecodeFrame(
                    decompressionSession!,
                    sampleBuffer: buffer,
                    flags: [],
                    frameRefcon: &outputBuffer,
                    infoFlagsOut: &flagOut)
               
                // Record CMSampleBuffer with AVFoundation
                if isRecording,
                    let vidInput = videoWriterInput,
                    vidInput.isReadyForMoreMediaData {
                    print("Appended: \(vidInput.append(buffer))")
                }
            }
        }
    }
   
    private func createFormatDescription(sps: [UInt8], pps: [UInt8]) -> Bool {
       
        let pointerSPS = UnsafePointer(sps)
        let pointerPPS = UnsafePointer(pps)
       
        let dataParamArray = [pointerSPS, pointerPPS]
        let parameterSetPointers = UnsafePointer(dataParamArray)
       
        let sizeParamArray = [sps.count, pps.count]
        let parameterSetSizes = UnsafePointer(sizeParamArray)
       
        let status = CMVideoFormatDescriptionCreateFromH264ParameterSets(
            allocator: kCFAllocatorDefault,
            parameterSetCount: 2,
            parameterSetPointers: parameterSetPointers,
            parameterSetSizes: parameterSetSizes,
            nalUnitHeaderLength: 4,
            formatDescriptionOut: &formatDesc)
       
        return status == noErr
    }
   
    private func createDecompressionSession() -> Bool {
        guard let desc = formatDesc else { return false }
       
        if let session = decompressionSession {
            VTDecompressionSessionInvalidate(session)
            decompressionSession = nil
        }
       
        let decoderParameters = NSMutableDictionary()
        let destinationPixelBufferAttributes = NSMutableDictionary()
       
        var outputCallback = VTDecompressionOutputCallbackRecord()
        outputCallback.decompressionOutputCallback = callback
        outputCallback.decompressionOutputRefCon = UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque())
       
        let status = VTDecompressionSessionCreate(
            allocator: kCFAllocatorDefault,
            formatDescription: desc,
            decoderSpecification: decoderParameters,
            imageBufferAttributes: destinationPixelBufferAttributes,
            outputCallback: &outputCallback,
            decompressionSessionOut: &decompressionSession)
       
        return status == noErr
    }
   
    private var callback: VTDecompressionOutputCallback = {(
        decompressionOutputRefCon: UnsafeMutableRawPointer?,
        sourceFrameRefCon: UnsafeMutableRawPointer?,
        status: OSStatus,
        infoFlags: VTDecodeInfoFlags,
        imageBuffer: CVPixelBuffer?,
        presentationTimeStamp: CMTime,
        duration: CMTime) in
        guard let newImage = imageBuffer,
            status == noErr else {
                // -12909 is Bad Video Error, nothing too bad unless there's no feed
                if status != -12909 {
                    print("===== Failed to decompress. VT Error \(status)")
                }
                return
        }
        // print("===== Image successfully decompressed")
        delegate?.receivedDisplayableFrame(imageBuffer!)
    }
   
    private func handlePhotoLibraryAuth() {
        if PHPhotoLibrary.authorizationStatus() != .authorized {
            PHPhotoLibrary.requestAuthorization { _ in }
        }
    }
   
    private func createFilePath() {
        let fileManager = FileManager.default
        let urls = fileManager.urls(for: .documentDirectory, in: .userDomainMask)
        guard let documentDirectory: NSURL = urls.first as NSURL? else {
            fatalError("documentDir Error")
        }
        guard let videoOutputURL = documentDirectory.appendingPathComponent("iTello-\(Date()).mp4") else {
            return
        }
        outputURL = videoOutputURL
        path = videoOutputURL.path
        if FileManager.default.fileExists(atPath: path) {
            do {
                try FileManager.default.removeItem(atPath: path)
            } catch {
                print("Unable to delete file: \(error) : \(#function).")
                return
            }
        }
    }
   
    private func beginRecording() {
        handlePhotoLibraryAuth()
        createFilePath()
        guard let videoOutputURL = outputURL,
            let vidWriter = try? AVAssetWriter(outputURL: videoOutputURL, fileType: AVFileType.mp4) else {
                fatalError("AVAssetWriter error")
        }
        if formatDesc == nil { print("Warning: No Format For Video") }
        let vidInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: nil, sourceFormatHint: formatDesc)
        guard vidWriter.canAdd(vidInput) else {
            print("Error: Cant add video writer input")
            return
        }
        vidInput.expectsMediaDataInRealTime = true
        vidWriter.add(vidInput)
        guard vidWriter.startWriting() else {
            print("Error: Cant write with vid writer")
            return
        }
        vidWriter.startSession(atSourceTime: CMTime.zero)
        self.videoWriter = vidWriter
        self.videoWriterInput = vidInput
        print("Recording Video Stream")
    }
   
    private func saveRecordingToPhotoLibrary() {
        let fileManager = FileManager.default
        guard fileManager.fileExists(atPath: self.path) else {
            print("Error: The file: \(self.path) not exists, so cannot move this file camera roll")
            return
        }
        print("The file: \(self.path) has been save into documents folder, and is ready to be moved to camera roll")
        PHPhotoLibrary.shared().performChanges({
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: URL(fileURLWithPath: self.path))
        }) { completed, error in
            guard completed else {
                print ("Error: Cannot move the video \(self.path) to camera roll, error: \(String(describing: error?.localizedDescription))")
                return
            }
            print("Video \(self.path) has been moved to camera roll")
        }
    }
   
    private func endRecording() {
        guard let vidInput = videoWriterInput, let vidWriter = videoWriter else {
            print("Error, no video writer or video input")
            return
        }
        vidInput.markAsFinished()
        if !vidInput.isReadyForMoreMediaData {
            vidWriter.finishWriting {
                print("Finished Recording")
                guard vidWriter.status == .completed else {
                    print("Warning: The Video Writer status is not completed, status: \(vidWriter.status.rawValue)")
                    print(vidWriter.error.debugDescription)
                    return
                }
                print("VideoWriter status is completed")
                self.saveRecordingToPhotoLibrary()
                self.videoWriterInput = nil
                self.videoWriter = nil
            }
        }
    }
}



I'm getting AVFoundation error -11800 and the writer is failing for some unknown reason. I'm streaming an h264/mp4 video from some non-apple product. The video is decoding and displaying properly but not recording. The append operation fails and fails and I can't determine why from the error.

Answered by Media Engineer in 650449022
Seems there is a mismatch between the format of the data you are appending and what the file format you're writing to (looks like MPEG-4) can support. It looks like you are writing uncompressed video, which may not be valid for MPEG-4. You could try writing to a QuickTime Movie file instead, or writing the compressed (or recompressed) video to the output file.
  • 11800 is a generic "unknown error". The full error text should provide more context on what is going wrong. Can you provide the full error text?

I apologize for the extremely delayed response, this was for a side project that I am only now getting back onto and getting the same error. I had status 3(failed) and this is the output from the last function in the original post codeblack for the func endRecording()

Full Error:

Code Block
Error Domain=AVFoundationErrorDomain Code=-11800 "Media format - sample description is unsupported for the specified format flavor" UserInfo={NSUnderlyingError=0x282636100 {Error Domain=NSOSStatusErrorDomain Code=-12717 "(null)"}, NSLocalizedFailureReason=An unknown error occurred (-12717), NSDebugDescription=Media format - sample description is unsupported for the specified format flavor, NSLocalizedDescription=The operation could not be completed}

Accepted Answer
Seems there is a mismatch between the format of the data you are appending and what the file format you're writing to (looks like MPEG-4) can support. It looks like you are writing uncompressed video, which may not be valid for MPEG-4. You could try writing to a QuickTime Movie file instead, or writing the compressed (or recompressed) video to the output file.
Thank you for your help! I'm able to record videos now but they appear as gray frames only. If you're able to point me in the right direction I would appreciate it.

Here is the append function:

Code Block
// Record CMSampleBuffer with AVFoundation
      if self.videoStreamManager.isRecording,
        let videoPixelBuffer = self.videoStreamManager.videoWriterInputPixelBufferAdaptor {
        if videoPixelBuffer.append(frame, withPresentationTime: CMTimeMake(value: self.videoStreamManager.videoFrameCounter, timescale: self.videoStreamManager.videoFPS)) {
          self.videoStreamManager.videoFrameCounter += 1
        } else {
          print("Warning: Did not append video frame!")
        }
      }

If you don't mind, please also report the less-than-informative error message you get. If it's not too much trouble, including a test app we can use to reproduce the issue would help a lot. Thanks.
Why is my AVAssetWriterInput failing to append CMSampleBuffers?
 
 
Q