Using videoComposition breaks export second time (The video could not be composed).

Hello guys.

I'm using Metal and custom video composition instruction and custom video compositor to change each frame of a video. It work perfect with AVPlayer. It works perfect when exporting original AVAsset for the first time but export fail if I use previously exported AVAsset.

For example I've exported original.mov to exported.mov – works perfect. Then I want to use exported.mov as input (original) AVAsset – gives me error "The video could not be composed" and I don't know how to fix that. Maybe someone got same issue and can help?

export function

func export(arguments: Arguments) throws {
        let timeRange = CMTimeRange(start: .zero, duration: arguments.asset.duration)
        let mixComposition = AVMutableComposition()
        guard
            let sourceVideoTrack = arguments.asset.tracks(withMediaType: .video).first,
            let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: CMPersistentTrackID()) // kCMPersistentTrackID_Invalid
        else {
            print("composition.addMutableTrack is nil")
            return
        }
        try compositionVideoTrack.insertTimeRange(timeRange, of: sourceVideoTrack, at: .zero)

        let audioMix = AVMutableAudioMix()
        if
            let sourceAudioTrack = arguments.asset.tracks(withMediaType: .audio).first,
            let compositionAudioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: CMPersistentTrackID()) // kCMPersistentTrackID_Invalid
        {
            let param = AVMutableAudioMixInputParameters(track: sourceAudioTrack)
            param.trackID = compositionAudioTrack.trackID
            try compositionAudioTrack.insertTimeRange(timeRange, of: sourceAudioTrack, at: .zero)
            audioMix.inputParameters = [param]
        }

        let presetName = sourceVideoTrack.hasMediaCharacteristic(.containsHDRVideo) ? AVAssetExportPresetHEVCHighestQuality : AVAssetExportPresetHighestQuality
        exportSession = AVAssetExportSession(asset: mixComposition, presetName: presetName)
        guard let exportSession = exportSession
        else {
            print("AVAssetExportSession return nil")
            return
        }

        guard let compositionInstruction = try CustomVideoCompositionInstruction(videoTrack: sourceVideoTrack, usedForExport: true, dependency: arguments.dependency, timeRange: timeRange)
        else {
            print("CustomVideoCompositionInstruction is nil")
            return
        }
        compositionInstruction.timeRange = CMTimeRange(start: .zero, duration: mixComposition.duration)
        compositionInstruction.videoColorStrength = arguments.colorStrength
        let mutableComposition = AVMutableVideoComposition()
        mutableComposition.renderSize = sourceVideoTrack.orientation.size(for: sourceVideoTrack.naturalSize)
        mutableComposition.frameDuration = CMTime(value: 1, timescale: CMTimeScale(sourceVideoTrack.nominalFrameRate))
        mutableComposition.customVideoCompositorClass = CustomVideoCompositor.self
        mutableComposition.instructions = [compositionInstruction]

        exportSession.timeRange = timeRange
        // 1. works perfect with original AVAsset that was not exported earlier (original.mov was exported to updated.mov)
        // 2. gives error when using updated.mov as original AVAsset if exportSession.videoComposition is not nil
        exportSession.videoComposition = mutableComposition
        exportSession.outputFileType = .mov
        exportSession.outputURL = arguments.url
        exportSession.audioMix = audioMix

        timer = Timer.scheduledTimer(withTimeInterval: 0.5, repeats: true, block: { [weak self] timer in
            arguments.progressChanged(exportSession.progress)
            if exportSession.progress == 1 {
                timer.invalidate()
                self?.timer = nil
            }
        })

        // 1. works perfect with original AVAsset that was not exported earlier (original.mov was exported to updated.mov)
        // 2. gives error when using updated.mov as original AVAsset if exportSession.videoComposition is not nil
        self.exportSession?.exportAsynchronously { [weak self] in
            guard
                let self = self,
                let session = self.exportSession
            else { return }

            if let error = session.error {
                print("session.error", error) // The video could not be composed when using previously exported AVAsset
            }

            self.timer?.invalidate()
            self.timer = nil
            self.exportSession = nil
            arguments.finished(session.status, session.error)
        }
    }

Accepted Reply

Fixed.

Instead

CustomVideoCompositionInstruction(videoTrack: sourceVideoTrack, usedForExport: true, dependency: arguments.dependency, timeRange: timeRange)

there should be

CustomVideoCompositionInstruction(videoTrack: compositionVideoTrack, usedForExport: true, dependency: arguments.dependency, timeRange: timeRange)

Replies

custom compositor

import AVFoundation

final class CustomVideoCompositor: NSObject, AVVideoCompositing {
    enum CustomVideoCompositorError: Int, Error, LocalizedError { case sourcePixelBuffer }

    // HDR ?
    var sourcePixelBufferAttributes: [String: Any]? = [String(kCVPixelBufferPixelFormatTypeKey): [kCVPixelFormatType_32BGRA]]
    // HDR ?
    var requiredPixelBufferAttributesForRenderContext: [String: Any] = [String(kCVPixelBufferPixelFormatTypeKey): [kCVPixelFormatType_32BGRA]]

//    var supportsHDRSourceFrames: Bool = true

    func renderContextChanged(_ newRenderContext: AVVideoCompositionRenderContext) {
        print(#function)
    }

    func startRequest(_ request: AVAsynchronousVideoCompositionRequest) {
        guard let sourcePixelBuffer = request.sourcePixelBuffer
        else {
            print("request.sourcePixelBuffer is nil")
            request.finish(with: CustomVideoCompositorError.sourcePixelBuffer)
            return
        }
        guard let instruction = (request.videoCompositionInstruction as? CustomVideoCompositionInstruction)
        else {
            print("CustomVideoCompositionInstruction is nil")
            request.finish(withComposedVideoFrame: sourcePixelBuffer)
            return
        }
        guard let sourceTexture = sourcePixelBuffer.createTexture2(dependency: instruction.dependency)
        else {
            print("sourceTexture is nil")
            request.finish(withComposedVideoFrame: sourcePixelBuffer)
            return
        }
        guard let outputPixelBuffer = request.renderContext.newPixelBuffer()
        else {
            print("request.renderContext.newPixelBuffer()")
            request.finish(withComposedVideoFrame: sourcePixelBuffer)
            return
        }
        guard let outputTexture = outputPixelBuffer.createTexture2(dependency: instruction.dependency)
        else {
            print("request.renderContext.newPixelBuffer()")
            request.finish(withComposedVideoFrame: sourcePixelBuffer)
            return
        }
        instruction.offscreenRenderer.render(
            background: OffscreenRenderer.Background(
                model: instruction.textureModel,
                texture: sourceTexture,
                colorStrength: instruction.videoColorStrength
            ),
            outputTexture: outputTexture
        )
        request.finish(withComposedVideoFrame: outputPixelBuffer)
        instruction.dependency.clearCache() // CVMetalTextureCacheFlush
    }
}

custom instruction

final class CustomVideoCompositionInstruction: NSObject, AVVideoCompositionInstructionProtocol {

    /// Custom
    var videoColorStrength = ColorStrength(red: 1, green: 1, blue: 1)
    let textureModel: TextureModel
    let dependency: Dependency
    let offscreenRenderer: OffscreenRenderer


    /// AVVideoCompositionInstructionProtocol
    var timeRange = CMTimeRange()
    var enablePostProcessing: Bool = false // to enable/disable  AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: outputLayer)
    var containsTweening: Bool = false
    var requiredSourceTrackIDs: [NSValue]?
    var passthroughTrackID: CMPersistentTrackID = kCMPersistentTrackID_Invalid

    init?(videoTrack: AVAssetTrack, usedForExport: Bool, dependency: Dependency, timeRange: CMTimeRange) throws {
        let orientation: AVAssetTrack.VideoTrackOrientation = usedForExport ? videoTrack.orientation : .upSideDown

        guard
            let textureModel = try TextureModel(dependency: dependency, orientation: orientation)
        else {
            print("TextureModel is nil")
            return nil
        }
        guard let offscreenRenderer = OffscreenRenderer(device: dependency.device)
        else {
            print("OffscreenRenderer is nil")
            return nil
        }

        self.textureModel = textureModel
        self.dependency = dependency
        self.offscreenRenderer = offscreenRenderer
        self.requiredSourceTrackIDs = [NSNumber(value: videoTrack.trackID)]
        self.timeRange = timeRange

        containsTweening = true
    }
}

Fixed.

Instead

CustomVideoCompositionInstruction(videoTrack: sourceVideoTrack, usedForExport: true, dependency: arguments.dependency, timeRange: timeRange)

there should be

CustomVideoCompositionInstruction(videoTrack: compositionVideoTrack, usedForExport: true, dependency: arguments.dependency, timeRange: timeRange)