AVVideoCompositor Renders Live Changes Slow

I am working on a video editing app and I recently changed my code to render frames using a custom compositor. Filters are rendered well, but when I try to change a property of the filter, for example the intensity, the updates are laggy. I didn't have this problem before using the custom compositor. The problem (I'm assuming) is because now the renderer object is within the compositor so outside of the compositor class when I bind the values to a slider, it doesn't update instantly. I am using SwiftUI. Here is part of my custom compositor:

class CustomVideoCompositor: NSObject, AVVideoCompositing {    
    
    var metalContext: RendererContext?
                        
    override init() {
        guard let device = MTLCreateSystemDefaultDevice(),
              let commandQueue = device.makeCommandQueue() else {
                  super.init()
                  return
              }
        
        var newTextureCache: CVMetalTextureCache?
        CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, device, nil, &newTextureCache)
        
        guard let texureCache = newTextureCache else {
            super.init()
            return
        }
        
        metalContext = RendererContext(device: device, commandQueue: commandQueue, textureCache: texureCache)
        
        super.init()
    }//init
    
    func startRequest(_ request: AVAsynchronousVideoCompositionRequest) {
        autoreleasepool {
            renderingQueue.async {
                if self.shouldCancelAllRequests {
                    request.finishCancelledRequest()
                } else {
                    
                    if let currentInstruction = request.videoCompositionInstruction as? CustomVideoCompositionInstruction {
                        guard let inputBuffer = request.sourceFrame(byTrackID: currentInstruction.trackID),
                              let videoEdits = currentInstruction.videoEdits
                        else {
                            request.finish(with: PixelBufferRequestError.newRenderedPixelBufferForRequestFailure)
                            return
                        }
                                                
                        request.finish(withComposedVideoFrame: self.renderVideoEdits(request: request, videoEdits: videoEdits, inputBuffer: inputBuffer))
                    
                    } else if let currentInstruction = request.videoCompositionInstruction as? TransitionInstruction {
                        guard let fromBuffer = request.sourceFrame(byTrackID: currentInstruction.fromTrackID),
                              let toBuffer = request.sourceFrame(byTrackID: currentInstruction.toTrackID),
                              let outputBuffer = request.renderContext.newPixelBuffer(),
                              let fromVideoEdits = currentInstruction.fromVideoEdits,
                              let toVideoEdits = currentInstruction.toVideoEdits,
                              let transitionEdit = currentInstruction.transitionEdit,
                              let metalContext = self.metalContext
                        else {
                                  request.finish(with: PixelBufferRequestError.newRenderedPixelBufferForRequestFailure)
                                  return
                              }
                        
                        if transitionEdit.transition.context == nil {
                            transitionEdit.transition.setContext(context: metalContext)
                        }
                        
                        transitionEdit.transition.prepare()
                        
                        let renderedFromBuffer = self.renderVideoEdits(request: request, videoEdits: fromVideoEdits, inputBuffer: fromBuffer)
                        
                        let renderedToBuffer = self.renderVideoEdits(request: request, videoEdits: toVideoEdits, inputBuffer: toBuffer)
                        
                        let renderedOutputBuffer = transitionEdit.transition.render(fromBuffer: renderedFromBuffer, toBuffer: renderedToBuffer, destinationBuffer: outputBuffer)
                        
                        request.finish(withComposedVideoFrame: renderedOutputBuffer)
                        
                    } else {
                        request.finish(with: PixelBufferRequestError.newRenderedPixelBufferForRequestFailure)
                    }

                }
            }//renderingQueue.async
        }//autoreleasepool
    }//startRequest
    
    func renderVideoEdits(request: AVAsynchronousVideoCompositionRequest, videoEdits: VideoEdits, inputBuffer: CVPixelBuffer) -> CVPixelBuffer {
        guard let metalContext = self.metalContext else {
            return inputBuffer
        }
        
        var renderedBuffer: CVPixelBuffer = inputBuffer
                
        for filter in videoEdits.filters {
            if filter.context == nil {
                filter.setContext(context: metalContext)
            }
            filter.prepare()

            guard let outputBuffer = request.renderContext.newPixelBuffer() else {
                return renderedBuffer
            }

            renderedBuffer = filter.render(inputBuffer: renderedBuffer, outputBuffer: outputBuffer)
        }
        
        return renderedBuffer
    }//renderVideoEdits
    
    func cancelAllPendingVideoCompositionRequests() {
        renderingQueue.sync {
            shouldCancelAllRequests = true
        }
        renderingQueue.async {
            self.shouldCancelAllRequests = false
        }
    }//cancelAllPendingVideoCompositionRequests
}//CustomVideoCompositor

I access the renderer in a swiftUI view by doing something like this:

@State var renderer: FilterRenderer
renderer = videoComposition.instructions[currentInstruction].videoEdits.filter
Slider(value: $renderer.intensity, in: 0.0...1.0)

I used to render filters using an AVPlayerItemVideoOutput and this implementation worked just fine. It was fast and efficient. Any idea why this is happening? I needed to switch to using a custom compositor so I can source separate frames for transitions.

Answered by developingCamel in 701227022

Problem was with the videoComposition preparing frames ahead of time. To get the filters to update in real time, I need to reset the composition every time I make a change.

playItem.videoComposition = playerItem.videoComposition.mutableCopy() as? AVVideoComposition

The problem now is that the slider makes so many changes at a rate faster than the CPU can handle resulting in another lag. This time the filter intensity changes smoothly, but the video lags and skips frames. Any thoughts?

Accepted Answer

Problem was with the videoComposition preparing frames ahead of time. To get the filters to update in real time, I need to reset the composition every time I make a change.

playItem.videoComposition = playerItem.videoComposition.mutableCopy() as? AVVideoComposition

The problem now is that the slider makes so many changes at a rate faster than the CPU can handle resulting in another lag. This time the filter intensity changes smoothly, but the video lags and skips frames. Any thoughts?

AVVideoCompositor Renders Live Changes Slow
 
 
Q