Fixed.
Instead
CustomVideoCompositionInstruction(videoTrack: sourceVideoTrack, usedForExport: true, dependency: arguments.dependency, timeRange: timeRange)
there should be
CustomVideoCompositionInstruction(videoTrack: compositionVideoTrack, usedForExport: true, dependency: arguments.dependency, timeRange: timeRange)
Post
Replies
Boosts
Views
Activity
custom instruction
final class CustomVideoCompositionInstruction: NSObject, AVVideoCompositionInstructionProtocol {
/// Custom
var videoColorStrength = ColorStrength(red: 1, green: 1, blue: 1)
let textureModel: TextureModel
let dependency: Dependency
let offscreenRenderer: OffscreenRenderer
/// AVVideoCompositionInstructionProtocol
var timeRange = CMTimeRange()
var enablePostProcessing: Bool = false // to enable/disable AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: outputLayer)
var containsTweening: Bool = false
var requiredSourceTrackIDs: [NSValue]?
var passthroughTrackID: CMPersistentTrackID = kCMPersistentTrackID_Invalid
init?(videoTrack: AVAssetTrack, usedForExport: Bool, dependency: Dependency, timeRange: CMTimeRange) throws {
let orientation: AVAssetTrack.VideoTrackOrientation = usedForExport ? videoTrack.orientation : .upSideDown
guard
let textureModel = try TextureModel(dependency: dependency, orientation: orientation)
else {
print("TextureModel is nil")
return nil
}
guard let offscreenRenderer = OffscreenRenderer(device: dependency.device)
else {
print("OffscreenRenderer is nil")
return nil
}
self.textureModel = textureModel
self.dependency = dependency
self.offscreenRenderer = offscreenRenderer
self.requiredSourceTrackIDs = [NSNumber(value: videoTrack.trackID)]
self.timeRange = timeRange
containsTweening = true
}
}
custom compositor
import AVFoundation
final class CustomVideoCompositor: NSObject, AVVideoCompositing {
enum CustomVideoCompositorError: Int, Error, LocalizedError { case sourcePixelBuffer }
// HDR ?
var sourcePixelBufferAttributes: [String: Any]? = [String(kCVPixelBufferPixelFormatTypeKey): [kCVPixelFormatType_32BGRA]]
// HDR ?
var requiredPixelBufferAttributesForRenderContext: [String: Any] = [String(kCVPixelBufferPixelFormatTypeKey): [kCVPixelFormatType_32BGRA]]
// var supportsHDRSourceFrames: Bool = true
func renderContextChanged(_ newRenderContext: AVVideoCompositionRenderContext) {
print(#function)
}
func startRequest(_ request: AVAsynchronousVideoCompositionRequest) {
guard let sourcePixelBuffer = request.sourcePixelBuffer
else {
print("request.sourcePixelBuffer is nil")
request.finish(with: CustomVideoCompositorError.sourcePixelBuffer)
return
}
guard let instruction = (request.videoCompositionInstruction as? CustomVideoCompositionInstruction)
else {
print("CustomVideoCompositionInstruction is nil")
request.finish(withComposedVideoFrame: sourcePixelBuffer)
return
}
guard let sourceTexture = sourcePixelBuffer.createTexture2(dependency: instruction.dependency)
else {
print("sourceTexture is nil")
request.finish(withComposedVideoFrame: sourcePixelBuffer)
return
}
guard let outputPixelBuffer = request.renderContext.newPixelBuffer()
else {
print("request.renderContext.newPixelBuffer()")
request.finish(withComposedVideoFrame: sourcePixelBuffer)
return
}
guard let outputTexture = outputPixelBuffer.createTexture2(dependency: instruction.dependency)
else {
print("request.renderContext.newPixelBuffer()")
request.finish(withComposedVideoFrame: sourcePixelBuffer)
return
}
instruction.offscreenRenderer.render(
background: OffscreenRenderer.Background(
model: instruction.textureModel,
texture: sourceTexture,
colorStrength: instruction.videoColorStrength
),
outputTexture: outputTexture
)
request.finish(withComposedVideoFrame: outputPixelBuffer)
instruction.dependency.clearCache() // CVMetalTextureCacheFlush
}
}