Hello @zissou_,
That definitely simplifies things if you can apply the filter to everything. In that case, and since it appears that you are using RealityKit, I recommend that you make use of the post-processing render callback and CoreImage to implement this, here is a short example:
import UIKit
import RealityKit
import CoreImage.CIFilterBuiltins
class ViewController: UIViewController {
@IBOutlet var arView: ARView!
// Initialize once and re-use to avoid expensive operations in the post-processing callback.
let ciContext = CIContext()
override func viewDidLoad() {
super.viewDidLoad()
// Load the "Box" scene from the "Experience" Reality File
let boxAnchor = try! Experience.loadBox()
// Add the box anchor to the scene
arView.scene.anchors.append(boxAnchor)
arView.renderCallbacks.postProcess = { [unowned self] postProcessingContext in
// A filter that applies a mono style to an image.
let monoFilter = CIFilter.photoEffectMono()
// Make a CIImage from the rendered frame buffer.
let source = CIImage(mtlTexture: postProcessingContext.sourceColorTexture)!
.oriented(.downMirrored) // This orientation is essential to make sure that CoreImage interprets the texture contents correctly.
// Set the source image as the input to the mono filter.
monoFilter.inputImage = source
// Request the filtered output image.
let filteredSource = monoFilter.outputImage!
// Render the filtered output image to the target color texture (this is the texture that ultimately gets displayed).
do {
let renderTask = try ciContext.startTask(toRender: filteredSource, to: .init(mtlTexture: postProcessingContext.targetColorTexture, commandBuffer: nil))
// You must waitUntilCompleted here. RealityKit is expecting all post-processing work to be finished by the end of this closure.
try renderTask.waitUntilCompleted()
} catch {
fatalError(error.localizedDescription)
}
}
}
}