how to apply depth blur effect on preview like apple camera app

I want to apply depth blur effect on real-time preview like system camera app's portrait mode.


I tried this function with this code,


- CameraViewController

extension CameraViewController: AVCaptureDataOutputSynchronizerDelegate {
    func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) {
        guard let syncedDepthData: AVCaptureSynchronizedDepthData = synchronizedDataCollection.synchronizedData(for: depthDataOutput) as? AVCaptureSynchronizedDepthData, let syncedVideoData: AVCaptureSynchronizedSampleBufferData = synchronizedDataCollection.synchronizedData(for: videoDataOutput) as? AVCaptureSynchronizedSampleBufferData else {
            print("Could not get data from synchronizedDataCollection")
            return
        }

        let sampleBuffer = syncedVideoData.sampleBuffer
        let depthData = syncedDepthData.depthData

        guard let videoPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
          
          
        // Scale & Origin
        // previewView is a subclass of MTKView
        let drawableSize = previewView.drawableSize

        let scaleX = drawableSize.width / finalImage.extent.width
        let scaleY = drawableSize.height / finalImage.extent.height
        let scale = min(scaleX, scaleY)

        finalImage = finalImage.transformed(by: CGAffineTransform(scaleX: scale, y: scale))

        let originX = (drawableSize.width - finalImage.extent.size.width) / 2
        let originY = (drawableSize.height - finalImage.extent.size.height) / 2

        finalImage = finalImage.transformed(by: CGAffineTransform(translationX: originX, y: originY))

        if let depthData = depthData {
          var depthImage = CIImage(cvPixelBuffer: depthData.depthDataMap).applyingFilter("CIColorInvert")
          let scaleFactor = Float(finalImage.extent.width) / Float(depthImage.extent.width)
          depthImage = depthImage.applyingFilter("CIBicubicScaleTransform", parameters: ["inputScale" : scaleFactor])

          finalImage = finalImage.applyingFilter("CIMaskedVariableBlur", parameters: ["inputMask": depthImage, "inputRadius": 8.0])
        }

        preview.iamge = finalImage

     }
}
        
   

- Preview

class Preview: MTKView {

     ...

    override func draw(_ rect: CGRect) {

          guard let currentDrawable = currentDrawable, let commandBuffer = self.commandQueue!.makeCommandBuffer(),
               let previewImage = image else { return }


          let destination = CIRenderDestination(width: Int(drawableSize.width), height: Int(drawableSize.height), pixelFormat: colorPixelFormat, commandBuffer: commandBuffer) { () -> MTLTexture in
            return currentDrawable.texture
          }

          do {
            try self.context.startTask(toRender: previewImage, to: destination)
          } 
          
          commandBuffer.present(currentDrawable)
          commandBuffer.commit()         
     }

}


I chose CIMaskedVariableBlur filter instead CIDepthBlurEffect because it needs lower computation.

However applying CIMaskedVariableBlur to MTKView (which is 30FPS) is too slow, and takes huge CPU usage (almost 50 ~ 60%)


I used CIRenderDestination and CIContext.startTask because they are effective between CPU and GPU.

Also, I use CIContext powered by MTLDevice (which is the shared device between CIContext and MTKView).


I can't find any further way to improve performance so that I can apply depth blur effect on real-time.


Do I have to write custom filter or find some way with Metal Performance Shader?

Add a Comment