I'm developing a macOS application using Swift and a camera extension. I'm utilizing the Vision framework's VNGeneratePersonSegmentationRequest to apply a background blur effect. However, I'm experiencing significant lag in the video feed. I've tried optimizing the request, but the issue persists. Could anyone provide insights or suggestions on how to resolve this lagging issue? Details:
Platform: macOS
Language: Swift
Framework: Vision
code snippet I am using are below `class ViewController: NSViewController, AVCaptureVideoDataOutputSampleBufferDelegate { var frameCounter = 0 let frameSkipRate = 2 private let visionQueue = DispatchQueue(label: "com.example.visionQueue")
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
frameCounter += 1
if frameCounter % frameSkipRate != 0 {
return
}
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
performPersonSegmentation(on: ciImage) { [self] mask in
guard let mask = mask else { return }
let blurredBackground = self.applyBlur(to: ciImage)
let resultImage = self.composeImage(with: blurredBackground, mask: mask, original: ciImage)
let nsImage = ciImageToNSImage(ciImage: resultImage)
DispatchQueue.main.async { [self] in
// Update your NSImageView or other UI elements with the composite image
if needToStream {
if (enqueued == false || readyToEnqueue == true), let queue = self.sinkQueue {
enqueued = true
readyToEnqueue = false
if let _ = image, let cgImage = nsImage.cgImage(forProposedRect: nil, context: nil, hints: nil) {
enqueue(queue, cgImage)
}
}
}
}
}
}
private func performPersonSegmentation(on image: CIImage, completion: @escaping (CIImage?) -> Void) {
let request = VNGeneratePersonSegmentationRequest()
request.qualityLevel = .fast // Adjust quality level as needed
request.outputPixelFormat = kCVPixelFormatType_OneComponent8
let handler = VNImageRequestHandler(ciImage: image, options: [:])
visionQueue.async {
do {
try handler.perform([request])
guard let result = request.results?.first as? VNPixelBufferObservation else {
completion(nil)
return
}
let maskPixelBuffer = result.pixelBuffer
let maskImage = CIImage(cvPixelBuffer: maskPixelBuffer)
completion(maskImage)
} catch {
print("Error performing segmentation: \(error)")
completion(nil)
}
}
}
private func composeImage(with blurredBackground: CIImage, mask: CIImage, original: CIImage) -> CIImage {
// Invert the mask to blur the background
let invertedMask = mask.applyingFilter("CIColorInvert")
// Ensure mask is correctly resized to match original image
let resizedMask = invertedMask.transformed(by: CGAffineTransform(scaleX: original.extent.width / invertedMask.extent.width, y: original.extent.height / invertedMask.extent.height))
// Blend the images using the mask
let blendFilter = CIFilter(name: "CIBlendWithMask")!
blendFilter.setValue(blurredBackground, forKey: kCIInputImageKey)
blendFilter.setValue(original, forKey: kCIInputBackgroundImageKey)
blendFilter.setValue(resizedMask, forKey: kCIInputMaskImageKey)
return blendFilter.outputImage ?? original
}
private func ciImageToNSImage(ciImage: CIImage) -> NSImage {
let cgImage = context.createCGImage(ciImage, from: ciImage.extent)!
return NSImage(cgImage: cgImage, size: ciImage.extent.size)
}
private func applyBlur(to image: CIImage) -> CIImage {
let blurFilter = CIFilter.gaussianBlur()
blurFilter.inputImage = image
blurFilter.radius = 7.0 // Adjust the blur radius as needed
return blurFilter.outputImage ?? image
}
}`