coreML Hand Pose classification: doesn't appear on the camera.

I created a Hand Pose model using CreateML and integrated it into my SwiftUI project app.

While coding, I referred to the Apple Developer documentation app for the necessary code.

However, when I ran the app on an iPhone 14, the camera didn't display any effects or finger numbers as expected.

note: I've already tested the ML model separately, and it works fine.

the code:

import CoreML import SceneKit import SwiftUI import Vision import ARKit

struct ARViewContainer: UIViewControllerRepresentable { let arViewController: ARViewController let model: modelHand

func makeUIViewController(context: UIViewControllerRepresentableContext<ARViewContainer>) -> ARViewController {
    arViewController.model = model
    return arViewController
}

func updateUIViewController(_ uiViewController: ARViewController, context: UIViewControllerRepresentableContext<ARViewContainer>) {
    // Update the view controller if needed
}

}

class ARViewController: UIViewController, ARSessionDelegate { var frameCounter = 0 let handPosePredictionInterval = 10 var model: modelHand! var effectNode: SCNNode?

override func viewDidLoad() {
    super.viewDidLoad()
    
    let arView = ARSCNView(frame: view.bounds)
    view.addSubview(arView)
    
    let session = ARSession()
    session.delegate = self
    let configuration = ARWorldTrackingConfiguration()
    configuration.frameSemantics = .personSegmentationWithDepth
    arView.session.run(configuration)
}

func session(_ session: ARSession, didUpdate frame: ARFrame) {
    let pixelBuffer = frame.capturedImage
    let handPoseRequest = VNDetectHumanHandPoseRequest()
    handPoseRequest.maximumHandCount = 1
    handPoseRequest.revision = VNDetectHumanHandPoseRequestRevision1
    
    let handler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:])
    
    do {
        try handler.perform([handPoseRequest])
    } catch {
        assertionFailure("Hand Pose Request failed: \(error)")
    }
    
    guard let handPoses = handPoseRequest.results, !handPoses.isEmpty else {
        return
    }
    
    if frameCounter % handPosePredictionInterval == 0 {
        if let handObservation = handPoses.first as? VNHumanHandPoseObservation {
            do {
                let keypointsMultiArray = try handObservation.keypointsMultiArray()
                let handPosePrediction = try model.prediction(poses: keypointsMultiArray)
                let confidence = handPosePrediction.labelProbabilities[handPosePrediction.label]!
                
                print("Confidence: \(confidence)")
                
                if confidence > 0.9 {
                    print("Rendering hand pose effect: \(handPosePrediction.label)")
                    renderHandPoseEffect(name: handPosePrediction.label)
                }
            } catch {
                fatalError("Failed to perform hand pose prediction: \(error)")
            }
        }
    }
}

func renderHandPoseEffect(name: String) {
    switch name {
    case "One":
        print("Rendering effect for One")
        if effectNode == nil {
            effectNode = addParticleNode(for: "One")
        }
    default:
        print("Removing all particle nodes")
        removeAllParticleNode()
    }
}

func removeAllParticleNode() {
    effectNode?.removeFromParentNode()
    effectNode = nil
}

func addParticleNode(for poseName: String) -> SCNNode {
    print("Adding particle node for pose: \(poseName)")
    let particleNode = SCNNode()
    return particleNode
}

}

struct ContentView: View { let model = modelHand()

var body: some View {
    ARViewContainer(arViewController: ARViewController(), model: model)
}

} #Preview { ContentView() }

coreML Hand Pose classification: doesn't appear on the camera.
 
 
Q