arScnView = ARSCNView(frame: CGRect.zero, options: nil)
arScnView.delegate = self
arScnView.automaticallyUpdatesLighting = true
arScnView.allowsCameraControl = true
addSubview(arScnView)
arSession = arScnView.session
arSession.delegate = self
config = ARWorldTrackingConfiguration()
config.sceneReconstruction = .meshWithClassification
config.environmentTexturing = .automatic
func session(_ session: ARSession, didAdd anchors: [ARAnchor])
{
anchors.forEach({ anchor in
if let meshAnchor = anchor as? ARMeshAnchor {
let node = meshAnchor.toSCNNode()
self.arScnView.scene.rootNode.addChildNode(node)
}
if let environmentProbeAnchor = anchor as? AREnvironmentProbeAnchor {
// Can I retrieve the texture map corresponding to ARMeshAnchor from Environment Probe Anchor?
// Or how can I retrieve the texture map corresponding to ARMeshAnchor?
}
})
}
How can I scan a 3D scene and save it as USDZ?
I want to achieve the following scenario?
Post
Replies
Boosts
Views
Activity
In larger scenes, I need to record motion trajectories. RoomCaptureSession always starts from (0,0,0), and I use the last tracked point as the offset value to connect multiple trajectory points, just like StructureBuilder merging models
But when StructureBuilder merged, it eliminated some of the models, which would make the trajectory points I saved lose accuracy, and I cannot know how much scene size was specifically eliminated between them
Is there any way you can help me?
invalidValue(-nan, Swift.EncodingError.Context(codingPath: [CapturedVolumeCodingKeys(stringValue: "rooms", intValue: nil), _JSONKey(stringValue: "Index 0", intValue: 0), CapturedVolumeCodingKeys(stringValue: "openings", intValue: nil), _JSONKey(stringValue: "Index 0", intValue: 0), CodingKeys(stringValue: "dimensions", intValue: nil), _JSONKey(stringValue: "Index 0", intValue: 0)], debugDescription: "Unable to encode Float.nan directly in JSON.", underlyingError: nil))
Why does this exception occur during encoding? All scan data is CapturedRoom and has not been modified