RealityView to show two screens of AR in iOS 18/macOS 15 using SwiftUI

I have an issue using RealityView to show two screens of AR, while I did succeed to make it as a non AR but now my code not working.

Also it is working using Storyboard and Swift with SceneKit, so why it is not working in RealityView?

import SwiftUI
import RealityKit


struct ContentView : View {
    var body: some View {
        HStack (spacing: 0){
            MainView()
            MainView()
        }
        .background(.black)
    }
}

struct MainView : View {
    @State var anchor = AnchorEntity()
    
    var body: some View {
        RealityView { content in
            let item = ModelEntity(mesh: .generateBox(size: 0.2), materials: [SimpleMaterial()])
            
            content.camera = .spatialTracking
            anchor.addChild(item)
            anchor.position = [0.0, 0.0, -1.0]
            anchor.orientation = .init(angle: .pi/4, axis:[0,1,1])
            
            // Add the horizontal plane anchor to the scene
            content.add(anchor)
        }
    }
}

Answered by ostoura in 812891022
import SwiftUI
import RealityKit


struct ContentView : View {
    let anchor1 = AnchorEntity(.camera)
    let anchor2 = AnchorEntity(.camera)
    
    var body: some View {
        HStack (spacing: 0){
            MainView(anchor: anchor1)
            MainView(anchor: anchor2)
        }
        .background(.black)
    }
}

struct MainView : View {
    @State var anchor = AnchorEntity()
    
    var body: some View {
        RealityView { content in
            content.camera = .spatialTracking
            
            let item = ModelEntity(mesh: .generateBox(size: 0.25), materials: [SimpleMaterial()])
            anchor.addChild(item)
            content.add(anchor)

            anchor.position.z = -1.0
            anchor.orientation = .init(angle: .pi/4, axis:[0,1,1])
        }
    }
}

credits for this solution goes to Andy Jazz

The only issue about this is that the Entity on the left view is bit bigger aor to be precise it is more near than the one on the right I guess it is a bug so I did report it in a separate post and I will raise an issue to apple about it .

Accepted Answer
import SwiftUI
import RealityKit


struct ContentView : View {
    let anchor1 = AnchorEntity(.camera)
    let anchor2 = AnchorEntity(.camera)
    
    var body: some View {
        HStack (spacing: 0){
            MainView(anchor: anchor1)
            MainView(anchor: anchor2)
        }
        .background(.black)
    }
}

struct MainView : View {
    @State var anchor = AnchorEntity()
    
    var body: some View {
        RealityView { content in
            content.camera = .spatialTracking
            
            let item = ModelEntity(mesh: .generateBox(size: 0.25), materials: [SimpleMaterial()])
            anchor.addChild(item)
            content.add(anchor)

            anchor.position.z = -1.0
            anchor.orientation = .init(angle: .pi/4, axis:[0,1,1])
        }
    }
}

credits for this solution goes to Andy Jazz

The only issue about this is that the Entity on the left view is bit bigger aor to be precise it is more near than the one on the right I guess it is a bug so I did report it in a separate post and I will raise an issue to apple about it .

RealityView to show two screens of AR in iOS 18/macOS 15 using SwiftUI
 
 
Q