guard let url = Bundle.main.url(forResource: "demoVideo2", withExtension: "m4v") else { return }
// Create the AVPlayer
let player = AVPlayer(url: url)
playerLayer.videoGravity = .resizeAspectFill
// Create the video material
let videoMaterial = VideoMaterial(avPlayer: player)
// Create a sphere and set its material to the video material
let sphere = MeshResource.generateSphere(radius: 0.5) // You can adjust the radius as needed
let sphereEntity = ModelEntity(mesh: sphere, materials: [videoMaterial])
sphereEntity.transform = Transform(scale: [1, 1, 1], rotation: simd_quatf())
sphereEntity.position = [0,0,-2]
// Add the sphere entity to the scene
let anchor = AnchorEntity()
anchor.addChild(sphereEntity)
content.add(anchor)
// Play the video
player.play()
Here is the code that I have been using to try and change a simulated scene to a 360 degree panorama video. Unfortunately, the closest I got to was a sphere with the video in it.
Is there anything that you can do to help me with the code? I could use some help. I can't use ARKit because it is not available in xrOS.
Also, is there an option in the build settings that can make the screen transform to a fully immersive one as you enter the app meaning without seeing the simulator around you. Thank you.
Post
Replies
Boosts
Views
Activity
Hi
I was wondering if there are any packages that can properly connect Unity to VisionOS and ensure that it is built properly meaning without UIScreen errors. I have been using Bing Chat and it returned package xr.sdk.visionOS but unfortunately when I downloaded it from the Unity XR SDK manual documentation it is not in the folder unity-xr-sdk-v1. I have tried researching on the internet and there is nothing on that package.
Is there anything that you can do to help me find the visionOS package or find a way to build Unity to Xcode project properly with no errors using VisionOS (experimental)?
Regards
Angad
Hi
I have built a unity project into Xcode Beta V15.2 and ran it. Unfortunately, the app does not really display in full screen meaning that it doesn't cover the simulated scene. I cannot find anything in the build settings of Xcode. Is there anything that you can do to help me with this? Also, is there a way to connect the Oculus to Xcode and play the app into the simulator? Thank you.
Hello, Here is my code below.
struct ImmersiveView: View {
@Environment(\.presentationMode) var presentationMode
@GestureState private var scale: CGFloat = 1.0
@State private var showMenu = false
var body: some View {
ZStack {
RealityView() { content in
// Get the video URL
//Create Entity for the video
let videoEntity = Entity()
//Search for video in paths
guard let url = Bundle.main.url(forResource: "harmandir-sahib-sarovar", withExtension: "mp4") else { fatalError("Video was not found!") }
//create a simple AVPlayer
let asset = AVURLAsset(url: url)
let playerItem = AVPlayerItem(asset: asset)
let player = AVPlayer()
//create a videoMaterial
let material = VideoMaterial(avPlayer: player)
//Made a Sphere with the videoEntity and asign the videoMaterial to it
videoEntity.components.set(ModelComponent(mesh: .generateSphere(radius: 1E3), materials: [material]))
//adjust the properties of the videoEntity(Sphere) if needed
videoEntity.scale = .init(x: 1, y: 1, z: -1)
videoEntity.transform.translation += SIMD3<Float>(0.0, 10.0, 0.0)
let angle = Angle.degrees(90)
let rotation = simd_quatf(angle: Float(angle.radians), axis: .init(x: 0, y: 0, z: 0))
videoEntity.transform.rotation = rotation
//add VideoEntity to realityView
content.add(videoEntity)
//start the VideoPlayer
player.replaceCurrentItem(with: playerItem)
//set the actionAtItemEnd property to .none
player.actionAtItemEnd = .none
//subscribe to the notification and seek back to start
NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: .main) { _ in
player.seek(to: CMTime.zero)
player.play()
}
player.play()
}
Color.clear // An invisible view that covers the whole screen
.gesture(MagnificationGesture().updating($scale) { value, state, transaction in
state = value
}.onEnded { value in
showMenu.toggle()
})
}
.overlay(
Group {
if showMenu {
VideoView(dismissAction: {
presentationMode.wrappedValue.dismiss()
})
}
}
)
}
}
struct VideoView: View {
let dismissAction: () -> Void
var body: some View {
VStack {
Button("Back") {
dismissAction()
}
}
.frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .bottom)
.background(Color.white)
}
}
I am working on a way to pinch anywhere on the screen in the VisionOS simulator to open up a menu. Unfortunately the coding above isn't working. Can you please help me out. Thank you.
Here is my button code
Button("Map") {
openWindow(id: "Navigate")
dismissWindow(id: "Begin")
}
And here is my main app coding with all the scenes:
var body: some Scene {
WindowGroup(id: "Begin") {
MainMenuView()
}
WindowGroup(id: "Navigate") {
MapView()
}
ImmersiveSpace(id: "ImmersiveSpace") {
ImmersiveView()
}.immersionStyle(selection: .constant(.full), in: .full)
}
It would not dismiss the begin window when I click on the button meaning the window is not gone and it just opens the navigate window. Can you please update my code to switch between views in the same window and fix the problem please?
Hello, here is my coding for the immersive view of Xcode Beta SwiftUI VisionPro when I first create a new project.
struct ImmersiveView: View {
@Environment(\.openWindow) var openWindow
var body: some View {
RealityView { content in
// Add the initial RealityKit content
if let immersiveContentEntity = try? await Entity(named: "Immersive", in: realityKitContentBundle) {
content.add(immersiveContentEntity)
// Add an ImageBasedLight for the immersive content
guard let resource = try? await EnvironmentResource(named: "ImageBasedLight") else { return }
let iblComponent = ImageBasedLightComponent(source: .single(resource), intensityExponent: 0.25)
immersiveContentEntity.components.set(iblComponent)
immersiveContentEntity.components.set(ImageBasedLightReceiverComponent(imageBasedLight: immersiveContentEntity))
// Put skybox here. See example in World project available at
// https://developer.apple.com/
}
}
.onLongPressGesture {
openWindow(id:"A")
}
}
}
I have just added a tap gesture to the immersive view because all I want is to test and see if it opens a window by tapping anywhere on the screen in the simulator. Unfortunately, the tapping does not work but the tap gestures works in a normal window group. Please explain to me why tap gestures don't work in immersive views? I need some help. If gestures do work then can you send me the updated code with the working gesture. Thank you.
Hi, I would like to ask a question regarding Xcode.
When I test the app from FlutterFlow in runner.xcworkspace it builds the app on the local phone and it works but then when I close the app or turn off the test. The app just does not run anymore meaning it goes back to the menu when you click on the icon. Please explain?
Also, can you provide me some help on how to ensure that my phone runs the FlutterFlow app even when testing is off on Xcode? Thanks