I've a little trouble with implement a changeText function. PlaneDetection works, the first text displayed correctly. Once I hit on the button which linked to the changeText() function and it stop working. The error shows something about upwarpping optional value with arView, I tried everything I could still didn't work at all. So anyone please help me, I'm new in here.
Error:
AR3DText/ContentView.swift:18: Fatal error: Unexpectedly found nil while implicitly unwrapping an Optional value
2021-10-24 13:18:53.331758-0700 AR3DText[8026:2401109] AR3DText/ContentView.swift:18: Fatal error: Unexpectedly found nil while implicitly unwrapping an Optional value
(lldb)
import SwiftUI
import RealityKit
import ARKit
struct ContentView : View {
var body: some View {
return ARViewContainer()
.overlay(
VStack{
Spacer()
Button(action:{arView.changeText("Frank is awesome")}) {
Text("Change Text")
.frame(width:120,height:40)
.font(.body)
.foregroundColor(.black)
.background(Color.white)
.opacity(0.6)
}
.offset(y:-30)
.padding(.bottom, 30)
}
)
.edgesIgnoringSafeArea(.all)
}
}
struct ARViewContainer: UIViewRepresentable {
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
let config = ARWorldTrackingConfiguration()
config.planeDetection = .horizontal
arView.session.run(config, options: [])
arView.session.delegate = arView
arView.createPlane()
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {}
}
var arView: ARView!
var textElement = TextElements()
var textEntity: ModelEntity!
var planeAnchor: AnchorEntity!
extension ARView: ARSessionDelegate {
func createPlane() {
let planeAcnchor = AnchorEntity(plane: .horizontal)
let textMesh: MeshResource = .generateText("Hello Frank!", extrusionDepth: textElement.extrusionDepth, font: textElement.font, containerFrame: CGRect(), alignment: .left, lineBreakMode: .byWordWrapping)
let textMaterial = SimpleMaterial(color: textElement.textColor, isMetallic: true)
let textEntity = ModelEntity(mesh: textMesh, materials: [textMaterial])
textEntity.generateCollisionShapes(recursive: false)
planeAcnchor.addChild(textEntity)
self.scene.addAnchor(planeAcnchor)
self.installGestures(.all, for: textEntity)
}
func changeText(_ textContent: String) {
let textMesh: MeshResource = .generateText(textContent, extrusionDepth: textElement.extrusionDepth, font: textElement.font, containerFrame: CGRect(), alignment: .left, lineBreakMode: .byWordWrapping)
let textMaterial = SimpleMaterial(color: .blue, isMetallic: false)
textEntity.removeFromParent()
let textEntity = ModelEntity(mesh: textMesh, materials: [textMaterial])
planeAnchor.addChild(textEntity)
self.scene.addAnchor(planeAnchor)
self.installGestures(.all, for: textEntity)
}
}
#if DEBUG
struct ContentView_Previews : PreviewProvider {
static var previews: some View {
ContentView()
}
}
#endif
Post
Replies
Boosts
Views
Activity
https://stackoverflow.com/questions/70270830/picker-doesnt-show-and-hide-the-selection-after-item-being-selected-in-swiftui?noredirect=1#comment124226535_70270830
Hi, I'm doing a practise project about Image Filter. I would like to show a horizontal scrollView at the bottom of screen which showing all filtered images, but it displayed an error when using ForEach.
Error: Referencing initializer 'init(_:content:)' on 'ForEach' requires that 'Image' conform to 'Identifiable'
Can someone help me out before Christmas?
Happy holiday! Wish all of you have a wonderful Christmas!
ContentView
import CoreImage
import CoreImage.CIFilterBuiltins
import SwiftUI
struct ContentView: View {
@State private var showingImagePicker = false
@State private var image: Image?
@State private var inputImage: UIImage?
@State private var showingFilterView = false
@State private var filterAmount = 0.5
@State private var filters: [CIFilter] = [
CIFilter.sepiaTone(),
CIFilter.vignette(),
CIFilter.discBlur(),
CIFilter.pixellate(),
CIFilter.crystallize(),
CIFilter.twirlDistortion()
]
let context = CIContext()
var body: some View {
ZStack {
Image("background")
.resizable()
.scaledToFill()
.ignoresSafeArea(.all)
VStack {
Button {
showingImagePicker = true
} label: {
VStack {
Image(systemName: "plus.square.fill.on.square.fill")
.resizable()
.frame(width: 40, height: 40)
Text("Add photo")
.font(.title3)
}
.foregroundColor(.white)
}
}
}
.preferredColorScheme(.dark)
.sheet(isPresented: $showingImagePicker) {
ImagePicker(image: $inputImage)
}
.onChange(of: inputImage) { _ in loadImage();loadFilter()}
.navigate(to: FilterView(image: image), when: $showingFilterView)
}
func loadImage() {
guard let inputImage = inputImage else {return}
image = Image(uiImage: inputImage)
showingFilterView = true
}
func loadFilter() {
filters.forEach { (filters) in
guard let inputImage = inputImage else { return }
let beginImage = CIImage(image: inputImage)
filters.setValue(beginImage, forKey: kCIInputImageKey)
guard let outputImage = filters.outputImage else {return}
let cgimg = context.createCGImage(outputImage, from: outputImage.extent)
let uiImage = UIImage(cgImage: cgimg!)
let filteredImage = Image(uiImage: uiImage)
let filterView = FilterView()
filterView.filteredImage.append(filteredImage)
}
}
}
extension View {
/// Navigate to a new view.
/// - Parameters:
/// - view: View to navigate to.
/// - binding: Only navigates when this condition is `true`.
func navigate<NewView: View>(to view: NewView, when binding: Binding<Bool>) -> some View {
NavigationView {
ZStack {
self
//.navigationBarTitle("")
//.navigationBarHidden(true)
NavigationLink(
destination: view,
//.navigationBarTitle(""),
//.navigationBarHidden(true),
isActive: binding
) {
EmptyView()
}
}
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
FilterView
import SwiftUI
struct FilterView: View{
@State var image: Image?
@State var filteredImage: [Image] = []
var body: some View {
NavigationView{
ZStack{
Image("background")
.resizable()
.scaledToFill()
.ignoresSafeArea(.all)
VStack {
image?
.resizable()
.scaledToFit()
ScrollView(.horizontal, showsIndicators: false) {
HStack{
ForEach(filteredImage) { image in
Button {
} label: {
image
}
}
}
}
}
}
}
.toolbar {
ToolbarItem(placement: .navigationBarTrailing) {
Button {
save()
} label: {
Image(systemName: "square.and.arrow.down")
}
}
}
}
func save() {
}
}
struct FilterView_Previews: PreviewProvider {
static var previews: some View {
FilterView()
}
}
I’m trying to show the camera tracking state info on top of device screen, but still get errors for all my efforts. I’m very confused with how ARView works with SwiftUI, for showing AR experience and adding button icons on top of it is simple, but to getting data from session just looks more work need to be done, could someone help me this? Thanks!
Error in ViewModel: ‘description is a get-only property’
ContentView
import SwiftUI
import ARKit
import RealityKit
struct ContentView: View {
@StateObject var vm = ViewModel()
var body: some View {
ZStack{
ARViewContainer()
.edgesIgnoringSafeArea(.all)
.environmentObject(vm)
VStack{
Text(vm.sessionInfoLabel)
.font(.title3)
.foregroundColor(.red)
Spacer()
HStack{
Button{
} label: {
Image(systemName: "person.2")
.padding()
.font(.title)
}
Spacer()
Button{
} label: {
Image(systemName: "target")
.padding()
.font(.title)
}
}
.padding()
}
}
}
}
struct ARViewContainer: UIViewRepresentable {
@EnvironmentObject var vm: ViewModel
func makeUIView(context: Context) -> ARView {
return vm.arView
}
func updateUIView(_ uiView: ARView, context: Context) {
}
}
ViewModel
import SwiftUI
import ARKit
import RealityKit
class ViewModel: ObservableObject {
@Published var arView: ARView
var sessionInfoLabel = ""
init() {
arView = ARView.init(frame: .zero)
let config = ARWorldTrackingConfiguration()
config.planeDetection = .horizontal
arView.session.delegate = arView
arView.session.run(config)
}
}
extension ARView: ARSessionDelegate {
public func session(_ session: ARSession, cameraDidChangeTrackingState camera: ARCamera) {
camera.trackingState.description = ViewModel().sessionInfoLabel
}
}
extension ARCamera.TrackingState: CustomStringConvertible {
public var description: String {
switch self {
case .normal:
return "Normal"
case .notAvailable:
return "Not Available"
case .limited(.initializing):
return "Initializing"
case .limited(.excessiveMotion):
return "Excessive Motion"
case .limited(.insufficientFeatures):
return "Insufficient Features"
case .limited(.relocalizing):
return "Relocalizing"
case .limited:
return "Unspecified Reason"
}
}
}
So once the Lidar detected scene with mesh, and tap anywhere to instantiate an object. Implement installGesture for object, scale and rotation works fine, only translation doesn't work. Why it doesn't work? How can I make it works?
Thanks
Hi apple engineer, I’m facing a trouble to run the “Capturing Depth Using the LiDAR Camera” on my 5th iPad Pro with Lidar on, and 15.4 iPadOS. Sample project download link: https://developer.apple.com/documentation/avfoundation/cameras_and_media_capture/capturing_depth_using_the_lidar_camera.
I didn’t make any changes on the code, and the error shows here:
Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: 'Could not find a storyboard named 'Main' in bundle NSBundle </private/var/containers/Bundle/Application/7E21BAE3-4F36-4ED8-9D91-01D1E0D5C008/LiDARDepth.app> (loaded)' terminating with uncaught exception of type NSException
Can someone help me out? Thank you!
My app has a problem updating location after resetting location service to enable. So when the system location service is enabled, my app has no problem updating the location. But if I closed the app and disabled the system location service, then restart the app, it will show an alert to enable the system location service. After enabling location service, then going back to the app, its location has no updating at all. I have the longitude, latitude, heading..etc UIs to indicate whether the location is updated or not. Here are my view's code and location manager class, could someone help me out? Thanks
View
struct ContentView: View {
@StateObject private var locationManager = LocationManager()
@State var trackingInfo = ARViewContainer.TrackingInfo()
@State var isLocationServiceOff = false
var body: some View {
ZStack {
ARViewContainer(trackingInfo: $trackingInfo)
.edgesIgnoringSafeArea(.all)
.onAppear{
if CLLocationManager.locationServicesEnabled() {
locationManager.checkIfLocationServicesIsEnabled()
} else {
isLocationServiceOff = true
}
}
.alert(isPresented: $isLocationServiceOff) {
Alert(title: Text("Location Service Disabled"),
message: Text("Geospatial experience requires location service enabled"),
primaryButton: .default(Text("Setting")){
if #available(iOS 10.0, *) {
if let url = URL(string: "App-Prefs:root=Privacy&path=LOCATION") {
UIApplication.shared.open(url, options: [:], completionHandler: nil)
}
} else {
if let url = URL(string: "prefs:root=LOCATION_SERVICE") {
UIApplication.shared.open(url)
}
}
}, secondaryButton: .cancel(Text("Cancel")))
}
}
}
}
}
LocationManager
class LocationManager: NSObject, ObservableObject, CLLocationManagerDelegate {
//User could turn location service off, so it's an optional.
var locationManager: CLLocationManager?
func checkIfLocationServicesIsEnabled() {
if CLLocationManager.locationServicesEnabled() {
locationManager = CLLocationManager()
locationManager!.delegate = self
}
}
private func checkLocationAuthorization() {
guard let locationManager = locationManager else {
return
}
switch locationManager.authorizationStatus {
case .notDetermined:
locationManager.requestWhenInUseAuthorization()
case .restricted:
print("Restricted location services due to parental controls")
case .denied:
print("You have denied this app location permission. Go into settings to change it")
case .authorizedAlways, .authorizedWhenInUse:
locationManager.desiredAccuracy = kCLLocationAccuracyBest
locationManager.allowsBackgroundLocationUpdates = true
@unknown default:
break
}
}
func locationManagerDidChangeAuthorization(_ manager: CLLocationManager) {
checkLocationAuthorization()
}
}
I'm using the newest Geospatial API from ARCore, and trying to built it with SwiftUI and RealityKit. I have all SDK and api key set up properly, all coordinates and accuracy info are updated every frame properly. But Whenever I use GARSession.creatAnchor method, it returns a GARAnchor. I used GARAnchor's transform property to create an ARAnchor(transform: GARAnchor.transform), then I created AnchorEntity with this ARAnchor, and add AnchorEntity to ARView.scene. However, the model never showed up. I have checked coordinates, altitude, still no luck at all. So is there anyone could help me out? Thank you so much.
do {
let garAnchor = try parent.garSession.createAnchor(coordinate: CLLocationCoordinate2D(latitude: xx.xxxxxxxxx, longitude: xx.xxxxxxx), altitude: xx, eastUpSouthQAnchor: simd_quatf(ix: 0, iy: 0, iz: 0, r: 0))
if garAnchor.hasValidTransform && garAnchor.trackingState == .tracking {
let arAnchor = ARAnchor(transform: garAnchor.transform)
let anchorEntity = AnchorEntity(anchor: arAnchor)
let mesh = MeshResource.generateSphere(radius: 2)
let material = SimpleMaterial(color: .red, isMetallic: true)
let sephere = ModelEntity(mesh: mesh, materials: [material])
anchorEntity.addChild(sephere)
parent.arView.scene.addAnchor(anchorEntity)
print("Anchor has valid transform, and anchor is tracking")
} else {
print("Anchor has invalid transform")
}
} catch {
print("Add garAnchor failed: \(error.localizedDescription)")
}
}