Hello,
I'm building an AR image tracking application that detects some images and play video on each image.
In most cases there is no problem, but some images are not detected.
So, I tried these.
when I init ARReferenceImage, I set the physicalWidth value to be same as the actual size. (It is 10cm)
I change the contrast of image.
But it didn't work very well.
I would be very grateful if you could give me a small hint.
Here is my code.
///Model
struct PhotoCard {
var imageName: String
var image: UIImage
var videoURL: URL
}
struct TrackingItem {
var referenceImage: ARReferenceImage
var videoURL: URL
}
final class CameraModel {
private let dataManger = DataManger.shared()
private func getPhotoCards() -> [PhotoCard] {
//get photocard from server
return dataManger.getPhotoCards()
}
func trackingItems() -> [TrackingItem] {
let photocards: [PhotoCard] = getPhotoCards()
var items: [TrackingItem] = []
photocards.forEach { photoCard in
guard let cgImage = photoCard.image.cgImage else { return }
let referenceImage = ARReferenceImage(cgImage, orientation: .up, physicalWidth: 0.1)
referenceImage.name = photoCard.imageName
let item = TrackingItem(referenceImage: referenceImage, videoURL: photoCard.videoURL)
items.append(item)
}
return items
}
}
///viewController
import SceneKit
import ARKit
import AVFoundation
import SpriteKit
import RxSwift
final class CameraViewController: VideoPlayerPresentableViewController, ARSCNViewDelegate {
@IBOutlet private var sceneView: ARSCNView!
private let model = CameraModel()
private var data: [TrackingItem] = []
override func viewDidLoad() {
super.viewDidLoad()
setupSceneView()
setupTrackingConfiguration()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
sceneView.session.pause()
}
private func setupSceneView() {
sceneView.delegate = self
let scene = SCNScene(named: "SceneAssets.scnassets/photocard.scn")!
sceneView.scene = scene
}
private func setupTrackingConfiguration() {
data = model.trackingItems()
let configuration = ARImageTrackingConfiguration()
configuration.isAutoFocusEnabled = true
let trackingImages = data.map { $0.referenceImage }
configuration.trackingImages = Set(trackingImages)
configuration.maximumNumberOfTrackedImages = trackingImages.count
sceneView.session.run(configuration)
}
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
guard let current = data.first(where: { $0.referenceImage.name == imageName }) else { return }
let referenceImage = imageAnchor.referenceImage
let planeGeometry = SCNPlane(width: referenceImage.physicalSize.width,
height: referenceImage.physicalSize.height)
let plane = SCNNode(geometry: planeGeometry)
plane.transform = SCNMatrix4MakeRotation(-.pi/2, 1, 0, 0)
let videoSceneAndPlayer = makeVideoSceneAndPlayer(with: current.videoURL)
planeGeometry.materials.first?.diffuse.contents = videoSceneAndPlayer.0
node.addChildNode(plane)
anchorAndPlayerMap[anchor] = videoSceneAndPlayer.1
model.updatePlayYN(of: current)
}
func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
guard let imageAnchor = anchor as? ARImageAnchor,
imageAnchor.isTracked == false else { return }
sceneView.session.remove(anchor: anchor)
}
private func makeVideoSceneAndPlayer(with url: URL) -> (SKScene, AVPlayer) {
let size = CGSize(width: 500, height: 500)
let scene = SKScene(size: size)
scene.scaleMode = .aspectFit
let player = AVPlayer(url: url)
let videoSpriteNode = SKVideoNode(avPlayer: player)
videoSpriteNode.position = CGPoint(x: size.width/2, y: size.height/2)
videoSpriteNode.size = size
videoSpriteNode.yScale = -1
scene.addChild(videoSpriteNode)
addObserver(of: player)
player.play()
return (scene, player)
}
}