Hello everyone !In my task, I do not have the ability to connect сi.Metallib in the project. I have a ci.metallib shader, this kind
#define TRACKING_SEVERITY 0.025
#define TRACKING_SPEED 0.2
#define SHIMMER_SPEED 30.0
#define RGB_MASK_SIZE 2.0
#include <metal_stdlib>
#include <CoreImage/CoreImage.h>
using namespace metal;
extern "C" { namespace coreimage {
float mod(float x, float y) {
return float(x - y * floor(x/y));
}
float4 mainImage(sampler_h src, float time, float amount) {
// const float magnitude = sin(time) * 0.1 * amount;
float2 greenCoord = src.coord();
greenCoord.x -= sin(greenCoord.y * 500.0 + time) * INTERLACING_SEVERITY * amount;
float scan = mod(greenCoord.y, 3.0);
float yOffset = floor(sin(time * SHIMMER_SPEED));
float pix = (greenCoord.y+yOffset) * src.size().x + greenCoord.x;
pix = floor(pix);
float4 colMask = float4(mod(pix, RGB_MASK_SIZE), mod((pix+1.0), RGB_MASK_SIZE), mod((pix+2.0), RGB_MASK_SIZE), 1.0);
colMask = colMask / (RGB_MASK_SIZE - 1.0) + 0.5;
// Tracking
float t = -time * TRACKING_SPEED;
float fractionalTime = (t - floor(t)) * 1.3 - TRACKING_HEIGHT;
if(fractionalTime + TRACKING_HEIGHT >= greenCoord.y && fractionalTime <= greenCoord.y)
{
greenCoord.x -= fractionalTime * TRACKING_SEVERITY;
}
return src.sample(greenCoord).b*colMask*scan;
}
}}
How does this code welcome to this form?
let kernel = CIKernel(source: """
kernel vec4 mainImage(sampler image, float time, float amount) {
float mod(float x, float y) {
return float(x - y * floor(x/y));
}?
vec2 greenCoord = destCoord();?
.......????
}
""")
How exactly the types change?
Post
Replies
Boosts
Views
Activity
Hi all! I'm trying to make a button with the last image from the gallery of the phone, but it comes with an incomplete size and error.
info.plist
Privacy - Photo Library Additions Usage Description 👌
Privacy - Photo Library Usage Description 👌
Privacy - Media Library Usage Description 👌
ViewModelFile
import Photos
...
func queryLastPhoto(resizeTo size: CGSize?, queryCallback: @escaping ((UIImage?) -> Void)) {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
let requestOptions = PHImageRequestOptions()
requestOptions.isSynchronous = true
let fetchResult = PHAsset.fetchAssets(with: PHAssetMediaType.image, options: fetchOptions)
if let asset = fetchResult.firstObject {
let manager = PHImageManager.default()
let targetSize = size == nil ? CGSize(width: asset.pixelWidth, height: asset.pixelHeight) : size!
manager.requestImage(for: asset,
targetSize: targetSize,
contentMode: .aspectFit,
options: requestOptions,
resultHandler: { image, info in
queryCallback(image)
})
}
}
...
ViewFile (image not resize, but appears )
viewModel.queryLastPhoto(resizeTo: CGSize(width: 20, height: 20)) { image in
self.imagePickerButton.setImage(image, for: .normal)
}
Console output
"Error returned from daemon: Error Domain=com.apple.accounts Code=7 "(null)""
2021-08-16 10:42:59.018957+0700 MetalFilter[2067:494516] [PAAccessLogger] Failed to log access with error: access=<PATCCAccess 0x283e82ee0> accessor:<<PAApplication 0x283094ba0> identifierType:auditToken identifier:{pid:2067, version:5804} parentApplication:(null)> identifier:4CBF8D4D-ABAB-4A08-BC26-471EBED8DA19 kind:intervalEnd timestampAdjustment:0 tccService:kTCCServicePhotos, error=Error Domain=PAErrorDomain Code=11 "Possibly incomplete access interval automatically ended by daemon"
what am I doing wrong? I hope you'll give me a hand
I try to obtain focus point entity when camera not see ARImageAnchor, and remove after camera sees ARImageAnchor, and when camera not sees anchor obtain focus point again. I used arView.session.delegate, but delegate method maybe call one time, i don't know. how to make it? Thank u
var focusEntity: FocusEntity! // (FocusEntity: Entity, HasAnchoring)
override func viewDidLoad() {
super.viewDidLoad()
// ...
focusEntity = FocusEntity(on: arView, style: .classic(color: .systemGray4))
arView.session.delegate = self
}
}
extension CameraViewController: ARSessionDelegate {
func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
for anchor in anchors {
if let imageAnchor = anchor as? ARImageAnchor {
focusEntity.destroy()
focusEntity = nil
//... Obtain entity to image anchor
}
}
}
func session(_ session: ARSession, didUpdate frame: ARFrame) {
//... ???
}
}
I trying to use CustomMaterial with surfaceShader, followed this article
https://developer.apple.com/documentation/realitykit/modifying_realitykit_rendering_using_custom_materials
but my model not appear after I try obtain customMaterial to modelComponent, I catch error. How to use surfaceShader on usdz model? Have a good day!
private func obtainUSDZModel() {
guard let doll = try? Entity.load(named: "Doll") else { return }
guard var modelComponent = doll.components[ModelComponent.self] as? ModelComponent else {
print("Error Model Component")
return
}
let surfaceShader = CustomMaterial.SurfaceShader(named: "rainbow", in: MetalLibLoader.library)
guard let customMaterials = try? modelComponent.materials.map({ material -> CustomMaterial in
let customMaterial = try CustomMaterial(from: material, surfaceShader: surfaceShader)
return customMaterial
}) else { return }
modelComponent.materials = customMaterials
doll.generateCollisionShapes(recursive: true)
doll.components[ModelComponent.self] = modelComponent
doll.scale = .init(repeating: 1.0)
anchorEntity.addChild(doll)
arView.scene.anchors.append(anchorEntity)
}
console output:
Error Model Component
I try to set entity with custom material to ARMeshAnchor using RealityKit, like this examplehttps://www.dropbox.com/s/b0u9mbsxqaobnrf/Screen%20Shot%202021-10-29%20at%205.25.57%20PM.png?dl=0
I found mini solution how to make it via sceneKit Geometry, but can't something how to make it via RealityKit. Tell me is it possible to do it? If this is possible, how? I tried to do this, but it looks like I'm doing something wrong.
I'm also interested in understanding how to stretch mesh entities along the boundaries of the space that captures the camera, since it seems to be done through .generatePlane(width: 1, height: 1) complete
private lazy var arView = ARView().do {
$0.frame = view.bounds
}
override func viewDidLoad() {
super.viewDidLoad()
MetalLibLoader.initializeMetal()
setupSubviews()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
arView.session.delegate = self
configureWorldTracking()
}
override func viewDidDisappear(_ animated: Bool) {
super.viewDidDisappear(animated)
arView.session.pause()
}
private func setupSubviews() {
view.addSubview(arView)
}
private func configureWorldTracking() {
let configuration = ARWorldTrackingConfiguration()
let personSegmentation: ARWorldTrackingConfiguration.FrameSemantics = .personSegmentationWithDepth
if ARWorldTrackingConfiguration.supportsFrameSemantics(personSegmentation) {
configuration.frameSemantics.insert(personSegmentation)
}
let sceneReconstruction: ARWorldTrackingConfiguration.SceneReconstruction = .mesh
if ARWorldTrackingConfiguration.supportsSceneReconstruction(sceneReconstruction) {
configuration.sceneReconstruction = sceneReconstruction
}
configuration.planeDetection.insert(.horizontal)
arView.renderOptions.insert(.disableMotionBlur)
arView.session.run(configuration)
}
private func updateAnchors(anchors: [ARAnchor]) {
for anchor in anchors.compactMap({ $0 as? ARMeshAnchor }) {
let anchorEntity = AnchorEntity(anchor: anchor)
anchorEntity.addChild(plasmaEntity())
arView.scene.addAnchor(anchorEntity)
}
}
private func plasmaEntity() -> ModelEntity {
let customMaterial: CustomMaterial
let surfaceShader = CustomMaterial.SurfaceShader(named: "plasma", in: MetalLibLoader.library)
do {
try customMaterial = CustomMaterial(surfaceShader: surfaceShader, lightingModel: .lit)
} catch {
fatalError(error.localizedDescription)
}
return ModelEntity(mesh: .generatePlane(width: 1, height: 1), materials: [customMaterial])
}
}
extension ARViewController: ARSessionDelegate {
func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
updateAnchors(anchors: anchors)
}
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
updateAnchors(anchors: anchors)
}
}
RealityKit ARImageAnchor with VideoMaterial problems
When I move the camera closer, sometimes the image from the ARResources overlaps the video with itself. What could be the problem?
Links:
https://www.dropbox.com/s/b8yaczq4xjk9v1p/IMG_9429.PNG?dl=0
https://www.dropbox.com/s/59dj4ldf6l3yj4u/RPReplay_Final1637392988.mov?dl=0
VideoEntity class
final class VideoEntity {
var videoPlayer = AVPlayer()
func videoModelEntity(width: Float?, height: Float?) -> ModelEntity {
let plane = MeshResource.generatePlane(width: width ?? Float(), height: height ?? Float())
let videoItem = createVideoItem(with: "Cooperation")
let videoMaterial = createVideoMaterial(with: videoItem)
return ModelEntity(mesh: plane, materials: [videoMaterial])
}
func placeVideoScreen(videoEntity: ModelEntity, imageAnchor: ARImageAnchor, arView: ARView) {
let anchorEntity = AnchorEntity(anchor: imageAnchor)
let rotationAngle = simd_quatf(angle: GLKMathDegreesToRadians(-90), axis: SIMD3<Float>(x: 1, y: 0, z: 0))
videoEntity.setOrientation(rotationAngle, relativeTo: anchorEntity)
videoEntity.setPosition(SIMD3<Float>(x: 0, y: 0.015, z: 0), relativeTo: anchorEntity)
anchorEntity.addChild(videoEntity)
arView.scene.addAnchor(anchorEntity)
}
private func createVideoItem(with filename: String) -> AVPlayerItem {
guard let url = Bundle.main.url(forResource: filename, withExtension: "mov") else {
fatalError("Fatal Error: - No file source.")
}
return AVPlayerItem(url: url)
}
private func createVideoMaterial(with videoItem: AVPlayerItem) -> VideoMaterial {
let videoMaterial = VideoMaterial(avPlayer: videoPlayer)
videoPlayer.replaceCurrentItem(with: videoItem)
videoPlayer.actionAtItemEnd = .none
videoPlayer.play()
NotificationCenter.default.addObserver(self, selector: #selector(loopVideo), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: videoPlayer.currentItem)
return videoMaterial
}
@objc
private func loopVideo(notification: Notification) {
guard let playerItem = notification.object as? AVPlayerItem else { return }
playerItem.seek(to: CMTime.zero, completionHandler: nil)
videoPlayer.play()
}
}
ViewModel class
func startImageTracking(arView: ARView) {
guard let arReferenceImage = ARReferenceImage.referenceImages(inGroupNamed: "ARResources", bundle: Bundle.main) else { return }
let configuration = ARImageTrackingConfiguration().do {
$0.trackingImages = arReferenceImage
$0.maximumNumberOfTrackedImages = 1
}
let personSegmentation: ARWorldTrackingConfiguration.FrameSemantics = .personSegmentationWithDepth
if ARWorldTrackingConfiguration.supportsFrameSemantics(personSegmentation) {
configuration.frameSemantics.insert(personSegmentation)
}
arView.session.run(configuration, options: [.resetTracking])
}
ARSessionDelegate protocol
func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
for anchor in anchors {
if let imageAnchor = anchor as? ARImageAnchor {
let videoEntity = viewModel.videoEntity.videoModelEntity(width: Float(imageAnchor.referenceImage.physicalSize.width), height: Float(imageAnchor.referenceImage.physicalSize.height))
viewModel.videoEntity.placeVideoScreen(videoEntity: videoEntity, imageAnchor: imageAnchor, arView: arView)
}
}
}
trying to put custom material on ARMeshAnchor, but during the session the FPS frequency begins to decrease, and the following warning comes out
Link to the video example of braking FPS
https://www.dropbox.com/s/p7g7qgvb5o95gdf/RPReplay_Final1637641112.mov?dl=0
Console Warning
ARSessionDelegate is retaining 11 ARFrames. This can lead to future camera frames being dropped
CameraViewController
final class CameraViewController: UIViewController {
private let arView = ARView().do {
$0.automaticallyConfigureSession = false
}
private var meshAnchorTracker: MeshAnchorTracker?
override func viewDidLoad() {
super.viewDidLoad()
MetalLibLoader.initializeMetal()
setupSubviews()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
setupARSession()
}
private func setupSubviews() {
view.addSubview(arView)
arView.frame = view.bounds
}
private func setupARSession() {
configureWorldTracking()
setupPhysicsOrigin()
}
private func configureWorldTracking() {
let configuration = ARWorldTrackingConfiguration()
let sceneReconstruction: ARWorldTrackingConfiguration.SceneReconstruction = .mesh
if ARWorldTrackingConfiguration.supportsSceneReconstruction(sceneReconstruction) {
configuration.sceneReconstruction = sceneReconstruction
meshAnchorTracker = .init(arView: arView)
}
configuration.planeDetection = .horizontal
arView.session.run(configuration, options: [.resetSceneReconstruction])
arView.renderOptions.insert(.disableMotionBlur)
arView.session.delegate = self
}
private func setupPhysicsOrigin() {
let physicsOrigin = Entity()
physicsOrigin.scale = .init(repeating: 0.1)
let anchor = AnchorEntity(world: SIMD3<Float>())
anchor.addChild(physicsOrigin)
arView.scene.addAnchor(anchor)
arView.physicsOrigin = physicsOrigin
}
func updateAnchors(anchors: [ARAnchor]) {
for anchor in anchors.compactMap({ $0 as? ARMeshAnchor }) {
meshAnchorTracker?.update(anchor)
}
}
}
extension CameraViewController: ARSessionDelegate {
func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
updateAnchors(anchors: anchors)
}
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
updateAnchors(anchors: anchors)
}
func session(_ session: ARSession, didRemove anchors: [ARAnchor]) {
for anchor in anchors.compactMap({ $0 as? ARMeshAnchor }) {
meshAnchorTracker?.remove(anchor)
}
}
}
MeshAnchorTracker
struct MeshAnchorTracker {
var entries: [ARMeshAnchor: Entry] = [:]
weak var arView: ARView?
init(arView: ARView) {
self.arView = arView
}
class Entry {
var entity: AnchorEntity
private var currentTask: Cancellable?
var nextTask: LoadRequest<MeshResource>? {
didSet {
scheduleNextTask()
}
}
static let material: RealityKit.CustomMaterial = {
let customMaterial: CustomMaterial
let surfaceShader = CustomMaterial.SurfaceShader(named: "plasma", in: MetalLibLoader.library)
do {
try customMaterial = CustomMaterial(surfaceShader: surfaceShader, lightingModel: .lit)
} catch {
fatalError(error.localizedDescription)
}
return customMaterial
}()
func scheduleNextTask() {
guard let task = nextTask else { return }
guard currentTask == nil else { return }
self.nextTask = nil
currentTask = task
.sink(
receiveCompletion: { result in
switch result {
case .failure(let error): assertionFailure("\(error)")
default: return
}
},
receiveValue: { [weak self] mesh in
self?.currentTask = nil
self?.entity.components[ModelComponent.self] = ModelComponent(
mesh: mesh,
materials: [Self.material]
)
self?.scheduleNextTask()
}
)
}
init(entity: AnchorEntity) {
self.entity = entity
}
}
mutating func update(_ anchor: ARMeshAnchor) {
let tracker: Entry = {
if let tracker = entries[anchor] { return tracker }
let entity = AnchorEntity(world: SIMD3<Float>())
let tracker = Entry(entity: entity)
entries[anchor] = tracker
arView?.scene.addAnchor(entity)
return tracker
}()
let entity = tracker.entity
do {
entity.transform = .init(matrix: anchor.transform)
let geom = anchor.geometry
var desc = MeshDescriptor()
let posValues = geom.vertices.asSIMD3(ofType: Float.self)
desc.positions = .init(posValues)
let normalValues = geom.normals.asSIMD3(ofType: Float.self)
desc.normals = .init(normalValues)
do {
desc.primitives = .polygons(
(0..<geom.faces.count).map { _ in UInt8(geom.faces.indexCountPerPrimitive) },
(0..<geom.faces.count * geom.faces.indexCountPerPrimitive).map {
geom.faces.buffer.contents()
.advanced(by: $0 * geom.faces.bytesPerIndex)
.assumingMemoryBound(to: UInt32.self).pointee
}
)
}
tracker.nextTask = MeshResource.generateAsync(from: [desc])
}
}
mutating func remove(_ anchor: ARMeshAnchor) {
if let entity = self.entries[anchor] {
entity.entity.removeFromParent()
self.entries[anchor] = nil
}
}
}
extension ARGeometrySource {
func asArray<T>(ofType: T.Type) -> [T] {
dispatchPrecondition(condition: .onQueue(.main))
assert(MemoryLayout<T>.stride == stride, "Invalid stride \(MemoryLayout<T>.stride); expected \(stride)")
return (0..<self.count).map {
buffer.contents().advanced(by: offset + stride * Int($0)).assumingMemoryBound(to: T.self).pointee
}
}
func asSIMD3<T>(ofType: T.Type) -> [SIMD3<T>] {
return asArray(ofType: (T, T, T).self).map { .init($0.0, $0.1, $0.2) }
}
}
What could the problem? 🥵
Hello everyone! how to put a custom shader in depth only on the floor?
I'm trying to use the depth of the scene to put the shader exclusively on the floor, but apparently I'm doing something wrong
Links
https://www.dropbox.com/s/4ghun92frlcg7hz/IMG_9960.PNG?dl=0
https://www.dropbox.com/home?preview=-2362988581602429186.MP4
PostProcess.metal
float linearizeDepth(float sampleDepth, float4x4 viewMatrix) {
constexpr float kDepthEpsilon = 1e-5f;
float d = max(kDepthEpsilon, sampleDepth);
d = abs(-viewMatrix[3][2] / d);
return d;
}
constexpr sampler textureSampler(address::clamp_to_edge, filter::bicubic);
float getDepth(float2 coords, constant InputArgs *args, texture2d<float, access::sample> inDepth,depth2d<float, access::sample> arDepth) {
float2 arDepthCoords = args->orientationTransform * coords + args->orientationOffset;
float realDepth = arDepth.sample(textureSampler, arDepthCoords);
float virtualDepth = linearizeDepth(inDepth.sample(textureSampler, coords)[0], args->viewMatrix);
bool realFragment = (virtualDepth <= FLT_EPSILON);
if (realFragment) { virtualDepth = realDepth; }
return min(virtualDepth, realDepth);
}
float3 getDirection(float2 screenCoord, constant InputArgs *args) {
float3 top = mix(args->topLeft.xyz, args->topRight.xyz, screenCoord.x);
float3 bottom = mix(args->bottomLeft.xyz, args->bottomRight.xyz, screenCoord.x);
return normalize(mix(bottom, top, screenCoord.y));
}
float3 worldCoordsForDepth(float depth, float2 screenCords, constant InputArgs *args) {
float3 centerDirection = getDirection(float2(0.5, 0.5), args);
float3 direction = getDirection(screenCords, args);
float depth2 = depth / dot(direction, centerDirection);
return direction * depth2 + args->viewTranslation.xyz;
}
[[kernel]]
void postProcess(uint2 gid [[thread_position_in_grid]],
texture2d<half, access::read> inputTexture [[texture(0)]],
texture2d<float, access::sample> inDepth [[texture(1)]],
texture2d<half, access::write> outputTexture [[texture(2)]],
depth2d<float, access::sample> arDepth [[texture(3)]],
constant InputArgs *args [[buffer(0)]]) {
float2 screenCoords = float2(float(gid[0]) / float(outputTexture.get_width()),
float(gid[1]) / float(outputTexture.get_height()));
float rawDepth = getDepth(screenCoords, args, inDepth, arDepth);
float3 worldCoords = worldCoordsForDepth(rawDepth, screenCoords, args);
float depth = rawDepth;
depth = 1 - pow(1 / (pow(depth, args->intensity) + 1), args->falloff);
depth = clamp(depth, 0.0, 1.0);
half4 nearColor = inputTexture.read(gid);
float blend = pow(1 - depth, args->exponent);
half4 color = half4(0.0);
float2 frag = worldCoords.xz;
frag *= 1.0 - 0.2 * cos (frag) * sin (3.14159 * 0.5 * inDepth.sample(textureSampler, float2(0.0)).x);
frag *= 5.0;
float random = rand (floor(frag));
float2 black = smoothstep(1.0, 0.8, cos(frag * 3.14159 * 2.0));
float3 finalColor = hsv2rgb (float3 (random, 1.0, 1.0));
finalColor *= black.x * black.y * smoothstep (1.0, 0.0, length (fract (frag) - 0.5));
finalColor *= 0.5 + 0.5 * cos (random + random * args->time + args->time + 3.14159 * 0.5 * inDepth.sample(textureSampler, float2(0.7)).x);
color = blend * nearColor + (1.0 - blend) * half4(half3(finalColor), 1.0);
}
I really hope for help of understanding this one
As I understood, the ability to use metal is only in "vr" mode. But is there any way to make a custom post-process in mixed reality mode?
In Reality Composter Pro has a triplanar projection node based on the provision of images. Is there a way to make a triplanar projection to input the dynamic material?