swiftuianchorobject-detectionrealitykitvisionos

How to add onTapGesture on a mesh?


I'm encountering a slight problem with my current setup. The tracking code operates effectively in isolation; I'm able to detect and draw a sphere on the tracked object. My goal is when my Apple Vision Pro detects the image, it will launch the Model3D on the position of the object.

Here's the relevant code snippet:

 @Observable
 @MainActor
 class ImageTrackingModel{
    private let session = ARKitSession()
    private var isImageDetected = false

// Load the image that we want to track
private let imageTrackingProvider = ImageTrackingProvider(
    referenceImages: ReferenceImage.loadReferenceImages(inGroupNamed: "ref")
)

private var contentEntity = Entity()
private var entityMap: [UUID: Entity] = [:]

func setupContentEntity() -> Entity {
    return contentEntity
}

func runSession() async {
    do {
        if ImageTrackingProvider.isSupported {
            try await session.run([imageTrackingProvider])
            print("[\(type(of: self))] [\(#function)] session.run")
            print("Session is running")
        }
    } catch {
        print(error)
        print("Error running session")
    }
}

func processImageTrackingUpdates() async {
    print("[\(type(of: self))] [\(#function)] called")

    for await update in imageTrackingProvider.anchorUpdates {
        print("[\(type(of: self))] [\(#function)] anchorUpdates")

        updateImage(update.anchor)
    }
}

func monitorSessionEvents() async {
    for await event in session.events {
        switch event {
        case .authorizationChanged(type: _, status: let status):
            print("Authorization changed to: \(status)")
            if status == .denied {
                print("Authorization status: denied")
            }
        case .dataProviderStateChanged(dataProviders: let providers, newState: let state, error: let error):
            print("Data provider changed: \(providers), \(state)")
            if let error {
                print("Data provider reached an error state: \(error)")
            }
        @unknown default:
            fatalError("Unhandled new event type \(event)")
        }
    }
}

private func updateImage(_ anchor: ImageAnchor) {
        if entityMap[anchor.id] == nil {
            let imageID = anchor.referenceImage.name
            print("Recognized image with ID: \(imageID)")

            let width = Float(anchor.referenceImage.physicalSize.width)
            let height = Float(anchor.referenceImage.physicalSize.height)
            let depth: Float = 0.01

            let entity = ModelEntity(mesh: .generateSphere(radius: 0.05))
            let material = UnlitMaterial(color: UIColor.blue.withAlphaComponent(0.65))
            entity.model?.materials = [material]

            entityMap[anchor.id] = entity
            contentEntity.addChild(entity)
        }

        if anchor.isTracked {
            entityMap[anchor.id]?.transform = Transform(matrix: anchor.originFromAnchorTransform)
            isImageDetected = true
        } else {
            isImageDetected = false
        }
    }
func isDetected() -> Bool {
        return isImageDetected
    }

}

Can you help me integrating this view so it launches it for me instead of drawing the mesh?

Here is the model3d that want to integrate:

ZStack {
            
                Model3D(named: "Scene", bundle: realityKitContentBundle)
                    .frame(width: 100, height: 100)
                    .opacity(1)
                    .onTapGesture {
                        isOptionsSheetPresented.toggle()
                        if isOptionsSheetPresented {
                            openWindow(id: "thirdWindow")
                        } else {
                            dismissWindow(id: "thirdWindow")
                        }
                    }
                    .onAppear {
                        withAnimation(Animation.easeInOut(duration: 1).repeatForever()) {
                            print("Image is detected")
                        }
                    }
            
        }

Solution

  • You'll have to add a CollisionComponent to your entity to detect gestures. Then add the gesture with a modifier on your RealityView:

        RealityView { content in
        ...
        model.components.set(InputTargetComponent())
        model.collision = CollisionComponent(shapes: [.generateSphere(...)])
        } update: { content in
        ...
        }
        .gesture(
            TapGesture(count: 1)
               .onEnded { value in