hey all, if I'm understanding the question right, you can use queryDeviceAnchor to do this. Pass CACurrentMediaTime()
as the timestamp.
This will give you the transform of the device position. use this if you want to launch UI at the user's head level, for instance
Here's a sample of this:
First, create a component:
public struct RelativeUIComponent: Component, Codable {
// add any properties you need
}
Then, create a system:
public struct RelativeUISystem: System {
static let query = EntityQuery(where: .has(RelativeUIComponent.self))
private let arkitSession = ARKitSession()
private let worldTrackingProvider = WorldTrackingProvider()
public init(scene: RealityKit.Scene) {
runSession()
}
// Run the world tracking provider
func runSession() {
Task {
do {
try await arkitSession.run([worldTrackingProvider])
} catch {
print("Error: \(error)")
}
}
}
public func update(context: SceneUpdateContext) {
// Check if the world tracking provider is running.
guard worldTrackingProvider.state == .running else { return }
// Iterate through the entities in the query from above
for entity in context.entities(
matching: Self.query,
updatingSystemWhen: .rendering
) {
// Get the device anchor at the current time
guard let deviceAnchor = worldTrackingProvider.queryDeviceAnchor(atTimestamp: CACurrentMediaTime()) else { return }
// Find the transform of the device.
let cameraTransform = Transform(matrix: deviceAnchor.originFromAnchorTransform)
// Make per-update changes to each entity here.
entity.transform.translation.z = cameraTransform.translation.z + 0.5 // or whatever you need from here
}
}
}
Register the system and component in your app:
@main
struct MyApp: App {
init() {
RelativeUISystem.registerSystem()
RelativeUIComponent.registerComponent()
}
var body: some Scene {
ImmersiveSpace() {
ContentView()
}
}
}
Lastly, add it in the RealityVIew
if let entity = attachments.entity(for: "devicePose") {
entity.components[RelativeUIComponent.self] = RelativeUIComponent()
// Add it to the scene.
content.add(entity)
}