I’m looking to replicate this reticule behavior from the Immersive-web hit-test demo.
The only way I knew how to do Hit-Test with Playcanvas is when a new inputSource (or screen tap) is detected, like Moka did in this sample project.
// in AR touch on screen adds new input source every time
this.app.xr.input.on('add', function(inputSource) {
// start hit test for added input source
inputSource.hitTestStart({
callback: function(err, hitTestSource) {
// if result is reported
hitTestSource.on('result', function(position, rotation) {
...
});
}
});
});
But, I want to hit-test in the update function while moving my mobile around to show a reticle where the floor is detected.
In the immersive-web demo, they can access to an hitTestSource from the XrSession like this:
// In this sample we want to cast a ray straight out from the viewer's
// position and render a reticle where it intersects with a real world
// surface. To do this we first get the viewer space, then create a
// hitTestSource that tracks it.
session.requestReferenceSpace('viewer').then((refSpace) => {
xrViewerSpace = refSpace;
session.requestHitTestSource({ space: xrViewerSpace }).then((hitTestSource) => {
xrHitTestSource = hitTestSource;
});
});
And hit-test every frame to position the reticule:
// Called every time a XRSession requests that a new frame be drawn.
function onXRFrame(t, frame) {
let session = frame.session;
let pose = frame.getViewerPose(xrRefSpace);
reticle.visible = false;
// If we have a hit test source, get its results for the frame
// and use the pose to display a reticle in the scene.
if (xrHitTestSource && pose) {
let hitTestResults = frame.getHitTestResults(xrHitTestSource);
if (hitTestResults.length > 0) {
let pose = hitTestResults[0].getPose(xrRefSpace);
reticle.visible = true;
reticle.matrix = pose.transform.matrix;
}
}
[...]
}
Is this possible to do that with the current state of implementation on WebXR in Playcanvas?