Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WebXR Depth Camera #5848

Merged
merged 30 commits into from
Jan 19, 2024
Merged
Show file tree
Hide file tree
Changes from 29 commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
123d045
XrViews, XR Raw Camera Access
Maksims Nov 4, 2023
4087765
docs and lint
Maksims Nov 5, 2023
7b0007b
xr views events
Maksims Nov 5, 2023
a01c763
Merge branch 'main' into webxr-raw-camera
Maksims Nov 9, 2023
368d551
implement camera color texture copying
Maksims Nov 9, 2023
cb05a1b
merge
Maksims Nov 9, 2023
4312136
Merge branch 'main' into webxr-raw-camera
Maksims Nov 14, 2023
b51f640
Merge branch 'main' into webxr-raw-camera
Maksims Nov 17, 2023
0d2525b
Merge branch 'main' into webxr-raw-camera
Maksims Nov 23, 2023
fc1f31d
PR comments
Maksims Nov 24, 2023
8ffa427
Merge branch 'main' into webxr-raw-camera
Maksims Nov 24, 2023
e1e27f2
wip
Maksims Nov 24, 2023
5f0a805
avoid FBO checks
Maksims Nov 24, 2023
b57595b
wip
Maksims Nov 24, 2023
70067e5
fix
Maksims Nov 24, 2023
a1470b9
depth sensing refactor
Maksims Nov 24, 2023
eb76669
handle webgl device lost
Maksims Nov 25, 2023
3806bbe
merge
Maksims Nov 25, 2023
54c3db8
deprecate XrDepthSensing
Maksims Nov 25, 2023
25e19d4
better backwards compatibility
Maksims Nov 25, 2023
708dfc5
depth information should not be available outside of its XRFrame
Maksims Nov 25, 2023
ba9c19d
clean objects
Maksims Nov 25, 2023
39b7b56
merge
Maksims Nov 29, 2023
3ebb5e6
fixes
Maksims Nov 29, 2023
44b9d86
GPU path for XR Depth Sensing
Maksims Dec 25, 2023
4ca8293
merge
Maksims Dec 25, 2023
cbe64ec
Merge branch 'main' into webxr-depth-camera
Maksims Jan 18, 2024
ad30af7
example of depth sensing for object placement
Maksims Jan 18, 2024
0a2e17d
Merge branch 'main' into webxr-depth-camera
Maksims Jan 18, 2024
cf22917
fixes based on Martin's feedback
Maksims Jan 19, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
265 changes: 265 additions & 0 deletions examples/src/examples/xr/ar-camera-depth.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,265 @@
import * as pc from 'playcanvas';

/**
* @typedef {import('../../options.mjs').ExampleOptions} ExampleOptions
* @param {import('../../options.mjs').ExampleOptions} options - The example options.
* @returns {Promise<pc.AppBase>} The example application.
*/
async function example({ canvas }) {
/**
* @param {string} msg - The message.
*/
const message = function (msg) {
/** @type {HTMLDivElement} */
let el = document.querySelector('.message');
if (!el) {
el = document.createElement('div');
el.classList.add('message');
el.style.position = 'absolute';
el.style.bottom = '96px';
el.style.right = '0';
el.style.padding = '8px 16px';
el.style.fontFamily = 'Helvetica, Arial, sans-serif';
el.style.color = '#fff';
el.style.backgroundColor = 'rgba(0, 0, 0, 0.5)';
document.body.append(el);
}
el.textContent = msg;
};

const app = new pc.Application(canvas, {
mouse: new pc.Mouse(canvas),
touch: new pc.TouchDevice(canvas),
keyboard: new pc.Keyboard(window),
graphicsDeviceOptions: { alpha: true }
});

app.setCanvasFillMode(pc.FILLMODE_FILL_WINDOW);
app.setCanvasResolution(pc.RESOLUTION_AUTO);

// Ensure canvas is resized when window changes size
const resize = () => app.resizeCanvas();
window.addEventListener('resize', resize);
app.on('destroy', () => {
window.removeEventListener('resize', resize);
});

// use device pixel ratio
app.graphicsDevice.maxPixelRatio = window.devicePixelRatio;

app.start();

// create camera
const camera = new pc.Entity();
camera.addComponent('camera', {
clearColor: new pc.Color(0, 0, 0, 0),
farClip: 10000
});
app.root.addChild(camera);

let shaderUpdated = false;
let shaderDepthArray = null;
let shaderDepthFloat = null;

const vertShader = /* glsl */ `
attribute vec3 aPosition;
attribute vec2 aUv0;
uniform mat4 matrix_model;
uniform mat4 matrix_viewProjection;
varying vec2 vUv0;
void main(void)
{
vec4 screenPosition = matrix_viewProjection * matrix_model * vec4(aPosition, 1.0);
gl_Position = screenPosition;
vUv0 = screenPosition.xy;
}
`;

const fragShader = /* glsl */ `
varying vec2 vUv0;
uniform vec4 uScreenSize;
uniform mat4 matrix_depth_uv;
uniform float depth_raw_to_meters;

#ifdef XRDEPTH_ARRAY
uniform int view_index;
uniform highp sampler2DArray depthMap;
#else
uniform sampler2D depthMap;
#endif

void main (void) {
vec2 uvScreen = gl_FragCoord.xy * uScreenSize.zw;

// use texture array for multi-view
#ifdef XRDEPTH_ARRAY
uvScreen = uvScreen * vec2(2.0, 1.0) - vec2(view_index, 0.0);
vec3 uv = vec3((matrix_depth_uv * vec4(uvScreen.xy, 0.0, 1.0)).xy, view_index);
#else
vec2 uv = (matrix_depth_uv * vec4(uvScreen.x, 1.0 - uvScreen.y, 0.0, 1.0)).xy;
#endif

#ifdef XRDEPTH_FLOAT
float depth = texture2D(depthMap, uv).r;
#else
// unpack from AlphaLuminance
vec2 packedDepth = texture2D(depthMap, uv).ra;
float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0));
#endif

depth *= depth_raw_to_meters;

// depth = 1.0 - min(depth / 2.0, 1.0); // 0..1 = 0m..4m
gl_FragColor = vec4(depth, depth, depth, 1.0);
}`;

const materialDepth = new pc.Material();

/**
* @param {boolean} array - If the depth information uses array texture.
* @param {boolean} float - If the depth information uses F32R texture.
*/
const updateShader = (array, float) => {
if (shaderDepthArray === array && shaderDepthFloat === float)
return;

shaderDepthArray = array;
shaderDepthFloat = float;

const key = 'textureDepthSensing_' + array + float;
let frag = fragShader;

if (shaderDepthArray)
frag = '#define XRDEPTH_ARRAY\n' + frag;

if (shaderDepthArray)
frag = '#define XRDEPTH_FLOAT\n' + frag;

materialDepth.shader = pc.createShaderFromCode(app.graphicsDevice,
vertShader,
frag,
key, {
aPosition: pc.SEMANTIC_POSITION,
aUv0: pc.SEMANTIC_TEXCOORD0
});
materialDepth.clearVariants();
materialDepth.update();
};

updateShader(false, false);

const plane = new pc.Entity();
plane.addComponent('render', {
type: 'plane'
});
plane.render.material = materialDepth;
plane.render.meshInstances[0].cull = false;
plane.setLocalPosition(0, 0, -1);
plane.setLocalEulerAngles(90, 0, 0);
camera.addChild(plane);

if (app.xr.supported) {
const activate = function () {
if (app.xr.isAvailable(pc.XRTYPE_AR)) {
camera.camera.startXr(pc.XRTYPE_AR, pc.XRSPACE_LOCALFLOOR, {
depthSensing: { // request access to camera depth
usagePreference: pc.XRDEPTHSENSINGUSAGE_GPU,
dataFormatPreference: pc.XRDEPTHSENSINGFORMAT_F32
},
callback: function (err) {
if (err) message("WebXR Immersive AR failed to start: " + err.message);
}
});
} else {
message("Immersive AR is not available");
}
};

app.mouse.on("mousedown", function () {
if (!app.xr.active)
activate();
});

if (app.touch) {
app.touch.on("touchend", function (evt) {
if (!app.xr.active) {
// if not in VR, activate
activate();
} else {
// otherwise reset camera
camera.camera.endXr();
}

evt.event.preventDefault();
evt.event.stopPropagation();
});
}

// end session by keyboard ESC
app.keyboard.on('keydown', function (evt) {
if (evt.key === pc.KEY_ESCAPE && app.xr.active) {
app.xr.end();
}
});

app.xr.on('start', function () {
message("Immersive AR session has started");
console.log('depth gpu optimized', app.xr.views.depthGpuOptimized);
console.log('depth texture format', app.xr.views.depthPixelFormat);
});
app.xr.on('end', function () {
shaderUpdated = false;
message("Immersive AR session has ended");
});
app.xr.on('available:' + pc.XRTYPE_AR, function (available) {
if (available) {
if (!app.xr.views.supportedDepth) {
message("AR Camera Depth is not supported");
} else {
message("Touch screen to start AR session");
}
} else {
message("Immersive AR is not available");
}
});

app.on('update', () => {
// if camera depth is available
if (app.xr.views.availableDepth) {
if (!shaderUpdated && app.xr.active) {
shaderUpdated = true;
updateShader(app.xr.views.list.length > 1, app.xr.views.depthPixelFormat === pc.PIXELFORMAT_R32F);
}

for(let i = 0; i < app.xr.views.list.length; i++) {
const view = app.xr.views.list[i];
if (!view.textureDepth) // check if depth texture is available
continue;

materialDepth.setParameter('depthMap', view.textureDepth);
materialDepth.setParameter('matrix_depth_uv', view.depthUvMatrix.data);
materialDepth.setParameter('depth_raw_to_meters', view.depthValueToMeters);
}
}
});

if (!app.xr.isAvailable(pc.XRTYPE_AR)) {
message("Immersive AR is not available");
} else if (!app.xr.views.supportedDepth) {
message("AR Camera Depth is not supported");
} else {
message("Touch screen to start AR session");
}
} else {
message("WebXR is not supported");
}
return app;
}

class ArCameraDepthExample {
static CATEGORY = 'XR';
static NAME = 'AR Camera Depth';
static example = example;
}

export { ArCameraDepthExample };
Loading