diff --git a/examples/src/examples/xr/ar-camera-depth.mjs b/examples/src/examples/xr/ar-camera-depth.mjs new file mode 100644 index 00000000000..3c4f6acd356 --- /dev/null +++ b/examples/src/examples/xr/ar-camera-depth.mjs @@ -0,0 +1,265 @@ +import * as pc from 'playcanvas'; + +/** + * @typedef {import('../../options.mjs').ExampleOptions} ExampleOptions + * @param {import('../../options.mjs').ExampleOptions} options - The example options. + * @returns {Promise} The example application. + */ +async function example({ canvas }) { + /** + * @param {string} msg - The message. + */ + const message = function (msg) { + /** @type {HTMLDivElement} */ + let el = document.querySelector('.message'); + if (!el) { + el = document.createElement('div'); + el.classList.add('message'); + el.style.position = 'absolute'; + el.style.bottom = '96px'; + el.style.right = '0'; + el.style.padding = '8px 16px'; + el.style.fontFamily = 'Helvetica, Arial, sans-serif'; + el.style.color = '#fff'; + el.style.backgroundColor = 'rgba(0, 0, 0, 0.5)'; + document.body.append(el); + } + el.textContent = msg; + }; + + const app = new pc.Application(canvas, { + mouse: new pc.Mouse(canvas), + touch: new pc.TouchDevice(canvas), + keyboard: new pc.Keyboard(window), + graphicsDeviceOptions: { alpha: true } + }); + + app.setCanvasFillMode(pc.FILLMODE_FILL_WINDOW); + app.setCanvasResolution(pc.RESOLUTION_AUTO); + + // Ensure canvas is resized when window changes size + const resize = () => app.resizeCanvas(); + window.addEventListener('resize', resize); + app.on('destroy', () => { + window.removeEventListener('resize', resize); + }); + + // use device pixel ratio + app.graphicsDevice.maxPixelRatio = window.devicePixelRatio; + + app.start(); + + // create camera + const camera = new pc.Entity(); + camera.addComponent('camera', { + clearColor: new pc.Color(0, 0, 0, 0), + farClip: 10000 + }); + app.root.addChild(camera); + + let shaderUpdated = false; + let shaderDepthArray = null; + let shaderDepthFloat = null; + + const vertShader = /* glsl */ ` + attribute vec3 aPosition; + attribute vec2 aUv0; + uniform mat4 matrix_model; + uniform mat4 matrix_viewProjection; + varying vec2 vUv0; + void main(void) + { + vec4 screenPosition = matrix_viewProjection * matrix_model * vec4(aPosition, 1.0); + gl_Position = screenPosition; + vUv0 = screenPosition.xy; + } + `; + + const fragShader = /* glsl */ ` + varying vec2 vUv0; + uniform vec4 uScreenSize; + uniform mat4 matrix_depth_uv; + uniform float depth_raw_to_meters; + + #ifdef XRDEPTH_ARRAY + uniform int view_index; + uniform highp sampler2DArray depthMap; + #else + uniform sampler2D depthMap; + #endif + + void main (void) { + vec2 uvScreen = gl_FragCoord.xy * uScreenSize.zw; + + // use texture array for multi-view + #ifdef XRDEPTH_ARRAY + uvScreen = uvScreen * vec2(2.0, 1.0) - vec2(view_index, 0.0); + vec3 uv = vec3((matrix_depth_uv * vec4(uvScreen.xy, 0.0, 1.0)).xy, view_index); + #else + vec2 uv = (matrix_depth_uv * vec4(uvScreen.x, 1.0 - uvScreen.y, 0.0, 1.0)).xy; + #endif + + #ifdef XRDEPTH_FLOAT + float depth = texture2D(depthMap, uv).r; + #else + // unpack from AlphaLuminance + vec2 packedDepth = texture2D(depthMap, uv).ra; + float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)); + #endif + + depth *= depth_raw_to_meters; + + // depth = 1.0 - min(depth / 2.0, 1.0); // 0..1 = 0m..4m + gl_FragColor = vec4(depth, depth, depth, 1.0); + }`; + + const materialDepth = new pc.Material(); + + /** + * @param {boolean} array - If the depth information uses array texture. + * @param {boolean} float - If the depth information uses F32R texture. + */ + const updateShader = (array, float) => { + if (shaderDepthArray === array && shaderDepthFloat === float) + return; + + shaderDepthArray = array; + shaderDepthFloat = float; + + const key = 'textureDepthSensing_' + array + float; + let frag = fragShader; + + if (shaderDepthArray) + frag = '#define XRDEPTH_ARRAY\n' + frag; + + if (shaderDepthArray) + frag = '#define XRDEPTH_FLOAT\n' + frag; + + materialDepth.shader = pc.createShaderFromCode(app.graphicsDevice, + vertShader, + frag, + key, { + aPosition: pc.SEMANTIC_POSITION, + aUv0: pc.SEMANTIC_TEXCOORD0 + }); + materialDepth.clearVariants(); + materialDepth.update(); + }; + + updateShader(false, false); + + const plane = new pc.Entity(); + plane.addComponent('render', { + type: 'plane' + }); + plane.render.material = materialDepth; + plane.render.meshInstances[0].cull = false; + plane.setLocalPosition(0, 0, -1); + plane.setLocalEulerAngles(90, 0, 0); + camera.addChild(plane); + + if (app.xr.supported) { + const activate = function () { + if (app.xr.isAvailable(pc.XRTYPE_AR)) { + camera.camera.startXr(pc.XRTYPE_AR, pc.XRSPACE_LOCALFLOOR, { + depthSensing: { // request access to camera depth + usagePreference: pc.XRDEPTHSENSINGUSAGE_GPU, + dataFormatPreference: pc.XRDEPTHSENSINGFORMAT_F32 + }, + callback: function (err) { + if (err) message("WebXR Immersive AR failed to start: " + err.message); + } + }); + } else { + message("Immersive AR is not available"); + } + }; + + app.mouse.on("mousedown", function () { + if (!app.xr.active) + activate(); + }); + + if (app.touch) { + app.touch.on("touchend", function (evt) { + if (!app.xr.active) { + // if not in VR, activate + activate(); + } else { + // otherwise reset camera + camera.camera.endXr(); + } + + evt.event.preventDefault(); + evt.event.stopPropagation(); + }); + } + + // end session by keyboard ESC + app.keyboard.on('keydown', function (evt) { + if (evt.key === pc.KEY_ESCAPE && app.xr.active) { + app.xr.end(); + } + }); + + app.xr.on('start', function () { + message("Immersive AR session has started"); + console.log('depth gpu optimized', app.xr.views.depthGpuOptimized); + console.log('depth texture format', app.xr.views.depthPixelFormat); + }); + app.xr.on('end', function () { + shaderUpdated = false; + message("Immersive AR session has ended"); + }); + app.xr.on('available:' + pc.XRTYPE_AR, function (available) { + if (available) { + if (!app.xr.views.supportedDepth) { + message("AR Camera Depth is not supported"); + } else { + message("Touch screen to start AR session"); + } + } else { + message("Immersive AR is not available"); + } + }); + + app.on('update', () => { + // if camera depth is available + if (app.xr.views.availableDepth) { + if (!shaderUpdated && app.xr.active) { + shaderUpdated = true; + updateShader(app.xr.views.list.length > 1, app.xr.views.depthPixelFormat === pc.PIXELFORMAT_R32F); + } + + for(let i = 0; i < app.xr.views.list.length; i++) { + const view = app.xr.views.list[i]; + if (!view.textureDepth) // check if depth texture is available + continue; + + materialDepth.setParameter('depthMap', view.textureDepth); + materialDepth.setParameter('matrix_depth_uv', view.depthUvMatrix.data); + materialDepth.setParameter('depth_raw_to_meters', view.depthValueToMeters); + } + } + }); + + if (!app.xr.isAvailable(pc.XRTYPE_AR)) { + message("Immersive AR is not available"); + } else if (!app.xr.views.supportedDepth) { + message("AR Camera Depth is not supported"); + } else { + message("Touch screen to start AR session"); + } + } else { + message("WebXR is not supported"); + } + return app; +} + +class ArCameraDepthExample { + static CATEGORY = 'XR'; + static NAME = 'AR Camera Depth'; + static example = example; +} + +export { ArCameraDepthExample }; diff --git a/examples/src/examples/xr/ar-depth-sensing-placer.mjs b/examples/src/examples/xr/ar-depth-sensing-placer.mjs new file mode 100644 index 00000000000..0656008ab8e --- /dev/null +++ b/examples/src/examples/xr/ar-depth-sensing-placer.mjs @@ -0,0 +1,200 @@ +import * as pc from 'playcanvas'; + +/** + * @typedef {import('../../options.mjs').ExampleOptions} ExampleOptions + * @param {import('../../options.mjs').ExampleOptions} options - The example options. + * @returns {Promise} The example application. + */ +async function example({ canvas }) { + /** + * @param {string} msg - The message. + */ + const message = function (msg) { + /** @type {HTMLDivElement} */ + let el = document.querySelector('.message'); + if (!el) { + el = document.createElement('div'); + el.classList.add('message'); + el.style.position = 'absolute'; + el.style.bottom = '96px'; + el.style.right = '0'; + el.style.padding = '8px 16px'; + el.style.fontFamily = 'Helvetica, Arial, sans-serif'; + el.style.color = '#fff'; + el.style.backgroundColor = 'rgba(0, 0, 0, 0.5)'; + document.body.append(el); + } + el.textContent = msg; + }; + + const app = new pc.Application(canvas, { + mouse: new pc.Mouse(canvas), + touch: new pc.TouchDevice(canvas), + keyboard: new pc.Keyboard(window), + graphicsDeviceOptions: { alpha: true } + }); + + app.setCanvasFillMode(pc.FILLMODE_FILL_WINDOW); + app.setCanvasResolution(pc.RESOLUTION_AUTO); + + // Ensure canvas is resized when window changes size + const resize = () => app.resizeCanvas(); + window.addEventListener('resize', resize); + app.on('destroy', () => { + window.removeEventListener('resize', resize); + }); + + // use device pixel ratio + app.graphicsDevice.maxPixelRatio = window.devicePixelRatio; + + app.start(); + + // create camera + const camera = new pc.Entity(); + camera.addComponent('camera', { + clearColor: new pc.Color(0, 0, 0, 0), + farClip: 10000 + }); + app.root.addChild(camera); + + // light + const l = new pc.Entity(); + l.addComponent("light", { + type: "spot", + range: 30 + }); + l.translate(0, 10, 0); + app.root.addChild(l); + + // placeable cone + const cone = new pc.Entity(); + cone.addComponent('render', { + type: 'cone' + }); + cone.setLocalScale(0.1, 0.1, 0.1); + app.root.addChild(cone); + + const tmpVec3A = new pc.Vec3(); + + if (app.xr.supported) { + const activate = function () { + if (app.xr.isAvailable(pc.XRTYPE_AR)) { + camera.camera.startXr(pc.XRTYPE_AR, pc.XRSPACE_LOCALFLOOR, { + depthSensing: { // request access to camera depth + usagePreference: pc.XRDEPTHSENSINGUSAGE_GPU, + dataFormatPreference: pc.XRDEPTHSENSINGFORMAT_F32 + }, + callback: function (err) { + if (err) message("WebXR Immersive AR failed to start: " + err.message); + } + }); + } else { + message("Immersive AR is not available"); + } + }; + + app.mouse.on("mousedown", function () { + if (!app.xr.active) + activate(); + }); + + if (app.touch) { + app.touch.on("touchend", function (evt) { + if (!app.xr.active) { + // if not in VR, activate + activate(); + } else { + // otherwise reset camera + camera.camera.endXr(); + } + + evt.event.preventDefault(); + evt.event.stopPropagation(); + }); + } + + // end session by keyboard ESC + app.keyboard.on('keydown', function (evt) { + if (evt.key === pc.KEY_ESCAPE && app.xr.active) { + app.xr.end(); + } + }); + + app.xr.on('start', function () { + message("Immersive AR session has started"); + console.log('depth gpu optimized', app.xr.views.depthGpuOptimized); + console.log('depth texture format', app.xr.views.depthPixelFormat); + }); + app.xr.on('end', function () { + message("Immersive AR session has ended"); + }); + app.xr.on('available:' + pc.XRTYPE_AR, function (available) { + if (available) { + if (!app.xr.views.supportedDepth) { + message("AR Camera Depth is not supported"); + } else { + message("Touch screen to start AR session"); + } + } else { + message("Immersive AR is not available"); + } + }); + + let selecting = false; + let selectingTime = 0; + const selectingDelay = 100; + + app.xr.input.on('select', () => { + selecting = true; + selectingTime = Date.now(); + }); + + app.on('update', () => { + // if camera depth is available + if (app.xr.views.availableDepth) { + const view = app.xr.views.list[0]; + const depth = view.getDepth(0.5, 0.5); + + if (depth) { + tmpVec3A.copy(camera.forward); + tmpVec3A.mulScalar(depth); + tmpVec3A.add(camera.getPosition()); + tmpVec3A.y += 0.05; // offset based on cone scale + + cone.enabled = true; + cone.setLocalPosition(tmpVec3A); + + if (selecting && (Date.now() - selectingTime) < selectingDelay) { + selecting = false; + const obj = cone.clone(); + app.root.addChild(obj); + } + } else { + cone.enabled = false; + } + } else { + cone.enabled = false; + } + }); + + if (!app.xr.isAvailable(pc.XRTYPE_AR)) { + message("Immersive AR is not available"); + } else if (!app.xr.views.supportedDepth) { + message("AR Camera Depth is not supported"); + } else { + message("Touch screen to start AR session"); + } + } else { + message("WebXR is not supported"); + + } + return app; +} + +class ArDepthSensingPlacerExample { + static CATEGORY = 'XR'; + static NAME = 'AR Depth Sensing Placer'; + static example = example; +} + +export { ArDepthSensingPlacerExample }; diff --git a/examples/src/examples/xr/index.mjs b/examples/src/examples/xr/index.mjs index 95a9e9181a4..b7b1280f5a2 100644 --- a/examples/src/examples/xr/index.mjs +++ b/examples/src/examples/xr/index.mjs @@ -1,5 +1,7 @@ export * from "./ar-basic.mjs"; export * from "./ar-camera-color.mjs"; +export * from "./ar-camera-depth.mjs"; +export * from "./ar-depth-sensing-placer.mjs"; export * from "./ar-hit-test.mjs"; export * from "./ar-hit-test-anchors.mjs"; export * from "./ar-anchors-persistence.mjs"; diff --git a/src/framework/xr/xr-depth-sensing.js b/src/framework/xr/xr-depth-sensing.js index affc289b964..c385a79fa32 100644 --- a/src/framework/xr/xr-depth-sensing.js +++ b/src/framework/xr/xr-depth-sensing.js @@ -1,69 +1,11 @@ import { EventHandler } from '../../core/event-handler.js'; -import { platform } from '../../core/platform.js'; import { Mat4 } from '../../core/math/mat4.js'; -import { ADDRESS_CLAMP_TO_EDGE, PIXELFORMAT_LA8, FILTER_LINEAR } from '../../platform/graphics/constants.js'; -import { Texture } from '../../platform/graphics/texture.js'; - -import { XRDEPTHSENSINGUSAGE_CPU, XRDEPTHSENSINGUSAGE_GPU } from './constants.js'; - /** - * Depth Sensing provides depth information which is reconstructed using the underlying AR system. - * It provides the ability to query depth values (CPU path) or access a depth texture (GPU path). - * Depth information can be used (not limited to) for reconstructing real world geometry, virtual - * object placement, occlusion of virtual objects by real world geometry and more. - * - * ```javascript - * // CPU path - * const depthSensing = app.xr.depthSensing; - * if (depthSensing.available) { - * // get depth in the middle of the screen, value is in meters - * const depth = depthSensing.getDepth(depthSensing.width / 2, depthSensing.height / 2); - * } - * ``` - * - * ```javascript - * // GPU path, attaching texture to material - * material.diffuseMap = depthSensing.texture; - * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data); - * material.setParameter('depth_raw_to_meters', depthSensing.rawValueToMeters); - * material.update(); - * - * // update UV transformation matrix on depth texture resize - * depthSensing.on('resize', function () { - * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data); - * material.setParameter('depth_raw_to_meters', depthSensing.rawValueToMeters); - * }); - * ``` - * - * ```javascript - * // GLSL shader to unpack depth texture - * varying vec2 vUv0; - * - * uniform sampler2D texture_depthSensingMap; - * uniform mat4 matrix_depth_uv; - * uniform float depth_raw_to_meters; - * - * void main(void) { - * // transform UVs using depth matrix - * vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy; - * - * // get luminance alpha components from depth texture - * vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra; - * - * // unpack into single value in millimeters - * float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)) * depth_raw_to_meters; // m - * - * // normalize: 0m to 8m distance - * depth = min(depth / 8.0, 1.0); // 0..1 = 0..8 - * - * // paint scene from black to white based on distance - * gl_FragColor = vec4(depth, depth, depth, 1.0); - * } - * ``` - * * @augments EventHandler * @category XR + * @deprecated + * @ignore */ class XrDepthSensing extends EventHandler { /** @@ -72,392 +14,191 @@ class XrDepthSensing extends EventHandler { */ _manager; - /** - * @type {boolean} - * @private - */ - _available = false; - - /** - * @type {XRCPUDepthInformation|null} - * @private - */ - _depthInfoCpu = null; - /** - * @type {XRCPUDepthInformation|null} + * @type {import('./xr-views.js').XrViews} * @private */ - _depthInfoGpu = null; - - /** - * @type {string|null} - * @private - */ - _usage = null; - - /** - * @type {string|null} - * @private - */ - _dataFormat = null; + _views; /** * @type {boolean} * @private */ - _matrixDirty = false; - - /** - * @type {Mat4} - * @private - */ - _matrix = new Mat4(); - - /** - * @type {Uint8Array} - * @private - */ - _emptyBuffer = new Uint8Array(32); + _available = false; /** - * @type {Uint8Array|null} + * @type {import('../../core/event-handle.js').EventHandle|null} * @private */ - _depthBuffer = null; + _evtDepthResize = null; /** - * @type {Texture} + * @type {Mat4} * @private */ - _texture; + _uvMatrix = Mat4.IDENTITY.clone(); /** - * Create a new XrDepthSensing instance. - * - * @param {import('./xr-manager.js').XrManager} manager - WebXR Manager. + * @param {import('./xr-manager.js').XrManager} manager - manager * @hideconstructor */ constructor(manager) { super(); this._manager = manager; + this._views = manager.views; - // TODO: data format can be different - this._texture = new Texture(this._manager.app.graphicsDevice, { - format: PIXELFORMAT_LA8, - mipmaps: false, - addressU: ADDRESS_CLAMP_TO_EDGE, - addressV: ADDRESS_CLAMP_TO_EDGE, - minFilter: FILTER_LINEAR, - magFilter: FILTER_LINEAR, - name: 'XRDepthSensing' - }); - - if (this.supported) { + if (this._views.supportedDepth) { this._manager.on('start', this._onSessionStart, this); this._manager.on('end', this._onSessionEnd, this); } } /** - * Fired when depth sensing data becomes available. - * * @event XrDepthSensing#available + * @deprecated + * @ignore */ /** - * Fired when depth sensing data becomes unavailable. - * * @event XrDepthSensing#unavailable + * @deprecated + * @ignore */ /** - * Fired when the depth sensing texture been resized. The {@link XrDepthSensing#uvMatrix} needs - * to be updated for relevant shaders. - * * @event XrDepthSensing#resize - * @param {number} width - The new width of the depth texture in pixels. - * @param {number} height - The new height of the depth texture in pixels. - * @example - * depthSensing.on('resize', function () { - * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix); - * }); + * @param {number} width + * @param {number} height + * @deprecated + * @ignore */ - /** @ignore */ - destroy() { - this._texture.destroy(); - this._texture = null; - } - /** @private */ _onSessionStart() { - const session = this._manager.session; - - try { - this._usage = session.depthUsage; - this._dataFormat = session.depthDataFormat; - } catch (ex) { - this._usage = null; - this._dataFormat = null; - this._available = false; - - this.fire('error', ex); - } + if (this._views.availableDepth) + this._evtDepthResize = this._views.list[0]?.on('depth:resize', this._onDepthResize, this); } /** @private */ _onSessionEnd() { - this._depthInfoCpu = null; - this._depthInfoGpu = null; - - this._usage = null; - this._dataFormat = null; + if (this._evtDepthResize) { + this._evtDepthResize.off(); + this._evtDepthResize = null; + } if (this._available) { this._available = false; this.fire('unavailable'); } - - this._depthBuffer = null; - this._texture._width = 4; - this._texture._height = 4; - this._texture._levels[0] = this._emptyBuffer; - this._texture.upload(); } /** @private */ - _updateTexture() { - const depthInfo = this._depthInfoCpu || this._depthInfoGpu; - - if (depthInfo) { - let resized = false; - - // changed resolution - if (depthInfo.width !== this._texture.width || depthInfo.height !== this._texture.height) { - this._texture._width = depthInfo.width; - this._texture._height = depthInfo.height; - this._matrixDirty = true; - resized = true; - } - - if (this._depthInfoCpu) { - const dataBuffer = this._depthInfoCpu.data; - this._depthBuffer = new Uint8Array(dataBuffer); - this._texture._levels[0] = this._depthBuffer; - this._texture.upload(); - } else if (this._depthInfoGpu) { - this._texture._levels[0] = this._depthInfoGpu.texture; - this._texture.upload(); - } - - if (resized) this.fire('resize', depthInfo.width, depthInfo.height); - } else if (this._depthBuffer) { - // depth info not available anymore - this._depthBuffer = null; - this._texture._width = 4; - this._texture._height = 4; - this._texture._levels[0] = this._emptyBuffer; - this._texture.upload(); - } + _onDepthResize(width, height) { + this.fire('resize', width, height); } /** - * @param {*} frame - XRFrame from requestAnimationFrame callback. - * @param {*} view - First XRView of viewer XRPose. + * @param {number} u - u + * @param {number} v - v + * @deprecated * @ignore + * @returns {number|null} number */ - update(frame, view) { - if (!this._usage) - return; - - let depthInfoCpu = null; - let depthInfoGpu = null; - if (this._usage === XRDEPTHSENSINGUSAGE_CPU && view) { - depthInfoCpu = frame.getDepthInformation(view); - } else if (this._usage === XRDEPTHSENSINGUSAGE_GPU && view) { - depthInfoGpu = frame.getDepthInformation(view); - } - - if ((this._depthInfoCpu && !depthInfoCpu) || (!this._depthInfoCpu && depthInfoCpu) || (this.depthInfoGpu && !depthInfoGpu) || (!this._depthInfoGpu && depthInfoGpu)) { - this._matrixDirty = true; - } - this._depthInfoCpu = depthInfoCpu; - this._depthInfoGpu = depthInfoGpu; - - this._updateTexture(); - - if (this._matrixDirty) { - this._matrixDirty = false; - - const depthInfo = this._depthInfoCpu || this._depthInfoGpu; - - if (depthInfo) { - this._matrix.data.set(depthInfo.normDepthBufferFromNormView.matrix); - } else { - this._matrix.setIdentity(); - } - } - - if ((this._depthInfoCpu || this._depthInfoGpu) && !this._available) { - this._available = true; - this.fire('available'); - } else if (!this._depthInfoCpu && !this._depthInfoGpu && this._available) { - this._available = false; - this.fire('unavailable'); - } + getDepth(u, v) { + return this._views.list[0]?.getDepth(u, v) ?? null; } /** - * Get depth value from depth information in meters. UV is in range of 0..1, with origin in - * top-left corner of a texture. - * - * @param {number} u - U coordinate of pixel in depth texture, which is in range from 0.0 to - * 1.0 (left to right). - * @param {number} v - V coordinate of pixel in depth texture, which is in range from 0.0 to - * 1.0 (top to bottom). - * @returns {number|null} Depth in meters or null if depth information is currently not - * available. - * @example - * const depth = app.xr.depthSensing.getDepth(u, v); - * if (depth !== null) { - * // depth in meters - * } + * @deprecated + * @ignore */ - getDepth(u, v) { - // TODO - // GPU usage - - if (!this._depthInfoCpu) - return null; - - return this._depthInfoCpu.getDepthInMeters(u, v); + update() { + if (this._manager.session && this.supported && this._views.availableDepth && this._views.list.length && !this._available) { + this._available = true; + this.fire('available'); + } } /** - * True if Depth Sensing is supported. - * * @type {boolean} + * @deprecated + * @ignore */ get supported() { - return platform.browser && !!window.XRDepthInformation; + return this._views.supportedDepth; } /** - * True if depth sensing information is available. - * * @type {boolean} - * @example - * if (app.xr.depthSensing.available) { - * const depth = app.xr.depthSensing.getDepth(x, y); - * } + * @deprecated + * @ignore */ get available() { - return this._available; + return this._views.availableDepth; } /** - * Whether the usage is CPU or GPU. - * * @type {string} + * @deprecated * @ignore */ get usage() { - return this._usage; + return this._views.depthUsage; } /** - * The depth sensing data format. - * * @type {string} + * @deprecated * @ignore */ get dataFormat() { - return this._dataFormat; + return this._views.depthFormat; } /** - * Width of depth texture or 0 if not available. - * * @type {number} + * @deprecated + * @ignore */ get width() { - const depthInfo = this._depthInfoCpu || this._depthInfoGpu; - return depthInfo && depthInfo.width || 0; + return this._views.list[0]?.textureDepth?.width ?? 0; } /** - * Height of depth texture or 0 if not available. - * * @type {number} + * @deprecated + * @ignore */ get height() { - const depthInfo = this._depthInfoCpu || this._depthInfoGpu; - return depthInfo && depthInfo.height || 0; + return this._views.list[0]?.textureDepth?.height ?? 0; } - /* eslint-disable jsdoc/check-examples */ /** - * Texture that contains packed depth information. The format of this texture is - * {@link PIXELFORMAT_LA8}. It is UV transformed based on the underlying AR system which can - * be normalized using {@link XrDepthSensing#uvMatrix}. - * - * @type {Texture} - * @example - * material.diffuseMap = depthSensing.texture; - * @example - * // GLSL shader to unpack depth texture - * varying vec2 vUv0; - * - * uniform sampler2D texture_depthSensingMap; - * uniform mat4 matrix_depth_uv; - * uniform float depth_raw_to_meters; - * - * void main(void) { - * // transform UVs using depth matrix - * vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy; - * - * // get luminance alpha components from depth texture - * vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra; - * - * // unpack into single value in millimeters - * float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)) * depth_raw_to_meters; // m - * - * // normalize: 0m to 8m distance - * depth = min(depth / 8.0, 1.0); // 0..1 = 0m..8m - * - * // paint scene from black to white based on distance - * gl_FragColor = vec4(depth, depth, depth, 1.0); - * } + * @type {import('../../platform/graphics/texture.js').Texture|null} + * @deprecated + * @ignore */ get texture() { - return this._texture; + return this._views.list[0]?.textureDepth; } - /* eslint-enable jsdoc/check-examples */ /** - * 4x4 matrix that should be used to transform depth texture UVs to normalized UVs in a shader. - * It is updated when the depth texture is resized. Refer to {@link XrDepthSensing#resize}. - * * @type {Mat4} - * @example - * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data); + * @deprecated + * @ignore */ get uvMatrix() { - return this._matrix; + return this._views.list[0]?.depthUvMatrix ?? this._uvMatrix; } /** - * Multiply this coefficient number by raw depth value to get depth in meters. - * * @type {number} - * @example - * material.setParameter('depth_raw_to_meters', depthSensing.rawValueToMeters); + * @deprecated + * @ignore */ get rawValueToMeters() { - const depthInfo = this._depthInfoCpu || this._depthInfoGpu; - return depthInfo && depthInfo.rawValueToMeters || 0; + return this._views.list[0]?.depthValueToMeters ?? 0; } } diff --git a/src/framework/xr/xr-manager.js b/src/framework/xr/xr-manager.js index 95ff60fdaa5..d136d4da246 100644 --- a/src/framework/xr/xr-manager.js +++ b/src/framework/xr/xr-manager.js @@ -230,6 +230,7 @@ class XrManager extends EventHandler { this._available[XRTYPE_VR] = false; this._available[XRTYPE_AR] = false; + this.views = new XrViews(this); this.depthSensing = new XrDepthSensing(this); this.domOverlay = new XrDomOverlay(this); this.hitTest = new XrHitTest(this); @@ -329,10 +330,7 @@ class XrManager extends EventHandler { * * @ignore */ - destroy() { - this.depthSensing.destroy(); - this.depthSensing = null; - } + destroy() { } /** * Attempts to start XR session for provided {@link CameraComponent} and optionally fires @@ -876,9 +874,6 @@ class XrManager extends EventHandler { if (this.lightEstimation.supported) this.lightEstimation.update(frame); - if (this.depthSensing.supported) - this.depthSensing.update(frame, pose && pose.views[0]); - if (this.imageTracking.supported) this.imageTracking.update(frame); @@ -888,6 +883,9 @@ class XrManager extends EventHandler { if (this.planeDetection.supported) this.planeDetection.update(frame); + if (this.depthSensing.supported) + this.depthSensing.update(); + if (this.meshDetection.supported) this.meshDetection.update(frame); } diff --git a/src/framework/xr/xr-view.js b/src/framework/xr/xr-view.js index ecb9b688188..e766c4f9b8a 100644 --- a/src/framework/xr/xr-view.js +++ b/src/framework/xr/xr-view.js @@ -1,3 +1,4 @@ +import { EventHandler } from '../../core/event-handler.js'; import { Texture } from '../../platform/graphics/texture.js'; import { Vec4 } from "../../core/math/vec4.js"; import { Mat3 } from "../../core/math/mat3.js"; @@ -7,10 +8,12 @@ import { ADDRESS_CLAMP_TO_EDGE, FILTER_LINEAR, PIXELFORMAT_RGB8 } from '../../pl /** * Represents XR View which represents a screen (mobile phone context) or an eye (HMD context). + * It provides access to view's color and depth information based on capabilities of underlying + * AR system. * * @category XR */ -class XrView { +class XrView extends EventHandler { /** * @type {import('./xr-manager.js').XrManager} * @private @@ -89,24 +92,53 @@ class XrView { */ _textureColor = null; + /** + * @type {Texture|null} + * @private + */ + _textureDepth = null; + + /** + * @type {XRDepthInformation|null} + * @private + */ + _depthInfo = null; + + /** + * @type {Uint8Array} + * @private + */ + _emptyDepthBuffer = new Uint8Array(32); + + /** + * @type {Mat4} + * @private + */ + _depthMatrix = new Mat4(); + /** * Create a new XrView instance. * * @param {import('./xr-manager.js').XrManager} manager - WebXR Manager. * @param {XRView} xrView - [XRView](https://developer.mozilla.org/en-US/docs/Web/API/XRView) * object that is created by WebXR API. + * @param {number} viewsCount - Number of views available for the session. * @hideconstructor */ - constructor(manager, xrView) { + constructor(manager, xrView, viewsCount) { + super(); + this._manager = manager; this._xrView = xrView; + const device = this._manager.app.graphicsDevice; + if (this._manager.views.supportedColor) { this._xrCamera = this._xrView.camera; // color texture if (this._manager.views.availableColor && this._xrCamera) { - this._textureColor = new Texture(this._manager.app.graphicsDevice, { + this._textureColor = new Texture(device, { format: PIXELFORMAT_RGB8, mipmaps: false, addressU: ADDRESS_CLAMP_TO_EDGE, @@ -117,12 +149,45 @@ class XrView { height: this._xrCamera.height, name: `XrView-${this._xrView.eye}-Color` }); + } + } - this._manager.app.graphicsDevice?.on('devicelost', this._onDeviceLost, this); + if (this._manager.views.supportedDepth && this._manager.views.availableDepth) { + this._textureDepth = new Texture(device, { + format: this._manager.views.depthPixelFormat, + arrayLength: (viewsCount === 1) ? 0 : viewsCount, + mipmaps: false, + addressU: ADDRESS_CLAMP_TO_EDGE, + addressV: ADDRESS_CLAMP_TO_EDGE, + minFilter: FILTER_LINEAR, + magFilter: FILTER_LINEAR, + width: 4, + height: 4, + name: `XrView-${this._xrView.eye}-Depth` + }); + + for (let i = 0; i < this._textureDepth._levels.length; i++) { + this._textureDepth._levels[i] = this._emptyDepthBuffer; } } + + if (this._textureColor || this._textureDepth) + device.on('devicelost', this._onDeviceLost, this); } + /** + * Fired when the depth sensing texture been resized. The {@link XrView#depthUvMatrix} needs + * to be updated for relevant shaders. + * + * @event XrView#depth:resize + * @param {number} width - The new width of the depth texture in pixels. + * @param {number} height - The new height of the depth texture in pixels. + * @example + * view.on('depth:resize', function () { + * material.setParameter('matrix_depth_uv', view.depthUvMatrix); + * }); + */ + /** * Texture associated with this view's camera color. Equals to null if camera color is * not available or not supported. @@ -133,6 +198,76 @@ class XrView { return this._textureColor; } + /* eslint-disable jsdoc/check-examples */ + /** + * Texture that contains packed depth information which is reconstructed using the underlying + * AR system. This texture can be used (not limited to) for reconstructing real world + * geometry, virtual object placement, occlusion of virtual object by the real world geometry, + * and more. + * The format of this texture is {@link PIXELFORMAT_LA8} or {@link PIXELFORMAT_R32F} + * based on {@link XrViews#depthFormat}. It is UV transformed based on the underlying AR + * system which can be normalized using {@link XrView#depthUvMatrix}. Equals to null if camera + * depth is not supported. + * + * @type {Texture|null} + * @example + * // GPU path, attaching texture to material + * material.setParameter('texture_depthSensingMap', view.textureDepth); + * material.setParameter('matrix_depth_uv', view.depthUvMatrix.data); + * material.setParameter('depth_to_meters', view.depthValueToMeters); + * @example + * // GLSL shader to unpack depth texture + * varying vec2 vUv0; + * + * uniform sampler2D texture_depthSensingMap; + * uniform mat4 matrix_depth_uv; + * uniform float depth_to_meters; + * + * void main(void) { + * // transform UVs using depth matrix + * vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy; + * + * // get luminance alpha components from depth texture + * vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra; + * + * // unpack into single value in millimeters + * float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)) * depth_to_meters; // m + * + * // normalize: 0m to 8m distance + * depth = min(depth / 8.0, 1.0); // 0..1 = 0m..8m + * + * // paint scene from black to white based on distance + * gl_FragColor = vec4(depth, depth, depth, 1.0); + * } + */ + get textureDepth() { + return this._textureDepth; + } + /* eslint-enable jsdoc/check-examples */ + + /** + * 4x4 matrix that should be used to transform depth texture UVs to normalized UVs in a shader. + * It is updated when the depth texture is resized. Refer to {@link XrView#depthResize}. + * + * @type {Mat4} + * @example + * material.setParameter('matrix_depth_uv', view.depthUvMatrix.data); + */ + get depthUvMatrix() { + return this._depthMatrix; + } + + /** + * Multiply this coefficient number by raw depth value to get depth in meters. + * + * @type {number} + * @example + * material.setParameter('depth_to_meters', view.depthValueToMeters); + */ + get depthValueToMeters() { + return this._depthInfo?.rawValueToMeters || 0; + } + /** * An eye with which this view is associated. Can be any of: * @@ -206,7 +341,7 @@ class XrView { } /** - * @param {*} frame - XRFrame from requestAnimationFrame callback. + * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback. * @param {XRView} xrView - XRView from WebXR API. * @ignore */ @@ -230,6 +365,7 @@ class XrView { this._viewInvMat.set(this._xrView.transform.matrix); this._updateTextureColor(); + this._updateDepth(frame); } /** @@ -290,6 +426,74 @@ class XrView { } } + /** + * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback. + * @private + */ + _updateDepth(frame) { + if (!this._manager.views.availableDepth || !this._textureDepth) + return; + + const gpu = this._manager.views.depthGpuOptimized; + + const infoSource = gpu ? this._manager.webglBinding : frame; + if (!infoSource) { + this._depthInfo = null; + return; + } + + const depthInfo = infoSource.getDepthInformation(this._xrView); + if (!depthInfo) { + this._depthInfo = null; + return; + } + + let matrixDirty = !this._depthInfo !== !depthInfo; + this._depthInfo = depthInfo; + + const width = this._depthInfo?.width || 4; + const height = this._depthInfo?.height || 4; + + let resized = false; + + // resizing + if (this._textureDepth.width !== width || this._textureDepth.height !== height) { + this._textureDepth._width = width; + this._textureDepth._height = height; + matrixDirty = true; + resized = true; + } + + // update depth matrix + if (matrixDirty) { + if (this._depthInfo) { + this._depthMatrix.data.set(this._depthInfo.normDepthBufferFromNormView.matrix); + } else { + this._depthMatrix.setIdentity(); + } + } + + // update texture + if (this._depthInfo) { + if (gpu) { + // gpu + if (this._depthInfo.texture) { + this._textureDepth.impl._glTexture = this._depthInfo.texture; + } + } else { + // cpu + this._textureDepth._levels[0] = new Uint8Array(this._depthInfo.data); + this._textureDepth.upload(); + } + } else { + // clear + this._textureDepth._levels[0] = this._emptyDepthBuffer; + this._textureDepth.upload(); + } + + if (resized) this.fire('depth:resize', width, height); + } + /** * @param {Mat4|null} transform - World Transform of a parents GraphNode. * @ignore @@ -314,17 +518,48 @@ class XrView { _onDeviceLost() { this._frameBufferSource = null; this._frameBuffer = null; + this._depthInfo = null; + } + + /** + * Get depth value from depth information in meters. UV is in range of 0..1, with origin in + * top-left corner of a texture. + * + * @param {number} u - U coordinate of pixel in depth texture, which is in range from 0.0 to + * 1.0 (left to right). + * @param {number} v - V coordinate of pixel in depth texture, which is in range from 0.0 to + * 1.0 (top to bottom). + * @returns {number|null} Depth in meters or null if depth information is currently not + * available. + * @example + * const depth = view.getDepth(u, v); + * if (depth !== null) { + * // depth in meters + * } + */ + getDepth(u, v) { + if (this._manager.views.depthGpuOptimized) + return null; + + return this._depthInfo?.getDepthInMeters(u, v) ?? null; } /** * @ignore */ destroy() { + this._depthInfo = null; + if (this._textureColor) { this._textureColor.destroy(); this._textureColor = null; } + if (this._textureDepth) { + this._textureDepth.destroy(); + this._textureDepth = null; + } + if (this._frameBufferSource) { const gl = this._manager.app.graphicsDevice.gl; diff --git a/src/framework/xr/xr-views.js b/src/framework/xr/xr-views.js index 5d34b59a768..f89596ac31c 100644 --- a/src/framework/xr/xr-views.js +++ b/src/framework/xr/xr-views.js @@ -1,7 +1,8 @@ import { platform } from '../../core/platform.js'; import { EventHandler } from "../../core/event-handler.js"; import { XrView } from "./xr-view.js"; -import { XRTYPE_AR } from "./constants.js"; +import { XRTYPE_AR, XRDEPTHSENSINGUSAGE_GPU, XRDEPTHSENSINGFORMAT_L8A8, XRDEPTHSENSINGFORMAT_F32 } from "./constants.js"; +import { PIXELFORMAT_LA8, PIXELFORMAT_R32F } from '../../platform/graphics/constants.js'; /** * Provides access to list of {@link XrView}'s. And information about their capabilities, @@ -40,12 +41,45 @@ class XrViews extends EventHandler { */ _supportedColor = platform.browser && !!window.XRCamera && !!window.XRWebGLBinding; + /** + * @type {boolean} + * @private + */ + _supportedDepth = platform.browser && !!window.XRDepthInformation; + /** * @type {boolean} * @private */ _availableColor = false; + /** + * @type {boolean} + * @private + */ + _availableDepth = false; + + /** + * @type {string} + * @private + */ + _depthUsage = ''; + + /** + * @type {string} + * @private + */ + _depthFormat = ''; + + /** + * @type {object} + * @private + */ + _depthFormats = { + [XRDEPTHSENSINGFORMAT_L8A8]: PIXELFORMAT_LA8, + [XRDEPTHSENSINGFORMAT_F32]: PIXELFORMAT_R32F + }; + /** * @param {import('./xr-manager.js').XrManager} manager - WebXR Manager. * @hideconstructor @@ -102,6 +136,16 @@ class XrViews extends EventHandler { return this._supportedColor; } + /** + * Check if Camera Depth is supported. It might be still unavailable even if requested, + * based on hardware capabilities and granted permissions. + * + * @type {boolean} + */ + get supportedDepth() { + return this._supportedDepth; + } + /** * Check if Camera Color is available. This information becomes available only after * session has started. @@ -112,6 +156,52 @@ class XrViews extends EventHandler { return this._availableColor; } + /** + * Check if Camera Depth is available. This information becomes available only after + * session has started. + * + * @type {boolean} + */ + get availableDepth() { + return this._availableDepth; + } + + /** + * Whether the depth sensing is GPU optimized. + * + * @type {boolean} + * @ignore + */ + get depthGpuOptimized() { + return this._depthUsage === XRDEPTHSENSINGUSAGE_GPU; + } + + /** + * @type {string} + * @ignore + */ + get depthFormat() { + return this._depthFormat; + } + + /** + * The depth sensing pixel format. Currently supported either: + * {@link PIXELFORMAT_LA8} or {@link PIXELFORMAT_R32F} + * + * @type {number|null} + */ + get depthPixelFormat() { + return this._depthFormats[this._depthFormat] ?? null; + } + + /** + * @type {string} + * @ignore + */ + get depthUsage() { + return this._depthUsage; + } + /** * @param {*} frame - XRFrame from requestAnimationFrame callback. * @param {XRView} xrView - XRView from WebXR API. @@ -127,7 +217,7 @@ class XrViews extends EventHandler { if (!view) { // add new view - view = new XrView(this._manager, xrView); + view = new XrView(this._manager, xrView, xrViews.length); this._index.set(eye, view); this._list.push(view); view.update(frame, xrView); @@ -167,7 +257,15 @@ class XrViews extends EventHandler { _onSessionStart() { if (this._manager.type !== XRTYPE_AR) return; + this._availableColor = this._manager.session.enabledFeatures.indexOf('camera-access') !== -1; + this._availableDepth = this._manager.session.enabledFeatures.indexOf('depth-sensing') !== -1; + + if (this._availableDepth) { + const session = this._manager.session; + this._depthUsage = session.depthUsage; + this._depthFormat = session.depthDataFormat; + } } /** @@ -179,6 +277,9 @@ class XrViews extends EventHandler { } this._index.clear(); this._availableColor = false; + this._availableDepth = false; + this._depthUsage = ''; + this._depthFormat = ''; this._list.length = 0; } } diff --git a/src/scene/renderer/forward-renderer.js b/src/scene/renderer/forward-renderer.js index 40690c6ba27..313bfd55f09 100644 --- a/src/scene/renderer/forward-renderer.js +++ b/src/scene/renderer/forward-renderer.js @@ -628,6 +628,7 @@ class ForwardRenderer extends Renderer { this.viewId3.setValue(view.viewMat3.data); this.viewProjId.setValue(view.projViewOffMat.data); this.viewPosId.setValue(view.positionData); + this.viewIndexId.setValue(v); if (v === 0) { this.drawInstance(device, drawCall, mesh, style, true); diff --git a/src/scene/renderer/renderer.js b/src/scene/renderer/renderer.js index 1c1165f990e..2cd6ed2c21f 100644 --- a/src/scene/renderer/renderer.js +++ b/src/scene/renderer/renderer.js @@ -220,6 +220,7 @@ class Renderer { this.farClipId = scope.resolve('camera_far'); this.cameraParams = new Float32Array(4); this.cameraParamsId = scope.resolve('camera_params'); + this.viewIndexId = scope.resolve('view_index'); this.blueNoiseJitterId = scope.resolve('blueNoiseJitter'); this.blueNoiseTextureId = scope.resolve('blueNoiseTex32');