From 123d045e227dba295dcc75b36f30e5680fba0a98 Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Sat, 4 Nov 2023 13:39:25 +0200 Subject: [PATCH 01/19] XrViews, XR Raw Camera Access --- examples/package-lock.json | 46 ++--- examples/src/examples/xr/ar-basic.mjs | 7 + examples/src/examples/xr/ar-camera-color.mjs | 197 +++++++++++++++++++ examples/src/examples/xr/index.mjs | 1 + src/framework/xr/constants.js | 4 + src/framework/xr/xr-depth-sensing.js | 4 +- src/framework/xr/xr-manager.js | 113 +++++------ src/framework/xr/xr-view.js | 164 +++++++++++++++ src/framework/xr/xr-views.js | 94 +++++++++ src/scene/renderer/forward-renderer.js | 8 +- src/scene/renderer/renderer.js | 31 +-- 11 files changed, 557 insertions(+), 112 deletions(-) create mode 100644 examples/src/examples/xr/ar-camera-color.mjs create mode 100644 src/framework/xr/xr-view.js create mode 100644 src/framework/xr/xr-views.js diff --git a/examples/package-lock.json b/examples/package-lock.json index 59943da9d95..8be323a30c2 100644 --- a/examples/package-lock.json +++ b/examples/package-lock.json @@ -57,23 +57,25 @@ "version": "1.67.0-dev", "dev": true, "license": "MIT", + "dependencies": { + "@types/webxr": "^0.5.7", + "@webgpu/types": "^0.1.38" + }, "devDependencies": { - "@babel/core": "^7.23.0", + "@babel/core": "^7.23.2", "@babel/eslint-parser": "^7.22.15", - "@babel/preset-env": "^7.22.20", + "@babel/preset-env": "^7.23.2", "@playcanvas/canvas-mock": "^1.0.1", "@playcanvas/eslint-config": "^1.7.1", "@playcanvas/jsdoc-template": "^1.1.2", - "@rollup/plugin-babel": "^6.0.3", - "@rollup/plugin-node-resolve": "^15.2.1", - "@rollup/plugin-strip": "^3.0.2", - "@rollup/plugin-terser": "^0.4.3", - "@rollup/pluginutils": "^5.0.4", - "@types/webxr": "^0.5.5", - "@webgpu/types": "^0.1.35", + "@rollup/plugin-babel": "^6.0.4", + "@rollup/plugin-node-resolve": "^15.2.3", + "@rollup/plugin-strip": "^3.0.4", + "@rollup/plugin-terser": "^0.4.4", + "@rollup/pluginutils": "^5.0.5", "c8": "^8.0.0", "chai": "^4.3.10", - "eslint": "^8.50.0", + "eslint": "^8.52.0", "fflate": "^0.8.1", "jsdoc": "^4.0.2", "jsdoc-tsimport-plugin": "^1.0.5", @@ -87,7 +89,7 @@ "rollup-plugin-jscc": "2.0.0", "rollup-plugin-visualizer": "^5.9.2", "serve": "^14.2.1", - "sinon": "^16.0.0", + "sinon": "^17.0.0", "typedoc": "^0.25.1", "typedoc-plugin-mdn-links": "^3.1.0", "typescript": "^5.2.2", @@ -9820,22 +9822,22 @@ "playcanvas": { "version": "file:..", "requires": { - "@babel/core": "^7.23.0", + "@babel/core": "^7.23.2", "@babel/eslint-parser": "^7.22.15", - "@babel/preset-env": "^7.22.20", + "@babel/preset-env": "^7.23.2", "@playcanvas/canvas-mock": "^1.0.1", "@playcanvas/eslint-config": "^1.7.1", "@playcanvas/jsdoc-template": "^1.1.2", - "@rollup/plugin-babel": "^6.0.3", - "@rollup/plugin-node-resolve": "^15.2.1", - "@rollup/plugin-strip": "^3.0.2", - "@rollup/plugin-terser": "^0.4.3", - "@rollup/pluginutils": "^5.0.4", - "@types/webxr": "^0.5.5", - "@webgpu/types": "^0.1.35", + "@rollup/plugin-babel": "^6.0.4", + "@rollup/plugin-node-resolve": "^15.2.3", + "@rollup/plugin-strip": "^3.0.4", + "@rollup/plugin-terser": "^0.4.4", + "@rollup/pluginutils": "^5.0.5", + "@types/webxr": "^0.5.7", + "@webgpu/types": "^0.1.38", "c8": "^8.0.0", "chai": "^4.3.10", - "eslint": "^8.50.0", + "eslint": "^8.52.0", "fflate": "^0.8.1", "jsdoc": "^4.0.2", "jsdoc-tsimport-plugin": "^1.0.5", @@ -9849,7 +9851,7 @@ "rollup-plugin-jscc": "2.0.0", "rollup-plugin-visualizer": "^5.9.2", "serve": "^14.2.1", - "sinon": "^16.0.0", + "sinon": "^17.0.0", "typedoc": "^0.25.1", "typedoc-plugin-mdn-links": "^3.1.0", "typescript": "^5.2.2", diff --git a/examples/src/examples/xr/ar-basic.mjs b/examples/src/examples/xr/ar-basic.mjs index ad68776a507..c41851c4806 100644 --- a/examples/src/examples/xr/ar-basic.mjs +++ b/examples/src/examples/xr/ar-basic.mjs @@ -15,6 +15,13 @@ async function example({ canvas }) { if (!el) { el = document.createElement('div'); el.classList.add('message'); + el.style.position = 'absolute'; + el.style.bottom = '96px'; + el.style.right = '0'; + el.style.padding = '8px 16px'; + el.style.fontFamily = 'Helvetica, Arial, sans-serif'; + el.style.color = '#fff'; + el.style.backgroundColor = 'rgba(0, 0, 0, 0.5)'; document.body.append(el); } el.textContent = msg; diff --git a/examples/src/examples/xr/ar-camera-color.mjs b/examples/src/examples/xr/ar-camera-color.mjs new file mode 100644 index 00000000000..00c1b5941f3 --- /dev/null +++ b/examples/src/examples/xr/ar-camera-color.mjs @@ -0,0 +1,197 @@ +import * as pc from 'playcanvas'; + +/** + * @typedef {import('../../options.mjs').ExampleOptions} ExampleOptions + * @param {import('../../options.mjs').ExampleOptions} options - The example options. + * @returns {Promise} The example application. + */ +async function example({ canvas }) { + /** + * @param {string} msg - The message. + */ + const message = function (msg) { + /** @type {HTMLDivElement} */ + let el = document.querySelector('.message'); + if (!el) { + el = document.createElement('div'); + el.classList.add('message'); + el.style.position = 'absolute'; + el.style.bottom = '96px'; + el.style.right = '0'; + el.style.padding = '8px 16px'; + el.style.fontFamily = 'Helvetica, Arial, sans-serif'; + el.style.color = '#fff'; + el.style.backgroundColor = 'rgba(0, 0, 0, 0.5)'; + document.body.append(el); + } + el.textContent = msg; + }; + + const app = new pc.Application(canvas, { + mouse: new pc.Mouse(canvas), + touch: new pc.TouchDevice(canvas), + keyboard: new pc.Keyboard(window), + graphicsDeviceOptions: { alpha: true } + }); + + app.setCanvasFillMode(pc.FILLMODE_FILL_WINDOW); + app.setCanvasResolution(pc.RESOLUTION_AUTO); + + // Ensure canvas is resized when window changes size + const resize = () => app.resizeCanvas(); + window.addEventListener('resize', resize); + app.on('destroy', () => { + window.removeEventListener('resize', resize); + }); + + // use device pixel ratio + app.graphicsDevice.maxPixelRatio = window.devicePixelRatio; + + app.start(); + + // create camera + const c = new pc.Entity(); + c.addComponent('camera', { + clearColor: new pc.Color(0, 0, 0, 0), + farClip: 10000 + }); + app.root.addChild(c); + + const l = new pc.Entity(); + l.addComponent("light", { + type: "spot", + range: 30 + }); + l.translate(0, 10, 0); + app.root.addChild(l); + + const material = new pc.StandardMaterial(); + + /** + * @param {number} x - The x coordinate. + * @param {number} y - The y coordinate. + * @param {number} z - The z coordinate. + */ + const createCube = function (x, y, z) { + const cube = new pc.Entity(); + cube.addComponent("render", { + type: "box" + }); + cube.render.material = material; + cube.setLocalScale(0.5, 0.5, 0.5); + cube.translate(x * 0.5, y, z * 0.5); + app.root.addChild(cube); + }; + + // create a grid of cubes + const SIZE = 4; + for (let x = 0; x < SIZE; x++) { + for (let y = 0; y < SIZE; y++) { + createCube(2 * x - SIZE, 0.25, 2 * y - SIZE); + } + } + + if (app.xr.supported) { + const activate = function () { + if (app.xr.isAvailable(pc.XRTYPE_AR)) { + c.camera.startXr(pc.XRTYPE_AR, pc.XRSPACE_LOCALFLOOR, { + cameraColor: true, + callback: function (err) { + if (err) message("WebXR Immersive AR failed to start: " + err.message); + } + }); + } else { + message("Immersive AR is not available"); + } + }; + + app.mouse.on("mousedown", function () { + if (!app.xr.active) + activate(); + }); + + if (app.touch) { + app.touch.on("touchend", function (evt) { + if (!app.xr.active) { + // if not in VR, activate + activate(); + } else { + // otherwise reset camera + c.camera.endXr(); + } + + evt.event.preventDefault(); + evt.event.stopPropagation(); + }); + } + + // end session by keyboard ESC + app.keyboard.on('keydown', function (evt) { + if (evt.key === pc.KEY_ESCAPE && app.xr.active) { + app.xr.end(); + } + }); + + app.xr.on('start', function () { + message("Immersive AR session has started"); + }); + app.xr.on('end', function () { + message("Immersive AR session has ended"); + }); + app.xr.on('available:' + pc.XRTYPE_AR, function (available) { + if (available) { + if (!app.xr.views.supportedColor) { + message("AR Camera Color is not supported"); + } else { + message("Touch screen to start AR session"); + } + } else { + message("Immersive AR is not available"); + } + }); + + app.on('update', () => { + if (app.xr.views.availableColor) { + for(let i = 0; i < app.xr.views.size; i++) { + const view = app.xr.views.list[i]; + if (!view.textureColor) + continue; + + if (!material.diffuseMap) { + material.diffuseMap = view.textureColor; + material.update(); + } + + app.drawTexture(0.5, -0.5, 1, -1, view.textureColor); + } + } + }); + + app.xr.on('end', () => { + if (!material.diffuseMap) + return; + + material.diffuseMap = null; + material.update(); + }) + + if (!app.xr.isAvailable(pc.XRTYPE_AR)) { + message("Immersive AR is not available"); + } else if (!app.xr.views.supportedColor) { + message("AR Camera Color is not supported"); + } else { + message("Touch screen to start AR session"); + } + } else { + message("WebXR is not supported"); + } + return app; +} + +class ArCameraColorExample { + static CATEGORY = 'XR'; + static NAME = 'AR Camera Color'; + static example = example; +} + +export { ArCameraColorExample }; diff --git a/examples/src/examples/xr/index.mjs b/examples/src/examples/xr/index.mjs index c40b96d7c91..1c9b6f43ad3 100644 --- a/examples/src/examples/xr/index.mjs +++ b/examples/src/examples/xr/index.mjs @@ -1,4 +1,5 @@ export * from "./ar-basic.mjs"; +export * from "./ar-camera-color.mjs"; export * from "./ar-hit-test.mjs"; export * from "./vr-basic.mjs"; export * from './vr-controllers.mjs'; diff --git a/src/framework/xr/constants.js b/src/framework/xr/constants.js index 0c3c0f01895..5309074cc28 100644 --- a/src/framework/xr/constants.js +++ b/src/framework/xr/constants.js @@ -100,6 +100,10 @@ export const XRTARGETRAY_SCREEN = 'screen'; */ export const XRTARGETRAY_POINTER = 'tracked-pointer'; +export const XREYE_NONE = 'none'; +export const XREYE_LEFT = 'left'; +export const XREYE_RIGHT = 'right'; + /** * None - input source is not meant to be held in hands. * diff --git a/src/framework/xr/xr-depth-sensing.js b/src/framework/xr/xr-depth-sensing.js index affc289b964..eeb232d54e4 100644 --- a/src/framework/xr/xr-depth-sensing.js +++ b/src/framework/xr/xr-depth-sensing.js @@ -268,10 +268,12 @@ class XrDepthSensing extends EventHandler { * @param {*} view - First XRView of viewer XRPose. * @ignore */ - update(frame, view) { + update(frame, pose) { if (!this._usage) return; + const view = pose.views[0]; + let depthInfoCpu = null; let depthInfoGpu = null; if (this._usage === XRDEPTHSENSINGUSAGE_CPU && view) { diff --git a/src/framework/xr/xr-manager.js b/src/framework/xr/xr-manager.js index 465b9ee8092..6a8a03e1f19 100644 --- a/src/framework/xr/xr-manager.js +++ b/src/framework/xr/xr-manager.js @@ -9,6 +9,7 @@ import { Vec3 } from '../../core/math/vec3.js'; import { Vec4 } from '../../core/math/vec4.js'; import { XRTYPE_INLINE, XRTYPE_VR, XRTYPE_AR, XRDEPTHSENSINGUSAGE_CPU, XRDEPTHSENSINGFORMAT_L8A8 } from './constants.js'; +import { DEVICETYPE_WEBGL1, DEVICETYPE_WEBGL2 } from '../../platform/graphics/constants.js'; import { XrDepthSensing } from './xr-depth-sensing.js'; import { XrDomOverlay } from './xr-dom-overlay.js'; import { XrHitTest } from './xr-hit-test.js'; @@ -17,6 +18,7 @@ import { XrInput } from './xr-input.js'; import { XrLightEstimation } from './xr-light-estimation.js'; import { XrPlaneDetection } from './xr-plane-detection.js'; import { XrAnchors } from './xr-anchors.js'; +import { XrViews } from './xr-views.js'; /** * Callback used by {@link XrManager#endXr} and {@link XrManager#startXr}. @@ -74,6 +76,12 @@ class XrManager extends EventHandler { */ _baseLayer = null; + /** + * @type {XRWebGLBinding|null} + * @ignore + */ + webglBinding = null; + /** * @type {XRReferenceSpace|null} * @ignore @@ -136,22 +144,30 @@ class XrManager extends EventHandler { lightEstimation; /** - * @type {import('../components/camera/component.js').CameraComponent} - * @private - */ - _camera = null; - - /** - * @type {Array<*>} + * Provides access to views. + * + * @type {XrViews} * @ignore */ - views = []; + views; /** - * @type {Array<*>} - * @ignore + * @type {import('../components/camera/component.js').CameraComponent|null} + * @private */ - viewsPool = []; + _camera = null; + + // /** + // * @type {Array<*>} + // * @ignore + // */ + // views = []; + + // /** + // * @type {Array<*>} + // * @ignore + // */ + // viewsPool = []; /** * @type {Vec3} @@ -213,6 +229,7 @@ class XrManager extends EventHandler { this.input = new XrInput(this); this.lightEstimation = new XrLightEstimation(this); this.anchors = new XrAnchors(this); + this.views = new XrViews(this); // TODO // 1. HMD class with its params @@ -451,6 +468,10 @@ class XrManager extends EventHandler { dataFormatPreference: dataFormatPreference }; } + + if (options && options.cameraColor && this.views.supportedColor) { + opts.optionalFeatures.push('camera-access'); + } } else if (type === XRTYPE_VR) { opts.optionalFeatures.push('hand-tracking'); } @@ -603,7 +624,7 @@ class XrManager extends EventHandler { this._session = null; this._referenceSpace = null; - this.views = []; + // this.views = []; this._width = 0; this._height = 0; this._type = null; @@ -611,7 +632,8 @@ class XrManager extends EventHandler { // old requestAnimationFrame will never be triggered, // so queue up new tick - this.app.tick(); + if (this.app.systems) + this.app.tick(); }; session.addEventListener('end', onEnd); @@ -636,6 +658,17 @@ class XrManager extends EventHandler { antialias: false }); + if (platform.browser) { + const deviceType = this.app.graphicsDevice.deviceType; + if ((deviceType === DEVICETYPE_WEBGL1 || deviceType === DEVICETYPE_WEBGL2) && window.XRWebGLBinding) { + try { + this.webglBinding = new XRWebGLBinding(session, this.app.graphicsDevice.gl); + } catch(ex) { + this.fire('error', ex); + } + } + } + session.updateRenderState({ baseLayer: this._baseLayer, depthNear: this._depthNear, @@ -705,32 +738,11 @@ class XrManager extends EventHandler { if (!pose) return false; - const lengthOld = this.views.length; - const lengthNew = pose.views.length; - - while (lengthNew > this.views.length) { - let view = this.viewsPool.pop(); - if (!view) { - view = { - viewport: new Vec4(), - projMat: new Mat4(), - viewMat: new Mat4(), - viewOffMat: new Mat4(), - viewInvMat: new Mat4(), - viewInvOffMat: new Mat4(), - projViewOffMat: new Mat4(), - viewMat3: new Mat3(), - position: new Float32Array(3), - rotation: new Quat() - }; - } + const lengthOld = this.views.size; + // const lengthNew = pose.views.length; - this.views.push(view); - } - // remove views from list into pool - while (lengthNew < this.views.length) { - this.viewsPool.push(this.views.pop()); - } + // add views + this.views.update(frame, pose.views); // reset position const posePosition = pose.transform.position; @@ -738,28 +750,10 @@ class XrManager extends EventHandler { this._localPosition.set(posePosition.x, posePosition.y, posePosition.z); this._localRotation.set(poseOrientation.x, poseOrientation.y, poseOrientation.z, poseOrientation.w); - const layer = frame.session.renderState.baseLayer; - - for (let i = 0; i < pose.views.length; i++) { - // for each view, calculate matrices - const viewRaw = pose.views[i]; - const view = this.views[i]; - const viewport = layer.getViewport(viewRaw); - - view.viewport.x = viewport.x; - view.viewport.y = viewport.y; - view.viewport.z = viewport.width; - view.viewport.w = viewport.height; - - view.projMat.set(viewRaw.projectionMatrix); - view.viewMat.set(viewRaw.transform.inverse.matrix); - view.viewInvMat.set(viewRaw.transform.matrix); - } - // update the camera fov properties only when we had 0 views - if (lengthOld === 0 && this.views.length > 0) { + if (lengthOld === 0 && this.views.size > 0) { const viewProjMat = new Mat4(); - const view = this.views[0]; + const view = this.views.list[0]; viewProjMat.copy(view.projMat); const data = viewProjMat.data; @@ -770,7 +764,6 @@ class XrManager extends EventHandler { const nearClip = data[14] / (data[10] - 1); const horizontalFov = false; - const camera = this._camera.camera; camera.setXrProperties({ aspectRatio, @@ -795,7 +788,7 @@ class XrManager extends EventHandler { this.lightEstimation.update(frame); if (this.depthSensing.supported) - this.depthSensing.update(frame, pose && pose.views[0]); + this.depthSensing.update(frame, pose); if (this.imageTracking.supported) this.imageTracking.update(frame); diff --git a/src/framework/xr/xr-view.js b/src/framework/xr/xr-view.js new file mode 100644 index 00000000000..b2df2808316 --- /dev/null +++ b/src/framework/xr/xr-view.js @@ -0,0 +1,164 @@ +import { EventHandler } from "../../core/event-handler.js"; +import { Texture } from '../../platform/graphics/texture.js'; +import { Vec4 } from "../../core/math/vec4.js"; +import { Mat3 } from "../../core/math/mat3.js"; +import { Mat4 } from "../../core/math/mat4.js"; + +import { ADDRESS_CLAMP_TO_EDGE, PIXELFORMAT_RGB8, FILTER_LINEAR, PIXELFORMAT_RGBA8 } from '../../platform/graphics/constants.js'; + +class XrView extends EventHandler { + _manager; + _xrView; + + _positionData = new Float32Array(3); + _viewport = new Vec4(); + + _projMat = new Mat4(); + _projViewOffMat = new Mat4(); + _viewMat = new Mat4(); + _viewOffMat = new Mat4(); + _viewMat3 = new Mat3(); + _viewInvMat = new Mat4(); + _viewInvOffMat = new Mat4(); + + _xrCamera = null; + _textureColor = null; + + constructor(manager, xrView) { + super(); + + this._manager = manager; + this._xrView = xrView; + + if (this._manager.views.supportedColor) + this._xrCamera = this._xrView.camera; + + this._updateTextureColor(); + } + + _updateTextureColor() { + if (!this._manager.views.availableColor || !this._xrCamera) + return; + + const binding = this._manager.webglBinding; + if (!binding) + return; + + const texture = binding.getCameraImage(this._xrCamera); + if (!texture) + return; + + if (!this._textureColor) { + this._textureColor = new Texture(this._manager.app.graphicsDevice, { + format: PIXELFORMAT_RGBA8, + mipmaps: false, + flipY: true, + addressU: ADDRESS_CLAMP_TO_EDGE, + addressV: ADDRESS_CLAMP_TO_EDGE, + minFilter: FILTER_LINEAR, + magFilter: FILTER_LINEAR, + width: this._xrCamera.width, + height: this._xrCamera.height, + name: `XrView-${this._xrView.eye}-Color` + }); + this._textureColor.upload(); + } + + // force texture initialization + if (!this._textureColor.impl._glTexture) { + this._textureColor.impl.initialize(this._manager.app.graphicsDevice, this._textureColor); + this._textureColor.impl.upload = () => { }; + this._textureColor._needsUpload = false; + } + + this._textureColor.impl._glCreated = true; + this._textureColor.impl._glTexture = texture; + } + + updateTransforms(transform) { + if (transform) { + this._viewInvOffMat.mul2(transform, this._viewInvMat); + this.viewOffMat.copy(this._viewInvOffMat).invert(); + } else { + this._viewInvOffMat.copy(this._viewInvMat); + this.viewOffMat.copy(this._viewMat); + } + + this._viewMat3.setFromMat4(this._viewOffMat); + this._projViewOffMat.mul2(this._projMat, this._viewOffMat); + + this._positionData[0] = this._viewInvOffMat.data[12]; + this._positionData[1] = this._viewInvOffMat.data[13]; + this._positionData[2] = this._viewInvOffMat.data[14]; + } + + update(frame, xrView) { + this._xrView = xrView; + if (this._manager.views.supportedColor) + this._xrCamera = this._xrView.camera; + + const layer = frame.session.renderState.baseLayer; + + // viewport + const viewport = layer.getViewport(this._xrView); + this._viewport.x = viewport.x; + this._viewport.y = viewport.y; + this._viewport.z = viewport.width; + this._viewport.w = viewport.height; + + // matrices + this._projMat.set(this._xrView.projectionMatrix); + this._viewMat.set(this._xrView.transform.inverse.matrix); + this._viewInvMat.set(this._xrView.transform.matrix); + + this._updateTextureColor(); + } + + destroy() { + if (this._textureColor) { + // TODO + // ensure there is no use of this texture after session ended + this._textureColor.impl._glTexture = null; + this._textureColor.destroy(); + this._textureColor = null; + } + } + + get textureColor() { + return this._textureColor; + } + + get eye() { + return this._xrView.eye; + } + + get viewport() { + return this._viewport; + } + + get projMat() { + return this._projMat; + } + + get projViewOffMat() { + return this._projViewOffMat; + } + + get viewOffMat() { + return this._viewOffMat; + } + + get viewInvOffMat() { + return this._viewInvOffMat; + } + + get viewMat3() { + return this._viewMat3; + } + + get positionData() { + return this._positionData; + } +} + +export { XrView }; diff --git a/src/framework/xr/xr-views.js b/src/framework/xr/xr-views.js new file mode 100644 index 00000000000..ee5d6e2465f --- /dev/null +++ b/src/framework/xr/xr-views.js @@ -0,0 +1,94 @@ +import { platform } from '../../core/platform.js'; +import { EventHandler } from "../../core/event-handler.js"; +import { XrView } from "./xr-view.js"; +import { XRTYPE_AR } from "./constants.js"; + +class XrViews extends EventHandler { + _manager; + _index = new Map(); + _list = []; + _indexTemporary = new Map(); + _supportedColor = platform.browser && !!window.XRCamera && !!window.XRWebGLBinding; + _availableColor = false; + + constructor(manager) { + super(); + + this._manager = manager; + this._manager.on('start', this._onSessionStart, this); + this._manager.on('end', this._onSessionEnd, this); + } + + _onSessionStart() { + if (this._manager.type !== XRTYPE_AR) + return; + this._availableColor = this._manager.session.enabledFeatures.indexOf('camera-access') !== -1; + } + + _onSessionEnd() { + for(const view of this._index.values()) { + view.destroy(); + } + this._index.clear(); + this._availableColor = false; + this._list.length = 0; + } + + update(frame, xrViews) { + for(let i = 0; i < xrViews.length; i++) { + this._indexTemporary.set(xrViews[i].eye, xrViews[i]); + } + + for(const [ eye, xrView ] of this._indexTemporary) { + let view = this._index.get(eye); + + if (!view) { + // add new view + view = new XrView(this._manager, xrView); + this._index.set(eye, view); + this._list.push(view); + view.update(frame, xrView); + this.fire('add', view); + } else { + // update existing view0 + view.update(frame, xrView); + } + } + + // remove views + for(const [ eye, view ] of this._index) { + if (this._indexTemporary.has(eye)) + continue; + + view.destroy(); + this._index.delete(eye); + const ind = this._list.indexOf(view); + if (ind !== -1) this._list.splice(ind, 1); + this.fire('remove', view); + } + + this._indexTemporary.clear(); + } + + get(name) { + return this._index.get(name) || null; + } + + get list() { + return this._list; + } + + get size() { + return this._list.length; + } + + get supportedColor() { + return this._supportedColor; + } + + get availableColor() { + return this._availableColor; + } +} + +export { XrViews }; diff --git a/src/scene/renderer/forward-renderer.js b/src/scene/renderer/forward-renderer.js index 79191c46b12..6b6d14c0cce 100644 --- a/src/scene/renderer/forward-renderer.js +++ b/src/scene/renderer/forward-renderer.js @@ -621,11 +621,11 @@ class ForwardRenderer extends Renderer { drawCallback?.(drawCall, i); - if (camera.xr && camera.xr.session && camera.xr.views.length) { + if (camera.xr && camera.xr.session && camera.xr.views.size) { const views = camera.xr.views; - for (let v = 0; v < views.length; v++) { - const view = views[v]; + for (let v = 0; v < views.size; v++) { + const view = views.list[v]; device.setViewport(view.viewport.x, view.viewport.y, view.viewport.z, view.viewport.w); @@ -635,7 +635,7 @@ class ForwardRenderer extends Renderer { this.viewInvId.setValue(view.viewInvOffMat.data); this.viewId3.setValue(view.viewMat3.data); this.viewProjId.setValue(view.projViewOffMat.data); - this.viewPosId.setValue(view.position); + this.viewPosId.setValue(view.positionData); if (v === 0) { this.drawInstance(device, drawCall, mesh, style, true); diff --git a/src/scene/renderer/renderer.js b/src/scene/renderer/renderer.js index a5c08aa1cc3..842d4079077 100644 --- a/src/scene/renderer/renderer.js +++ b/src/scene/renderer/renderer.js @@ -295,31 +295,12 @@ class Renderer { let viewCount = 1; if (camera.xr && camera.xr.session) { - let transform; - const parent = camera._node.parent; - if (parent) - transform = parent.getWorldTransform(); - + const transform = camera._node?.parent?.getWorldTransform() || null; const views = camera.xr.views; - viewCount = views.length; + viewCount = views.size; for (let v = 0; v < viewCount; v++) { - const view = views[v]; - - if (parent) { - view.viewInvOffMat.mul2(transform, view.viewInvMat); - view.viewOffMat.copy(view.viewInvOffMat).invert(); - } else { - view.viewInvOffMat.copy(view.viewInvMat); - view.viewOffMat.copy(view.viewMat); - } - - view.viewMat3.setFromMat4(view.viewOffMat); - view.projViewOffMat.mul2(view.projMat, view.viewOffMat); - - view.position[0] = view.viewInvOffMat.data[12]; - view.position[1] = view.viewInvOffMat.data[13]; - view.position[2] = view.viewInvOffMat.data[14]; - + const view = views.list[v]; + view.updateTransforms(transform); camera.frustum.setFromMat4(view.projViewOffMat); } } else { @@ -497,9 +478,9 @@ class Renderer { updateCameraFrustum(camera) { - if (camera.xr && camera.xr.views.length) { + if (camera.xr && camera.xr.views.size) { // calculate frustum based on XR view - const view = camera.xr.views[0]; + const view = camera.xr.views.list[0]; viewProjMat.mul2(view.projMat, view.viewOffMat); camera.frustum.setFromMat4(viewProjMat); return; From 40877655d129ff58f227d0a68840644283ee0960 Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Sun, 5 Nov 2023 15:46:38 +0200 Subject: [PATCH 02/19] docs and lint --- examples/src/examples/xr/ar-basic.mjs | 7 - examples/src/examples/xr/ar-camera-color.mjs | 8 +- src/framework/xr/constants.js | 17 ++ src/framework/xr/xr-depth-sensing.js | 4 +- src/framework/xr/xr-manager.js | 24 +- src/framework/xr/xr-view.js | 267 ++++++++++++++----- src/framework/xr/xr-views.js | 148 +++++++--- 7 files changed, 346 insertions(+), 129 deletions(-) diff --git a/examples/src/examples/xr/ar-basic.mjs b/examples/src/examples/xr/ar-basic.mjs index c41851c4806..ad68776a507 100644 --- a/examples/src/examples/xr/ar-basic.mjs +++ b/examples/src/examples/xr/ar-basic.mjs @@ -15,13 +15,6 @@ async function example({ canvas }) { if (!el) { el = document.createElement('div'); el.classList.add('message'); - el.style.position = 'absolute'; - el.style.bottom = '96px'; - el.style.right = '0'; - el.style.padding = '8px 16px'; - el.style.fontFamily = 'Helvetica, Arial, sans-serif'; - el.style.color = '#fff'; - el.style.backgroundColor = 'rgba(0, 0, 0, 0.5)'; document.body.append(el); } el.textContent = msg; diff --git a/examples/src/examples/xr/ar-camera-color.mjs b/examples/src/examples/xr/ar-camera-color.mjs index 00c1b5941f3..5c1b9ae5ec3 100644 --- a/examples/src/examples/xr/ar-camera-color.mjs +++ b/examples/src/examples/xr/ar-camera-color.mjs @@ -95,7 +95,7 @@ async function example({ canvas }) { const activate = function () { if (app.xr.isAvailable(pc.XRTYPE_AR)) { c.camera.startXr(pc.XRTYPE_AR, pc.XRSPACE_LOCALFLOOR, { - cameraColor: true, + cameraColor: true, // request access to camera color callback: function (err) { if (err) message("WebXR Immersive AR failed to start: " + err.message); } @@ -151,17 +151,20 @@ async function example({ canvas }) { }); app.on('update', () => { + // if camera color is available if (app.xr.views.availableColor) { for(let i = 0; i < app.xr.views.size; i++) { const view = app.xr.views.list[i]; - if (!view.textureColor) + if (!view.textureColor) // check if color texture is available continue; + // apply camera color texture to material diffuse channel if (!material.diffuseMap) { material.diffuseMap = view.textureColor; material.update(); } + // debug draw camera color texture on the screen app.drawTexture(0.5, -0.5, 1, -1, view.textureColor); } } @@ -171,6 +174,7 @@ async function example({ canvas }) { if (!material.diffuseMap) return; + // clear camera color texture when XR session ends material.diffuseMap = null; material.update(); }) diff --git a/src/framework/xr/constants.js b/src/framework/xr/constants.js index 5309074cc28..fe464eed166 100644 --- a/src/framework/xr/constants.js +++ b/src/framework/xr/constants.js @@ -100,8 +100,25 @@ export const XRTARGETRAY_SCREEN = 'screen'; */ export const XRTARGETRAY_POINTER = 'tracked-pointer'; +/** + * None - view associated with a monoscopic screen, such as mobile phone screens. + * + * @type {string} + */ export const XREYE_NONE = 'none'; + +/** + * Left - view associated with left eye. + * + * @type {string} + */ export const XREYE_LEFT = 'left'; + +/** + * Right - view associated with right eye. + * + * @type {string} + */ export const XREYE_RIGHT = 'right'; /** diff --git a/src/framework/xr/xr-depth-sensing.js b/src/framework/xr/xr-depth-sensing.js index eeb232d54e4..affc289b964 100644 --- a/src/framework/xr/xr-depth-sensing.js +++ b/src/framework/xr/xr-depth-sensing.js @@ -268,12 +268,10 @@ class XrDepthSensing extends EventHandler { * @param {*} view - First XRView of viewer XRPose. * @ignore */ - update(frame, pose) { + update(frame, view) { if (!this._usage) return; - const view = pose.views[0]; - let depthInfoCpu = null; let depthInfoGpu = null; if (this._usage === XRDEPTHSENSINGUSAGE_CPU && view) { diff --git a/src/framework/xr/xr-manager.js b/src/framework/xr/xr-manager.js index 6a8a03e1f19..11b6fdb71b3 100644 --- a/src/framework/xr/xr-manager.js +++ b/src/framework/xr/xr-manager.js @@ -2,11 +2,9 @@ import { Debug } from "../../core/debug.js"; import { EventHandler } from '../../core/event-handler.js'; import { platform } from '../../core/platform.js'; -import { Mat3 } from '../../core/math/mat3.js'; import { Mat4 } from '../../core/math/mat4.js'; import { Quat } from '../../core/math/quat.js'; import { Vec3 } from '../../core/math/vec3.js'; -import { Vec4 } from '../../core/math/vec4.js'; import { XRTYPE_INLINE, XRTYPE_VR, XRTYPE_AR, XRDEPTHSENSINGUSAGE_CPU, XRDEPTHSENSINGFORMAT_L8A8 } from './constants.js'; import { DEVICETYPE_WEBGL1, DEVICETYPE_WEBGL2 } from '../../platform/graphics/constants.js'; @@ -144,7 +142,7 @@ class XrManager extends EventHandler { lightEstimation; /** - * Provides access to views. + * Provides access to views and their capabilities. * * @type {XrViews} * @ignore @@ -157,18 +155,6 @@ class XrManager extends EventHandler { */ _camera = null; - // /** - // * @type {Array<*>} - // * @ignore - // */ - // views = []; - - // /** - // * @type {Array<*>} - // * @ignore - // */ - // viewsPool = []; - /** * @type {Vec3} * @private @@ -624,7 +610,6 @@ class XrManager extends EventHandler { this._session = null; this._referenceSpace = null; - // this.views = []; this._width = 0; this._height = 0; this._type = null; @@ -662,8 +647,8 @@ class XrManager extends EventHandler { const deviceType = this.app.graphicsDevice.deviceType; if ((deviceType === DEVICETYPE_WEBGL1 || deviceType === DEVICETYPE_WEBGL2) && window.XRWebGLBinding) { try { - this.webglBinding = new XRWebGLBinding(session, this.app.graphicsDevice.gl); - } catch(ex) { + this.webglBinding = new XRWebGLBinding(session, this.app.graphicsDevice.gl); // eslint-disable-line no-undef + } catch (ex) { this.fire('error', ex); } } @@ -739,7 +724,6 @@ class XrManager extends EventHandler { if (!pose) return false; const lengthOld = this.views.size; - // const lengthNew = pose.views.length; // add views this.views.update(frame, pose.views); @@ -788,7 +772,7 @@ class XrManager extends EventHandler { this.lightEstimation.update(frame); if (this.depthSensing.supported) - this.depthSensing.update(frame, pose); + this.depthSensing.update(frame, pose && pose.views[0]); if (this.imageTracking.supported) this.imageTracking.update(frame); diff --git a/src/framework/xr/xr-view.js b/src/framework/xr/xr-view.js index b2df2808316..ce764487942 100644 --- a/src/framework/xr/xr-view.js +++ b/src/framework/xr/xr-view.js @@ -1,32 +1,103 @@ -import { EventHandler } from "../../core/event-handler.js"; import { Texture } from '../../platform/graphics/texture.js'; import { Vec4 } from "../../core/math/vec4.js"; import { Mat3 } from "../../core/math/mat3.js"; import { Mat4 } from "../../core/math/mat4.js"; -import { ADDRESS_CLAMP_TO_EDGE, PIXELFORMAT_RGB8, FILTER_LINEAR, PIXELFORMAT_RGBA8 } from '../../platform/graphics/constants.js'; - -class XrView extends EventHandler { +import { ADDRESS_CLAMP_TO_EDGE, FILTER_LINEAR, PIXELFORMAT_RGB8 } from '../../platform/graphics/constants.js'; + +/** + * Represents XR View which represents a screen (mobile phone context) or an eye (HMD context). + * + * @category XR + */ +class XrView { + /** + * @type {import('./xr-manager.js').XrManager} + * @private + */ _manager; + + /** + * @type {XRView} + * @private + */ _xrView; + /** + * @type {Float32Array} + * @private + */ _positionData = new Float32Array(3); + + /** + * @type {Vec4} + * @private + */ _viewport = new Vec4(); + /** + * @type {Mat4} + * @private + */ _projMat = new Mat4(); + + /** + * @type {Mat4} + * @private + */ _projViewOffMat = new Mat4(); + + /** + * @type {Mat4} + * @private + */ _viewMat = new Mat4(); + + /** + * @type {Mat4} + * @private + */ _viewOffMat = new Mat4(); + + /** + * @type {Mat3} + * @private + */ _viewMat3 = new Mat3(); + + /** + * @type {Mat4} + * @private + */ _viewInvMat = new Mat4(); + + /** + * @type {Mat4} + * @private + */ _viewInvOffMat = new Mat4(); + /** + * @type {XRCamera} + * @private + */ _xrCamera = null; + + /** + * @type {Texture|null} + * @private + */ _textureColor = null; + /** + * Create a new XrView instance. + * + * @param {import('./xr-manager.js').XrManager} manager - WebXR Manager. + * @param {XRView} xrView - [XRView](https://developer.mozilla.org/en-US/docs/Web/API/XRView) + * object that is created by WebXR API. + * @hideconstructor + */ constructor(manager, xrView) { - super(); - this._manager = manager; this._xrView = xrView; @@ -36,6 +107,121 @@ class XrView extends EventHandler { this._updateTextureColor(); } + /** + * Texture associated with this view's camera color. Equals to null if camera color is + * not available or not supported. + * + * @type {Texture|null} + * @readonly + */ + get textureColor() { + return this._textureColor; + } + + /** + * An eye with which this view is associated. Can be any of: + * + * - {@link XREYE_NONE}: None - inidcates a monoscopic view (likely mobile phone screen). + * - {@link XREYE_LEFT}: Left - indicates left eye view. + * - {@link XREYE_RIGHT}: Right - indicates a right eye view. + * + * @type {string} + * @readonly + */ + get eye() { + return this._xrView.eye; + } + + /** + * A Vec4 (x, y, width, height) that represents a view's viewport. For monoscopic screen + * it will define fullscreen view, but for stereoscopic views (left/right eye) it will define + * a part of a whole screen that view is occupying. + * + * @type {Vec4} + * @readonly + */ + get viewport() { + return this._viewport; + } + + /** + * @type {Mat4} + * @ignore + */ + get projMat() { + return this._projMat; + } + + /** + * @type {Mat4} + * @ignore + */ + get projViewOffMat() { + return this._projViewOffMat; + } + + /** + * @type {Mat4} + * @ignore + */ + get viewOffMat() { + return this._viewOffMat; + } + + /** + * @type {Mat4} + * @ignore + */ + get viewInvOffMat() { + return this._viewInvOffMat; + } + + /** + * @type {Mat3} + * @ignore + */ + get viewMat3() { + return this._viewMat3; + } + + /** + * @type {Float32Array} + * @ignore + */ + get positionData() { + return this._positionData; + } + + /** + * @param {*} frame - XRFrame from requestAnimationFrame callback. + * @param {XRView} xrView - XRView from WebXR API. + * @ignore + */ + update(frame, xrView) { + this._xrView = xrView; + if (this._manager.views.supportedColor) + this._xrCamera = this._xrView.camera; + + const layer = frame.session.renderState.baseLayer; + + // viewport + const viewport = layer.getViewport(this._xrView); + this._viewport.x = viewport.x; + this._viewport.y = viewport.y; + this._viewport.z = viewport.width; + this._viewport.w = viewport.height; + + // matrices + this._projMat.set(this._xrView.projectionMatrix); + this._viewMat.set(this._xrView.transform.inverse.matrix); + this._viewInvMat.set(this._xrView.transform.matrix); + + this._updateTextureColor(); + } + + /** + * @private + */ _updateTextureColor() { if (!this._manager.views.availableColor || !this._xrCamera) return; @@ -50,9 +236,9 @@ class XrView extends EventHandler { if (!this._textureColor) { this._textureColor = new Texture(this._manager.app.graphicsDevice, { - format: PIXELFORMAT_RGBA8, + format: PIXELFORMAT_RGB8, mipmaps: false, - flipY: true, + flipY: false, addressU: ADDRESS_CLAMP_TO_EDGE, addressV: ADDRESS_CLAMP_TO_EDGE, minFilter: FILTER_LINEAR, @@ -75,6 +261,10 @@ class XrView extends EventHandler { this._textureColor.impl._glTexture = texture; } + /** + * @param {Mat4|null} transform - World Transform of a parents GraphNode. + * @ignore + */ updateTransforms(transform) { if (transform) { this._viewInvOffMat.mul2(transform, this._viewInvMat); @@ -92,28 +282,9 @@ class XrView extends EventHandler { this._positionData[2] = this._viewInvOffMat.data[14]; } - update(frame, xrView) { - this._xrView = xrView; - if (this._manager.views.supportedColor) - this._xrCamera = this._xrView.camera; - - const layer = frame.session.renderState.baseLayer; - - // viewport - const viewport = layer.getViewport(this._xrView); - this._viewport.x = viewport.x; - this._viewport.y = viewport.y; - this._viewport.z = viewport.width; - this._viewport.w = viewport.height; - - // matrices - this._projMat.set(this._xrView.projectionMatrix); - this._viewMat.set(this._xrView.transform.inverse.matrix); - this._viewInvMat.set(this._xrView.transform.matrix); - - this._updateTextureColor(); - } - + /** + * @ignore + */ destroy() { if (this._textureColor) { // TODO @@ -123,42 +294,6 @@ class XrView extends EventHandler { this._textureColor = null; } } - - get textureColor() { - return this._textureColor; - } - - get eye() { - return this._xrView.eye; - } - - get viewport() { - return this._viewport; - } - - get projMat() { - return this._projMat; - } - - get projViewOffMat() { - return this._projViewOffMat; - } - - get viewOffMat() { - return this._viewOffMat; - } - - get viewInvOffMat() { - return this._viewInvOffMat; - } - - get viewMat3() { - return this._viewMat3; - } - - get positionData() { - return this._positionData; - } } export { XrView }; diff --git a/src/framework/xr/xr-views.js b/src/framework/xr/xr-views.js index ee5d6e2465f..f88176e9b01 100644 --- a/src/framework/xr/xr-views.js +++ b/src/framework/xr/xr-views.js @@ -3,14 +3,53 @@ import { EventHandler } from "../../core/event-handler.js"; import { XrView } from "./xr-view.js"; import { XRTYPE_AR } from "./constants.js"; +/** + * Provides access to list of {@link XrView}'s. And information about their capabilities, + * such as support and availability of view's camera color texture. + * + * @category XR + */ class XrViews extends EventHandler { + /** + * @type {import('./xr-manager.js').XrManager} + * @private + */ _manager; + + /** + * @type {Map} + * @private + */ _index = new Map(); + + /** + * @type {Map} + * @private + */ + _indexTmp = new Map(); + + /** + * @type {XrView[]} + * @private + */ _list = []; - _indexTemporary = new Map(); + + /** + * @type {boolean} + * @private + */ _supportedColor = platform.browser && !!window.XRCamera && !!window.XRWebGLBinding; + + /** + * @type {boolean} + * @private + */ _availableColor = false; + /** + * @param {import('./xr-manager.js').XrManager} manager - WebXR Manager. + * @hideconstructor + */ constructor(manager) { super(); @@ -19,27 +58,65 @@ class XrViews extends EventHandler { this._manager.on('end', this._onSessionEnd, this); } - _onSessionStart() { - if (this._manager.type !== XRTYPE_AR) - return; - this._availableColor = this._manager.session.enabledFeatures.indexOf('camera-access') !== -1; + // TODO + // events + + /** + * An array of {@link XrView}'s of this session. Views are not available straight + * away on session start, and can be added/removed mid-session. So use of add/remove + * events is required for accessing views. + * + * @type {XrView[]} + * @readonly + */ + get list() { + return this._list; } - _onSessionEnd() { - for(const view of this._index.values()) { - view.destroy(); - } - this._index.clear(); - this._availableColor = false; - this._list.length = 0; + /** + * How many views are available. Views can be added/removed mid-session by underlying + * WebXR system. + * + * @type {number} + * @readonly + */ + get size() { + return this._list.length; } + /** + * Check if Camera Color is supported. It might be still unavailable even if requested, + * based on hardware capabilities and granted permissions. + * + * @type {boolean} + * @readonly + */ + get supportedColor() { + return this._supportedColor; + } + + /** + * Check if Camera Color is available. This information becomes available only after + * session has started. + * + * @type {boolean} + * @readonly + */ + get availableColor() { + return this._availableColor; + } + + /** + * @param {*} frame - XRFrame from requestAnimationFrame callback. + * @param {XRView} xrView - XRView from WebXR API. + * @ignore + */ update(frame, xrViews) { - for(let i = 0; i < xrViews.length; i++) { - this._indexTemporary.set(xrViews[i].eye, xrViews[i]); + for (let i = 0; i < xrViews.length; i++) { + this._indexTmp.set(xrViews[i].eye, xrViews[i]); } - for(const [ eye, xrView ] of this._indexTemporary) { + for (const [eye, xrView] of this._indexTmp) { let view = this._index.get(eye); if (!view) { @@ -56,8 +133,8 @@ class XrViews extends EventHandler { } // remove views - for(const [ eye, view ] of this._index) { - if (this._indexTemporary.has(eye)) + for (const [eye, view] of this._index) { + if (this._indexTmp.has(eye)) continue; view.destroy(); @@ -67,27 +144,36 @@ class XrViews extends EventHandler { this.fire('remove', view); } - this._indexTemporary.clear(); + this._indexTmp.clear(); } - get(name) { + /** + * @param {string} eye - An XREYE_* view is associated with. Can be 'none' for monoscope views. + * @returns {XrView|null} View or null if view of such eye is not available. + */ + get(eye) { return this._index.get(name) || null; } - get list() { - return this._list; - } - - get size() { - return this._list.length; - } - - get supportedColor() { - return this._supportedColor; + /** + * @private + */ + _onSessionStart() { + if (this._manager.type !== XRTYPE_AR) + return; + this._availableColor = this._manager.session.enabledFeatures.indexOf('camera-access') !== -1; } - get availableColor() { - return this._availableColor; + /** + * @private + */ + _onSessionEnd() { + for (const view of this._index.values()) { + view.destroy(); + } + this._index.clear(); + this._availableColor = false; + this._list.length = 0; } } From 7b0007b94ae33598dd8f57bb673056ca4c05bcf0 Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Sun, 5 Nov 2023 15:53:35 +0200 Subject: [PATCH 03/19] xr views events --- src/framework/xr/xr-views.js | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/src/framework/xr/xr-views.js b/src/framework/xr/xr-views.js index f88176e9b01..bf46c249081 100644 --- a/src/framework/xr/xr-views.js +++ b/src/framework/xr/xr-views.js @@ -58,8 +58,28 @@ class XrViews extends EventHandler { this._manager.on('end', this._onSessionEnd, this); } - // TODO - // events + /** + * Fired when view has been added. Views are not available straight away on session start + * and are added mid-session. They can be added/removed mid session by underlyng system. + * + * @event XrViews#add + * @param {XrView} view - XrView that has been added. + * @example + * xr.views.on('add', function (view) { + * // view that has been added + * }); + */ + + /** + * Fired when view has been removed. They can be added/removed mid session by underlyng system. + * + * @event XrViews#remove + * @param {XrView} view - XrView that has been removed. + * @example + * xr.views.on('remove', function (view) { + * // view that has been added + * }); + */ /** * An array of {@link XrView}'s of this session. Views are not available straight From 368d551006da6f7db3ec02212976ed98fb79e271 Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Thu, 9 Nov 2023 13:30:07 +0200 Subject: [PATCH 04/19] implement camera color texture copying --- examples/src/examples/xr/ar-camera-color.mjs | 2 +- src/framework/xr/xr-manager.js | 4 +- src/framework/xr/xr-view.js | 75 +++++++++++++++----- src/framework/xr/xr-views.js | 4 -- 4 files changed, 61 insertions(+), 24 deletions(-) diff --git a/examples/src/examples/xr/ar-camera-color.mjs b/examples/src/examples/xr/ar-camera-color.mjs index 5c1b9ae5ec3..ddfb4f369cb 100644 --- a/examples/src/examples/xr/ar-camera-color.mjs +++ b/examples/src/examples/xr/ar-camera-color.mjs @@ -165,7 +165,7 @@ async function example({ canvas }) { } // debug draw camera color texture on the screen - app.drawTexture(0.5, -0.5, 1, -1, view.textureColor); + app.drawTexture(0.5, -0.5, 1, 1, view.textureColor); } } }); diff --git a/src/framework/xr/xr-manager.js b/src/framework/xr/xr-manager.js index 64bcfade0e3..15d02155783 100644 --- a/src/framework/xr/xr-manager.js +++ b/src/framework/xr/xr-manager.js @@ -417,6 +417,8 @@ class XrManager extends EventHandler { optionalFeatures: [] }; + const webgl = this.app.graphicsDevice?.isWebGL1 || this.app.graphicsDevice?.isWebGL2; + if (type === XRTYPE_AR) { opts.optionalFeatures.push('light-estimation'); opts.optionalFeatures.push('hit-test'); @@ -462,7 +464,7 @@ class XrManager extends EventHandler { }; } - if (options && options.cameraColor && this.views.supportedColor) { + if (webgl && options && options.cameraColor && this.views.supportedColor) { opts.optionalFeatures.push('camera-access'); } } else if (type === XRTYPE_VR) { diff --git a/src/framework/xr/xr-view.js b/src/framework/xr/xr-view.js index ce764487942..6eae5da01c9 100644 --- a/src/framework/xr/xr-view.js +++ b/src/framework/xr/xr-view.js @@ -112,7 +112,6 @@ class XrView { * not available or not supported. * * @type {Texture|null} - * @readonly */ get textureColor() { return this._textureColor; @@ -126,7 +125,6 @@ class XrView { * - {@link XREYE_RIGHT}: Right - indicates a right eye view. * * @type {string} - * @readonly */ get eye() { return this._xrView.eye; @@ -138,7 +136,6 @@ class XrView { * a part of a whole screen that view is occupying. * * @type {Vec4} - * @readonly */ get viewport() { return this._viewport; @@ -199,7 +196,7 @@ class XrView { */ update(frame, xrView) { this._xrView = xrView; - if (this._manager.views.supportedColor) + if (this._manager.views.availableColor) this._xrCamera = this._xrView.camera; const layer = frame.session.renderState.baseLayer; @@ -234,31 +231,68 @@ class XrView { if (!texture) return; + const device = this._manager.app.graphicsDevice; + const gl = device.gl; + const attachmentBaseConstant = device.isWebGL2 ? gl.COLOR_ATTACHMENT0 : (device.extDrawBuffers?.COLOR_ATTACHMENT0_WEBGL ?? gl.COLOR_ATTACHMENT0); + + const width = this._xrCamera.width; + const height = this._xrCamera.height; + if (!this._textureColor) { + // color texture this._textureColor = new Texture(this._manager.app.graphicsDevice, { format: PIXELFORMAT_RGB8, mipmaps: false, - flipY: false, addressU: ADDRESS_CLAMP_TO_EDGE, addressV: ADDRESS_CLAMP_TO_EDGE, minFilter: FILTER_LINEAR, magFilter: FILTER_LINEAR, - width: this._xrCamera.width, - height: this._xrCamera.height, + width: width, + height: height, name: `XrView-${this._xrView.eye}-Color` }); + + // force initialize texture this._textureColor.upload(); - } - // force texture initialization - if (!this._textureColor.impl._glTexture) { - this._textureColor.impl.initialize(this._manager.app.graphicsDevice, this._textureColor); - this._textureColor.impl.upload = () => { }; - this._textureColor._needsUpload = false; + // create frame buffer to read from + this._frameBufferSource = gl.createFramebuffer(); + + // create frame buffer to write to + this._frameBuffer = gl.createFramebuffer(); } - this._textureColor.impl._glCreated = true; - this._textureColor.impl._glTexture = texture; + // set frame buffer to read from + device.setFramebuffer(this._frameBufferSource); + gl.framebufferTexture2D( + gl.FRAMEBUFFER, + attachmentBaseConstant, + gl.TEXTURE_2D, + texture, + 0 + ); + + // set frame buffer to write to + device.setFramebuffer(this._frameBuffer); + gl.framebufferTexture2D( + gl.FRAMEBUFFER, + attachmentBaseConstant, + gl.TEXTURE_2D, + this._textureColor.impl._glTexture, + 0 + ); + + // bind buffers + gl.bindFramebuffer(gl.READ_FRAMEBUFFER, this._frameBufferSource); + let ready = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE; + + gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, this._frameBuffer); + if (ready) ready = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE; + + if (ready) { + // copy buffers with flip Y + gl.blitFramebuffer(0, height, width, 0, 0, 0, width, height, gl.COLOR_BUFFER_BIT, gl.NEAREST); + } } /** @@ -287,11 +321,16 @@ class XrView { */ destroy() { if (this._textureColor) { - // TODO - // ensure there is no use of this texture after session ended - this._textureColor.impl._glTexture = null; this._textureColor.destroy(); this._textureColor = null; + + const gl = this._manager.app.graphicsDevice.gl; + + gl.deleteFramebuffer(this._frameBufferSource); + this._frameBufferSource = null; + + gl.deleteFramebuffer(this._frameBuffer); + this._frameBuffer = null; } } } diff --git a/src/framework/xr/xr-views.js b/src/framework/xr/xr-views.js index bf46c249081..670d48dab4f 100644 --- a/src/framework/xr/xr-views.js +++ b/src/framework/xr/xr-views.js @@ -87,7 +87,6 @@ class XrViews extends EventHandler { * events is required for accessing views. * * @type {XrView[]} - * @readonly */ get list() { return this._list; @@ -98,7 +97,6 @@ class XrViews extends EventHandler { * WebXR system. * * @type {number} - * @readonly */ get size() { return this._list.length; @@ -109,7 +107,6 @@ class XrViews extends EventHandler { * based on hardware capabilities and granted permissions. * * @type {boolean} - * @readonly */ get supportedColor() { return this._supportedColor; @@ -120,7 +117,6 @@ class XrViews extends EventHandler { * session has started. * * @type {boolean} - * @readonly */ get availableColor() { return this._availableColor; From fc1f31d64c8c9f962d6331833ccf253dc4ea6afc Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Fri, 24 Nov 2023 11:21:03 +0200 Subject: [PATCH 05/19] PR comments --- examples/src/examples/xr/ar-camera-color.mjs | 2 +- src/framework/xr/xr-manager.js | 4 ++-- src/framework/xr/xr-view.js | 2 +- src/framework/xr/xr-views.js | 10 ---------- src/scene/renderer/forward-renderer.js | 4 ++-- src/scene/renderer/renderer.js | 4 ++-- 6 files changed, 8 insertions(+), 18 deletions(-) diff --git a/examples/src/examples/xr/ar-camera-color.mjs b/examples/src/examples/xr/ar-camera-color.mjs index ddfb4f369cb..ffc06325413 100644 --- a/examples/src/examples/xr/ar-camera-color.mjs +++ b/examples/src/examples/xr/ar-camera-color.mjs @@ -153,7 +153,7 @@ async function example({ canvas }) { app.on('update', () => { // if camera color is available if (app.xr.views.availableColor) { - for(let i = 0; i < app.xr.views.size; i++) { + for(let i = 0; i < app.xr.views.list.length; i++) { const view = app.xr.views.list[i]; if (!view.textureColor) // check if color texture is available continue; diff --git a/src/framework/xr/xr-manager.js b/src/framework/xr/xr-manager.js index c16fa12d499..b7353c862a1 100644 --- a/src/framework/xr/xr-manager.js +++ b/src/framework/xr/xr-manager.js @@ -773,7 +773,7 @@ class XrManager extends EventHandler { if (!pose) return false; - const lengthOld = this.views.size; + const lengthOld = this.views.list.length; // add views this.views.update(frame, pose.views); @@ -785,7 +785,7 @@ class XrManager extends EventHandler { this._localRotation.set(poseOrientation.x, poseOrientation.y, poseOrientation.z, poseOrientation.w); // update the camera fov properties only when we had 0 views - if (lengthOld === 0 && this.views.size > 0) { + if (lengthOld === 0 && this.views.list.length > 0) { const viewProjMat = new Mat4(); const view = this.views.list[0]; diff --git a/src/framework/xr/xr-view.js b/src/framework/xr/xr-view.js index 6eae5da01c9..845322ca964 100644 --- a/src/framework/xr/xr-view.js +++ b/src/framework/xr/xr-view.js @@ -240,7 +240,7 @@ class XrView { if (!this._textureColor) { // color texture - this._textureColor = new Texture(this._manager.app.graphicsDevice, { + this._textureColor = new Texture(device, { format: PIXELFORMAT_RGB8, mipmaps: false, addressU: ADDRESS_CLAMP_TO_EDGE, diff --git a/src/framework/xr/xr-views.js b/src/framework/xr/xr-views.js index 670d48dab4f..5ccb265712f 100644 --- a/src/framework/xr/xr-views.js +++ b/src/framework/xr/xr-views.js @@ -92,16 +92,6 @@ class XrViews extends EventHandler { return this._list; } - /** - * How many views are available. Views can be added/removed mid-session by underlying - * WebXR system. - * - * @type {number} - */ - get size() { - return this._list.length; - } - /** * Check if Camera Color is supported. It might be still unavailable even if requested, * based on hardware capabilities and granted permissions. diff --git a/src/scene/renderer/forward-renderer.js b/src/scene/renderer/forward-renderer.js index 6b6d14c0cce..8e459a3f20e 100644 --- a/src/scene/renderer/forward-renderer.js +++ b/src/scene/renderer/forward-renderer.js @@ -621,10 +621,10 @@ class ForwardRenderer extends Renderer { drawCallback?.(drawCall, i); - if (camera.xr && camera.xr.session && camera.xr.views.size) { + if (camera.xr && camera.xr.session && camera.xr.views.list.length) { const views = camera.xr.views; - for (let v = 0; v < views.size; v++) { + for (let v = 0; v < views.list.length; v++) { const view = views.list[v]; device.setViewport(view.viewport.x, view.viewport.y, view.viewport.z, view.viewport.w); diff --git a/src/scene/renderer/renderer.js b/src/scene/renderer/renderer.js index 842d4079077..e2219712fed 100644 --- a/src/scene/renderer/renderer.js +++ b/src/scene/renderer/renderer.js @@ -297,7 +297,7 @@ class Renderer { if (camera.xr && camera.xr.session) { const transform = camera._node?.parent?.getWorldTransform() || null; const views = camera.xr.views; - viewCount = views.size; + viewCount = views.list.length; for (let v = 0; v < viewCount; v++) { const view = views.list[v]; view.updateTransforms(transform); @@ -478,7 +478,7 @@ class Renderer { updateCameraFrustum(camera) { - if (camera.xr && camera.xr.views.size) { + if (camera.xr && camera.xr.views.list.length) { // calculate frustum based on XR view const view = camera.xr.views.list[0]; viewProjMat.mul2(view.projMat, view.viewOffMat); From e1e27f2bb936b8025ec8aaa2d0d1fdc8642f4fbc Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Fri, 24 Nov 2023 14:31:13 +0200 Subject: [PATCH 06/19] wip --- examples/src/examples/xr/ar-camera-depth.mjs | 221 +++++++++++++++++++ examples/src/examples/xr/index.mjs | 1 + src/framework/xr/xr-depth-sensing.js | 40 ++-- src/framework/xr/xr-view.js | 204 +++++++++++++---- src/framework/xr/xr-views.js | 86 ++++++++ 5 files changed, 494 insertions(+), 58 deletions(-) create mode 100644 examples/src/examples/xr/ar-camera-depth.mjs diff --git a/examples/src/examples/xr/ar-camera-depth.mjs b/examples/src/examples/xr/ar-camera-depth.mjs new file mode 100644 index 00000000000..9bdfeaf162b --- /dev/null +++ b/examples/src/examples/xr/ar-camera-depth.mjs @@ -0,0 +1,221 @@ +import * as pc from 'playcanvas'; + +/** + * @typedef {import('../../options.mjs').ExampleOptions} ExampleOptions + * @param {import('../../options.mjs').ExampleOptions} options - The example options. + * @returns {Promise} The example application. + */ +async function example({ canvas }) { + /** + * @param {string} msg - The message. + */ + const message = function (msg) { + /** @type {HTMLDivElement} */ + let el = document.querySelector('.message'); + if (!el) { + el = document.createElement('div'); + el.classList.add('message'); + el.style.position = 'absolute'; + el.style.bottom = '96px'; + el.style.right = '0'; + el.style.padding = '8px 16px'; + el.style.fontFamily = 'Helvetica, Arial, sans-serif'; + el.style.color = '#fff'; + el.style.backgroundColor = 'rgba(0, 0, 0, 0.5)'; + document.body.append(el); + } + el.textContent = msg; + }; + + const app = new pc.Application(canvas, { + mouse: new pc.Mouse(canvas), + touch: new pc.TouchDevice(canvas), + keyboard: new pc.Keyboard(window), + graphicsDeviceOptions: { alpha: true } + }); + + app.setCanvasFillMode(pc.FILLMODE_FILL_WINDOW); + app.setCanvasResolution(pc.RESOLUTION_AUTO); + + // Ensure canvas is resized when window changes size + const resize = () => app.resizeCanvas(); + window.addEventListener('resize', resize); + app.on('destroy', () => { + window.removeEventListener('resize', resize); + }); + + // use device pixel ratio + app.graphicsDevice.maxPixelRatio = window.devicePixelRatio; + + app.start(); + + // create camera + const c = new pc.Entity(); + c.addComponent('camera', { + clearColor: new pc.Color(0, 0, 0, 0), + farClip: 10000 + }); + app.root.addChild(c); + + const l = new pc.Entity(); + l.addComponent("light", { + type: "spot", + range: 30 + }); + l.translate(0, 10, 0); + app.root.addChild(l); + + const material = new pc.StandardMaterial(); + + const materialDepth = new pc.Material(); + materialDepth.cull = pc.CULLFACE_NONE; + materialDepth.shader = app.scene.immediate.getShader('textureDepthSensing', /* glsl */ ` + varying vec2 uv0; + uniform sampler2D colorMap; + uniform mat4 matrix_depth_uv; + uniform float depth_raw_to_meters; + + void main (void) { + vec2 texCoord = (matrix_depth_uv * vec4(uv0.xy, 0.0, 1.0)).xy; + vec2 packedDepth = texture2D(colorMap, texCoord).ra; + float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)) * depth_raw_to_meters; // m + depth = 1.0 - min(depth / 8.0, 1.0); // 0..1 = 0m..8m + gl_FragColor = vec4(depth, depth, depth, 1.0); + }`); + materialDepth.update(); + + /** + * @param {number} x - The x coordinate. + * @param {number} y - The y coordinate. + * @param {number} z - The z coordinate. + */ + const createCube = function (x, y, z) { + const cube = new pc.Entity(); + cube.addComponent("render", { + type: "box" + }); + cube.render.material = material; + cube.setLocalScale(0.5, 0.5, 0.5); + cube.translate(x * 0.5, y, z * 0.5); + app.root.addChild(cube); + }; + + // create a grid of cubes + const SIZE = 4; + for (let x = 0; x < SIZE; x++) { + for (let y = 0; y < SIZE; y++) { + createCube(2 * x - SIZE, 0.25, 2 * y - SIZE); + } + } + + if (app.xr.supported) { + const activate = function () { + if (app.xr.isAvailable(pc.XRTYPE_AR)) { + c.camera.startXr(pc.XRTYPE_AR, pc.XRSPACE_LOCALFLOOR, { + depthSensing: { // request access to camera depth + usagePreference: pc.XRDEPTHSENSINGUSAGE_GPU, + dataFormatPreference: pc.XRDEPTHSENSINGFORMAT_F32 + }, + callback: function (err) { + if (err) message("WebXR Immersive AR failed to start: " + err.message); + } + }); + } else { + message("Immersive AR is not available"); + } + }; + + app.mouse.on("mousedown", function () { + if (!app.xr.active) + activate(); + }); + + if (app.touch) { + app.touch.on("touchend", function (evt) { + if (!app.xr.active) { + // if not in VR, activate + activate(); + } else { + // otherwise reset camera + c.camera.endXr(); + } + + evt.event.preventDefault(); + evt.event.stopPropagation(); + }); + } + + // end session by keyboard ESC + app.keyboard.on('keydown', function (evt) { + if (evt.key === pc.KEY_ESCAPE && app.xr.active) { + app.xr.end(); + } + }); + + app.xr.on('start', function () { + message("Immersive AR session has started"); + console.log('depth gpu optimized', app.xr.views.depthGpuOptimized); + console.log('depth texture format', app.xr.views.depthFormat); + }); + app.xr.on('end', function () { + message("Immersive AR session has ended"); + }); + app.xr.on('available:' + pc.XRTYPE_AR, function (available) { + if (available) { + if (!app.xr.views.supportedDepth) { + message("AR Camera Depth is not supported"); + } else { + message("Touch screen to start AR session"); + } + } else { + message("Immersive AR is not available"); + } + }); + + app.on('update', () => { + // if camera depth is available + if (app.xr.views.availableDepth) { + for(let i = 0; i < app.xr.views.list.length; i++) { + const view = app.xr.views.list[i]; + if (!view.textureDepth) // check if depth texture is available + continue; + + materialDepth.setParameter('colorMap', view.textureDepth) + materialDepth.setParameter('matrix_depth_uv', app.xr.depthSensing.uvMatrix.data); + materialDepth.setParameter('depth_raw_to_meters', app.xr.depthSensing.rawValueToMeters); + + // debug draw camera depth texture on the screen + app.drawTexture(0.5, -0.5, 1, 1, view.textureDepth, materialDepth); + } + } + }); + + app.xr.on('end', () => { + if (!material.diffuseMap) + return; + + // clear camera depth texture when XR session ends + material.diffuseMap = null; + material.update(); + }); + + if (!app.xr.isAvailable(pc.XRTYPE_AR)) { + message("Immersive AR is not available"); + } else if (!app.xr.views.supportedDepth) { + message("AR Camera Depth is not supported"); + } else { + message("Touch screen to start AR session"); + } + } else { + message("WebXR is not supported"); + } + return app; +} + +class ArCameraDepthExample { + static CATEGORY = 'XR'; + static NAME = 'AR Camera Depth'; + static example = example; +} + +export { ArCameraDepthExample }; diff --git a/examples/src/examples/xr/index.mjs b/examples/src/examples/xr/index.mjs index 27fd04e8eb5..7eed1d7c986 100644 --- a/examples/src/examples/xr/index.mjs +++ b/examples/src/examples/xr/index.mjs @@ -1,5 +1,6 @@ export * from "./ar-basic.mjs"; export * from "./ar-camera-color.mjs"; +export * from "./ar-camera-depth.mjs"; export * from "./ar-hit-test.mjs"; export * from "./ar-hit-test-anchors.mjs"; export * from "./ar-anchors-persistence.mjs"; diff --git a/src/framework/xr/xr-depth-sensing.js b/src/framework/xr/xr-depth-sensing.js index affc289b964..7162d1efce4 100644 --- a/src/framework/xr/xr-depth-sensing.js +++ b/src/framework/xr/xr-depth-sensing.js @@ -72,6 +72,12 @@ class XrDepthSensing extends EventHandler { */ _manager; + /** + * @type {import('./xr-views.js').XrViews} + * @private + */ + _views; + /** * @type {boolean} * @private @@ -142,17 +148,18 @@ class XrDepthSensing extends EventHandler { super(); this._manager = manager; - - // TODO: data format can be different - this._texture = new Texture(this._manager.app.graphicsDevice, { - format: PIXELFORMAT_LA8, - mipmaps: false, - addressU: ADDRESS_CLAMP_TO_EDGE, - addressV: ADDRESS_CLAMP_TO_EDGE, - minFilter: FILTER_LINEAR, - magFilter: FILTER_LINEAR, - name: 'XRDepthSensing' - }); + this._views = manager.views; + + // // TODO: data format can be different + // this._texture = new Texture(this._manager.app.graphicsDevice, { + // format: PIXELFORMAT_LA8, + // mipmaps: false, + // addressU: ADDRESS_CLAMP_TO_EDGE, + // addressV: ADDRESS_CLAMP_TO_EDGE, + // minFilter: FILTER_LINEAR, + // magFilter: FILTER_LINEAR, + // name: 'XRDepthSensing' + // }); if (this.supported) { this._manager.on('start', this._onSessionStart, this); @@ -221,14 +228,15 @@ class XrDepthSensing extends EventHandler { } this._depthBuffer = null; - this._texture._width = 4; - this._texture._height = 4; - this._texture._levels[0] = this._emptyBuffer; - this._texture.upload(); + // this._texture._width = 4; + // this._texture._height = 4; + // this._texture._levels[0] = this._emptyBuffer; + // this._texture.upload(); } /** @private */ _updateTexture() { + return; const depthInfo = this._depthInfoCpu || this._depthInfoGpu; if (depthInfo) { @@ -361,6 +369,7 @@ class XrDepthSensing extends EventHandler { * Whether the usage is CPU or GPU. * * @type {string} + * @deprecated * @ignore */ get usage() { @@ -371,6 +380,7 @@ class XrDepthSensing extends EventHandler { * The depth sensing data format. * * @type {string} + * @deprecated * @ignore */ get dataFormat() { diff --git a/src/framework/xr/xr-view.js b/src/framework/xr/xr-view.js index 845322ca964..e4da83f2055 100644 --- a/src/framework/xr/xr-view.js +++ b/src/framework/xr/xr-view.js @@ -1,3 +1,4 @@ +import { EventHandler } from '../../core/event-handler.js'; import { Texture } from '../../platform/graphics/texture.js'; import { Vec4 } from "../../core/math/vec4.js"; import { Mat3 } from "../../core/math/mat3.js"; @@ -10,7 +11,7 @@ import { ADDRESS_CLAMP_TO_EDGE, FILTER_LINEAR, PIXELFORMAT_RGB8 } from '../../pl * * @category XR */ -class XrView { +class XrView extends EventHandler { /** * @type {import('./xr-manager.js').XrManager} * @private @@ -89,6 +90,12 @@ class XrView { */ _textureColor = null; + /** + * @type {Texture|null} + * @private + */ + _textureDepth = null; + /** * Create a new XrView instance. * @@ -98,13 +105,29 @@ class XrView { * @hideconstructor */ constructor(manager, xrView) { + super(); + this._manager = manager; this._xrView = xrView; - if (this._manager.views.supportedColor) + if (this._manager.views.supportedColor) { this._xrCamera = this._xrView.camera; - this._updateTextureColor(); + // color texture + if (this._manager.views.availableColor && this._xrCamera) { + this._textureColor = new Texture(this._manager.app.graphicsDevice, { + format: PIXELFORMAT_RGB8, + mipmaps: false, + addressU: ADDRESS_CLAMP_TO_EDGE, + addressV: ADDRESS_CLAMP_TO_EDGE, + minFilter: FILTER_LINEAR, + magFilter: FILTER_LINEAR, + width: this._xrCamera.width, + height: this._xrCamera.height, + name: `XrView-${this._xrView.eye}-Color` + }); + } + } } /** @@ -117,6 +140,16 @@ class XrView { return this._textureColor; } + /** + * Texture associated with this view's camera depth. Equals to null if camera depth is + * not available or not supported. + * + * @type {Texture|null} + */ + get textureDepth() { + return this._textureDepth; + } + /** * An eye with which this view is associated. Can be any of: * @@ -214,13 +247,14 @@ class XrView { this._viewInvMat.set(this._xrView.transform.matrix); this._updateTextureColor(); + this._updateTextureDepth(frame); } /** * @private */ _updateTextureColor() { - if (!this._manager.views.availableColor || !this._xrCamera) + if (!this._manager.views.availableColor || !this._xrCamera || !this._textureColor) return; const binding = this._manager.webglBinding; @@ -231,17 +265,84 @@ class XrView { if (!texture) return; + const device = this._manager.app.graphicsDevice; + const gl = device.gl; + + if (!this._frameBufferSource) { + // create frame buffer to read from + this._frameBufferSource = gl.createFramebuffer(); + + // create frame buffer to write to + this._frameBuffer = gl.createFramebuffer(); + } else { + const attachmentBaseConstant = device.isWebGL2 ? gl.COLOR_ATTACHMENT0 : (device.extDrawBuffers?.COLOR_ATTACHMENT0_WEBGL ?? gl.COLOR_ATTACHMENT0); + const width = this._xrCamera.width; + const height = this._xrCamera.height; + + // set frame buffer to read from + device.setFramebuffer(this._frameBufferSource); + gl.framebufferTexture2D( + gl.FRAMEBUFFER, + attachmentBaseConstant, + gl.TEXTURE_2D, + texture, + 0 + ); + + // set frame buffer to write to + device.setFramebuffer(this._frameBuffer); + gl.framebufferTexture2D( + gl.FRAMEBUFFER, + attachmentBaseConstant, + gl.TEXTURE_2D, + this._textureColor.impl._glTexture, + 0 + ); + + // bind buffers + gl.bindFramebuffer(gl.READ_FRAMEBUFFER, this._frameBufferSource); + gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, this._frameBuffer); + + // copy buffers with flip Y + gl.blitFramebuffer(0, height, width, 0, 0, 0, width, height, gl.COLOR_BUFFER_BIT, gl.NEAREST); + } + } + + /** + * @private + */ + _updateTextureDepth(frame) { + if (!this._manager.views.availableDepth) + return; + + const binding = this._manager.webglBinding; + if (!binding) + return; + + const gpu = this._manager.views.depthGpuOptimized; + let info, texture; + + if (gpu) { + info = binding.getDepthInformation(this._xrView); + } else { + info = frame.getDepthInformation(this._xrView); + } + + if (!info) return; + + if (gpu) texture = info.texture; + const device = this._manager.app.graphicsDevice; const gl = device.gl; const attachmentBaseConstant = device.isWebGL2 ? gl.COLOR_ATTACHMENT0 : (device.extDrawBuffers?.COLOR_ATTACHMENT0_WEBGL ?? gl.COLOR_ATTACHMENT0); - const width = this._xrCamera.width; - const height = this._xrCamera.height; + const width = info.width; + const height = info.height; - if (!this._textureColor) { + if (!this._textureDepth) { // color texture - this._textureColor = new Texture(device, { - format: PIXELFORMAT_RGB8, + this._textureDepth = new Texture(device, { + format: this._manager.views.depthFormat, mipmaps: false, addressU: ADDRESS_CLAMP_TO_EDGE, addressV: ADDRESS_CLAMP_TO_EDGE, @@ -249,49 +350,66 @@ class XrView { magFilter: FILTER_LINEAR, width: width, height: height, - name: `XrView-${this._xrView.eye}-Color` + name: `XrView-${this._xrView.eye}-Depth` }); // force initialize texture - this._textureColor.upload(); + // this._textureDepth.upload(); // create frame buffer to read from - this._frameBufferSource = gl.createFramebuffer(); + // this._frameBufferSource = gl.createFramebuffer(); // create frame buffer to write to - this._frameBuffer = gl.createFramebuffer(); + // this._frameBuffer = gl.createFramebuffer(); } - // set frame buffer to read from - device.setFramebuffer(this._frameBufferSource); - gl.framebufferTexture2D( - gl.FRAMEBUFFER, - attachmentBaseConstant, - gl.TEXTURE_2D, - texture, - 0 - ); - - // set frame buffer to write to - device.setFramebuffer(this._frameBuffer); - gl.framebufferTexture2D( - gl.FRAMEBUFFER, - attachmentBaseConstant, - gl.TEXTURE_2D, - this._textureColor.impl._glTexture, - 0 - ); - - // bind buffers - gl.bindFramebuffer(gl.READ_FRAMEBUFFER, this._frameBufferSource); - let ready = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE; - - gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, this._frameBuffer); - if (ready) ready = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE; - - if (ready) { - // copy buffers with flip Y - gl.blitFramebuffer(0, height, width, 0, 0, 0, width, height, gl.COLOR_BUFFER_BIT, gl.NEAREST); + let resized = false; + + if (this._textureDepth.width !== width || this._textureDepth.height !== height) { + this._textureDepth._width = width; + this._textureDepth._height = height; + resized = true; + } + + if (gpu) { + // set frame buffer to read from + // device.setFramebuffer(this._frameBufferSource); + // gl.framebufferTexture2D( + // gl.FRAMEBUFFER, + // attachmentBaseConstant, + // gl.TEXTURE_2D, + // texture, + // 0 + // ); + + // // set frame buffer to write to + // device.setFramebuffer(this._frameBuffer); + // gl.framebufferTexture2D( + // gl.FRAMEBUFFER, + // attachmentBaseConstant, + // gl.TEXTURE_2D, + // this._textureDepth.impl._glTexture, + // 0 + // ); + + // // bind buffers + // gl.bindFramebuffer(gl.READ_FRAMEBUFFER, this._frameBufferSource); + // let ready = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE; + + // gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, this._frameBuffer); + // if (ready) ready = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE; + + // if (ready) { + // // copy buffers with flip Y + // gl.blitFramebuffer(0, height, width, 0, 0, 0, width, height, gl.COLOR_BUFFER_BIT, gl.NEAREST); + // } + } else { + this._textureDepth._levels[0] = new Uint8Array(info.data); + this._textureDepth.upload(); + } + + if (resized) { + this.fire('depthResize', width, height); } } diff --git a/src/framework/xr/xr-views.js b/src/framework/xr/xr-views.js index 5ccb265712f..d8456d274cc 100644 --- a/src/framework/xr/xr-views.js +++ b/src/framework/xr/xr-views.js @@ -2,6 +2,7 @@ import { platform } from '../../core/platform.js'; import { EventHandler } from "../../core/event-handler.js"; import { XrView } from "./xr-view.js"; import { XRTYPE_AR } from "./constants.js"; +import { PIXELFORMAT_LA8, PIXELFORMAT_R32F } from '../../platform/graphics/constants.js'; /** * Provides access to list of {@link XrView}'s. And information about their capabilities, @@ -40,12 +41,45 @@ class XrViews extends EventHandler { */ _supportedColor = platform.browser && !!window.XRCamera && !!window.XRWebGLBinding; + /** + * @type {boolean} + * @private + */ + _supportedDepth = platform.browser && !!window.XRDepthInformation; + /** * @type {boolean} * @private */ _availableColor = false; + /** + * @type {boolean} + * @private + */ + _availableDepth = false; + + /** + * @type {string} + * @private + */ + _depthUsage = ''; + + /** + * @type {string} + * @private + */ + _depthFormat = ''; + + /** + * @type {object} + * @private + */ + _depthFormats = { + 'luminance-alpha': PIXELFORMAT_LA8, + 'float32': PIXELFORMAT_R32F + }; + /** * @param {import('./xr-manager.js').XrManager} manager - WebXR Manager. * @hideconstructor @@ -102,6 +136,16 @@ class XrViews extends EventHandler { return this._supportedColor; } + /** + * Check if Camera Depth is supported. It might be still unavailable even if requested, + * based on hardware capabilities and granted permissions. + * + * @type {boolean} + */ + get supportedDepth() { + return this._supportedDepth; + } + /** * Check if Camera Color is available. This information becomes available only after * session has started. @@ -112,6 +156,37 @@ class XrViews extends EventHandler { return this._availableColor; } + /** + * Check if Camera Depth is available. This information becomes available only after + * session has started. + * + * @type {boolean} + */ + get availableDepth() { + return this._availableDepth; + } + + /** + * Whether the depth sensing is GPU optimized. + * + * @type {boolean} + * @ignore + */ + get depthGpuOptimized() { + return this._depthUsage === 'gpu-optimized'; + } + + /** + * The depth sensing data format. Currently supported either: + * {@link PIXELFORMAT_LA8} or {@link PIXELFORMAT_R32F} + * + * @type {number} + * @ignore + */ + get depthFormat() { + return this._depthFormats[this._depthFormat]; + } + /** * @param {*} frame - XRFrame from requestAnimationFrame callback. * @param {XRView} xrView - XRView from WebXR API. @@ -167,7 +242,15 @@ class XrViews extends EventHandler { _onSessionStart() { if (this._manager.type !== XRTYPE_AR) return; + this._availableColor = this._manager.session.enabledFeatures.indexOf('camera-access') !== -1; + this._availableDepth = this._manager.session.enabledFeatures.indexOf('depth-sensing') !== -1; + + if (this._availableDepth) { + const session = this._manager.session; + this._depthUsage = session.depthUsage; + this._depthFormat = session.depthDataFormat; + } } /** @@ -179,6 +262,9 @@ class XrViews extends EventHandler { } this._index.clear(); this._availableColor = false; + this._availableDepth = false; + this._depthUsage = ''; + this._depthFormat = ''; this._list.length = 0; } } From 5f0a8057a0cc610dec3278286a34d8f48c28e67f Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Fri, 24 Nov 2023 14:36:29 +0200 Subject: [PATCH 07/19] avoid FBO checks --- src/framework/xr/xr-view.js | 101 +++++++++++++++++------------------- 1 file changed, 48 insertions(+), 53 deletions(-) diff --git a/src/framework/xr/xr-view.js b/src/framework/xr/xr-view.js index 845322ca964..78cbcd37a57 100644 --- a/src/framework/xr/xr-view.js +++ b/src/framework/xr/xr-view.js @@ -101,10 +101,24 @@ class XrView { this._manager = manager; this._xrView = xrView; - if (this._manager.views.supportedColor) + if (this._manager.views.supportedColor) { this._xrCamera = this._xrView.camera; - this._updateTextureColor(); + // color texture + if (this._manager.views.availableColor && this._xrCamera) { + this._textureColor = new Texture(this._manager.app.graphicsDevice, { + format: PIXELFORMAT_RGB8, + mipmaps: false, + addressU: ADDRESS_CLAMP_TO_EDGE, + addressV: ADDRESS_CLAMP_TO_EDGE, + minFilter: FILTER_LINEAR, + magFilter: FILTER_LINEAR, + width: this._xrCamera.width, + height: this._xrCamera.height, + name: `XrView-${this._xrView.eye}-Color` + }); + } + } } /** @@ -220,7 +234,7 @@ class XrView { * @private */ _updateTextureColor() { - if (!this._manager.views.availableColor || !this._xrCamera) + if (!this._manager.views.availableColor || !this._xrCamera || !this._textureColor) return; const binding = this._manager.webglBinding; @@ -233,63 +247,42 @@ class XrView { const device = this._manager.app.graphicsDevice; const gl = device.gl; - const attachmentBaseConstant = device.isWebGL2 ? gl.COLOR_ATTACHMENT0 : (device.extDrawBuffers?.COLOR_ATTACHMENT0_WEBGL ?? gl.COLOR_ATTACHMENT0); - - const width = this._xrCamera.width; - const height = this._xrCamera.height; - - if (!this._textureColor) { - // color texture - this._textureColor = new Texture(device, { - format: PIXELFORMAT_RGB8, - mipmaps: false, - addressU: ADDRESS_CLAMP_TO_EDGE, - addressV: ADDRESS_CLAMP_TO_EDGE, - minFilter: FILTER_LINEAR, - magFilter: FILTER_LINEAR, - width: width, - height: height, - name: `XrView-${this._xrView.eye}-Color` - }); - - // force initialize texture - this._textureColor.upload(); + if (!this._frameBufferSource) { // create frame buffer to read from this._frameBufferSource = gl.createFramebuffer(); // create frame buffer to write to this._frameBuffer = gl.createFramebuffer(); - } + } else { + const attachmentBaseConstant = device.isWebGL2 ? gl.COLOR_ATTACHMENT0 : (device.extDrawBuffers?.COLOR_ATTACHMENT0_WEBGL ?? gl.COLOR_ATTACHMENT0); + const width = this._xrCamera.width; + const height = this._xrCamera.height; + + // set frame buffer to read from + device.setFramebuffer(this._frameBufferSource); + gl.framebufferTexture2D( + gl.FRAMEBUFFER, + attachmentBaseConstant, + gl.TEXTURE_2D, + texture, + 0 + ); + + // set frame buffer to write to + device.setFramebuffer(this._frameBuffer); + gl.framebufferTexture2D( + gl.FRAMEBUFFER, + attachmentBaseConstant, + gl.TEXTURE_2D, + this._textureColor.impl._glTexture, + 0 + ); + + // bind buffers + gl.bindFramebuffer(gl.READ_FRAMEBUFFER, this._frameBufferSource); + gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, this._frameBuffer); - // set frame buffer to read from - device.setFramebuffer(this._frameBufferSource); - gl.framebufferTexture2D( - gl.FRAMEBUFFER, - attachmentBaseConstant, - gl.TEXTURE_2D, - texture, - 0 - ); - - // set frame buffer to write to - device.setFramebuffer(this._frameBuffer); - gl.framebufferTexture2D( - gl.FRAMEBUFFER, - attachmentBaseConstant, - gl.TEXTURE_2D, - this._textureColor.impl._glTexture, - 0 - ); - - // bind buffers - gl.bindFramebuffer(gl.READ_FRAMEBUFFER, this._frameBufferSource); - let ready = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE; - - gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, this._frameBuffer); - if (ready) ready = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE; - - if (ready) { // copy buffers with flip Y gl.blitFramebuffer(0, height, width, 0, 0, 0, width, height, gl.COLOR_BUFFER_BIT, gl.NEAREST); } @@ -323,7 +316,9 @@ class XrView { if (this._textureColor) { this._textureColor.destroy(); this._textureColor = null; + } + if (this._frameBufferSource) { const gl = this._manager.app.graphicsDevice.gl; gl.deleteFramebuffer(this._frameBufferSource); From b57595b09fd2d6b5107bd7210408462e797b5313 Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Fri, 24 Nov 2023 14:45:28 +0200 Subject: [PATCH 08/19] wip --- src/framework/xr/xr-view.js | 70 ++++++++++++++++++++++-------------- src/framework/xr/xr-views.js | 2 +- 2 files changed, 44 insertions(+), 28 deletions(-) diff --git a/src/framework/xr/xr-view.js b/src/framework/xr/xr-view.js index e4da83f2055..1bed67c1620 100644 --- a/src/framework/xr/xr-view.js +++ b/src/framework/xr/xr-view.js @@ -128,6 +128,20 @@ class XrView extends EventHandler { }); } } + + if (this._manager.views.supportedDepth && this._manager.views.availableDepth) { + this._textureDepth = new Texture(this._manager.app.graphicsDevice, { + format: this._manager.views.depthFormat, + mipmaps: false, + addressU: ADDRESS_CLAMP_TO_EDGE, + addressV: ADDRESS_CLAMP_TO_EDGE, + minFilter: FILTER_LINEAR, + magFilter: FILTER_LINEAR, + width: 4, + height: 4, + name: `XrView-${this._xrView.eye}-Depth` + }); + } } /** @@ -312,7 +326,7 @@ class XrView extends EventHandler { * @private */ _updateTextureDepth(frame) { - if (!this._manager.views.availableDepth) + if (!this._manager.views.availableDepth || !this._textureDepth) return; const binding = this._manager.webglBinding; @@ -332,36 +346,36 @@ class XrView extends EventHandler { if (gpu) texture = info.texture; - const device = this._manager.app.graphicsDevice; - const gl = device.gl; - const attachmentBaseConstant = device.isWebGL2 ? gl.COLOR_ATTACHMENT0 : (device.extDrawBuffers?.COLOR_ATTACHMENT0_WEBGL ?? gl.COLOR_ATTACHMENT0); + // const device = this._manager.app.graphicsDevice; + // const gl = device.gl; + // const attachmentBaseConstant = device.isWebGL2 ? gl.COLOR_ATTACHMENT0 : (device.extDrawBuffers?.COLOR_ATTACHMENT0_WEBGL ?? gl.COLOR_ATTACHMENT0); const width = info.width; const height = info.height; - if (!this._textureDepth) { - // color texture - this._textureDepth = new Texture(device, { - format: this._manager.views.depthFormat, - mipmaps: false, - addressU: ADDRESS_CLAMP_TO_EDGE, - addressV: ADDRESS_CLAMP_TO_EDGE, - minFilter: FILTER_LINEAR, - magFilter: FILTER_LINEAR, - width: width, - height: height, - name: `XrView-${this._xrView.eye}-Depth` - }); - - // force initialize texture - // this._textureDepth.upload(); - - // create frame buffer to read from - // this._frameBufferSource = gl.createFramebuffer(); - - // create frame buffer to write to - // this._frameBuffer = gl.createFramebuffer(); - } + // if (!this._textureDepth) { + // // color texture + // this._textureDepth = new Texture(device, { + // format: this._manager.views.depthFormat, + // mipmaps: false, + // addressU: ADDRESS_CLAMP_TO_EDGE, + // addressV: ADDRESS_CLAMP_TO_EDGE, + // minFilter: FILTER_LINEAR, + // magFilter: FILTER_LINEAR, + // width: width, + // height: height, + // name: `XrView-${this._xrView.eye}-Depth` + // }); + + // // force initialize texture + // // this._textureDepth.upload(); + + // // create frame buffer to read from + // // this._frameBufferSource = gl.createFramebuffer(); + + // // create frame buffer to write to + // // this._frameBuffer = gl.createFramebuffer(); + // } let resized = false; @@ -441,7 +455,9 @@ class XrView extends EventHandler { if (this._textureColor) { this._textureColor.destroy(); this._textureColor = null; + } + if (this._frameBufferSource) { const gl = this._manager.app.graphicsDevice.gl; gl.deleteFramebuffer(this._frameBufferSource); diff --git a/src/framework/xr/xr-views.js b/src/framework/xr/xr-views.js index d8456d274cc..0800ad8cc82 100644 --- a/src/framework/xr/xr-views.js +++ b/src/framework/xr/xr-views.js @@ -233,7 +233,7 @@ class XrViews extends EventHandler { * @returns {XrView|null} View or null if view of such eye is not available. */ get(eye) { - return this._index.get(name) || null; + return this._index.get(eye) || null; } /** From 70067e5cfea17d854ad6e05115fc14e101083dfe Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Fri, 24 Nov 2023 14:46:00 +0200 Subject: [PATCH 09/19] fix --- src/framework/xr/xr-views.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/framework/xr/xr-views.js b/src/framework/xr/xr-views.js index 5ccb265712f..5d34b59a768 100644 --- a/src/framework/xr/xr-views.js +++ b/src/framework/xr/xr-views.js @@ -158,7 +158,7 @@ class XrViews extends EventHandler { * @returns {XrView|null} View or null if view of such eye is not available. */ get(eye) { - return this._index.get(name) || null; + return this._index.get(eye) || null; } /** From a1470b9e1a4fa5ff34e71bb575aad6e6b61d1b9b Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Fri, 24 Nov 2023 15:29:53 +0200 Subject: [PATCH 10/19] depth sensing refactor --- examples/src/examples/xr/ar-camera-depth.mjs | 8 +- src/framework/xr/xr-depth-sensing.js | 231 ++++--------------- src/framework/xr/xr-manager.js | 5 +- src/framework/xr/xr-view.js | 182 ++++++++------- src/framework/xr/xr-views.js | 16 +- 5 files changed, 155 insertions(+), 287 deletions(-) diff --git a/examples/src/examples/xr/ar-camera-depth.mjs b/examples/src/examples/xr/ar-camera-depth.mjs index 9bdfeaf162b..e1d24ad3c91 100644 --- a/examples/src/examples/xr/ar-camera-depth.mjs +++ b/examples/src/examples/xr/ar-camera-depth.mjs @@ -155,7 +155,7 @@ async function example({ canvas }) { app.xr.on('start', function () { message("Immersive AR session has started"); console.log('depth gpu optimized', app.xr.views.depthGpuOptimized); - console.log('depth texture format', app.xr.views.depthFormat); + console.log('depth texture format', app.xr.views.depthPixelFormat); }); app.xr.on('end', function () { message("Immersive AR session has ended"); @@ -180,9 +180,9 @@ async function example({ canvas }) { if (!view.textureDepth) // check if depth texture is available continue; - materialDepth.setParameter('colorMap', view.textureDepth) - materialDepth.setParameter('matrix_depth_uv', app.xr.depthSensing.uvMatrix.data); - materialDepth.setParameter('depth_raw_to_meters', app.xr.depthSensing.rawValueToMeters); + materialDepth.setParameter('colorMap', view.textureDepth); + materialDepth.setParameter('matrix_depth_uv', view.depthUvMatrix.data); + materialDepth.setParameter('depth_raw_to_meters', view.depthValueToMeters); // debug draw camera depth texture on the screen app.drawTexture(0.5, -0.5, 1, 1, view.textureDepth, materialDepth); diff --git a/src/framework/xr/xr-depth-sensing.js b/src/framework/xr/xr-depth-sensing.js index 7162d1efce4..e8bbde24d42 100644 --- a/src/framework/xr/xr-depth-sensing.js +++ b/src/framework/xr/xr-depth-sensing.js @@ -64,6 +64,8 @@ import { XRDEPTHSENSINGUSAGE_CPU, XRDEPTHSENSINGUSAGE_GPU } from './constants.js * * @augments EventHandler * @category XR + * @deprecated + * @ignore */ class XrDepthSensing extends EventHandler { /** @@ -78,65 +80,11 @@ class XrDepthSensing extends EventHandler { */ _views; - /** - * @type {boolean} - * @private - */ - _available = false; - - /** - * @type {XRCPUDepthInformation|null} - * @private - */ - _depthInfoCpu = null; - - /** - * @type {XRCPUDepthInformation|null} - * @private - */ - _depthInfoGpu = null; - - /** - * @type {string|null} - * @private - */ - _usage = null; - - /** - * @type {string|null} - * @private - */ - _dataFormat = null; - /** * @type {boolean} * @private */ - _matrixDirty = false; - - /** - * @type {Mat4} - * @private - */ - _matrix = new Mat4(); - - /** - * @type {Uint8Array} - * @private - */ - _emptyBuffer = new Uint8Array(32); - - /** - * @type {Uint8Array|null} - * @private - */ - _depthBuffer = null; - - /** - * @type {Texture} - * @private - */ - _texture; + _available = false; /** * Create a new XrDepthSensing instance. @@ -150,18 +98,7 @@ class XrDepthSensing extends EventHandler { this._manager = manager; this._views = manager.views; - // // TODO: data format can be different - // this._texture = new Texture(this._manager.app.graphicsDevice, { - // format: PIXELFORMAT_LA8, - // mipmaps: false, - // addressU: ADDRESS_CLAMP_TO_EDGE, - // addressV: ADDRESS_CLAMP_TO_EDGE, - // minFilter: FILTER_LINEAR, - // magFilter: FILTER_LINEAR, - // name: 'XRDepthSensing' - // }); - - if (this.supported) { + if (this._views.supportedDepth) { this._manager.on('start', this._onSessionStart, this); this._manager.on('end', this._onSessionEnd, this); } @@ -171,12 +108,16 @@ class XrDepthSensing extends EventHandler { * Fired when depth sensing data becomes available. * * @event XrDepthSensing#available + * @deprecated + * @ignore */ /** * Fired when depth sensing data becomes unavailable. * * @event XrDepthSensing#unavailable + * @deprecated + * @ignore */ /** @@ -186,6 +127,8 @@ class XrDepthSensing extends EventHandler { * @event XrDepthSensing#resize * @param {number} width - The new width of the depth texture in pixels. * @param {number} height - The new height of the depth texture in pixels. + * @deprecated + * @ignore * @example * depthSensing.on('resize', function () { * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix); @@ -200,121 +143,18 @@ class XrDepthSensing extends EventHandler { /** @private */ _onSessionStart() { - const session = this._manager.session; - - try { - this._usage = session.depthUsage; - this._dataFormat = session.depthDataFormat; - } catch (ex) { - this._usage = null; - this._dataFormat = null; - this._available = false; - - this.fire('error', ex); + if (this._views.availableDepth) { + this._available = true; + this.fire('available') } } /** @private */ _onSessionEnd() { - this._depthInfoCpu = null; - this._depthInfoGpu = null; - - this._usage = null; - this._dataFormat = null; - if (this._available) { this._available = false; this.fire('unavailable'); } - - this._depthBuffer = null; - // this._texture._width = 4; - // this._texture._height = 4; - // this._texture._levels[0] = this._emptyBuffer; - // this._texture.upload(); - } - - /** @private */ - _updateTexture() { - return; - const depthInfo = this._depthInfoCpu || this._depthInfoGpu; - - if (depthInfo) { - let resized = false; - - // changed resolution - if (depthInfo.width !== this._texture.width || depthInfo.height !== this._texture.height) { - this._texture._width = depthInfo.width; - this._texture._height = depthInfo.height; - this._matrixDirty = true; - resized = true; - } - - if (this._depthInfoCpu) { - const dataBuffer = this._depthInfoCpu.data; - this._depthBuffer = new Uint8Array(dataBuffer); - this._texture._levels[0] = this._depthBuffer; - this._texture.upload(); - } else if (this._depthInfoGpu) { - this._texture._levels[0] = this._depthInfoGpu.texture; - this._texture.upload(); - } - - if (resized) this.fire('resize', depthInfo.width, depthInfo.height); - } else if (this._depthBuffer) { - // depth info not available anymore - this._depthBuffer = null; - this._texture._width = 4; - this._texture._height = 4; - this._texture._levels[0] = this._emptyBuffer; - this._texture.upload(); - } - } - - /** - * @param {*} frame - XRFrame from requestAnimationFrame callback. - * @param {*} view - First XRView of viewer XRPose. - * @ignore - */ - update(frame, view) { - if (!this._usage) - return; - - let depthInfoCpu = null; - let depthInfoGpu = null; - if (this._usage === XRDEPTHSENSINGUSAGE_CPU && view) { - depthInfoCpu = frame.getDepthInformation(view); - } else if (this._usage === XRDEPTHSENSINGUSAGE_GPU && view) { - depthInfoGpu = frame.getDepthInformation(view); - } - - if ((this._depthInfoCpu && !depthInfoCpu) || (!this._depthInfoCpu && depthInfoCpu) || (this.depthInfoGpu && !depthInfoGpu) || (!this._depthInfoGpu && depthInfoGpu)) { - this._matrixDirty = true; - } - this._depthInfoCpu = depthInfoCpu; - this._depthInfoGpu = depthInfoGpu; - - this._updateTexture(); - - if (this._matrixDirty) { - this._matrixDirty = false; - - const depthInfo = this._depthInfoCpu || this._depthInfoGpu; - - if (depthInfo) { - this._matrix.data.set(depthInfo.normDepthBufferFromNormView.matrix); - } else { - this._matrix.setIdentity(); - } - } - - if ((this._depthInfoCpu || this._depthInfoGpu) && !this._available) { - this._available = true; - this.fire('available'); - } else if (!this._depthInfoCpu && !this._depthInfoGpu && this._available) { - this._available = false; - this.fire('unavailable'); - } } /** @@ -325,6 +165,8 @@ class XrDepthSensing extends EventHandler { * 1.0 (left to right). * @param {number} v - V coordinate of pixel in depth texture, which is in range from 0.0 to * 1.0 (top to bottom). + * @deprecated + * @ignore * @returns {number|null} Depth in meters or null if depth information is currently not * available. * @example @@ -334,35 +176,33 @@ class XrDepthSensing extends EventHandler { * } */ getDepth(u, v) { - // TODO - // GPU usage - - if (!this._depthInfoCpu) - return null; - - return this._depthInfoCpu.getDepthInMeters(u, v); + return this._views.list[0]?.getDepth(u, v) ?? null; } /** * True if Depth Sensing is supported. * * @type {boolean} + * @deprecated + * @ignore */ get supported() { - return platform.browser && !!window.XRDepthInformation; + return this._views.supportedDepth; } /** * True if depth sensing information is available. * * @type {boolean} + * @deprecated + * @ignore * @example * if (app.xr.depthSensing.available) { * const depth = app.xr.depthSensing.getDepth(x, y); * } */ get available() { - return this._available; + return this._views.availableDepth; } /** @@ -373,7 +213,7 @@ class XrDepthSensing extends EventHandler { * @ignore */ get usage() { - return this._usage; + return this._views.depthUsage; } /** @@ -384,27 +224,29 @@ class XrDepthSensing extends EventHandler { * @ignore */ get dataFormat() { - return this._dataFormat; + return this._views.depthFormat; } /** * Width of depth texture or 0 if not available. * * @type {number} + * @deprecated + * @ignore */ get width() { - const depthInfo = this._depthInfoCpu || this._depthInfoGpu; - return depthInfo && depthInfo.width || 0; + return this._views.list[0]?.textureDepth?.width ?? 0; } /** * Height of depth texture or 0 if not available. * * @type {number} + * @deprecated + * @ignore */ get height() { - const depthInfo = this._depthInfoCpu || this._depthInfoGpu; - return depthInfo && depthInfo.height || 0; + return this._views.list[0]?.textureDepth?.height ?? 0; } /* eslint-disable jsdoc/check-examples */ @@ -414,6 +256,8 @@ class XrDepthSensing extends EventHandler { * be normalized using {@link XrDepthSensing#uvMatrix}. * * @type {Texture} + * @deprecated + * @ignore * @example * material.diffuseMap = depthSensing.texture; * @example @@ -442,7 +286,7 @@ class XrDepthSensing extends EventHandler { * } */ get texture() { - return this._texture; + return this._views.list[0]?.textureDepth; } /* eslint-enable jsdoc/check-examples */ @@ -451,23 +295,26 @@ class XrDepthSensing extends EventHandler { * It is updated when the depth texture is resized. Refer to {@link XrDepthSensing#resize}. * * @type {Mat4} + * @deprecated + * @ignore * @example * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data); */ get uvMatrix() { - return this._matrix; + return this._views.list[0]?.depthUvMatrix; } /** * Multiply this coefficient number by raw depth value to get depth in meters. * * @type {number} + * @deprecated + * @ignore * @example * material.setParameter('depth_raw_to_meters', depthSensing.rawValueToMeters); */ get rawValueToMeters() { - const depthInfo = this._depthInfoCpu || this._depthInfoGpu; - return depthInfo && depthInfo.rawValueToMeters || 0; + return this._views.list[0]?.depthValueToMeters ?? 0; } } diff --git a/src/framework/xr/xr-manager.js b/src/framework/xr/xr-manager.js index b7353c862a1..38686dcd790 100644 --- a/src/framework/xr/xr-manager.js +++ b/src/framework/xr/xr-manager.js @@ -221,6 +221,7 @@ class XrManager extends EventHandler { this._available[XRTYPE_VR] = false; this._available[XRTYPE_AR] = false; + this.views = new XrViews(this); this.depthSensing = new XrDepthSensing(this); this.domOverlay = new XrDomOverlay(this); this.hitTest = new XrHitTest(this); @@ -229,7 +230,6 @@ class XrManager extends EventHandler { this.input = new XrInput(this); this.lightEstimation = new XrLightEstimation(this); this.anchors = new XrAnchors(this); - this.views = new XrViews(this); // TODO // 1. HMD class with its params @@ -821,9 +821,6 @@ class XrManager extends EventHandler { if (this.lightEstimation.supported) this.lightEstimation.update(frame); - if (this.depthSensing.supported) - this.depthSensing.update(frame, pose && pose.views[0]); - if (this.imageTracking.supported) this.imageTracking.update(frame); diff --git a/src/framework/xr/xr-view.js b/src/framework/xr/xr-view.js index 1bed67c1620..6550e861ce1 100644 --- a/src/framework/xr/xr-view.js +++ b/src/framework/xr/xr-view.js @@ -96,6 +96,24 @@ class XrView extends EventHandler { */ _textureDepth = null; + /** + * @type {XRDepthInformation|null} + * @private + */ + _depthInfo = null; + + /** + * @type {Uint8Array} + * @private + */ + _emptyDepthBuffer = new Uint8Array(32); + + /** + * @type {Mat4} + * @private + */ + _depthMatrix = new Mat4(); + /** * Create a new XrView instance. * @@ -131,7 +149,7 @@ class XrView extends EventHandler { if (this._manager.views.supportedDepth && this._manager.views.availableDepth) { this._textureDepth = new Texture(this._manager.app.graphicsDevice, { - format: this._manager.views.depthFormat, + format: this._manager.views.depthPixelFormat, mipmaps: false, addressU: ADDRESS_CLAMP_TO_EDGE, addressV: ADDRESS_CLAMP_TO_EDGE, @@ -164,6 +182,29 @@ class XrView extends EventHandler { return this._textureDepth; } + /** + * 4x4 matrix that should be used to transform depth texture UVs to normalized UVs in a shader. + * It is updated when the depth texture is resized. Refer to {@link XrView#depthResize}. + * + * @type {Mat4} + * @example + * material.setParameter('matrix_depth_uv', view.depthUvMatrix.data); + */ + get depthUvMatrix() { + return this._depthMatrix; + } + + /** + * Multiply this coefficient number by raw depth value to get depth in meters. + * + * @type {number} + * @example + * material.setParameter('depth_raw_to_meters', view.depthValueToMeters); + */ + get depthValueToMeters() { + return this._depthInfo?.rawValueToMeters || 0; + } + /** * An eye with which this view is associated. Can be any of: * @@ -261,7 +302,7 @@ class XrView extends EventHandler { this._viewInvMat.set(this._xrView.transform.matrix); this._updateTextureColor(); - this._updateTextureDepth(frame); + this._updateDepth(frame); } /** @@ -325,106 +366,58 @@ class XrView extends EventHandler { /** * @private */ - _updateTextureDepth(frame) { + _updateDepth(frame) { if (!this._manager.views.availableDepth || !this._textureDepth) return; - const binding = this._manager.webglBinding; - if (!binding) - return; - const gpu = this._manager.views.depthGpuOptimized; - let info, texture; - if (gpu) { - info = binding.getDepthInformation(this._xrView); - } else { - info = frame.getDepthInformation(this._xrView); - } + const infoSource = gpu ? this._manager.webglBinding : frame; + if (!infoSource) return; - if (!info) return; - - if (gpu) texture = info.texture; - - // const device = this._manager.app.graphicsDevice; - // const gl = device.gl; - // const attachmentBaseConstant = device.isWebGL2 ? gl.COLOR_ATTACHMENT0 : (device.extDrawBuffers?.COLOR_ATTACHMENT0_WEBGL ?? gl.COLOR_ATTACHMENT0); - - const width = info.width; - const height = info.height; - - // if (!this._textureDepth) { - // // color texture - // this._textureDepth = new Texture(device, { - // format: this._manager.views.depthFormat, - // mipmaps: false, - // addressU: ADDRESS_CLAMP_TO_EDGE, - // addressV: ADDRESS_CLAMP_TO_EDGE, - // minFilter: FILTER_LINEAR, - // magFilter: FILTER_LINEAR, - // width: width, - // height: height, - // name: `XrView-${this._xrView.eye}-Depth` - // }); - - // // force initialize texture - // // this._textureDepth.upload(); - - // // create frame buffer to read from - // // this._frameBufferSource = gl.createFramebuffer(); - - // // create frame buffer to write to - // // this._frameBuffer = gl.createFramebuffer(); - // } + const depthInfo = infoSource.getDepthInformation(this._xrView); + let matrixDirty = !this._depthInfo !== !depthInfo; + this._depthInfo = depthInfo; + + const width = this._depthInfo?.width || 4; + const height = this._depthInfo?.height || 4; let resized = false; + // resizing if (this._textureDepth.width !== width || this._textureDepth.height !== height) { this._textureDepth._width = width; this._textureDepth._height = height; + matrixDirty = true; resized = true; } - if (gpu) { - // set frame buffer to read from - // device.setFramebuffer(this._frameBufferSource); - // gl.framebufferTexture2D( - // gl.FRAMEBUFFER, - // attachmentBaseConstant, - // gl.TEXTURE_2D, - // texture, - // 0 - // ); - - // // set frame buffer to write to - // device.setFramebuffer(this._frameBuffer); - // gl.framebufferTexture2D( - // gl.FRAMEBUFFER, - // attachmentBaseConstant, - // gl.TEXTURE_2D, - // this._textureDepth.impl._glTexture, - // 0 - // ); - - // // bind buffers - // gl.bindFramebuffer(gl.READ_FRAMEBUFFER, this._frameBufferSource); - // let ready = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE; - - // gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, this._frameBuffer); - // if (ready) ready = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE; - - // if (ready) { - // // copy buffers with flip Y - // gl.blitFramebuffer(0, height, width, 0, 0, 0, width, height, gl.COLOR_BUFFER_BIT, gl.NEAREST); - // } + // update depth matrix + if (matrixDirty) { + if (this._depthInfo) { + this._depthMatrix.data.set(this._depthInfo.normDepthBufferFromNormView.matrix); + } else { + this._depthMatrix.setIdentity(); + } + } + + // update texture + if (this._depthInfo) { + if (gpu) { + // gpu + console.log('not implemented') + } else { + // cpu + this._textureDepth._levels[0] = new Uint8Array(this._depthInfo.data); + this._textureDepth.upload(); + } } else { - this._textureDepth._levels[0] = new Uint8Array(info.data); + // clear + this._textureDepth._levels[0] = this._emptyDepthBuffer; this._textureDepth.upload(); } - if (resized) { - this.fire('depthResize', width, height); - } + if (resized) this.fire('depthResize', width, height); } /** @@ -448,6 +441,29 @@ class XrView extends EventHandler { this._positionData[2] = this._viewInvOffMat.data[14]; } + /** + * Get depth value from depth information in meters. UV is in range of 0..1, with origin in + * top-left corner of a texture. + * + * @param {number} u - U coordinate of pixel in depth texture, which is in range from 0.0 to + * 1.0 (left to right). + * @param {number} v - V coordinate of pixel in depth texture, which is in range from 0.0 to + * 1.0 (top to bottom). + * @returns {number|null} Depth in meters or null if depth information is currently not + * available. + * @example + * const depth = view.getDepth(u, v); + * if (depth !== null) { + * // depth in meters + * } + */ + getDepth(u, v) { + if (this._manager.views.depthGpuOptimized) + return null; + + return this._depthInfo?.getDepthInMeters(u, v) ?? null; + } + /** * @ignore */ diff --git a/src/framework/xr/xr-views.js b/src/framework/xr/xr-views.js index 0800ad8cc82..897c5b24d5e 100644 --- a/src/framework/xr/xr-views.js +++ b/src/framework/xr/xr-views.js @@ -177,14 +177,22 @@ class XrViews extends EventHandler { } /** - * The depth sensing data format. Currently supported either: + * The depth sensing pixel format. Currently supported either: * {@link PIXELFORMAT_LA8} or {@link PIXELFORMAT_R32F} * - * @type {number} + * @type {number|null} * @ignore */ - get depthFormat() { - return this._depthFormats[this._depthFormat]; + get depthPixelFormat() { + return this._depthFormats[this._depthFormat] ?? null; + } + + /** + * @type {string} + * @ignore + */ + get depthUsage() { + return this._depthUsage; } /** From eb766698dc453fdeb807bedc47d81c428041122a Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Sat, 25 Nov 2023 13:41:51 +0200 Subject: [PATCH 11/19] handle webgl device lost --- src/framework/xr/xr-manager.js | 34 ++++++++++++++++++++++++++++++++++ src/framework/xr/xr-view.js | 7 +++++++ 2 files changed, 41 insertions(+) diff --git a/src/framework/xr/xr-manager.js b/src/framework/xr/xr-manager.js index b7353c862a1..29d6c108899 100644 --- a/src/framework/xr/xr-manager.js +++ b/src/framework/xr/xr-manager.js @@ -241,6 +241,9 @@ class XrManager extends EventHandler { this._deviceAvailabilityCheck(); }); this._deviceAvailabilityCheck(); + + this.app.graphicsDevice.on('devicelost', this._onDeviceLost, this); + this.app.graphicsDevice.on('devicerestored', this._onDeviceRestored, this); } } @@ -520,6 +523,35 @@ class XrManager extends EventHandler { }); } + /** + * @private + */ + _onDeviceLost() { + if (this.webglBinding) + this.webglBinding = null; + } + + /** + * @private + */ + _onDeviceRestored() { + if (!this._session) + return; + + this.webglBinding = null; + + if (platform.browser) { + const deviceType = this.app.graphicsDevice.deviceType; + if ((deviceType === DEVICETYPE_WEBGL1 || deviceType === DEVICETYPE_WEBGL2) && window.XRWebGLBinding) { + try { + this.webglBinding = new XRWebGLBinding(this._session, this.app.graphicsDevice.gl); // eslint-disable-line no-undef + } catch (ex) { + this.fire('error', ex); + } + } + } + } + /** * Attempts to end XR session and optionally fires callback when session is ended or failed to * end. @@ -540,6 +572,8 @@ class XrManager extends EventHandler { return; } + this.webglBinding = null; + if (callback) this.once('end', callback); this._session.end(); diff --git a/src/framework/xr/xr-view.js b/src/framework/xr/xr-view.js index 78cbcd37a57..ecb9b688188 100644 --- a/src/framework/xr/xr-view.js +++ b/src/framework/xr/xr-view.js @@ -117,6 +117,8 @@ class XrView { height: this._xrCamera.height, name: `XrView-${this._xrView.eye}-Color` }); + + this._manager.app.graphicsDevice?.on('devicelost', this._onDeviceLost, this); } } } @@ -309,6 +311,11 @@ class XrView { this._positionData[2] = this._viewInvOffMat.data[14]; } + _onDeviceLost() { + this._frameBufferSource = null; + this._frameBuffer = null; + } + /** * @ignore */ From 54c3db8ecb219d5f673cfa6ac386b0baa3e30767 Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Sat, 25 Nov 2023 14:30:04 +0200 Subject: [PATCH 12/19] deprecate XrDepthSensing --- examples/package-lock.json | 2 +- src/framework/xr/xr-depth-sensing.js | 183 ++++----------------------- src/framework/xr/xr-manager.js | 5 +- src/framework/xr/xr-view.js | 67 +++++++++- src/framework/xr/xr-views.js | 18 ++- 5 files changed, 102 insertions(+), 173 deletions(-) diff --git a/examples/package-lock.json b/examples/package-lock.json index 8be323a30c2..e56ae315723 100644 --- a/examples/package-lock.json +++ b/examples/package-lock.json @@ -54,7 +54,7 @@ }, "..": { "name": "playcanvas", - "version": "1.67.0-dev", + "version": "1.68.0-dev", "dev": true, "license": "MIT", "dependencies": { diff --git a/src/framework/xr/xr-depth-sensing.js b/src/framework/xr/xr-depth-sensing.js index e8bbde24d42..7edd1bc2402 100644 --- a/src/framework/xr/xr-depth-sensing.js +++ b/src/framework/xr/xr-depth-sensing.js @@ -1,67 +1,6 @@ import { EventHandler } from '../../core/event-handler.js'; -import { platform } from '../../core/platform.js'; -import { Mat4 } from '../../core/math/mat4.js'; - -import { ADDRESS_CLAMP_TO_EDGE, PIXELFORMAT_LA8, FILTER_LINEAR } from '../../platform/graphics/constants.js'; -import { Texture } from '../../platform/graphics/texture.js'; - -import { XRDEPTHSENSINGUSAGE_CPU, XRDEPTHSENSINGUSAGE_GPU } from './constants.js'; /** - * Depth Sensing provides depth information which is reconstructed using the underlying AR system. - * It provides the ability to query depth values (CPU path) or access a depth texture (GPU path). - * Depth information can be used (not limited to) for reconstructing real world geometry, virtual - * object placement, occlusion of virtual objects by real world geometry and more. - * - * ```javascript - * // CPU path - * const depthSensing = app.xr.depthSensing; - * if (depthSensing.available) { - * // get depth in the middle of the screen, value is in meters - * const depth = depthSensing.getDepth(depthSensing.width / 2, depthSensing.height / 2); - * } - * ``` - * - * ```javascript - * // GPU path, attaching texture to material - * material.diffuseMap = depthSensing.texture; - * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data); - * material.setParameter('depth_raw_to_meters', depthSensing.rawValueToMeters); - * material.update(); - * - * // update UV transformation matrix on depth texture resize - * depthSensing.on('resize', function () { - * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data); - * material.setParameter('depth_raw_to_meters', depthSensing.rawValueToMeters); - * }); - * ``` - * - * ```javascript - * // GLSL shader to unpack depth texture - * varying vec2 vUv0; - * - * uniform sampler2D texture_depthSensingMap; - * uniform mat4 matrix_depth_uv; - * uniform float depth_raw_to_meters; - * - * void main(void) { - * // transform UVs using depth matrix - * vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy; - * - * // get luminance alpha components from depth texture - * vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra; - * - * // unpack into single value in millimeters - * float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)) * depth_raw_to_meters; // m - * - * // normalize: 0m to 8m distance - * depth = min(depth / 8.0, 1.0); // 0..1 = 0..8 - * - * // paint scene from black to white based on distance - * gl_FragColor = vec4(depth, depth, depth, 1.0); - * } - * ``` - * * @augments EventHandler * @category XR * @deprecated @@ -87,9 +26,13 @@ class XrDepthSensing extends EventHandler { _available = false; /** - * Create a new XrDepthSensing instance. - * - * @param {import('./xr-manager.js').XrManager} manager - WebXR Manager. + * @type {import('../../core/event-handle.js').EventHandle|null} + * @private + */ + _evtDepthResize = null; + + /** + * @param {import('./xr-manager.js').XrManager} manager - manager * @hideconstructor */ constructor(manager) { @@ -105,83 +48,64 @@ class XrDepthSensing extends EventHandler { } /** - * Fired when depth sensing data becomes available. - * * @event XrDepthSensing#available * @deprecated * @ignore */ /** - * Fired when depth sensing data becomes unavailable. - * * @event XrDepthSensing#unavailable * @deprecated * @ignore */ /** - * Fired when the depth sensing texture been resized. The {@link XrDepthSensing#uvMatrix} needs - * to be updated for relevant shaders. - * * @event XrDepthSensing#resize - * @param {number} width - The new width of the depth texture in pixels. - * @param {number} height - The new height of the depth texture in pixels. + * @param {number} width + * @param {number} height * @deprecated * @ignore - * @example - * depthSensing.on('resize', function () { - * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix); - * }); */ - /** @ignore */ - destroy() { - this._texture.destroy(); - this._texture = null; - } - /** @private */ _onSessionStart() { if (this._views.availableDepth) { this._available = true; - this.fire('available') + this._evtDepthResize = this._views.list[0]?.on('depth:resize', this._onDepthResize, this); + this.fire('available'); } } /** @private */ _onSessionEnd() { + if (this._evtDepthResize) { + this._evtDepthResize.off(); + this._evtDepthResize = null; + } + if (this._available) { this._available = false; this.fire('unavailable'); } } + /** @private */ + _onDepthResize(width, height) { + this.fire('resize', width, height); + } + /** - * Get depth value from depth information in meters. UV is in range of 0..1, with origin in - * top-left corner of a texture. - * - * @param {number} u - U coordinate of pixel in depth texture, which is in range from 0.0 to - * 1.0 (left to right). - * @param {number} v - V coordinate of pixel in depth texture, which is in range from 0.0 to - * 1.0 (top to bottom). + * @param {number} u - u + * @param {number} v - v * @deprecated * @ignore - * @returns {number|null} Depth in meters or null if depth information is currently not - * available. - * @example - * const depth = app.xr.depthSensing.getDepth(u, v); - * if (depth !== null) { - * // depth in meters - * } + * @returns {number|null} number */ getDepth(u, v) { return this._views.list[0]?.getDepth(u, v) ?? null; } /** - * True if Depth Sensing is supported. - * * @type {boolean} * @deprecated * @ignore @@ -191,23 +115,15 @@ class XrDepthSensing extends EventHandler { } /** - * True if depth sensing information is available. - * * @type {boolean} * @deprecated * @ignore - * @example - * if (app.xr.depthSensing.available) { - * const depth = app.xr.depthSensing.getDepth(x, y); - * } */ get available() { return this._views.availableDepth; } /** - * Whether the usage is CPU or GPU. - * * @type {string} * @deprecated * @ignore @@ -217,8 +133,6 @@ class XrDepthSensing extends EventHandler { } /** - * The depth sensing data format. - * * @type {string} * @deprecated * @ignore @@ -228,8 +142,6 @@ class XrDepthSensing extends EventHandler { } /** - * Width of depth texture or 0 if not available. - * * @type {number} * @deprecated * @ignore @@ -239,8 +151,6 @@ class XrDepthSensing extends EventHandler { } /** - * Height of depth texture or 0 if not available. - * * @type {number} * @deprecated * @ignore @@ -249,69 +159,28 @@ class XrDepthSensing extends EventHandler { return this._views.list[0]?.textureDepth?.height ?? 0; } - /* eslint-disable jsdoc/check-examples */ /** - * Texture that contains packed depth information. The format of this texture is - * {@link PIXELFORMAT_LA8}. It is UV transformed based on the underlying AR system which can - * be normalized using {@link XrDepthSensing#uvMatrix}. - * - * @type {Texture} + * @type {import('../../platform/graphics/texture.js').Texture|null} * @deprecated * @ignore - * @example - * material.diffuseMap = depthSensing.texture; - * @example - * // GLSL shader to unpack depth texture - * varying vec2 vUv0; - * - * uniform sampler2D texture_depthSensingMap; - * uniform mat4 matrix_depth_uv; - * uniform float depth_raw_to_meters; - * - * void main(void) { - * // transform UVs using depth matrix - * vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy; - * - * // get luminance alpha components from depth texture - * vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra; - * - * // unpack into single value in millimeters - * float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)) * depth_raw_to_meters; // m - * - * // normalize: 0m to 8m distance - * depth = min(depth / 8.0, 1.0); // 0..1 = 0m..8m - * - * // paint scene from black to white based on distance - * gl_FragColor = vec4(depth, depth, depth, 1.0); - * } */ get texture() { return this._views.list[0]?.textureDepth; } - /* eslint-enable jsdoc/check-examples */ /** - * 4x4 matrix that should be used to transform depth texture UVs to normalized UVs in a shader. - * It is updated when the depth texture is resized. Refer to {@link XrDepthSensing#resize}. - * - * @type {Mat4} + * @type {import('../../core/math/mat4.js').Mat4} * @deprecated * @ignore - * @example - * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data); */ get uvMatrix() { return this._views.list[0]?.depthUvMatrix; } /** - * Multiply this coefficient number by raw depth value to get depth in meters. - * * @type {number} * @deprecated * @ignore - * @example - * material.setParameter('depth_raw_to_meters', depthSensing.rawValueToMeters); */ get rawValueToMeters() { return this._views.list[0]?.depthValueToMeters ?? 0; diff --git a/src/framework/xr/xr-manager.js b/src/framework/xr/xr-manager.js index 27f625c0d55..0f3358ec8ad 100644 --- a/src/framework/xr/xr-manager.js +++ b/src/framework/xr/xr-manager.js @@ -319,10 +319,7 @@ class XrManager extends EventHandler { * * @ignore */ - destroy() { - this.depthSensing.destroy(); - this.depthSensing = null; - } + destroy() { } /** * Attempts to start XR session for provided {@link CameraComponent} and optionally fires diff --git a/src/framework/xr/xr-view.js b/src/framework/xr/xr-view.js index 759dad70bb9..f1c1b2cd981 100644 --- a/src/framework/xr/xr-view.js +++ b/src/framework/xr/xr-view.js @@ -8,6 +8,8 @@ import { ADDRESS_CLAMP_TO_EDGE, FILTER_LINEAR, PIXELFORMAT_RGB8 } from '../../pl /** * Represents XR View which represents a screen (mobile phone context) or an eye (HMD context). + * It provides access to view's color and depth information based on capabilities of underlying + * AR system. * * @category XR */ @@ -166,6 +168,19 @@ class XrView extends EventHandler { this._manager.app.graphicsDevice?.on('devicelost', this._onDeviceLost, this); } + /** + * Fired when the depth sensing texture been resized. The {@link XrView#depthUvMatrix} needs + * to be updated for relevant shaders. + * + * @event XrView#depth:resize + * @param {number} width - The new width of the depth texture in pixels. + * @param {number} height - The new height of the depth texture in pixels. + * @example + * view.on('depth:resize', function () { + * material.setParameter('matrix_depth_uv', view.depthUvMatrix); + * }); + */ + /** * Texture associated with this view's camera color. Equals to null if camera color is * not available or not supported. @@ -176,15 +191,52 @@ class XrView extends EventHandler { return this._textureColor; } + /* eslint-disable jsdoc/check-examples */ /** - * Texture associated with this view's camera depth. Equals to null if camera depth is - * not available or not supported. + * Texture that contains packed depth information which is reconstructed using the underlying + * AR system. This texture can be used (not limited to) for reconstructing real world + * geometry, virtual object placement, occlusion of virtual object by the real world geometry, + * and more. + * The format of this texture is {@link PIXELFORMAT_LA8} or {@link PIXELFORMAT_R32F} + * based on {@link XrViews#depthFormat}. It is UV transformed based on the underlying AR + * system which can be normalized using {@link XrView#depthUvMatrix}. Equals to null if camera + * depth is not supported. * * @type {Texture|null} + * @example + * // GPU path, attaching texture to material + * material.setParameter('texture_depthSensingMap', view.textureDepth); + * material.setParameter('matrix_depth_uv', view.depthUvMatrix.data); + * material.setParameter('depth_to_meters', view.depthValueToMeters); + * @example + * // GLSL shader to unpack depth texture + * varying vec2 vUv0; + * + * uniform sampler2D texture_depthSensingMap; + * uniform mat4 matrix_depth_uv; + * uniform float depth_to_meters; + * + * void main(void) { + * // transform UVs using depth matrix + * vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy; + * + * // get luminance alpha components from depth texture + * vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra; + * + * // unpack into single value in millimeters + * float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)) * depth_to_meters; // m + * + * // normalize: 0m to 8m distance + * depth = min(depth / 8.0, 1.0); // 0..1 = 0m..8m + * + * // paint scene from black to white based on distance + * gl_FragColor = vec4(depth, depth, depth, 1.0); + * } */ get textureDepth() { return this._textureDepth; } + /* eslint-enable jsdoc/check-examples */ /** * 4x4 matrix that should be used to transform depth texture UVs to normalized UVs in a shader. @@ -203,7 +255,7 @@ class XrView extends EventHandler { * * @type {number} * @example - * material.setParameter('depth_raw_to_meters', view.depthValueToMeters); + * material.setParameter('depth_to_meters', view.depthValueToMeters); */ get depthValueToMeters() { return this._depthInfo?.rawValueToMeters || 0; @@ -282,7 +334,7 @@ class XrView extends EventHandler { } /** - * @param {*} frame - XRFrame from requestAnimationFrame callback. + * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback. * @param {XRView} xrView - XRView from WebXR API. * @ignore */ @@ -368,6 +420,7 @@ class XrView extends EventHandler { } /** + * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback. * @private */ _updateDepth(frame) { @@ -380,6 +433,8 @@ class XrView extends EventHandler { if (!infoSource) return; const depthInfo = infoSource.getDepthInformation(this._xrView); + if (!depthInfo) return; + let matrixDirty = !this._depthInfo !== !depthInfo; this._depthInfo = depthInfo; @@ -409,7 +464,7 @@ class XrView extends EventHandler { if (this._depthInfo) { if (gpu) { // gpu - console.log('not implemented') + console.log('not implemented'); } else { // cpu this._textureDepth._levels[0] = new Uint8Array(this._depthInfo.data); @@ -421,7 +476,7 @@ class XrView extends EventHandler { this._textureDepth.upload(); } - if (resized) this.fire('depthResize', width, height); + if (resized) this.fire('depth:resize', width, height); } /** diff --git a/src/framework/xr/xr-views.js b/src/framework/xr/xr-views.js index 897c5b24d5e..90bc30e84f5 100644 --- a/src/framework/xr/xr-views.js +++ b/src/framework/xr/xr-views.js @@ -1,7 +1,7 @@ import { platform } from '../../core/platform.js'; import { EventHandler } from "../../core/event-handler.js"; import { XrView } from "./xr-view.js"; -import { XRTYPE_AR } from "./constants.js"; +import { XRTYPE_AR, XRDEPTHSENSINGUSAGE_GPU, XRDEPTHSENSINGFORMAT_L8A8, XRDEPTHSENSINGFORMAT_F32 } from "./constants.js"; import { PIXELFORMAT_LA8, PIXELFORMAT_R32F } from '../../platform/graphics/constants.js'; /** @@ -76,8 +76,8 @@ class XrViews extends EventHandler { * @private */ _depthFormats = { - 'luminance-alpha': PIXELFORMAT_LA8, - 'float32': PIXELFORMAT_R32F + [XRDEPTHSENSINGFORMAT_L8A8]: PIXELFORMAT_LA8, + [XRDEPTHSENSINGFORMAT_F32]: PIXELFORMAT_R32F }; /** @@ -173,9 +173,17 @@ class XrViews extends EventHandler { * @ignore */ get depthGpuOptimized() { - return this._depthUsage === 'gpu-optimized'; + return this._depthUsage === XRDEPTHSENSINGUSAGE_GPU; } - + + /** + * @type {string} + * @ignore + */ + get depthFormat() { + return this._depthFormat; + } + /** * The depth sensing pixel format. Currently supported either: * {@link PIXELFORMAT_LA8} or {@link PIXELFORMAT_R32F} From 25e19d485c7b892c4c6dcef7b503ec4e5a899845 Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Sat, 25 Nov 2023 14:55:42 +0200 Subject: [PATCH 13/19] better backwards compatibility --- src/framework/xr/xr-depth-sensing.js | 27 +++++++++++++++++++++------ src/framework/xr/xr-manager.js | 3 +++ 2 files changed, 24 insertions(+), 6 deletions(-) diff --git a/src/framework/xr/xr-depth-sensing.js b/src/framework/xr/xr-depth-sensing.js index 7edd1bc2402..c385a79fa32 100644 --- a/src/framework/xr/xr-depth-sensing.js +++ b/src/framework/xr/xr-depth-sensing.js @@ -1,4 +1,5 @@ import { EventHandler } from '../../core/event-handler.js'; +import { Mat4 } from '../../core/math/mat4.js'; /** * @augments EventHandler @@ -31,6 +32,12 @@ class XrDepthSensing extends EventHandler { */ _evtDepthResize = null; + /** + * @type {Mat4} + * @private + */ + _uvMatrix = Mat4.IDENTITY.clone(); + /** * @param {import('./xr-manager.js').XrManager} manager - manager * @hideconstructor @@ -69,11 +76,8 @@ class XrDepthSensing extends EventHandler { /** @private */ _onSessionStart() { - if (this._views.availableDepth) { - this._available = true; + if (this._views.availableDepth) this._evtDepthResize = this._views.list[0]?.on('depth:resize', this._onDepthResize, this); - this.fire('available'); - } } /** @private */ @@ -105,6 +109,17 @@ class XrDepthSensing extends EventHandler { return this._views.list[0]?.getDepth(u, v) ?? null; } + /** + * @deprecated + * @ignore + */ + update() { + if (this._manager.session && this.supported && this._views.availableDepth && this._views.list.length && !this._available) { + this._available = true; + this.fire('available'); + } + } + /** * @type {boolean} * @deprecated @@ -169,12 +184,12 @@ class XrDepthSensing extends EventHandler { } /** - * @type {import('../../core/math/mat4.js').Mat4} + * @type {Mat4} * @deprecated * @ignore */ get uvMatrix() { - return this._views.list[0]?.depthUvMatrix; + return this._views.list[0]?.depthUvMatrix ?? this._uvMatrix; } /** diff --git a/src/framework/xr/xr-manager.js b/src/framework/xr/xr-manager.js index 0f3358ec8ad..b1d5b14bccc 100644 --- a/src/framework/xr/xr-manager.js +++ b/src/framework/xr/xr-manager.js @@ -860,6 +860,9 @@ class XrManager extends EventHandler { if (this.planeDetection.supported) this.planeDetection.update(frame); + + if (this.depthSensing.supported) + this.depthSensing.update(); } this.fire('update', frame); From 708dfc58132e25ad05114a538fa707c6d1a31c27 Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Sat, 25 Nov 2023 15:11:59 +0200 Subject: [PATCH 14/19] depth information should not be available outside of its XRFrame --- src/framework/xr/xr-view.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/framework/xr/xr-view.js b/src/framework/xr/xr-view.js index f1c1b2cd981..10ba0bb1c74 100644 --- a/src/framework/xr/xr-view.js +++ b/src/framework/xr/xr-view.js @@ -433,7 +433,10 @@ class XrView extends EventHandler { if (!infoSource) return; const depthInfo = infoSource.getDepthInformation(this._xrView); - if (!depthInfo) return; + if (!depthInfo) { + this._depthInfo = null; + return; + } let matrixDirty = !this._depthInfo !== !depthInfo; this._depthInfo = depthInfo; From ba9c19d1e1bbcd3060531ca4cf6324f6fa1b2722 Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Sat, 25 Nov 2023 17:58:47 +0200 Subject: [PATCH 15/19] clean objects --- src/framework/xr/xr-view.js | 15 +++++++++++++-- src/framework/xr/xr-views.js | 1 - 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/src/framework/xr/xr-view.js b/src/framework/xr/xr-view.js index 10ba0bb1c74..275eb17c035 100644 --- a/src/framework/xr/xr-view.js +++ b/src/framework/xr/xr-view.js @@ -430,7 +430,10 @@ class XrView extends EventHandler { const gpu = this._manager.views.depthGpuOptimized; const infoSource = gpu ? this._manager.webglBinding : frame; - if (!infoSource) return; + if (!infoSource) { + this._depthInfo = null; + return; + } const depthInfo = infoSource.getDepthInformation(this._xrView); if (!depthInfo) { @@ -467,7 +470,7 @@ class XrView extends EventHandler { if (this._depthInfo) { if (gpu) { // gpu - console.log('not implemented'); + // TODO } else { // cpu this._textureDepth._levels[0] = new Uint8Array(this._depthInfo.data); @@ -506,6 +509,7 @@ class XrView extends EventHandler { _onDeviceLost() { this._frameBufferSource = null; this._frameBuffer = null; + this._depthInfo = null; } /** @@ -535,11 +539,18 @@ class XrView extends EventHandler { * @ignore */ destroy() { + this._depthInfo = null; + if (this._textureColor) { this._textureColor.destroy(); this._textureColor = null; } + if (this._textureDepth) { + this._textureDepth.destroy(); + this._textureDepth = null; + } + if (this._frameBufferSource) { const gl = this._manager.app.graphicsDevice.gl; diff --git a/src/framework/xr/xr-views.js b/src/framework/xr/xr-views.js index 90bc30e84f5..7cc2e478900 100644 --- a/src/framework/xr/xr-views.js +++ b/src/framework/xr/xr-views.js @@ -189,7 +189,6 @@ class XrViews extends EventHandler { * {@link PIXELFORMAT_LA8} or {@link PIXELFORMAT_R32F} * * @type {number|null} - * @ignore */ get depthPixelFormat() { return this._depthFormats[this._depthFormat] ?? null; From 3ebb5e6295fa78b23645c83a262f4a0074f9979f Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Wed, 29 Nov 2023 13:31:12 +0200 Subject: [PATCH 16/19] fixes --- src/framework/xr/xr-manager.js | 29 ----------------------------- src/framework/xr/xr-view.js | 8 +++++--- 2 files changed, 5 insertions(+), 32 deletions(-) diff --git a/src/framework/xr/xr-manager.js b/src/framework/xr/xr-manager.js index 976f8e17e73..f4ee3912b36 100644 --- a/src/framework/xr/xr-manager.js +++ b/src/framework/xr/xr-manager.js @@ -521,35 +521,6 @@ class XrManager extends EventHandler { }); } - /** - * @private - */ - _onDeviceLost() { - if (this.webglBinding) - this.webglBinding = null; - } - - /** - * @private - */ - _onDeviceRestored() { - if (!this._session) - return; - - this.webglBinding = null; - - if (platform.browser) { - const deviceType = this.app.graphicsDevice.deviceType; - if ((deviceType === DEVICETYPE_WEBGL1 || deviceType === DEVICETYPE_WEBGL2) && window.XRWebGLBinding) { - try { - this.webglBinding = new XRWebGLBinding(this._session, this.app.graphicsDevice.gl); // eslint-disable-line no-undef - } catch (ex) { - this.fire('error', ex); - } - } - } - } - /** * Attempts to end XR session and optionally fires callback when session is ended or failed to * end. diff --git a/src/framework/xr/xr-view.js b/src/framework/xr/xr-view.js index 275eb17c035..ad5cd0057f1 100644 --- a/src/framework/xr/xr-view.js +++ b/src/framework/xr/xr-view.js @@ -130,12 +130,14 @@ class XrView extends EventHandler { this._manager = manager; this._xrView = xrView; + const device = this._manager.app.graphicsDevice; + if (this._manager.views.supportedColor) { this._xrCamera = this._xrView.camera; // color texture if (this._manager.views.availableColor && this._xrCamera) { - this._textureColor = new Texture(this._manager.app.graphicsDevice, { + this._textureColor = new Texture(device, { format: PIXELFORMAT_RGB8, mipmaps: false, addressU: ADDRESS_CLAMP_TO_EDGE, @@ -151,7 +153,7 @@ class XrView extends EventHandler { } if (this._manager.views.supportedDepth && this._manager.views.availableDepth) { - this._textureDepth = new Texture(this._manager.app.graphicsDevice, { + this._textureDepth = new Texture(device, { format: this._manager.views.depthPixelFormat, mipmaps: false, addressU: ADDRESS_CLAMP_TO_EDGE, @@ -165,7 +167,7 @@ class XrView extends EventHandler { } if (this._textureColor || this._textureDepth) - this._manager.app.graphicsDevice?.on('devicelost', this._onDeviceLost, this); + device.on('devicelost', this._onDeviceLost, this); } /** From 44b9d8613a63c933884b14d8a67dd793376e5907 Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Mon, 25 Dec 2023 18:10:43 +0200 Subject: [PATCH 17/19] GPU path for XR Depth Sensing --- examples/src/examples/xr/ar-camera-depth.mjs | 156 ++++++++++++------- src/framework/xr/xr-view.js | 13 +- src/framework/xr/xr-views.js | 2 +- src/scene/renderer/forward-renderer.js | 1 + src/scene/renderer/renderer.js | 1 + 5 files changed, 113 insertions(+), 60 deletions(-) diff --git a/examples/src/examples/xr/ar-camera-depth.mjs b/examples/src/examples/xr/ar-camera-depth.mjs index e1d24ad3c91..3c4f6acd356 100644 --- a/examples/src/examples/xr/ar-camera-depth.mjs +++ b/examples/src/examples/xr/ar-camera-depth.mjs @@ -50,68 +50,118 @@ async function example({ canvas }) { app.start(); // create camera - const c = new pc.Entity(); - c.addComponent('camera', { + const camera = new pc.Entity(); + camera.addComponent('camera', { clearColor: new pc.Color(0, 0, 0, 0), farClip: 10000 }); - app.root.addChild(c); + app.root.addChild(camera); - const l = new pc.Entity(); - l.addComponent("light", { - type: "spot", - range: 30 - }); - l.translate(0, 10, 0); - app.root.addChild(l); + let shaderUpdated = false; + let shaderDepthArray = null; + let shaderDepthFloat = null; - const material = new pc.StandardMaterial(); + const vertShader = /* glsl */ ` + attribute vec3 aPosition; + attribute vec2 aUv0; + uniform mat4 matrix_model; + uniform mat4 matrix_viewProjection; + varying vec2 vUv0; + void main(void) + { + vec4 screenPosition = matrix_viewProjection * matrix_model * vec4(aPosition, 1.0); + gl_Position = screenPosition; + vUv0 = screenPosition.xy; + } + `; - const materialDepth = new pc.Material(); - materialDepth.cull = pc.CULLFACE_NONE; - materialDepth.shader = app.scene.immediate.getShader('textureDepthSensing', /* glsl */ ` - varying vec2 uv0; - uniform sampler2D colorMap; + const fragShader = /* glsl */ ` + varying vec2 vUv0; + uniform vec4 uScreenSize; uniform mat4 matrix_depth_uv; uniform float depth_raw_to_meters; + #ifdef XRDEPTH_ARRAY + uniform int view_index; + uniform highp sampler2DArray depthMap; + #else + uniform sampler2D depthMap; + #endif + void main (void) { - vec2 texCoord = (matrix_depth_uv * vec4(uv0.xy, 0.0, 1.0)).xy; - vec2 packedDepth = texture2D(colorMap, texCoord).ra; - float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)) * depth_raw_to_meters; // m - depth = 1.0 - min(depth / 8.0, 1.0); // 0..1 = 0m..8m + vec2 uvScreen = gl_FragCoord.xy * uScreenSize.zw; + + // use texture array for multi-view + #ifdef XRDEPTH_ARRAY + uvScreen = uvScreen * vec2(2.0, 1.0) - vec2(view_index, 0.0); + vec3 uv = vec3((matrix_depth_uv * vec4(uvScreen.xy, 0.0, 1.0)).xy, view_index); + #else + vec2 uv = (matrix_depth_uv * vec4(uvScreen.x, 1.0 - uvScreen.y, 0.0, 1.0)).xy; + #endif + + #ifdef XRDEPTH_FLOAT + float depth = texture2D(depthMap, uv).r; + #else + // unpack from AlphaLuminance + vec2 packedDepth = texture2D(depthMap, uv).ra; + float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)); + #endif + + depth *= depth_raw_to_meters; + + // depth = 1.0 - min(depth / 2.0, 1.0); // 0..1 = 0m..4m gl_FragColor = vec4(depth, depth, depth, 1.0); - }`); - materialDepth.update(); + }`; + + const materialDepth = new pc.Material(); /** - * @param {number} x - The x coordinate. - * @param {number} y - The y coordinate. - * @param {number} z - The z coordinate. + * @param {boolean} array - If the depth information uses array texture. + * @param {boolean} float - If the depth information uses F32R texture. */ - const createCube = function (x, y, z) { - const cube = new pc.Entity(); - cube.addComponent("render", { - type: "box" - }); - cube.render.material = material; - cube.setLocalScale(0.5, 0.5, 0.5); - cube.translate(x * 0.5, y, z * 0.5); - app.root.addChild(cube); + const updateShader = (array, float) => { + if (shaderDepthArray === array && shaderDepthFloat === float) + return; + + shaderDepthArray = array; + shaderDepthFloat = float; + + const key = 'textureDepthSensing_' + array + float; + let frag = fragShader; + + if (shaderDepthArray) + frag = '#define XRDEPTH_ARRAY\n' + frag; + + if (shaderDepthArray) + frag = '#define XRDEPTH_FLOAT\n' + frag; + + materialDepth.shader = pc.createShaderFromCode(app.graphicsDevice, + vertShader, + frag, + key, { + aPosition: pc.SEMANTIC_POSITION, + aUv0: pc.SEMANTIC_TEXCOORD0 + }); + materialDepth.clearVariants(); + materialDepth.update(); }; - // create a grid of cubes - const SIZE = 4; - for (let x = 0; x < SIZE; x++) { - for (let y = 0; y < SIZE; y++) { - createCube(2 * x - SIZE, 0.25, 2 * y - SIZE); - } - } + updateShader(false, false); + + const plane = new pc.Entity(); + plane.addComponent('render', { + type: 'plane' + }); + plane.render.material = materialDepth; + plane.render.meshInstances[0].cull = false; + plane.setLocalPosition(0, 0, -1); + plane.setLocalEulerAngles(90, 0, 0); + camera.addChild(plane); if (app.xr.supported) { const activate = function () { if (app.xr.isAvailable(pc.XRTYPE_AR)) { - c.camera.startXr(pc.XRTYPE_AR, pc.XRSPACE_LOCALFLOOR, { + camera.camera.startXr(pc.XRTYPE_AR, pc.XRSPACE_LOCALFLOOR, { depthSensing: { // request access to camera depth usagePreference: pc.XRDEPTHSENSINGUSAGE_GPU, dataFormatPreference: pc.XRDEPTHSENSINGFORMAT_F32 @@ -137,7 +187,7 @@ async function example({ canvas }) { activate(); } else { // otherwise reset camera - c.camera.endXr(); + camera.camera.endXr(); } evt.event.preventDefault(); @@ -158,6 +208,7 @@ async function example({ canvas }) { console.log('depth texture format', app.xr.views.depthPixelFormat); }); app.xr.on('end', function () { + shaderUpdated = false; message("Immersive AR session has ended"); }); app.xr.on('available:' + pc.XRTYPE_AR, function (available) { @@ -175,30 +226,23 @@ async function example({ canvas }) { app.on('update', () => { // if camera depth is available if (app.xr.views.availableDepth) { + if (!shaderUpdated && app.xr.active) { + shaderUpdated = true; + updateShader(app.xr.views.list.length > 1, app.xr.views.depthPixelFormat === pc.PIXELFORMAT_R32F); + } + for(let i = 0; i < app.xr.views.list.length; i++) { const view = app.xr.views.list[i]; if (!view.textureDepth) // check if depth texture is available continue; - materialDepth.setParameter('colorMap', view.textureDepth); + materialDepth.setParameter('depthMap', view.textureDepth); materialDepth.setParameter('matrix_depth_uv', view.depthUvMatrix.data); materialDepth.setParameter('depth_raw_to_meters', view.depthValueToMeters); - - // debug draw camera depth texture on the screen - app.drawTexture(0.5, -0.5, 1, 1, view.textureDepth, materialDepth); } } }); - app.xr.on('end', () => { - if (!material.diffuseMap) - return; - - // clear camera depth texture when XR session ends - material.diffuseMap = null; - material.update(); - }); - if (!app.xr.isAvailable(pc.XRTYPE_AR)) { message("Immersive AR is not available"); } else if (!app.xr.views.supportedDepth) { diff --git a/src/framework/xr/xr-view.js b/src/framework/xr/xr-view.js index ad5cd0057f1..e766c4f9b8a 100644 --- a/src/framework/xr/xr-view.js +++ b/src/framework/xr/xr-view.js @@ -122,9 +122,10 @@ class XrView extends EventHandler { * @param {import('./xr-manager.js').XrManager} manager - WebXR Manager. * @param {XRView} xrView - [XRView](https://developer.mozilla.org/en-US/docs/Web/API/XRView) * object that is created by WebXR API. + * @param {number} viewsCount - Number of views available for the session. * @hideconstructor */ - constructor(manager, xrView) { + constructor(manager, xrView, viewsCount) { super(); this._manager = manager; @@ -148,13 +149,13 @@ class XrView extends EventHandler { height: this._xrCamera.height, name: `XrView-${this._xrView.eye}-Color` }); - } } if (this._manager.views.supportedDepth && this._manager.views.availableDepth) { this._textureDepth = new Texture(device, { format: this._manager.views.depthPixelFormat, + arrayLength: (viewsCount === 1) ? 0 : viewsCount, mipmaps: false, addressU: ADDRESS_CLAMP_TO_EDGE, addressV: ADDRESS_CLAMP_TO_EDGE, @@ -164,6 +165,10 @@ class XrView extends EventHandler { height: 4, name: `XrView-${this._xrView.eye}-Depth` }); + + for (let i = 0; i < this._textureDepth._levels.length; i++) { + this._textureDepth._levels[i] = this._emptyDepthBuffer; + } } if (this._textureColor || this._textureDepth) @@ -472,7 +477,9 @@ class XrView extends EventHandler { if (this._depthInfo) { if (gpu) { // gpu - // TODO + if (this._depthInfo.texture) { + this._textureDepth.impl._glTexture = this._depthInfo.texture; + } } else { // cpu this._textureDepth._levels[0] = new Uint8Array(this._depthInfo.data); diff --git a/src/framework/xr/xr-views.js b/src/framework/xr/xr-views.js index 7cc2e478900..f89596ac31c 100644 --- a/src/framework/xr/xr-views.js +++ b/src/framework/xr/xr-views.js @@ -217,7 +217,7 @@ class XrViews extends EventHandler { if (!view) { // add new view - view = new XrView(this._manager, xrView); + view = new XrView(this._manager, xrView, xrViews.length); this._index.set(eye, view); this._list.push(view); view.update(frame, xrView); diff --git a/src/scene/renderer/forward-renderer.js b/src/scene/renderer/forward-renderer.js index e888fb0802e..84e632466d4 100644 --- a/src/scene/renderer/forward-renderer.js +++ b/src/scene/renderer/forward-renderer.js @@ -636,6 +636,7 @@ class ForwardRenderer extends Renderer { this.viewId3.setValue(view.viewMat3.data); this.viewProjId.setValue(view.projViewOffMat.data); this.viewPosId.setValue(view.positionData); + this.viewIndex.setValue(v); if (v === 0) { this.drawInstance(device, drawCall, mesh, style, true); diff --git a/src/scene/renderer/renderer.js b/src/scene/renderer/renderer.js index 33b8a32c87f..576d3e1bc12 100644 --- a/src/scene/renderer/renderer.js +++ b/src/scene/renderer/renderer.js @@ -191,6 +191,7 @@ class Renderer { this.farClipId = scope.resolve('camera_far'); this.cameraParams = new Float32Array(4); this.cameraParamsId = scope.resolve('camera_params'); + this.viewIndex = scope.resolve('view_index'); this.alphaTestId = scope.resolve('alpha_ref'); this.opacityMapId = scope.resolve('texture_opacityMap'); From ad30af70d44a02ee15ca75d373b35bf4e9c32a56 Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Thu, 18 Jan 2024 13:56:28 +0200 Subject: [PATCH 18/19] example of depth sensing for object placement --- .../examples/xr/ar-depth-sensing-placer.mjs | 200 ++++++++++++++++++ examples/src/examples/xr/index.mjs | 1 + 2 files changed, 201 insertions(+) create mode 100644 examples/src/examples/xr/ar-depth-sensing-placer.mjs diff --git a/examples/src/examples/xr/ar-depth-sensing-placer.mjs b/examples/src/examples/xr/ar-depth-sensing-placer.mjs new file mode 100644 index 00000000000..0656008ab8e --- /dev/null +++ b/examples/src/examples/xr/ar-depth-sensing-placer.mjs @@ -0,0 +1,200 @@ +import * as pc from 'playcanvas'; + +/** + * @typedef {import('../../options.mjs').ExampleOptions} ExampleOptions + * @param {import('../../options.mjs').ExampleOptions} options - The example options. + * @returns {Promise} The example application. + */ +async function example({ canvas }) { + /** + * @param {string} msg - The message. + */ + const message = function (msg) { + /** @type {HTMLDivElement} */ + let el = document.querySelector('.message'); + if (!el) { + el = document.createElement('div'); + el.classList.add('message'); + el.style.position = 'absolute'; + el.style.bottom = '96px'; + el.style.right = '0'; + el.style.padding = '8px 16px'; + el.style.fontFamily = 'Helvetica, Arial, sans-serif'; + el.style.color = '#fff'; + el.style.backgroundColor = 'rgba(0, 0, 0, 0.5)'; + document.body.append(el); + } + el.textContent = msg; + }; + + const app = new pc.Application(canvas, { + mouse: new pc.Mouse(canvas), + touch: new pc.TouchDevice(canvas), + keyboard: new pc.Keyboard(window), + graphicsDeviceOptions: { alpha: true } + }); + + app.setCanvasFillMode(pc.FILLMODE_FILL_WINDOW); + app.setCanvasResolution(pc.RESOLUTION_AUTO); + + // Ensure canvas is resized when window changes size + const resize = () => app.resizeCanvas(); + window.addEventListener('resize', resize); + app.on('destroy', () => { + window.removeEventListener('resize', resize); + }); + + // use device pixel ratio + app.graphicsDevice.maxPixelRatio = window.devicePixelRatio; + + app.start(); + + // create camera + const camera = new pc.Entity(); + camera.addComponent('camera', { + clearColor: new pc.Color(0, 0, 0, 0), + farClip: 10000 + }); + app.root.addChild(camera); + + // light + const l = new pc.Entity(); + l.addComponent("light", { + type: "spot", + range: 30 + }); + l.translate(0, 10, 0); + app.root.addChild(l); + + // placeable cone + const cone = new pc.Entity(); + cone.addComponent('render', { + type: 'cone' + }); + cone.setLocalScale(0.1, 0.1, 0.1); + app.root.addChild(cone); + + const tmpVec3A = new pc.Vec3(); + + if (app.xr.supported) { + const activate = function () { + if (app.xr.isAvailable(pc.XRTYPE_AR)) { + camera.camera.startXr(pc.XRTYPE_AR, pc.XRSPACE_LOCALFLOOR, { + depthSensing: { // request access to camera depth + usagePreference: pc.XRDEPTHSENSINGUSAGE_GPU, + dataFormatPreference: pc.XRDEPTHSENSINGFORMAT_F32 + }, + callback: function (err) { + if (err) message("WebXR Immersive AR failed to start: " + err.message); + } + }); + } else { + message("Immersive AR is not available"); + } + }; + + app.mouse.on("mousedown", function () { + if (!app.xr.active) + activate(); + }); + + if (app.touch) { + app.touch.on("touchend", function (evt) { + if (!app.xr.active) { + // if not in VR, activate + activate(); + } else { + // otherwise reset camera + camera.camera.endXr(); + } + + evt.event.preventDefault(); + evt.event.stopPropagation(); + }); + } + + // end session by keyboard ESC + app.keyboard.on('keydown', function (evt) { + if (evt.key === pc.KEY_ESCAPE && app.xr.active) { + app.xr.end(); + } + }); + + app.xr.on('start', function () { + message("Immersive AR session has started"); + console.log('depth gpu optimized', app.xr.views.depthGpuOptimized); + console.log('depth texture format', app.xr.views.depthPixelFormat); + }); + app.xr.on('end', function () { + message("Immersive AR session has ended"); + }); + app.xr.on('available:' + pc.XRTYPE_AR, function (available) { + if (available) { + if (!app.xr.views.supportedDepth) { + message("AR Camera Depth is not supported"); + } else { + message("Touch screen to start AR session"); + } + } else { + message("Immersive AR is not available"); + } + }); + + let selecting = false; + let selectingTime = 0; + const selectingDelay = 100; + + app.xr.input.on('select', () => { + selecting = true; + selectingTime = Date.now(); + }); + + app.on('update', () => { + // if camera depth is available + if (app.xr.views.availableDepth) { + const view = app.xr.views.list[0]; + const depth = view.getDepth(0.5, 0.5); + + if (depth) { + tmpVec3A.copy(camera.forward); + tmpVec3A.mulScalar(depth); + tmpVec3A.add(camera.getPosition()); + tmpVec3A.y += 0.05; // offset based on cone scale + + cone.enabled = true; + cone.setLocalPosition(tmpVec3A); + + if (selecting && (Date.now() - selectingTime) < selectingDelay) { + selecting = false; + const obj = cone.clone(); + app.root.addChild(obj); + } + } else { + cone.enabled = false; + } + } else { + cone.enabled = false; + } + }); + + if (!app.xr.isAvailable(pc.XRTYPE_AR)) { + message("Immersive AR is not available"); + } else if (!app.xr.views.supportedDepth) { + message("AR Camera Depth is not supported"); + } else { + message("Touch screen to start AR session"); + } + } else { + message("WebXR is not supported"); + + } + return app; +} + +class ArDepthSensingPlacerExample { + static CATEGORY = 'XR'; + static NAME = 'AR Depth Sensing Placer'; + static example = example; +} + +export { ArDepthSensingPlacerExample }; diff --git a/examples/src/examples/xr/index.mjs b/examples/src/examples/xr/index.mjs index b474fe04beb..b7b1280f5a2 100644 --- a/examples/src/examples/xr/index.mjs +++ b/examples/src/examples/xr/index.mjs @@ -1,6 +1,7 @@ export * from "./ar-basic.mjs"; export * from "./ar-camera-color.mjs"; export * from "./ar-camera-depth.mjs"; +export * from "./ar-depth-sensing-placer.mjs"; export * from "./ar-hit-test.mjs"; export * from "./ar-hit-test-anchors.mjs"; export * from "./ar-anchors-persistence.mjs"; From cf2291780a31fe31ce31fffee913bacd9a2177e6 Mon Sep 17 00:00:00 2001 From: mrmaxm Date: Fri, 19 Jan 2024 15:06:22 +0200 Subject: [PATCH 19/19] fixes based on Martin's feedback --- src/scene/renderer/forward-renderer.js | 2 +- src/scene/renderer/renderer.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/scene/renderer/forward-renderer.js b/src/scene/renderer/forward-renderer.js index c5e2c5013c7..313bfd55f09 100644 --- a/src/scene/renderer/forward-renderer.js +++ b/src/scene/renderer/forward-renderer.js @@ -628,7 +628,7 @@ class ForwardRenderer extends Renderer { this.viewId3.setValue(view.viewMat3.data); this.viewProjId.setValue(view.projViewOffMat.data); this.viewPosId.setValue(view.positionData); - this.viewIndex.setValue(v); + this.viewIndexId.setValue(v); if (v === 0) { this.drawInstance(device, drawCall, mesh, style, true); diff --git a/src/scene/renderer/renderer.js b/src/scene/renderer/renderer.js index 8f06ff953f2..2cd6ed2c21f 100644 --- a/src/scene/renderer/renderer.js +++ b/src/scene/renderer/renderer.js @@ -220,7 +220,7 @@ class Renderer { this.farClipId = scope.resolve('camera_far'); this.cameraParams = new Float32Array(4); this.cameraParamsId = scope.resolve('camera_params'); - this.viewIndex = scope.resolve('view_index'); + this.viewIndexId = scope.resolve('view_index'); this.blueNoiseJitterId = scope.resolve('blueNoiseJitter'); this.blueNoiseTextureId = scope.resolve('blueNoiseTex32');