From 998f0e8bcad956a1029a44cc469a5c7d476babbb Mon Sep 17 00:00:00 2001 From: Maksims Mihejevs Date: Mon, 16 Nov 2020 17:06:48 +0100 Subject: [PATCH 1/9] depth sensing --- src/xr/xr-depth-sensing.js | 23 +++++++++++++++++++++++ src/xr/xr-manager.js | 4 ++++ 2 files changed, 27 insertions(+) create mode 100644 src/xr/xr-depth-sensing.js diff --git a/src/xr/xr-depth-sensing.js b/src/xr/xr-depth-sensing.js new file mode 100644 index 00000000000..9248fdd8694 --- /dev/null +++ b/src/xr/xr-depth-sensing.js @@ -0,0 +1,23 @@ +import { EventHandler } from '../core/event-handler.js'; + +function XrDepthSensing(manager) { + EventHandler.call(this); + + this._manager = manager; + this._depthInfo = null; + this._frame = null; +} +XrDepthSensing.prototype = Object.create(EventHandler.prototype); +XrDepthSensing.prototype.constructor = XrDepthSensing; + +XrDepthSensing.prototype.update = function(frame) { + this._frame = frame; +}; + +Object.defineProperty(XrDepthSensing.prototype, 'supported', { + get: function () { + return !! window.XRDepthInformation; + } +}); + +export { XrDepthSensing }; diff --git a/src/xr/xr-manager.js b/src/xr/xr-manager.js index bfd3af8a68d..8a18215f9a6 100644 --- a/src/xr/xr-manager.js +++ b/src/xr/xr-manager.js @@ -10,6 +10,7 @@ import { XRTYPE_INLINE, XRTYPE_VR, XRTYPE_AR } from './constants.js'; import { XrHitTest } from './xr-hit-test.js'; import { XrInput } from './xr-input.js'; import { XrLightEstimation } from './xr-light-estimation.js'; +import { XrDepthSensing } from './xr-depth-sensing.js'; /** * @class @@ -50,10 +51,12 @@ function XrManager(app) { this._session = null; this._baseLayer = null; this._referenceSpace = null; + this._frame = null; this.input = new XrInput(this); this.hitTest = new XrHitTest(this); this.lightEstimation = new XrLightEstimation(this); + this.depthSensing = new XrDepthSensing(this); this._camera = null; this.views = []; @@ -212,6 +215,7 @@ XrManager.prototype.start = function (camera, type, spaceType, options) { if (type === XRTYPE_AR) { optionalFeatures.push('light-estimation'); optionalFeatures.push('hit-test'); + optionalFeatures.push('depth-sensing'); } else if (type === XRTYPE_VR) { optionalFeatures.push('hand-tracking'); } From b35f01accfd2bbcfe44eda9655aa6f9a78e7d20f Mon Sep 17 00:00:00 2001 From: Maksims Mihejevs Date: Mon, 16 Nov 2020 20:39:09 +0100 Subject: [PATCH 2/9] depth sensing API, CPU path --- externs.js | 1 + src/index.js | 1 + src/xr/xr-depth-sensing.js | 99 ++++++++++++++++++++++++++++++++++++-- src/xr/xr-manager.js | 10 ++-- 4 files changed, 104 insertions(+), 7 deletions(-) diff --git a/externs.js b/externs.js index 8da42a7128b..106cdd0af61 100644 --- a/externs.js +++ b/externs.js @@ -16,6 +16,7 @@ var WebAssembly = {}; var XRWebGLLayer = {}; var XRRay = {}; var XRHand = {}; +var XRDepthInformation = {}; var DOMPoint = {}; // extras requires this diff --git a/src/index.js b/src/index.js index 91b9aa46a6e..a29c0489592 100644 --- a/src/index.js +++ b/src/index.js @@ -268,6 +268,7 @@ export * from './xr/constants.js'; export { XrInput } from './xr/xr-input.js'; export { XrInputSource } from './xr/xr-input-source.js'; export { XrLightEstimation } from './xr/xr-light-estimation.js'; +export { XrDepthSensing } from './xr/xr-depth-sensing.js'; export { XrManager } from './xr/xr-manager.js'; export { XrHitTest } from './xr/xr-hit-test.js'; export { XrHitTestSource } from './xr/xr-hit-test-source.js'; diff --git a/src/xr/xr-depth-sensing.js b/src/xr/xr-depth-sensing.js index 9248fdd8694..8762519518c 100644 --- a/src/xr/xr-depth-sensing.js +++ b/src/xr/xr-depth-sensing.js @@ -1,17 +1,83 @@ import { EventHandler } from '../core/event-handler.js'; +/** + * @class + * @name pc.XrDepthSensing + * @augments pc.EventHandler + * @classdesc Depth Sensing provides depth information which is reconstructed using underlying AR system. It provides ability to query depth value (CPU path) or access a depth texture (GPU path). Depth information can be used (not limited to) for: reconstructing real world geometry; virtual object placement; occlusion of virtual objects by real world geometry; and more. + * @description + * @param {pc.XrManager} manager - WebXR Manager. + * @property {boolean} supported True if Depth Sensing is supported. + * @property {number} width Width of depth texture or 0 if not available. + * @property {number} height Height of depth texture or 0 if not available. + */ function XrDepthSensing(manager) { EventHandler.call(this); this._manager = manager; this._depthInfo = null; - this._frame = null; + this._available = false; + + this._manager.on('end', this._onSessionEnd, this); } XrDepthSensing.prototype = Object.create(EventHandler.prototype); XrDepthSensing.prototype.constructor = XrDepthSensing; -XrDepthSensing.prototype.update = function(frame) { - this._frame = frame; +/** + * @event + * @name pc.XrDepthSensing#available + * @description Fired when depth sensing data becomes available. + */ + +/** + * @event + * @name pc.XrDepthSensing#unavailable + * @description Fired when depth sensing data becomes unavailable. + */ + +XrDepthSensing.prototype._onSessionEnd = function () { + this._depthInfo = null; + + if (this._available) { + this._available = false; + this.fire('unavailable'); + } +}; + +XrDepthSensing.prototype.update = function (frame, view) { + if (view) { + this._depthInfo = frame.getDepthInformation(view); + } else { + this._depthInfo = null; + } + + if (this._depthInfo && ! this._available) { + this._available = true; + this.fire('available'); + } else if (! this._depthInfo && this._available) { + this._available = false; + this.fire('unavailable'); + } +}; + +/** + * @function + * @name pc.XrDepthSensing#getDepth + * @param {number} x - x coordinate of pixel in depth texture. + * @param {number} y - y coordinate of pixel in depth texture. + * @description Get depth value from depth information in meters. X and Y coordinates are in depth texture space, use {@link pc.XrDepthSensing#width} and {@link pc.XrDepthSensing#height}. This is not using GPU texture, and is a CPU path. + * @example + * var depth = app.xr.depthSensing.getDepth(x, y); + * if (depth !== null) { + * // depth in meters + * } + * @returns {number|null} Depth in meters or null if depth information is not available. + */ +XrDepthSensing.prototype.getDepth = function (x, y) { + if (! this._depthInfo) + return null; + + return this._depthInfo.getDepth(x, y); }; Object.defineProperty(XrDepthSensing.prototype, 'supported', { @@ -20,4 +86,31 @@ Object.defineProperty(XrDepthSensing.prototype, 'supported', { } }); +/** + * @name pc.XrDepthSensing#available + * @type {boolean} + * @description True if depth sensing information is available. + * @example + * if (app.xr.depthSensing.available) { + * var depth = app.xr.depthSensing.getDepth(x, y); + * } + */ +Object.defineProperty(XrDepthSensing.prototype, 'available', { + get: function () { + return this._available; + } +}); + +Object.defineProperty(XrDepthSensing.prototype, 'width', { + get: function () { + return this._depthInfo && this._depthInfo.width || 0; + } +}); + +Object.defineProperty(XrDepthSensing.prototype, 'height', { + get: function () { + return this._depthInfo && this._depthInfo.height || 0; + } +}); + export { XrDepthSensing }; diff --git a/src/xr/xr-manager.js b/src/xr/xr-manager.js index 8a18215f9a6..32d856d8ce6 100644 --- a/src/xr/xr-manager.js +++ b/src/xr/xr-manager.js @@ -475,12 +475,14 @@ XrManager.prototype.update = function (frame) { this.input.update(frame); if (this._type === XRTYPE_AR) { - if (this.hitTest.supported) { + if (this.hitTest.supported) this.hitTest.update(frame); - } - if (this.lightEstimation.supported) { + + if (this.lightEstimation.supported) this.lightEstimation.update(frame); - } + + if (this.depthSensing.supported) + this.depthSensing.update(frame, pose && pose.views[0]); } this.fire('update', frame); From d4134091fef46017aa4c7ba6853ac1a503161303 Mon Sep 17 00:00:00 2001 From: Maksims Mihejevs Date: Mon, 16 Nov 2020 20:53:10 +0100 Subject: [PATCH 3/9] test:tsd fix --- src/xr/xr-depth-sensing.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/xr/xr-depth-sensing.js b/src/xr/xr-depth-sensing.js index 8762519518c..eafc7bc4e59 100644 --- a/src/xr/xr-depth-sensing.js +++ b/src/xr/xr-depth-sensing.js @@ -5,7 +5,7 @@ import { EventHandler } from '../core/event-handler.js'; * @name pc.XrDepthSensing * @augments pc.EventHandler * @classdesc Depth Sensing provides depth information which is reconstructed using underlying AR system. It provides ability to query depth value (CPU path) or access a depth texture (GPU path). Depth information can be used (not limited to) for: reconstructing real world geometry; virtual object placement; occlusion of virtual objects by real world geometry; and more. - * @description + * @description Depth Sensing provides depth information which is reconstructed using underlying AR system. It provides ability to query depth value (CPU path) or access a depth texture (GPU path). Depth information can be used (not limited to) for: reconstructing real world geometry; virtual object placement; occlusion of virtual objects by real world geometry; and more. * @param {pc.XrManager} manager - WebXR Manager. * @property {boolean} supported True if Depth Sensing is supported. * @property {number} width Width of depth texture or 0 if not available. From e12428e4f2d3ba39492fee78300111b137942879 Mon Sep 17 00:00:00 2001 From: Maksims Mihejevs Date: Tue, 17 Nov 2020 20:25:08 +0100 Subject: [PATCH 4/9] WebXR depth sensing GPU path --- src/xr/xr-depth-sensing.js | 165 +++++++++++++++++++++++++++++++++++++ 1 file changed, 165 insertions(+) diff --git a/src/xr/xr-depth-sensing.js b/src/xr/xr-depth-sensing.js index eafc7bc4e59..db2d19e7729 100644 --- a/src/xr/xr-depth-sensing.js +++ b/src/xr/xr-depth-sensing.js @@ -1,4 +1,7 @@ import { EventHandler } from '../core/event-handler.js'; +import { Mat4 } from '../math/mat4.js'; +import { Texture } from '../graphics/texture.js'; +import { ADDRESS_CLAMP_TO_EDGE, PIXELFORMAT_L8_A8 } from '../graphics/graphics.js'; /** * @class @@ -10,6 +13,46 @@ import { EventHandler } from '../core/event-handler.js'; * @property {boolean} supported True if Depth Sensing is supported. * @property {number} width Width of depth texture or 0 if not available. * @property {number} height Height of depth texture or 0 if not available. + * @example + * // CPU path + * var depthSensing = app.xr.depthSensing; + * if (depthSensing.available) { + * // get depth in the middle of the screen, value is in meters + * var depth = depthSensing.getDepth(depthSensing.width / 2, depthSensing.height / 2); + * } + * @example + * // GPU path, attaching texture to material + * material.diffuseMap = depthSensing.texture; + * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data); + * material.update(); + * + * // update UV transformation matrix on depth texture resize + * depthSensing.on('resize', function () { + * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data); + * }); + * @example + * // // GLSL shader to unpack depth texture + * // varying vec2 vUv0; + * // + * // uniform sampler2D texture_depthSensingMap; + * // uniform mat4 matrix_depth_uv; + * // + * // void main(void) { + * // // transform UVs using depth matrix + * // vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy; + * // + * // // get luminance alpha components from depth texture + * // vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra; + * // + * // // unpack into single value in millimeters + * // float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)); // mm + * // + * // // normalize: 0m to 8m distance + * // depth = min(depth / 8000.0, 1.0); // 0..1 = 0m..8m + * // + * // // paint scene from black to white based on distance + * // gl_FragColor = vec4(depth, depth, depth, 1.0); + * // } */ function XrDepthSensing(manager) { EventHandler.call(this); @@ -18,6 +61,18 @@ function XrDepthSensing(manager) { this._depthInfo = null; this._available = false; + this._matrixDirty = false; + this._matrix = new Mat4(); + this._emptyBuffer = new Uint8Array(32); + this._depthBuffer = null; + + this._texture = new Texture(this._manager.app.graphicsDevice, { + format: PIXELFORMAT_L8_A8, + mipmaps: false, + addressU: ADDRESS_CLAMP_TO_EDGE, + addressV: ADDRESS_CLAMP_TO_EDGE + }); + this._manager.on('end', this._onSessionEnd, this); } XrDepthSensing.prototype = Object.create(EventHandler.prototype); @@ -35,6 +90,18 @@ XrDepthSensing.prototype.constructor = XrDepthSensing; * @description Fired when depth sensing data becomes unavailable. */ +/** + * @event + * @name pc.XrDepthSensing#resize + * @description Fired when depth sensing texture been resized. So {@link pc.XrDepthSensing#uvMatrix} needs to be updated for relevant shaders. + * @param {number} width - The new width of the depth texture in pixels. + * @param {number} height - The new height of the depth texture in pixels. + * @example + * depthSensing.on('resize', function () { + * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix); + * }); + */ + XrDepthSensing.prototype._onSessionEnd = function () { this._depthInfo = null; @@ -42,15 +109,64 @@ XrDepthSensing.prototype._onSessionEnd = function () { this._available = false; this.fire('unavailable'); } + + this._depthBuffer = null; + this._texture._width = 4; + this._texture._height = 4; + this._texture._levels[0] = this._emptyBuffer; + this._texture.upload(); +}; + +XrDepthSensing.prototype._updateTexture = function () { + if (this._depthInfo) { + var resized = false; + + // changed resolution + if (this._depthInfo.width !== this._texture.width || this._depthInfo.height !== this._texture.height) { + this._texture._width = this._depthInfo.width; + this._texture._height = this._depthInfo.height; + this._matrixDirty = true; + resized = true; + } + + var dataBuffer = this._depthInfo.data; + this._depthBuffer = new Uint8Array(dataBuffer.buffer, dataBuffer.byteOffset, dataBuffer.byteLength); + this._texture._levels[0] = this._depthBuffer; + this._texture.upload(); + + if (resized) this.fire('resize', this._depthInfo.width, this._depthInfo.height); + + } else if (this._depthBuffer) { + // depth info not available anymore + this._depthBuffer = null; + this._texture._width = 4; + this._texture._height = 4; + this._texture._levels[0] = this._emptyBuffer; + this._texture.upload(); + } }; XrDepthSensing.prototype.update = function (frame, view) { if (view) { + if (! this._depthInfo) this._matrixDirty = true; this._depthInfo = frame.getDepthInformation(view); } else { + if (this._depthInfo) this._matrixDirty = true; this._depthInfo = null; } + this._updateTexture(); + + if (this._matrixDirty) { + this._matrixDirty = false; + + if (this._depthInfo) { + this._matrix.data.set(this._depthInfo.normTextureFromNormView.matrix); + } else { + this._matrix.setIdentity(); + } + } + if (this._depthInfo && ! this._available) { this._available = true; this.fire('available'); @@ -113,4 +229,53 @@ Object.defineProperty(XrDepthSensing.prototype, 'height', { } }); +/** + * @name pc.XrDepthSensing#texture + * @type {pc.Texture} + * @description Texture that contains packed depth information. Format of this texture is {@link pc.PIXELFORMAT_L8_A8}. And is UV transformed based on onderlying AR system, which can be normalized using {@link pc.XrDepthSensing#uvMatrix}. + * @example + * material.diffuseMap = depthSensing.texture; + * @example + * // // GLSL shader to unpack depth texture + * // varying vec2 vUv0; + * // + * // uniform sampler2D texture_depthSensingMap; + * // uniform mat4 matrix_depth_uv; + * // + * // void main(void) { + * // // transform UVs using depth matrix + * // vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy; + * // + * // // get luminance alpha components from depth texture + * // vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra; + * // + * // // unpack into single value in millimeters + * // float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)); // mm + * // + * // // normalize: 0m to 8m distance + * // depth = min(depth / 8000.0, 1.0); // 0..1 = 0m..8m + * // + * // // paint scene from black to white based on distance + * // gl_FragColor = vec4(depth, depth, depth, 1.0); + * // } + */ +Object.defineProperty(XrDepthSensing.prototype, 'texture', { + get: function () { + return this._texture; + } +}); + +/** + * @name pc.XrDepthSensing#uvMatrix + * @type {pc.Mat4} + * @description Data of 4x4 matrix that should be used to transform depth texture UVs to normalized UVs in a shader. It is updated when depth texture is resized, refer to {@link pc.XrDepthSensing#resize} + * @example + * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data); + */ +Object.defineProperty(XrDepthSensing.prototype, 'uvMatrix', { + get: function () { + return this._matrix; + } +}); + export { XrDepthSensing }; From d4f03279ae26e041f1e0e951891193bfbd0488ea Mon Sep 17 00:00:00 2001 From: Maksims Mihejevs Date: Tue, 17 Nov 2020 21:08:16 +0100 Subject: [PATCH 5/9] clear --- src/xr/xr-manager.js | 1 - 1 file changed, 1 deletion(-) diff --git a/src/xr/xr-manager.js b/src/xr/xr-manager.js index 32d856d8ce6..d680bd0f8b1 100644 --- a/src/xr/xr-manager.js +++ b/src/xr/xr-manager.js @@ -51,7 +51,6 @@ function XrManager(app) { this._session = null; this._baseLayer = null; this._referenceSpace = null; - this._frame = null; this.input = new XrInput(this); this.hitTest = new XrHitTest(this); From 966d34d91ce62b29f26fa8187ca33167fb929615 Mon Sep 17 00:00:00 2001 From: Maksims Mihejevs Date: Wed, 30 Dec 2020 21:55:15 +0200 Subject: [PATCH 6/9] disable linter for glsl @example jsdoc code; more right texture filtering; --- src/xr/xr-depth-sensing.js | 96 ++++++++++++++++++++------------------ 1 file changed, 51 insertions(+), 45 deletions(-) diff --git a/src/xr/xr-depth-sensing.js b/src/xr/xr-depth-sensing.js index db2d19e7729..b75890343ec 100644 --- a/src/xr/xr-depth-sensing.js +++ b/src/xr/xr-depth-sensing.js @@ -3,6 +3,7 @@ import { Mat4 } from '../math/mat4.js'; import { Texture } from '../graphics/texture.js'; import { ADDRESS_CLAMP_TO_EDGE, PIXELFORMAT_L8_A8 } from '../graphics/graphics.js'; +/* eslint-disable */ /** * @class * @name pc.XrDepthSensing @@ -31,29 +32,30 @@ import { ADDRESS_CLAMP_TO_EDGE, PIXELFORMAT_L8_A8 } from '../graphics/graphics.j * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data); * }); * @example - * // // GLSL shader to unpack depth texture - * // varying vec2 vUv0; - * // - * // uniform sampler2D texture_depthSensingMap; - * // uniform mat4 matrix_depth_uv; - * // - * // void main(void) { - * // // transform UVs using depth matrix - * // vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy; - * // - * // // get luminance alpha components from depth texture - * // vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra; - * // - * // // unpack into single value in millimeters - * // float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)); // mm - * // - * // // normalize: 0m to 8m distance - * // depth = min(depth / 8000.0, 1.0); // 0..1 = 0m..8m - * // - * // // paint scene from black to white based on distance - * // gl_FragColor = vec4(depth, depth, depth, 1.0); - * // } + * // GLSL shader to unpack depth texture + * varying vec2 vUv0; + * + * uniform sampler2D texture_depthSensingMap; + * uniform mat4 matrix_depth_uv; + * + * void main(void) { + * // transform UVs using depth matrix + * vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy; + * + * // get luminance alpha components from depth texture + * vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra; + * + * // unpack into single value in millimeters + * float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)); // mm + * + * // normalize: 0m to 8m distance + * depth = min(depth / 8000.0, 1.0); // 0..1 = 0m..8m + * + * // paint scene from black to white based on distance + * gl_FragColor = vec4(depth, depth, depth, 1.0); + * } */ +/* eslint-enable */ function XrDepthSensing(manager) { EventHandler.call(this); @@ -70,7 +72,9 @@ function XrDepthSensing(manager) { format: PIXELFORMAT_L8_A8, mipmaps: false, addressU: ADDRESS_CLAMP_TO_EDGE, - addressV: ADDRESS_CLAMP_TO_EDGE + addressV: ADDRESS_CLAMP_TO_EDGE, + minFilter: FILTER_LINEAR, + magFilter: FILTER_LINEAR }); this._manager.on('end', this._onSessionEnd, this); @@ -229,6 +233,7 @@ Object.defineProperty(XrDepthSensing.prototype, 'height', { } }); +/* eslint-disable */ /** * @name pc.XrDepthSensing#texture * @type {pc.Texture} @@ -236,29 +241,30 @@ Object.defineProperty(XrDepthSensing.prototype, 'height', { * @example * material.diffuseMap = depthSensing.texture; * @example - * // // GLSL shader to unpack depth texture - * // varying vec2 vUv0; - * // - * // uniform sampler2D texture_depthSensingMap; - * // uniform mat4 matrix_depth_uv; - * // - * // void main(void) { - * // // transform UVs using depth matrix - * // vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy; - * // - * // // get luminance alpha components from depth texture - * // vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra; - * // - * // // unpack into single value in millimeters - * // float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)); // mm - * // - * // // normalize: 0m to 8m distance - * // depth = min(depth / 8000.0, 1.0); // 0..1 = 0m..8m - * // - * // // paint scene from black to white based on distance - * // gl_FragColor = vec4(depth, depth, depth, 1.0); - * // } + * // GLSL shader to unpack depth texture + * varying vec2 vUv0; + * + * uniform sampler2D texture_depthSensingMap; + * uniform mat4 matrix_depth_uv; + * + * void main(void) { + * // transform UVs using depth matrix + * vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy; + * + * // get luminance alpha components from depth texture + * vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra; + * + * // unpack into single value in millimeters + * float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)); // mm + * + * // normalize: 0m to 8m distance + * depth = min(depth / 8000.0, 1.0); // 0..1 = 0m..8m + * + * // paint scene from black to white based on distance + * gl_FragColor = vec4(depth, depth, depth, 1.0); + * } */ +/* eslint-enable */ Object.defineProperty(XrDepthSensing.prototype, 'texture', { get: function () { return this._texture; From 560f0bcb262f607ced33e33b36843b59d4cb8ffd Mon Sep 17 00:00:00 2001 From: Maksims Mihejevs Date: Wed, 30 Dec 2020 22:25:05 +0200 Subject: [PATCH 7/9] fix --- src/xr/xr-depth-sensing.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/xr/xr-depth-sensing.js b/src/xr/xr-depth-sensing.js index b75890343ec..e78f44e87fa 100644 --- a/src/xr/xr-depth-sensing.js +++ b/src/xr/xr-depth-sensing.js @@ -1,7 +1,7 @@ import { EventHandler } from '../core/event-handler.js'; import { Mat4 } from '../math/mat4.js'; import { Texture } from '../graphics/texture.js'; -import { ADDRESS_CLAMP_TO_EDGE, PIXELFORMAT_L8_A8 } from '../graphics/graphics.js'; +import { ADDRESS_CLAMP_TO_EDGE, PIXELFORMAT_L8_A8, FILTER_LINEAR } from '../graphics/graphics.js'; /* eslint-disable */ /** From 786f6c94ca94f7137c017aa446851ad171dc92ce Mon Sep 17 00:00:00 2001 From: Maksims Mihejevs Date: Thu, 31 Dec 2020 00:50:24 +0200 Subject: [PATCH 8/9] jslint disable on few examples --- src/xr/xr-depth-sensing.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/xr/xr-depth-sensing.js b/src/xr/xr-depth-sensing.js index e78f44e87fa..8affbdd1d30 100644 --- a/src/xr/xr-depth-sensing.js +++ b/src/xr/xr-depth-sensing.js @@ -3,7 +3,7 @@ import { Mat4 } from '../math/mat4.js'; import { Texture } from '../graphics/texture.js'; import { ADDRESS_CLAMP_TO_EDGE, PIXELFORMAT_L8_A8, FILTER_LINEAR } from '../graphics/graphics.js'; -/* eslint-disable */ +/* eslint-disable jsdoc/check-examples */ /** * @class * @name pc.XrDepthSensing @@ -55,7 +55,7 @@ import { ADDRESS_CLAMP_TO_EDGE, PIXELFORMAT_L8_A8, FILTER_LINEAR } from '../grap * gl_FragColor = vec4(depth, depth, depth, 1.0); * } */ -/* eslint-enable */ +/* eslint-enable jsdoc/check-examples */ function XrDepthSensing(manager) { EventHandler.call(this); @@ -233,7 +233,7 @@ Object.defineProperty(XrDepthSensing.prototype, 'height', { } }); -/* eslint-disable */ +/* eslint-disable jsdoc/check-examples */ /** * @name pc.XrDepthSensing#texture * @type {pc.Texture} @@ -264,7 +264,7 @@ Object.defineProperty(XrDepthSensing.prototype, 'height', { * gl_FragColor = vec4(depth, depth, depth, 1.0); * } */ -/* eslint-enable */ +/* eslint-enable jsdoc/check-examples */ Object.defineProperty(XrDepthSensing.prototype, 'texture', { get: function () { return this._texture; From 38f5ce8b2032fb1c921cd6722d14e32433359da3 Mon Sep 17 00:00:00 2001 From: Will Eastcott Date: Thu, 31 Dec 2020 00:11:52 +0000 Subject: [PATCH 9/9] Edit docs --- src/xr/xr-depth-sensing.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/xr/xr-depth-sensing.js b/src/xr/xr-depth-sensing.js index 8affbdd1d30..f0591cda3ad 100644 --- a/src/xr/xr-depth-sensing.js +++ b/src/xr/xr-depth-sensing.js @@ -8,8 +8,8 @@ import { ADDRESS_CLAMP_TO_EDGE, PIXELFORMAT_L8_A8, FILTER_LINEAR } from '../grap * @class * @name pc.XrDepthSensing * @augments pc.EventHandler - * @classdesc Depth Sensing provides depth information which is reconstructed using underlying AR system. It provides ability to query depth value (CPU path) or access a depth texture (GPU path). Depth information can be used (not limited to) for: reconstructing real world geometry; virtual object placement; occlusion of virtual objects by real world geometry; and more. - * @description Depth Sensing provides depth information which is reconstructed using underlying AR system. It provides ability to query depth value (CPU path) or access a depth texture (GPU path). Depth information can be used (not limited to) for: reconstructing real world geometry; virtual object placement; occlusion of virtual objects by real world geometry; and more. + * @classdesc Depth Sensing provides depth information which is reconstructed using the underlying AR system. It provides the ability to query depth values (CPU path) or access a depth texture (GPU path). Depth information can be used (not limited to) for reconstructing real world geometry, virtual object placement, occlusion of virtual objects by real world geometry and more. + * @description Depth Sensing provides depth information which is reconstructed using the underlying AR system. It provides the ability to query depth values (CPU path) or access a depth texture (GPU path). Depth information can be used (not limited to) for reconstructing real world geometry, virtual object placement, occlusion of virtual objects by real world geometry and more. * @param {pc.XrManager} manager - WebXR Manager. * @property {boolean} supported True if Depth Sensing is supported. * @property {number} width Width of depth texture or 0 if not available. @@ -97,7 +97,7 @@ XrDepthSensing.prototype.constructor = XrDepthSensing; /** * @event * @name pc.XrDepthSensing#resize - * @description Fired when depth sensing texture been resized. So {@link pc.XrDepthSensing#uvMatrix} needs to be updated for relevant shaders. + * @description Fired when the depth sensing texture been resized. {@link pc.XrDepthSensing#uvMatrix} needs to be updated for relevant shaders. * @param {number} width - The new width of the depth texture in pixels. * @param {number} height - The new height of the depth texture in pixels. * @example @@ -185,7 +185,7 @@ XrDepthSensing.prototype.update = function (frame, view) { * @name pc.XrDepthSensing#getDepth * @param {number} x - x coordinate of pixel in depth texture. * @param {number} y - y coordinate of pixel in depth texture. - * @description Get depth value from depth information in meters. X and Y coordinates are in depth texture space, use {@link pc.XrDepthSensing#width} and {@link pc.XrDepthSensing#height}. This is not using GPU texture, and is a CPU path. + * @description Get depth value from depth information in meters. X and Y coordinates are in depth texture space, use {@link pc.XrDepthSensing#width} and {@link pc.XrDepthSensing#height}. This is not using a GPU texture and is a CPU path. * @example * var depth = app.xr.depthSensing.getDepth(x, y); * if (depth !== null) { @@ -237,7 +237,7 @@ Object.defineProperty(XrDepthSensing.prototype, 'height', { /** * @name pc.XrDepthSensing#texture * @type {pc.Texture} - * @description Texture that contains packed depth information. Format of this texture is {@link pc.PIXELFORMAT_L8_A8}. And is UV transformed based on onderlying AR system, which can be normalized using {@link pc.XrDepthSensing#uvMatrix}. + * @description Texture that contains packed depth information. The format of this texture is {@link pc.PIXELFORMAT_L8_A8}. It is UV transformed based on the underlying AR system which can be normalized using {@link pc.XrDepthSensing#uvMatrix}. * @example * material.diffuseMap = depthSensing.texture; * @example @@ -274,7 +274,7 @@ Object.defineProperty(XrDepthSensing.prototype, 'texture', { /** * @name pc.XrDepthSensing#uvMatrix * @type {pc.Mat4} - * @description Data of 4x4 matrix that should be used to transform depth texture UVs to normalized UVs in a shader. It is updated when depth texture is resized, refer to {@link pc.XrDepthSensing#resize} + * @description 4x4 matrix that should be used to transform depth texture UVs to normalized UVs in a shader. It is updated when the depth texture is resized. Refer to {@link pc.XrDepthSensing#resize}. * @example * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data); */