|
| 1 | +import { EventHandler } from '../core/event-handler.js'; |
| 2 | +import { Mat4 } from '../math/mat4.js'; |
| 3 | +import { Texture } from '../graphics/texture.js'; |
| 4 | +import { ADDRESS_CLAMP_TO_EDGE, PIXELFORMAT_L8_A8, FILTER_LINEAR } from '../graphics/graphics.js'; |
| 5 | + |
| 6 | +/* eslint-disable jsdoc/check-examples */ |
| 7 | +/** |
| 8 | + * @class |
| 9 | + * @name pc.XrDepthSensing |
| 10 | + * @augments pc.EventHandler |
| 11 | + * @classdesc Depth Sensing provides depth information which is reconstructed using the underlying AR system. It provides the ability to query depth values (CPU path) or access a depth texture (GPU path). Depth information can be used (not limited to) for reconstructing real world geometry, virtual object placement, occlusion of virtual objects by real world geometry and more. |
| 12 | + * @description Depth Sensing provides depth information which is reconstructed using the underlying AR system. It provides the ability to query depth values (CPU path) or access a depth texture (GPU path). Depth information can be used (not limited to) for reconstructing real world geometry, virtual object placement, occlusion of virtual objects by real world geometry and more. |
| 13 | + * @param {pc.XrManager} manager - WebXR Manager. |
| 14 | + * @property {boolean} supported True if Depth Sensing is supported. |
| 15 | + * @property {number} width Width of depth texture or 0 if not available. |
| 16 | + * @property {number} height Height of depth texture or 0 if not available. |
| 17 | + * @example |
| 18 | + * // CPU path |
| 19 | + * var depthSensing = app.xr.depthSensing; |
| 20 | + * if (depthSensing.available) { |
| 21 | + * // get depth in the middle of the screen, value is in meters |
| 22 | + * var depth = depthSensing.getDepth(depthSensing.width / 2, depthSensing.height / 2); |
| 23 | + * } |
| 24 | + * @example |
| 25 | + * // GPU path, attaching texture to material |
| 26 | + * material.diffuseMap = depthSensing.texture; |
| 27 | + * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data); |
| 28 | + * material.update(); |
| 29 | + * |
| 30 | + * // update UV transformation matrix on depth texture resize |
| 31 | + * depthSensing.on('resize', function () { |
| 32 | + * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data); |
| 33 | + * }); |
| 34 | + * @example |
| 35 | + * // GLSL shader to unpack depth texture |
| 36 | + * varying vec2 vUv0; |
| 37 | + * |
| 38 | + * uniform sampler2D texture_depthSensingMap; |
| 39 | + * uniform mat4 matrix_depth_uv; |
| 40 | + * |
| 41 | + * void main(void) { |
| 42 | + * // transform UVs using depth matrix |
| 43 | + * vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy; |
| 44 | + * |
| 45 | + * // get luminance alpha components from depth texture |
| 46 | + * vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra; |
| 47 | + * |
| 48 | + * // unpack into single value in millimeters |
| 49 | + * float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)); // mm |
| 50 | + * |
| 51 | + * // normalize: 0m to 8m distance |
| 52 | + * depth = min(depth / 8000.0, 1.0); // 0..1 = 0m..8m |
| 53 | + * |
| 54 | + * // paint scene from black to white based on distance |
| 55 | + * gl_FragColor = vec4(depth, depth, depth, 1.0); |
| 56 | + * } |
| 57 | + */ |
| 58 | +/* eslint-enable jsdoc/check-examples */ |
| 59 | +function XrDepthSensing(manager) { |
| 60 | + EventHandler.call(this); |
| 61 | + |
| 62 | + this._manager = manager; |
| 63 | + this._depthInfo = null; |
| 64 | + this._available = false; |
| 65 | + |
| 66 | + this._matrixDirty = false; |
| 67 | + this._matrix = new Mat4(); |
| 68 | + this._emptyBuffer = new Uint8Array(32); |
| 69 | + this._depthBuffer = null; |
| 70 | + |
| 71 | + this._texture = new Texture(this._manager.app.graphicsDevice, { |
| 72 | + format: PIXELFORMAT_L8_A8, |
| 73 | + mipmaps: false, |
| 74 | + addressU: ADDRESS_CLAMP_TO_EDGE, |
| 75 | + addressV: ADDRESS_CLAMP_TO_EDGE, |
| 76 | + minFilter: FILTER_LINEAR, |
| 77 | + magFilter: FILTER_LINEAR |
| 78 | + }); |
| 79 | + |
| 80 | + this._manager.on('end', this._onSessionEnd, this); |
| 81 | +} |
| 82 | +XrDepthSensing.prototype = Object.create(EventHandler.prototype); |
| 83 | +XrDepthSensing.prototype.constructor = XrDepthSensing; |
| 84 | + |
| 85 | +/** |
| 86 | + * @event |
| 87 | + * @name pc.XrDepthSensing#available |
| 88 | + * @description Fired when depth sensing data becomes available. |
| 89 | + */ |
| 90 | + |
| 91 | +/** |
| 92 | + * @event |
| 93 | + * @name pc.XrDepthSensing#unavailable |
| 94 | + * @description Fired when depth sensing data becomes unavailable. |
| 95 | + */ |
| 96 | + |
| 97 | +/** |
| 98 | + * @event |
| 99 | + * @name pc.XrDepthSensing#resize |
| 100 | + * @description Fired when the depth sensing texture been resized. {@link pc.XrDepthSensing#uvMatrix} needs to be updated for relevant shaders. |
| 101 | + * @param {number} width - The new width of the depth texture in pixels. |
| 102 | + * @param {number} height - The new height of the depth texture in pixels. |
| 103 | + * @example |
| 104 | + * depthSensing.on('resize', function () { |
| 105 | + * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix); |
| 106 | + * }); |
| 107 | + */ |
| 108 | + |
| 109 | +XrDepthSensing.prototype._onSessionEnd = function () { |
| 110 | + this._depthInfo = null; |
| 111 | + |
| 112 | + if (this._available) { |
| 113 | + this._available = false; |
| 114 | + this.fire('unavailable'); |
| 115 | + } |
| 116 | + |
| 117 | + this._depthBuffer = null; |
| 118 | + this._texture._width = 4; |
| 119 | + this._texture._height = 4; |
| 120 | + this._texture._levels[0] = this._emptyBuffer; |
| 121 | + this._texture.upload(); |
| 122 | +}; |
| 123 | + |
| 124 | +XrDepthSensing.prototype._updateTexture = function () { |
| 125 | + if (this._depthInfo) { |
| 126 | + var resized = false; |
| 127 | + |
| 128 | + // changed resolution |
| 129 | + if (this._depthInfo.width !== this._texture.width || this._depthInfo.height !== this._texture.height) { |
| 130 | + this._texture._width = this._depthInfo.width; |
| 131 | + this._texture._height = this._depthInfo.height; |
| 132 | + this._matrixDirty = true; |
| 133 | + resized = true; |
| 134 | + } |
| 135 | + |
| 136 | + var dataBuffer = this._depthInfo.data; |
| 137 | + this._depthBuffer = new Uint8Array(dataBuffer.buffer, dataBuffer.byteOffset, dataBuffer.byteLength); |
| 138 | + this._texture._levels[0] = this._depthBuffer; |
| 139 | + this._texture.upload(); |
| 140 | + |
| 141 | + if (resized) this.fire('resize', this._depthInfo.width, this._depthInfo.height); |
| 142 | + |
| 143 | + } else if (this._depthBuffer) { |
| 144 | + // depth info not available anymore |
| 145 | + this._depthBuffer = null; |
| 146 | + this._texture._width = 4; |
| 147 | + this._texture._height = 4; |
| 148 | + this._texture._levels[0] = this._emptyBuffer; |
| 149 | + this._texture.upload(); |
| 150 | + } |
| 151 | +}; |
| 152 | + |
| 153 | +XrDepthSensing.prototype.update = function (frame, view) { |
| 154 | + if (view) { |
| 155 | + if (! this._depthInfo) this._matrixDirty = true; |
| 156 | + this._depthInfo = frame.getDepthInformation(view); |
| 157 | + } else { |
| 158 | + if (this._depthInfo) this._matrixDirty = true; |
| 159 | + this._depthInfo = null; |
| 160 | + } |
| 161 | + |
| 162 | + this._updateTexture(); |
| 163 | + |
| 164 | + if (this._matrixDirty) { |
| 165 | + this._matrixDirty = false; |
| 166 | + |
| 167 | + if (this._depthInfo) { |
| 168 | + this._matrix.data.set(this._depthInfo.normTextureFromNormView.matrix); |
| 169 | + } else { |
| 170 | + this._matrix.setIdentity(); |
| 171 | + } |
| 172 | + } |
| 173 | + |
| 174 | + if (this._depthInfo && ! this._available) { |
| 175 | + this._available = true; |
| 176 | + this.fire('available'); |
| 177 | + } else if (! this._depthInfo && this._available) { |
| 178 | + this._available = false; |
| 179 | + this.fire('unavailable'); |
| 180 | + } |
| 181 | +}; |
| 182 | + |
| 183 | +/** |
| 184 | + * @function |
| 185 | + * @name pc.XrDepthSensing#getDepth |
| 186 | + * @param {number} x - x coordinate of pixel in depth texture. |
| 187 | + * @param {number} y - y coordinate of pixel in depth texture. |
| 188 | + * @description Get depth value from depth information in meters. X and Y coordinates are in depth texture space, use {@link pc.XrDepthSensing#width} and {@link pc.XrDepthSensing#height}. This is not using a GPU texture and is a CPU path. |
| 189 | + * @example |
| 190 | + * var depth = app.xr.depthSensing.getDepth(x, y); |
| 191 | + * if (depth !== null) { |
| 192 | + * // depth in meters |
| 193 | + * } |
| 194 | + * @returns {number|null} Depth in meters or null if depth information is not available. |
| 195 | + */ |
| 196 | +XrDepthSensing.prototype.getDepth = function (x, y) { |
| 197 | + if (! this._depthInfo) |
| 198 | + return null; |
| 199 | + |
| 200 | + return this._depthInfo.getDepth(x, y); |
| 201 | +}; |
| 202 | + |
| 203 | +Object.defineProperty(XrDepthSensing.prototype, 'supported', { |
| 204 | + get: function () { |
| 205 | + return !! window.XRDepthInformation; |
| 206 | + } |
| 207 | +}); |
| 208 | + |
| 209 | +/** |
| 210 | + * @name pc.XrDepthSensing#available |
| 211 | + * @type {boolean} |
| 212 | + * @description True if depth sensing information is available. |
| 213 | + * @example |
| 214 | + * if (app.xr.depthSensing.available) { |
| 215 | + * var depth = app.xr.depthSensing.getDepth(x, y); |
| 216 | + * } |
| 217 | + */ |
| 218 | +Object.defineProperty(XrDepthSensing.prototype, 'available', { |
| 219 | + get: function () { |
| 220 | + return this._available; |
| 221 | + } |
| 222 | +}); |
| 223 | + |
| 224 | +Object.defineProperty(XrDepthSensing.prototype, 'width', { |
| 225 | + get: function () { |
| 226 | + return this._depthInfo && this._depthInfo.width || 0; |
| 227 | + } |
| 228 | +}); |
| 229 | + |
| 230 | +Object.defineProperty(XrDepthSensing.prototype, 'height', { |
| 231 | + get: function () { |
| 232 | + return this._depthInfo && this._depthInfo.height || 0; |
| 233 | + } |
| 234 | +}); |
| 235 | + |
| 236 | +/* eslint-disable jsdoc/check-examples */ |
| 237 | +/** |
| 238 | + * @name pc.XrDepthSensing#texture |
| 239 | + * @type {pc.Texture} |
| 240 | + * @description Texture that contains packed depth information. The format of this texture is {@link pc.PIXELFORMAT_L8_A8}. It is UV transformed based on the underlying AR system which can be normalized using {@link pc.XrDepthSensing#uvMatrix}. |
| 241 | + * @example |
| 242 | + * material.diffuseMap = depthSensing.texture; |
| 243 | + * @example |
| 244 | + * // GLSL shader to unpack depth texture |
| 245 | + * varying vec2 vUv0; |
| 246 | + * |
| 247 | + * uniform sampler2D texture_depthSensingMap; |
| 248 | + * uniform mat4 matrix_depth_uv; |
| 249 | + * |
| 250 | + * void main(void) { |
| 251 | + * // transform UVs using depth matrix |
| 252 | + * vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy; |
| 253 | + * |
| 254 | + * // get luminance alpha components from depth texture |
| 255 | + * vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra; |
| 256 | + * |
| 257 | + * // unpack into single value in millimeters |
| 258 | + * float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)); // mm |
| 259 | + * |
| 260 | + * // normalize: 0m to 8m distance |
| 261 | + * depth = min(depth / 8000.0, 1.0); // 0..1 = 0m..8m |
| 262 | + * |
| 263 | + * // paint scene from black to white based on distance |
| 264 | + * gl_FragColor = vec4(depth, depth, depth, 1.0); |
| 265 | + * } |
| 266 | + */ |
| 267 | +/* eslint-enable jsdoc/check-examples */ |
| 268 | +Object.defineProperty(XrDepthSensing.prototype, 'texture', { |
| 269 | + get: function () { |
| 270 | + return this._texture; |
| 271 | + } |
| 272 | +}); |
| 273 | + |
| 274 | +/** |
| 275 | + * @name pc.XrDepthSensing#uvMatrix |
| 276 | + * @type {pc.Mat4} |
| 277 | + * @description 4x4 matrix that should be used to transform depth texture UVs to normalized UVs in a shader. It is updated when the depth texture is resized. Refer to {@link pc.XrDepthSensing#resize}. |
| 278 | + * @example |
| 279 | + * material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data); |
| 280 | + */ |
| 281 | +Object.defineProperty(XrDepthSensing.prototype, 'uvMatrix', { |
| 282 | + get: function () { |
| 283 | + return this._matrix; |
| 284 | + } |
| 285 | +}); |
| 286 | + |
| 287 | +export { XrDepthSensing }; |
0 commit comments