Skip to content

Commit e4a50dd

Browse files
WebXR depth sensing API (#2561)
* depth sensing * depth sensing API, CPU path * test:tsd fix * WebXR depth sensing GPU path * clear * disable linter for glsl @example jsdoc code; more right texture filtering; * fix * jslint disable on few examples * Edit docs Co-authored-by: Will Eastcott <[email protected]>
1 parent 0f403ee commit e4a50dd

File tree

4 files changed

+303
-9
lines changed

4 files changed

+303
-9
lines changed

externs.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ var WebAssembly = {};
1616
var XRWebGLLayer = {};
1717
var XRRay = {};
1818
var XRHand = {};
19+
var XRDepthInformation = {};
1920
var XRImageTrackingResult = {};
2021
var DOMPoint = {};
2122

src/index.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -273,6 +273,7 @@ export * from './xr/constants.js';
273273
export { XrInput } from './xr/xr-input.js';
274274
export { XrInputSource } from './xr/xr-input-source.js';
275275
export { XrLightEstimation } from './xr/xr-light-estimation.js';
276+
export { XrDepthSensing } from './xr/xr-depth-sensing.js';
276277
export { XrManager } from './xr/xr-manager.js';
277278
export { XrHitTest } from './xr/xr-hit-test.js';
278279
export { XrHitTestSource } from './xr/xr-hit-test-source.js';

src/xr/xr-depth-sensing.js

Lines changed: 287 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,287 @@
1+
import { EventHandler } from '../core/event-handler.js';
2+
import { Mat4 } from '../math/mat4.js';
3+
import { Texture } from '../graphics/texture.js';
4+
import { ADDRESS_CLAMP_TO_EDGE, PIXELFORMAT_L8_A8, FILTER_LINEAR } from '../graphics/graphics.js';
5+
6+
/* eslint-disable jsdoc/check-examples */
7+
/**
8+
* @class
9+
* @name pc.XrDepthSensing
10+
* @augments pc.EventHandler
11+
* @classdesc Depth Sensing provides depth information which is reconstructed using the underlying AR system. It provides the ability to query depth values (CPU path) or access a depth texture (GPU path). Depth information can be used (not limited to) for reconstructing real world geometry, virtual object placement, occlusion of virtual objects by real world geometry and more.
12+
* @description Depth Sensing provides depth information which is reconstructed using the underlying AR system. It provides the ability to query depth values (CPU path) or access a depth texture (GPU path). Depth information can be used (not limited to) for reconstructing real world geometry, virtual object placement, occlusion of virtual objects by real world geometry and more.
13+
* @param {pc.XrManager} manager - WebXR Manager.
14+
* @property {boolean} supported True if Depth Sensing is supported.
15+
* @property {number} width Width of depth texture or 0 if not available.
16+
* @property {number} height Height of depth texture or 0 if not available.
17+
* @example
18+
* // CPU path
19+
* var depthSensing = app.xr.depthSensing;
20+
* if (depthSensing.available) {
21+
* // get depth in the middle of the screen, value is in meters
22+
* var depth = depthSensing.getDepth(depthSensing.width / 2, depthSensing.height / 2);
23+
* }
24+
* @example
25+
* // GPU path, attaching texture to material
26+
* material.diffuseMap = depthSensing.texture;
27+
* material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data);
28+
* material.update();
29+
*
30+
* // update UV transformation matrix on depth texture resize
31+
* depthSensing.on('resize', function () {
32+
* material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data);
33+
* });
34+
* @example
35+
* // GLSL shader to unpack depth texture
36+
* varying vec2 vUv0;
37+
*
38+
* uniform sampler2D texture_depthSensingMap;
39+
* uniform mat4 matrix_depth_uv;
40+
*
41+
* void main(void) {
42+
* // transform UVs using depth matrix
43+
* vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy;
44+
*
45+
* // get luminance alpha components from depth texture
46+
* vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra;
47+
*
48+
* // unpack into single value in millimeters
49+
* float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)); // mm
50+
*
51+
* // normalize: 0m to 8m distance
52+
* depth = min(depth / 8000.0, 1.0); // 0..1 = 0m..8m
53+
*
54+
* // paint scene from black to white based on distance
55+
* gl_FragColor = vec4(depth, depth, depth, 1.0);
56+
* }
57+
*/
58+
/* eslint-enable jsdoc/check-examples */
59+
function XrDepthSensing(manager) {
60+
EventHandler.call(this);
61+
62+
this._manager = manager;
63+
this._depthInfo = null;
64+
this._available = false;
65+
66+
this._matrixDirty = false;
67+
this._matrix = new Mat4();
68+
this._emptyBuffer = new Uint8Array(32);
69+
this._depthBuffer = null;
70+
71+
this._texture = new Texture(this._manager.app.graphicsDevice, {
72+
format: PIXELFORMAT_L8_A8,
73+
mipmaps: false,
74+
addressU: ADDRESS_CLAMP_TO_EDGE,
75+
addressV: ADDRESS_CLAMP_TO_EDGE,
76+
minFilter: FILTER_LINEAR,
77+
magFilter: FILTER_LINEAR
78+
});
79+
80+
this._manager.on('end', this._onSessionEnd, this);
81+
}
82+
XrDepthSensing.prototype = Object.create(EventHandler.prototype);
83+
XrDepthSensing.prototype.constructor = XrDepthSensing;
84+
85+
/**
86+
* @event
87+
* @name pc.XrDepthSensing#available
88+
* @description Fired when depth sensing data becomes available.
89+
*/
90+
91+
/**
92+
* @event
93+
* @name pc.XrDepthSensing#unavailable
94+
* @description Fired when depth sensing data becomes unavailable.
95+
*/
96+
97+
/**
98+
* @event
99+
* @name pc.XrDepthSensing#resize
100+
* @description Fired when the depth sensing texture been resized. {@link pc.XrDepthSensing#uvMatrix} needs to be updated for relevant shaders.
101+
* @param {number} width - The new width of the depth texture in pixels.
102+
* @param {number} height - The new height of the depth texture in pixels.
103+
* @example
104+
* depthSensing.on('resize', function () {
105+
* material.setParameter('matrix_depth_uv', depthSensing.uvMatrix);
106+
* });
107+
*/
108+
109+
XrDepthSensing.prototype._onSessionEnd = function () {
110+
this._depthInfo = null;
111+
112+
if (this._available) {
113+
this._available = false;
114+
this.fire('unavailable');
115+
}
116+
117+
this._depthBuffer = null;
118+
this._texture._width = 4;
119+
this._texture._height = 4;
120+
this._texture._levels[0] = this._emptyBuffer;
121+
this._texture.upload();
122+
};
123+
124+
XrDepthSensing.prototype._updateTexture = function () {
125+
if (this._depthInfo) {
126+
var resized = false;
127+
128+
// changed resolution
129+
if (this._depthInfo.width !== this._texture.width || this._depthInfo.height !== this._texture.height) {
130+
this._texture._width = this._depthInfo.width;
131+
this._texture._height = this._depthInfo.height;
132+
this._matrixDirty = true;
133+
resized = true;
134+
}
135+
136+
var dataBuffer = this._depthInfo.data;
137+
this._depthBuffer = new Uint8Array(dataBuffer.buffer, dataBuffer.byteOffset, dataBuffer.byteLength);
138+
this._texture._levels[0] = this._depthBuffer;
139+
this._texture.upload();
140+
141+
if (resized) this.fire('resize', this._depthInfo.width, this._depthInfo.height);
142+
143+
} else if (this._depthBuffer) {
144+
// depth info not available anymore
145+
this._depthBuffer = null;
146+
this._texture._width = 4;
147+
this._texture._height = 4;
148+
this._texture._levels[0] = this._emptyBuffer;
149+
this._texture.upload();
150+
}
151+
};
152+
153+
XrDepthSensing.prototype.update = function (frame, view) {
154+
if (view) {
155+
if (! this._depthInfo) this._matrixDirty = true;
156+
this._depthInfo = frame.getDepthInformation(view);
157+
} else {
158+
if (this._depthInfo) this._matrixDirty = true;
159+
this._depthInfo = null;
160+
}
161+
162+
this._updateTexture();
163+
164+
if (this._matrixDirty) {
165+
this._matrixDirty = false;
166+
167+
if (this._depthInfo) {
168+
this._matrix.data.set(this._depthInfo.normTextureFromNormView.matrix);
169+
} else {
170+
this._matrix.setIdentity();
171+
}
172+
}
173+
174+
if (this._depthInfo && ! this._available) {
175+
this._available = true;
176+
this.fire('available');
177+
} else if (! this._depthInfo && this._available) {
178+
this._available = false;
179+
this.fire('unavailable');
180+
}
181+
};
182+
183+
/**
184+
* @function
185+
* @name pc.XrDepthSensing#getDepth
186+
* @param {number} x - x coordinate of pixel in depth texture.
187+
* @param {number} y - y coordinate of pixel in depth texture.
188+
* @description Get depth value from depth information in meters. X and Y coordinates are in depth texture space, use {@link pc.XrDepthSensing#width} and {@link pc.XrDepthSensing#height}. This is not using a GPU texture and is a CPU path.
189+
* @example
190+
* var depth = app.xr.depthSensing.getDepth(x, y);
191+
* if (depth !== null) {
192+
* // depth in meters
193+
* }
194+
* @returns {number|null} Depth in meters or null if depth information is not available.
195+
*/
196+
XrDepthSensing.prototype.getDepth = function (x, y) {
197+
if (! this._depthInfo)
198+
return null;
199+
200+
return this._depthInfo.getDepth(x, y);
201+
};
202+
203+
Object.defineProperty(XrDepthSensing.prototype, 'supported', {
204+
get: function () {
205+
return !! window.XRDepthInformation;
206+
}
207+
});
208+
209+
/**
210+
* @name pc.XrDepthSensing#available
211+
* @type {boolean}
212+
* @description True if depth sensing information is available.
213+
* @example
214+
* if (app.xr.depthSensing.available) {
215+
* var depth = app.xr.depthSensing.getDepth(x, y);
216+
* }
217+
*/
218+
Object.defineProperty(XrDepthSensing.prototype, 'available', {
219+
get: function () {
220+
return this._available;
221+
}
222+
});
223+
224+
Object.defineProperty(XrDepthSensing.prototype, 'width', {
225+
get: function () {
226+
return this._depthInfo && this._depthInfo.width || 0;
227+
}
228+
});
229+
230+
Object.defineProperty(XrDepthSensing.prototype, 'height', {
231+
get: function () {
232+
return this._depthInfo && this._depthInfo.height || 0;
233+
}
234+
});
235+
236+
/* eslint-disable jsdoc/check-examples */
237+
/**
238+
* @name pc.XrDepthSensing#texture
239+
* @type {pc.Texture}
240+
* @description Texture that contains packed depth information. The format of this texture is {@link pc.PIXELFORMAT_L8_A8}. It is UV transformed based on the underlying AR system which can be normalized using {@link pc.XrDepthSensing#uvMatrix}.
241+
* @example
242+
* material.diffuseMap = depthSensing.texture;
243+
* @example
244+
* // GLSL shader to unpack depth texture
245+
* varying vec2 vUv0;
246+
*
247+
* uniform sampler2D texture_depthSensingMap;
248+
* uniform mat4 matrix_depth_uv;
249+
*
250+
* void main(void) {
251+
* // transform UVs using depth matrix
252+
* vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy;
253+
*
254+
* // get luminance alpha components from depth texture
255+
* vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra;
256+
*
257+
* // unpack into single value in millimeters
258+
* float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)); // mm
259+
*
260+
* // normalize: 0m to 8m distance
261+
* depth = min(depth / 8000.0, 1.0); // 0..1 = 0m..8m
262+
*
263+
* // paint scene from black to white based on distance
264+
* gl_FragColor = vec4(depth, depth, depth, 1.0);
265+
* }
266+
*/
267+
/* eslint-enable jsdoc/check-examples */
268+
Object.defineProperty(XrDepthSensing.prototype, 'texture', {
269+
get: function () {
270+
return this._texture;
271+
}
272+
});
273+
274+
/**
275+
* @name pc.XrDepthSensing#uvMatrix
276+
* @type {pc.Mat4}
277+
* @description 4x4 matrix that should be used to transform depth texture UVs to normalized UVs in a shader. It is updated when the depth texture is resized. Refer to {@link pc.XrDepthSensing#resize}.
278+
* @example
279+
* material.setParameter('matrix_depth_uv', depthSensing.uvMatrix.data);
280+
*/
281+
Object.defineProperty(XrDepthSensing.prototype, 'uvMatrix', {
282+
get: function () {
283+
return this._matrix;
284+
}
285+
});
286+
287+
export { XrDepthSensing };

src/xr/xr-manager.js

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ import { XrInput } from './xr-input.js';
1212
import { XrLightEstimation } from './xr-light-estimation.js';
1313
import { XrImageTracking } from './xr-image-tracking.js';
1414
import { XrDomOverlay } from './xr-dom-overlay.js';
15+
import { XrDepthSensing } from './xr-depth-sensing.js';
1516

1617
/**
1718
* @class
@@ -53,11 +54,12 @@ function XrManager(app) {
5354
this._baseLayer = null;
5455
this._referenceSpace = null;
5556

56-
this.input = new XrInput(this);
57+
this.depthSensing = new XrDepthSensing(this);
58+
this.domOverlay = new XrDomOverlay(this);
5759
this.hitTest = new XrHitTest(this);
58-
this.lightEstimation = new XrLightEstimation(this);
5960
this.imageTracking = new XrImageTracking(this);
60-
this.domOverlay = new XrDomOverlay(this);
61+
this.input = new XrInput(this);
62+
this.lightEstimation = new XrLightEstimation(this);
6163

6264
this._camera = null;
6365
this.views = [];
@@ -218,6 +220,7 @@ XrManager.prototype.start = function (camera, type, spaceType, options) {
218220
if (type === XRTYPE_AR) {
219221
opts.optionalFeatures.push('light-estimation');
220222
opts.optionalFeatures.push('hit-test');
223+
opts.optionalFeatures.push('depth-sensing');
221224

222225
if (options && options.imageTracking) {
223226
opts.optionalFeatures.push('image-tracking');
@@ -503,15 +506,17 @@ XrManager.prototype.update = function (frame) {
503506
this.input.update(frame);
504507

505508
if (this._type === XRTYPE_AR) {
506-
if (this.hitTest.supported) {
509+
if (this.hitTest.supported)
507510
this.hitTest.update(frame);
508-
}
509-
if (this.lightEstimation.supported) {
511+
512+
if (this.lightEstimation.supported)
510513
this.lightEstimation.update(frame);
511-
}
512-
if (this.imageTracking.supported) {
514+
515+
if (this.depthSensing.supported)
516+
this.depthSensing.update(frame, pose && pose.views[0]);
517+
518+
if (this.imageTracking.supported)
513519
this.imageTracking.update(frame);
514-
}
515520
}
516521

517522
this.fire('update', frame);

0 commit comments

Comments
 (0)