UNPKG

@babylonjs/core

Version:

Getting started? Play directly with the Babylon.js API using our [playground](https://playground.babylonjs.com/). It also contains a lot of samples to learn how to use it.

564 lines (561 loc) 22.7 kB
import { RawTexture } from "../../Materials/Textures/rawTexture.js"; import { WebXRFeatureName, WebXRFeaturesManager } from "../webXRFeaturesManager.js"; import { WebXRAbstractFeature } from "./WebXRAbstractFeature.js"; import { Tools } from "../../Misc/tools.js"; import { Texture } from "../../Materials/Textures/texture.js"; import { Observable } from "../../Misc/observable.js"; import { WebGLHardwareTexture } from "../../Engines/WebGL/webGLHardwareTexture.js"; import { MaterialPluginBase } from "../../Materials/materialPluginBase.js"; import { MaterialDefines } from "../../Materials/materialDefines.js"; import { PBRBaseMaterial } from "../../Materials/PBR/pbrBaseMaterial.js"; import { RegisterMaterialPlugin } from "../../Materials/materialPluginManager.js"; import { Matrix } from "../../Maths/math.vector.js"; class DepthSensingMaterialDefines extends MaterialDefines { constructor() { super(...arguments); /** * Is the feature enabled */ this.DEPTH_SENSING = false; /** * Is the texture type provided as a texture array */ this.DEPTH_SENSING_TEXTURE_ARRAY = false; /** * Is the texture type provided as Alpha-Luminance (unpacked differently on the shader) */ this.DEPTH_SENSING_TEXTURE_AL = false; /** * Should the shader discard the pixel if the depth is less than the asset depth * Will lead to better performance. the other variant is to change the color based on a tolerance factor */ this.DEPTH_SENSING_DISCARD = true; } } let IsPluginEnabled = false; let DepthTexture = null; let AlphaLuminanceTexture = false; const ScreenSize = { width: 512, height: 512 }; const ShaderViewport = { x: 0, y: 0, width: 1, height: 1 }; let GlobalRawValueToMeters = 1; let ViewIndex = 0; let EnableDiscard = true; const UvTransform = Matrix.Identity(); const ManagedMaterialPlugins = []; /** * @internal */ class WebXRDepthSensingMaterialPlugin extends MaterialPluginBase { /** @internal */ _markAllDefinesAsDirty() { this._enable(this._isEnabled); this.markAllDefinesAsDirty(); } /** * Gets whether the mesh debug plugin is enabled in the material. */ get isEnabled() { return this._isEnabled; } /** * Sets whether the mesh debug plugin is enabled in the material. * @param value enabled */ set isEnabled(value) { if (this._isEnabled === value) { return; } this._isEnabled = value; this._markAllDefinesAsDirty(); } /** * Gets a boolean indicating that the plugin is compatible with a given shader language. * @param shaderLanguage The shader language to use. * @returns true if the plugin is compatible with the shader language */ isCompatible(shaderLanguage) { switch (shaderLanguage) { case 0 /* ShaderLanguage.GLSL */: return true; default: // no webgpu for webxr yet, however - if this is not true the plugin fails to load. // webxr is currently only supported on webgl, and the plugin is disabled per default. return true; } } constructor(material) { super(material, "DepthSensing", 222, new DepthSensingMaterialDefines()); this._isEnabled = false; this._varColorName = material instanceof PBRBaseMaterial ? "finalColor" : "color"; ManagedMaterialPlugins.push(this); } /** * Prepare the defines * @param defines the defines */ prepareDefines(defines) { defines.DEPTH_SENSING = !!DepthTexture && IsPluginEnabled; defines.DEPTH_SENSING_TEXTURE_ARRAY = DepthTexture?.is2DArray ?? false; defines.DEPTH_SENSING_TEXTURE_AL = AlphaLuminanceTexture; defines.DEPTH_SENSING_DISCARD = EnableDiscard; } getUniforms() { return { // first, define the UBO with the correct type and size. ubo: [ { name: "ds_invScreenSize", size: 2, type: "vec2" }, { name: "ds_rawValueToMeters", size: 1, type: "float" }, { name: "ds_viewIndex", size: 1, type: "float" }, { name: "ds_shaderViewport", size: 4, type: "vec4" }, { name: "ds_uvTransform", size: 16, type: "mat4" }, ], // now, on the fragment shader, add the uniform itself in case uniform buffers are not supported by the engine fragment: `#ifdef DEPTH_SENSING uniform vec2 ds_invScreenSize; uniform float ds_rawValueToMeters; uniform float ds_viewIndex; uniform vec4 ds_shaderViewport; uniform mat4 ds_uvTransform; #endif `, }; } getSamplers(samplers) { samplers.push("ds_depthSampler"); } bindForSubMesh(uniformBuffer) { if (IsPluginEnabled && DepthTexture) { uniformBuffer.updateFloat2("ds_invScreenSize", 1 / ScreenSize.width, 1 / ScreenSize.height); uniformBuffer.updateFloat("ds_rawValueToMeters", GlobalRawValueToMeters); uniformBuffer.updateFloat("ds_viewIndex", ViewIndex); uniformBuffer.updateFloat4("ds_shaderViewport", ShaderViewport.x, ShaderViewport.y, ShaderViewport.width, ShaderViewport.height); uniformBuffer.setTexture("ds_depthSampler", DepthTexture); uniformBuffer.updateMatrix("ds_uvTransform", UvTransform); } } getClassName() { return "DepthSensingMaterialPlugin"; } getCustomCode(shaderType) { return shaderType === "vertex" ? { CUSTOM_VERTEX_MAIN_BEGIN: ` #ifdef DEPTH_SENSING #ifdef MULTIVIEW ds_viewIndexMultiview = float(gl_ViewID_OVR); #endif #endif `, CUSTOM_VERTEX_DEFINITIONS: ` #ifdef DEPTH_SENSING #ifdef MULTIVIEW varying float ds_viewIndexMultiview; #endif #endif `, } : { CUSTOM_FRAGMENT_DEFINITIONS: ` #ifdef DEPTH_SENSING #ifdef DEPTH_SENSING_TEXTURE_ARRAY uniform highp sampler2DArray ds_depthSampler; #else uniform sampler2D ds_depthSampler; #endif #ifdef MULTIVIEW varying float ds_viewIndexMultiview; #endif #endif `, CUSTOM_FRAGMENT_MAIN_BEGIN: ` #ifdef DEPTH_SENSING #ifdef MULTIVIEW float ds_viewIndexSet = ds_viewIndexMultiview; vec2 ds_compensation = vec2(0.0, 0.0); #else float ds_viewIndexSet = ds_viewIndex; vec2 ds_compensation = vec2(ds_viewIndexSet, 0.0); #endif vec2 ds_baseUv = gl_FragCoord.xy * ds_invScreenSize; #ifdef DEPTH_SENSING_TEXTURE_ARRAY vec2 ds_uv = ds_baseUv - ds_compensation; vec3 ds_depthUv = vec3((ds_uvTransform * vec4(ds_uv, 0.0, 1.0)).xy, ds_viewIndexSet); #else vec2 ds_depthUv = (ds_uvTransform * vec4(ds_baseUv.x, 1.0 - ds_baseUv.y, 0.0, 1.0)).xy; #endif #ifdef DEPTH_SENSING_TEXTURE_AL // from alpha-luminance - taken from the explainer vec2 ds_alphaLuminance = texture(ds_depthSampler, ds_depthUv).ra; float ds_cameraDepth = dot(ds_alphaLuminance, vec2(255.0, 256.0 * 255.0)); #else float ds_cameraDepth = texture(ds_depthSampler, ds_depthUv).r; #endif ds_cameraDepth = ds_cameraDepth * ds_rawValueToMeters; float ds_assetDepth = gl_FragCoord.z; #ifdef DEPTH_SENSING_DISCARD if(ds_cameraDepth < ds_assetDepth) { discard; } #endif #endif `, CUSTOM_FRAGMENT_BEFORE_FRAGCOLOR: ` #ifdef DEPTH_SENSING #ifndef DEPTH_SENSING_DISCARD const float ds_depthTolerancePerM = 0.005; float ds_occlusion = clamp(1.0 - 0.5 * (ds_cameraDepth - ds_assetDepth) / (ds_depthTolerancePerM * ds_assetDepth) + 0.5, 0.0, 1.0); ${this._varColorName} *= (1.0 - ds_occlusion); #endif #endif `, }; } dispose(_forceDisposeTextures) { const index = ManagedMaterialPlugins.indexOf(this); if (index !== -1) { ManagedMaterialPlugins.splice(index, 1); } super.dispose(_forceDisposeTextures); } } /** * WebXR Feature for WebXR Depth Sensing Module * @since 5.49.1 */ export class WebXRDepthSensing extends WebXRAbstractFeature { /** * Width of depth data. If depth data is not exist, returns null. */ get width() { return this._width; } /** * Height of depth data. If depth data is not exist, returns null. */ get height() { return this._height; } /** * Scale factor by which the raw depth values must be multiplied in order to get the depths in meters. */ get rawValueToMeters() { return this._rawValueToMeters; } /** * An XRRigidTransform that needs to be applied when indexing into the depth buffer. */ get normDepthBufferFromNormView() { return this._normDepthBufferFromNormView; } /** * Describes which depth-sensing usage ("cpu" or "gpu") is used. */ get depthUsage() { switch (this._xrSessionManager.session.depthUsage) { case "cpu-optimized": return "cpu"; case "gpu-optimized": return "gpu"; } } /** * Describes which depth sensing data format ("ushort" or "float") is used. */ get depthDataFormat() { switch (this._xrSessionManager.session.depthDataFormat) { case "luminance-alpha": return "ushort"; case "float32": return "float"; case "unsigned-short": return "ushort"; } } /** * Latest cached InternalTexture which containing depth buffer information. * This can be used when the depth usage is "gpu". * @deprecated This will be removed in the future. Use latestDepthImageTexture */ get latestInternalTexture() { if (!this._cachedWebGLTexture) { return null; } return this._getInternalTextureFromDepthInfo(); } /** * cached depth buffer */ get latestDepthBuffer() { if (!this._cachedDepthBuffer) { return null; } return this.depthDataFormat === "float" ? new Float32Array(this._cachedDepthBuffer) : new Uint16Array(this._cachedDepthBuffer); } /** * Latest cached Texture of depth image which is made from the depth buffer data. */ get latestDepthImageTexture() { return this._cachedDepthImageTexture; } /** * Creates a new instance of the depth sensing feature * @param _xrSessionManager the WebXRSessionManager * @param options options for WebXR Depth Sensing Feature */ constructor(_xrSessionManager, options) { super(_xrSessionManager); this.options = options; this._width = null; this._height = null; this._rawValueToMeters = null; this._textureType = null; this._normDepthBufferFromNormView = null; this._cachedDepthBuffer = null; this._cachedWebGLTexture = null; this._cachedDepthImageTexture = null; this._onCameraObserver = null; /** * Event that notify when `DepthInformation.getDepthInMeters` is available. * `getDepthInMeters` method needs active XRFrame (not available for cached XRFrame) */ this.onGetDepthInMetersAvailable = new Observable(); this.xrNativeFeatureName = "depth-sensing"; // https://immersive-web.github.io/depth-sensing/ Tools.Warn("depth-sensing is an experimental and unstable feature."); EnableDiscard = !options.useToleranceFactorForDepthSensing; } /** * attach this feature * Will usually be called by the features manager * @param force should attachment be forced (even when already attached) * @returns true if successful. */ attach(force) { if (!super.attach(force)) { return false; } const isBothDepthUsageAndFormatNull = this._xrSessionManager.session.depthDataFormat == null || this._xrSessionManager.session.depthUsage == null; if (isBothDepthUsageAndFormatNull) { return false; } this._glBinding = new XRWebGLBinding(this._xrSessionManager.session, this._xrSessionManager.scene.getEngine()._gl); IsPluginEnabled = !this.options.disableDepthSensingOnMaterials; if (IsPluginEnabled) { for (const plugin of ManagedMaterialPlugins) { plugin.isEnabled = true; } this._onCameraObserver = this._xrSessionManager.scene.onBeforeCameraRenderObservable.add((camera) => { if (!IsPluginEnabled) { return; } // make sure this is a webxr camera if (camera.outputRenderTarget) { const viewport = camera.rigCameras.length > 0 ? camera.rigCameras[0].viewport : camera.viewport; ScreenSize.width = camera.outputRenderTarget.getRenderWidth() / (camera.rigParent ? camera.rigParent.rigCameras.length || 1 : 1); ScreenSize.height = camera.outputRenderTarget.getRenderHeight(); ShaderViewport.x = viewport.x; ShaderViewport.y = viewport.y; ShaderViewport.width = viewport.width; ShaderViewport.height = viewport.height; // find the viewIndex if (camera.rigParent) { // should use the viewIndexes array! ViewIndex = camera.isLeftCamera ? 0 : 1; } } }); } return true; } detach() { IsPluginEnabled = false; DepthTexture = null; this._cachedWebGLTexture = null; this._cachedDepthBuffer = null; for (const plugin of ManagedMaterialPlugins) { plugin.isEnabled = false; } if (this._onCameraObserver) { this._xrSessionManager.scene.onBeforeCameraRenderObservable.remove(this._onCameraObserver); } return super.detach(); } /** * Dispose this feature and all of the resources attached */ dispose() { this._cachedDepthImageTexture?.dispose(); this.onGetDepthInMetersAvailable.clear(); // cleanup if (this._onCameraObserver) { this._xrSessionManager.scene.onBeforeCameraRenderObservable.remove(this._onCameraObserver); } for (const plugin of ManagedMaterialPlugins) { plugin.dispose(); } ManagedMaterialPlugins.length = 0; } _onXRFrame(_xrFrame) { const referenceSPace = this._xrSessionManager.referenceSpace; const pose = _xrFrame.getViewerPose(referenceSPace); if (pose == null) { return; } for (const view of pose.views) { switch (this.depthUsage) { case "cpu": this._updateDepthInformationAndTextureCPUDepthUsage(_xrFrame, view, this.depthDataFormat); break; case "gpu": if (!this._glBinding) { break; } this._updateDepthInformationAndTextureWebGLDepthUsage(this._glBinding, view, this.depthDataFormat); break; default: Tools.Error("Unknown depth usage"); this.detach(); break; } } } _updateDepthInformationAndTextureCPUDepthUsage(frame, view, dataFormat) { const depthInfo = frame.getDepthInformation(view); if (depthInfo === null) { return; } const { data, width, height, rawValueToMeters, getDepthInMeters, normDepthBufferFromNormView } = depthInfo; this._width = width; this._height = height; this._rawValueToMeters = rawValueToMeters; this._cachedDepthBuffer = data; GlobalRawValueToMeters = rawValueToMeters; AlphaLuminanceTexture = dataFormat === "luminance-alpha"; UvTransform.fromArray(normDepthBufferFromNormView.matrix); // to avoid Illegal Invocation error, bind `this` this.onGetDepthInMetersAvailable.notifyObservers(getDepthInMeters.bind(depthInfo)); if (!this._cachedDepthImageTexture) { this._cachedDepthImageTexture = RawTexture.CreateRTexture(null, width, height, this._xrSessionManager.scene, false, false, Texture.NEAREST_SAMPLINGMODE, 1); DepthTexture = this._cachedDepthImageTexture; } let float32Array = null; switch (dataFormat) { case "ushort": case "luminance-alpha": float32Array = Float32Array.from(new Uint16Array(data)); break; case "float": float32Array = new Float32Array(data); break; default: break; } if (float32Array) { if (this.options.prepareTextureForVisualization) { float32Array = float32Array.map((val) => val * rawValueToMeters); } this._cachedDepthImageTexture.update(float32Array); } } _updateDepthInformationAndTextureWebGLDepthUsage(webglBinding, view, dataFormat) { const depthInfo = webglBinding.getDepthInformation(view); if (depthInfo === null) { return; } const { texture, width, height, textureType, rawValueToMeters, normDepthBufferFromNormView } = depthInfo; GlobalRawValueToMeters = rawValueToMeters; AlphaLuminanceTexture = dataFormat === "luminance-alpha"; UvTransform.fromArray(normDepthBufferFromNormView.matrix); if (this._cachedWebGLTexture) { return; } this._width = width; this._height = height; this._cachedWebGLTexture = texture; this._textureType = textureType; const scene = this._xrSessionManager.scene; const internalTexture = this._getInternalTextureFromDepthInfo(); if (!this._cachedDepthImageTexture) { this._cachedDepthImageTexture = RawTexture.CreateRTexture(null, width, height, scene, false, true, Texture.NEAREST_SAMPLINGMODE, dataFormat === "float" ? 1 : 0); } this._cachedDepthImageTexture._texture = internalTexture; DepthTexture = this._cachedDepthImageTexture; this._xrSessionManager.scene.markAllMaterialsAsDirty(1); } /** * Extends the session init object if needed * @returns augmentation object for the xr session init object. */ async getXRSessionInitExtension() { const isDepthUsageDeclared = this.options.usagePreference != null && this.options.usagePreference.length !== 0; const isDataFormatDeclared = this.options.dataFormatPreference != null && this.options.dataFormatPreference.length !== 0; return await new Promise((resolve) => { if (isDepthUsageDeclared && isDataFormatDeclared) { const usages = this.options.usagePreference.map((usage) => { switch (usage) { case "cpu": return "cpu-optimized"; case "gpu": return "gpu-optimized"; } }); const dataFormats = this.options.dataFormatPreference.map((format) => { switch (format) { case "luminance-alpha": return "luminance-alpha"; case "float": return "float32"; case "ushort": return "unsigned-short"; } }); resolve({ depthSensing: { usagePreference: usages, dataFormatPreference: dataFormats, }, }); } else { resolve({}); } }); } _getInternalTextureFromDepthInfo() { const engine = this._xrSessionManager.scene.getEngine(); const dataFormat = this.depthDataFormat; const textureType = this._textureType; if (!this._width || !this._height || !this._cachedWebGLTexture) { throw new Error("Depth information is not available"); } const internalTexture = engine.wrapWebGLTexture(this._cachedWebGLTexture, false, 1, this._width || 256, this._height || 256); internalTexture.isCube = false; internalTexture.invertY = false; internalTexture._useSRGBBuffer = false; internalTexture.format = dataFormat === "luminance-alpha" ? 2 : 5; internalTexture.generateMipMaps = false; internalTexture.type = dataFormat === "float" ? 1 : dataFormat === "ushort" ? 5 : 0; internalTexture._cachedWrapU = 1; internalTexture._cachedWrapV = 1; internalTexture._hardwareTexture = new WebGLHardwareTexture(this._cachedWebGLTexture, engine._gl); internalTexture.is2DArray = textureType === "texture-array"; return internalTexture; } } /** * The module's name */ WebXRDepthSensing.Name = WebXRFeatureName.DEPTH_SENSING; /** * The (Babylon) version of this module. * This is an integer representing the implementation version. * This number does not correspond to the WebXR specs version */ WebXRDepthSensing.Version = 1; WebXRFeaturesManager.AddWebXRFeature(WebXRDepthSensing.Name, (xrSessionManager, options) => { return () => new WebXRDepthSensing(xrSessionManager, options); }, WebXRDepthSensing.Version, false); RegisterMaterialPlugin("WebXRDepthSensingMaterialPlugin", (material) => new WebXRDepthSensingMaterialPlugin(material)); //# sourceMappingURL=WebXRDepthSensing.js.map