UNPKG

@babylonjs/core

Version:

Getting started? Play directly with the Babylon.js API using our [playground](https://playground.babylonjs.com/). It also contains a lot of samples to learn how to use it.

1,249 lines (1,114 loc) 111 kB
import type { Nullable } from "../types.js"; import type { Scene } from "../scene.js"; import { InternalTexture } from "../Materials/Textures/internalTexture.js"; import type { IOfflineProvider } from "../Offline/IOfflineProvider.js"; import type { ILoadingScreen } from "../Loading/loadingScreen.js"; import type { WebGLPipelineContext } from "./WebGL/webGLPipelineContext.js"; import type { IPipelineContext } from "./IPipelineContext.js"; import type { ICustomAnimationFrameRequester } from "../Misc/customAnimationFrameRequester.js"; import type { EngineOptions } from "./thinEngine.js"; import { ThinEngine } from "./thinEngine.js"; import type { IViewportLike, IColor4Like } from "../Maths/math.like.js"; import { PerformanceMonitor } from "../Misc/performanceMonitor.js"; import type { DataBuffer } from "../Buffers/dataBuffer.js"; import type { RenderTargetWrapper } from "./renderTargetWrapper.js"; import "./Extensions/engine.alpha.js"; import "./Extensions/engine.rawTexture.js"; import "./Extensions/engine.readTexture.js"; import "./Extensions/engine.dynamicBuffer.js"; import "./Extensions/engine.cubeTexture.js"; import "./Extensions/engine.renderTarget.js"; import "./Extensions/engine.renderTargetTexture.js"; import "./Extensions/engine.renderTargetCube.js"; import "./Extensions/engine.prefilteredCubeTexture.js"; import "./Extensions/engine.uniformBuffer.js"; import "./AbstractEngine/abstractEngine.loadingScreen.js"; import "./AbstractEngine/abstractEngine.dom.js"; import "./AbstractEngine/abstractEngine.states.js"; import "./AbstractEngine/abstractEngine.renderPass.js"; import "./AbstractEngine/abstractEngine.texture.js"; import { AbstractEngine } from "./abstractEngine.js"; /** * The engine class is responsible for interfacing with all lower-level APIs such as WebGL and Audio */ export declare class Engine extends ThinEngine { /** Defines that alpha blending is disabled */ static readonly ALPHA_DISABLE = 0; /** Defines that alpha blending to SRC ALPHA * SRC + DEST */ static readonly ALPHA_ADD = 1; /** Defines that alpha blending to SRC ALPHA * SRC + (1 - SRC ALPHA) * DEST */ static readonly ALPHA_COMBINE = 2; /** Defines that alpha blending to DEST - SRC * DEST */ static readonly ALPHA_SUBTRACT = 3; /** Defines that alpha blending to SRC * DEST */ static readonly ALPHA_MULTIPLY = 4; /** Defines that alpha blending to SRC ALPHA * SRC + (1 - SRC) * DEST */ static readonly ALPHA_MAXIMIZED = 5; /** Defines that alpha blending to SRC + DEST */ static readonly ALPHA_ONEONE = 6; /** Defines that alpha blending to SRC + (1 - SRC ALPHA) * DEST */ static readonly ALPHA_PREMULTIPLIED = 7; /** * Defines that alpha blending to SRC + (1 - SRC ALPHA) * DEST * Alpha will be set to (1 - SRC ALPHA) * DEST ALPHA */ static readonly ALPHA_PREMULTIPLIED_PORTERDUFF = 8; /** Defines that alpha blending to CST * SRC + (1 - CST) * DEST */ static readonly ALPHA_INTERPOLATE = 9; /** * Defines that alpha blending to SRC + (1 - SRC) * DEST * Alpha will be set to SRC ALPHA + (1 - SRC ALPHA) * DEST ALPHA */ static readonly ALPHA_SCREENMODE = 10; /** Defines that the resource is not delayed*/ static readonly DELAYLOADSTATE_NONE = 0; /** Defines that the resource was successfully delay loaded */ static readonly DELAYLOADSTATE_LOADED = 1; /** Defines that the resource is currently delay loading */ static readonly DELAYLOADSTATE_LOADING = 2; /** Defines that the resource is delayed and has not started loading */ static readonly DELAYLOADSTATE_NOTLOADED = 4; /** Passed to depthFunction or stencilFunction to specify depth or stencil tests will never pass. i.e. Nothing will be drawn */ static readonly NEVER = 512; /** Passed to depthFunction or stencilFunction to specify depth or stencil tests will always pass. i.e. Pixels will be drawn in the order they are drawn */ static readonly ALWAYS = 519; /** Passed to depthFunction or stencilFunction to specify depth or stencil tests will pass if the new depth value is less than the stored value */ static readonly LESS = 513; /** Passed to depthFunction or stencilFunction to specify depth or stencil tests will pass if the new depth value is equals to the stored value */ static readonly EQUAL = 514; /** Passed to depthFunction or stencilFunction to specify depth or stencil tests will pass if the new depth value is less than or equal to the stored value */ static readonly LEQUAL = 515; /** Passed to depthFunction or stencilFunction to specify depth or stencil tests will pass if the new depth value is greater than the stored value */ static readonly GREATER = 516; /** Passed to depthFunction or stencilFunction to specify depth or stencil tests will pass if the new depth value is greater than or equal to the stored value */ static readonly GEQUAL = 518; /** Passed to depthFunction or stencilFunction to specify depth or stencil tests will pass if the new depth value is not equal to the stored value */ static readonly NOTEQUAL = 517; /** Passed to stencilOperation to specify that stencil value must be kept */ static readonly KEEP = 7680; /** Passed to stencilOperation to specify that stencil value must be replaced */ static readonly REPLACE = 7681; /** Passed to stencilOperation to specify that stencil value must be incremented */ static readonly INCR = 7682; /** Passed to stencilOperation to specify that stencil value must be decremented */ static readonly DECR = 7683; /** Passed to stencilOperation to specify that stencil value must be inverted */ static readonly INVERT = 5386; /** Passed to stencilOperation to specify that stencil value must be incremented with wrapping */ static readonly INCR_WRAP = 34055; /** Passed to stencilOperation to specify that stencil value must be decremented with wrapping */ static readonly DECR_WRAP = 34056; /** Texture is not repeating outside of 0..1 UVs */ static readonly TEXTURE_CLAMP_ADDRESSMODE = 0; /** Texture is repeating outside of 0..1 UVs */ static readonly TEXTURE_WRAP_ADDRESSMODE = 1; /** Texture is repeating and mirrored */ static readonly TEXTURE_MIRROR_ADDRESSMODE = 2; /** ALPHA */ static readonly TEXTUREFORMAT_ALPHA = 0; /** LUMINANCE */ static readonly TEXTUREFORMAT_LUMINANCE = 1; /** LUMINANCE_ALPHA */ static readonly TEXTUREFORMAT_LUMINANCE_ALPHA = 2; /** RGB */ static readonly TEXTUREFORMAT_RGB = 4; /** RGBA */ static readonly TEXTUREFORMAT_RGBA = 5; /** RED */ static readonly TEXTUREFORMAT_RED = 6; /** RED (2nd reference) */ static readonly TEXTUREFORMAT_R = 6; /** RED unsigned short normed to [0, 1] **/ static readonly TEXTUREFORMAT_R16_UNORM = 33322; /** RG unsigned short normed to [0, 1] **/ static readonly TEXTUREFORMAT_RG16_UNORM = 33324; /** RGB unsigned short normed to [0, 1] **/ static readonly TEXTUREFORMAT_RGB16_UNORM = 32852; /** RGBA unsigned short normed to [0, 1] **/ static readonly TEXTUREFORMAT_RGBA16_UNORM = 32859; /** RED signed short normed to [-1, 1] **/ static readonly TEXTUREFORMAT_R16_SNORM = 36760; /** RG signed short normed to [-1, 1] **/ static readonly TEXTUREFORMAT_RG16_SNORM = 36761; /** RGB signed short normed to [-1, 1] **/ static readonly TEXTUREFORMAT_RGB16_SNORM = 36762; /** RGBA signed short normed to [-1, 1] **/ static readonly TEXTUREFORMAT_RGBA16_SNORM = 36763; /** RG */ static readonly TEXTUREFORMAT_RG = 7; /** RED_INTEGER */ static readonly TEXTUREFORMAT_RED_INTEGER = 8; /** RED_INTEGER (2nd reference) */ static readonly TEXTUREFORMAT_R_INTEGER = 8; /** RG_INTEGER */ static readonly TEXTUREFORMAT_RG_INTEGER = 9; /** RGB_INTEGER */ static readonly TEXTUREFORMAT_RGB_INTEGER = 10; /** RGBA_INTEGER */ static readonly TEXTUREFORMAT_RGBA_INTEGER = 11; /** UNSIGNED_BYTE */ static readonly TEXTURETYPE_UNSIGNED_BYTE = 0; /** @deprecated use more explicit TEXTURETYPE_UNSIGNED_BYTE instead. Use TEXTURETYPE_UNSIGNED_INTEGER for 32bits values.*/ static readonly TEXTURETYPE_UNSIGNED_INT = 0; /** FLOAT */ static readonly TEXTURETYPE_FLOAT = 1; /** HALF_FLOAT */ static readonly TEXTURETYPE_HALF_FLOAT = 2; /** BYTE */ static readonly TEXTURETYPE_BYTE = 3; /** SHORT */ static readonly TEXTURETYPE_SHORT = 4; /** UNSIGNED_SHORT */ static readonly TEXTURETYPE_UNSIGNED_SHORT = 5; /** INT */ static readonly TEXTURETYPE_INT = 6; /** UNSIGNED_INT */ static readonly TEXTURETYPE_UNSIGNED_INTEGER = 7; /** UNSIGNED_SHORT_4_4_4_4 */ static readonly TEXTURETYPE_UNSIGNED_SHORT_4_4_4_4 = 8; /** UNSIGNED_SHORT_5_5_5_1 */ static readonly TEXTURETYPE_UNSIGNED_SHORT_5_5_5_1 = 9; /** UNSIGNED_SHORT_5_6_5 */ static readonly TEXTURETYPE_UNSIGNED_SHORT_5_6_5 = 10; /** UNSIGNED_INT_2_10_10_10_REV */ static readonly TEXTURETYPE_UNSIGNED_INT_2_10_10_10_REV = 11; /** UNSIGNED_INT_24_8 */ static readonly TEXTURETYPE_UNSIGNED_INT_24_8 = 12; /** UNSIGNED_INT_10F_11F_11F_REV */ static readonly TEXTURETYPE_UNSIGNED_INT_10F_11F_11F_REV = 13; /** UNSIGNED_INT_5_9_9_9_REV */ static readonly TEXTURETYPE_UNSIGNED_INT_5_9_9_9_REV = 14; /** FLOAT_32_UNSIGNED_INT_24_8_REV */ static readonly TEXTURETYPE_FLOAT_32_UNSIGNED_INT_24_8_REV = 15; /** nearest is mag = nearest and min = nearest and mip = none */ static readonly TEXTURE_NEAREST_SAMPLINGMODE = 1; /** Bilinear is mag = linear and min = linear and mip = nearest */ static readonly TEXTURE_BILINEAR_SAMPLINGMODE = 2; /** Trilinear is mag = linear and min = linear and mip = linear */ static readonly TEXTURE_TRILINEAR_SAMPLINGMODE = 3; /** nearest is mag = nearest and min = nearest and mip = linear */ static readonly TEXTURE_NEAREST_NEAREST_MIPLINEAR = 8; /** Bilinear is mag = linear and min = linear and mip = nearest */ static readonly TEXTURE_LINEAR_LINEAR_MIPNEAREST = 11; /** Trilinear is mag = linear and min = linear and mip = linear */ static readonly TEXTURE_LINEAR_LINEAR_MIPLINEAR = 3; /** mag = nearest and min = nearest and mip = nearest */ static readonly TEXTURE_NEAREST_NEAREST_MIPNEAREST = 4; /** mag = nearest and min = linear and mip = nearest */ static readonly TEXTURE_NEAREST_LINEAR_MIPNEAREST = 5; /** mag = nearest and min = linear and mip = linear */ static readonly TEXTURE_NEAREST_LINEAR_MIPLINEAR = 6; /** mag = nearest and min = linear and mip = none */ static readonly TEXTURE_NEAREST_LINEAR = 7; /** mag = nearest and min = nearest and mip = none */ static readonly TEXTURE_NEAREST_NEAREST = 1; /** mag = linear and min = nearest and mip = nearest */ static readonly TEXTURE_LINEAR_NEAREST_MIPNEAREST = 9; /** mag = linear and min = nearest and mip = linear */ static readonly TEXTURE_LINEAR_NEAREST_MIPLINEAR = 10; /** mag = linear and min = linear and mip = none */ static readonly TEXTURE_LINEAR_LINEAR = 2; /** mag = linear and min = nearest and mip = none */ static readonly TEXTURE_LINEAR_NEAREST = 12; /** Explicit coordinates mode */ static readonly TEXTURE_EXPLICIT_MODE = 0; /** Spherical coordinates mode */ static readonly TEXTURE_SPHERICAL_MODE = 1; /** Planar coordinates mode */ static readonly TEXTURE_PLANAR_MODE = 2; /** Cubic coordinates mode */ static readonly TEXTURE_CUBIC_MODE = 3; /** Projection coordinates mode */ static readonly TEXTURE_PROJECTION_MODE = 4; /** Skybox coordinates mode */ static readonly TEXTURE_SKYBOX_MODE = 5; /** Inverse Cubic coordinates mode */ static readonly TEXTURE_INVCUBIC_MODE = 6; /** Equirectangular coordinates mode */ static readonly TEXTURE_EQUIRECTANGULAR_MODE = 7; /** Equirectangular Fixed coordinates mode */ static readonly TEXTURE_FIXED_EQUIRECTANGULAR_MODE = 8; /** Equirectangular Fixed Mirrored coordinates mode */ static readonly TEXTURE_FIXED_EQUIRECTANGULAR_MIRRORED_MODE = 9; /** Defines that texture rescaling will use a floor to find the closer power of 2 size */ static readonly SCALEMODE_FLOOR = 1; /** Defines that texture rescaling will look for the nearest power of 2 size */ static readonly SCALEMODE_NEAREST = 2; /** Defines that texture rescaling will use a ceil to find the closer power of 2 size */ static readonly SCALEMODE_CEILING = 3; /** * Returns the current npm package of the sdk */ static get NpmPackage(): string; /** * Returns the current version of the framework */ static get Version(): string; /** Gets the list of created engines */ static get Instances(): AbstractEngine[]; /** * Gets the latest created engine */ static get LastCreatedEngine(): Nullable<AbstractEngine>; /** * Gets the latest created scene */ static get LastCreatedScene(): Nullable<Scene>; /** @internal */ /** * Method called to create the default loading screen. * This can be overridden in your own app. * @param canvas The rendering canvas element * @returns The loading screen */ static DefaultLoadingScreenFactory(canvas: HTMLCanvasElement): ILoadingScreen; /** * If set, will be used to request the next animation frame for the render loop */ customAnimationFrameRequester: Nullable<ICustomAnimationFrameRequester>; private _rescalePostProcess; protected get _supportsHardwareTextureRescaling(): boolean; private _measureFps; private _performanceMonitor; /** * Gets the performance monitor attached to this engine * @see https://doc.babylonjs.com/features/featuresDeepDive/scene/optimize_your_scene#engineinstrumentation */ get performanceMonitor(): PerformanceMonitor; /** * Creates a new engine * @param canvasOrContext defines the canvas or WebGL context to use for rendering. If you provide a WebGL context, Babylon.js will not hook events on the canvas (like pointers, keyboards, etc...) so no event observables will be available. This is mostly used when Babylon.js is used as a plugin on a system which already used the WebGL context * @param antialias defines enable antialiasing (default: false) * @param options defines further options to be sent to the getContext() function * @param adaptToDeviceRatio defines whether to adapt to the device's viewport characteristics (default: false) */ constructor(canvasOrContext: Nullable<HTMLCanvasElement | OffscreenCanvas | WebGLRenderingContext | WebGL2RenderingContext>, antialias?: boolean, options?: EngineOptions, adaptToDeviceRatio?: boolean); protected _initGLContext(): void; /** * Shared initialization across engines types. * @param canvas The canvas associated with this instance of the engine. */ protected _sharedInit(canvas: HTMLCanvasElement): void; /** * Resize an image and returns the image data as an uint8array * @param image image to resize * @param bufferWidth destination buffer width * @param bufferHeight destination buffer height * @returns an uint8array containing RGBA values of bufferWidth * bufferHeight size */ resizeImageBitmap(image: HTMLImageElement | ImageBitmap, bufferWidth: number, bufferHeight: number): Uint8Array; /** * Engine abstraction for loading and creating an image bitmap from a given source string. * @param imageSource source to load the image from. * @param options An object that sets options for the image's extraction. * @returns ImageBitmap */ _createImageBitmapFromSource(imageSource: string, options?: ImageBitmapOptions): Promise<ImageBitmap>; /** * Toggle full screen mode * @param requestPointerLock defines if a pointer lock should be requested from the user */ switchFullscreen(requestPointerLock: boolean): void; /** * Enters full screen mode * @param requestPointerLock defines if a pointer lock should be requested from the user */ enterFullscreen(requestPointerLock: boolean): void; /** * Exits full screen mode */ exitFullscreen(): void; /** States */ /** * Sets a boolean indicating if the dithering state is enabled or disabled * @param value defines the dithering state */ setDitheringState(value: boolean): void; /** * Sets a boolean indicating if the rasterizer state is enabled or disabled * @param value defines the rasterizer state */ setRasterizerState(value: boolean): void; /** * Directly set the WebGL Viewport * @param x defines the x coordinate of the viewport (in screen space) * @param y defines the y coordinate of the viewport (in screen space) * @param width defines the width of the viewport (in screen space) * @param height defines the height of the viewport (in screen space) * @returns the current viewport Object (if any) that is being replaced by this call. You can restore this viewport later on to go back to the original state */ setDirectViewport(x: number, y: number, width: number, height: number): Nullable<IViewportLike>; /** * Executes a scissor clear (ie. a clear on a specific portion of the screen) * @param x defines the x-coordinate of the bottom left corner of the clear rectangle * @param y defines the y-coordinate of the corner of the clear rectangle * @param width defines the width of the clear rectangle * @param height defines the height of the clear rectangle * @param clearColor defines the clear color */ scissorClear(x: number, y: number, width: number, height: number, clearColor: IColor4Like): void; /** * Enable scissor test on a specific rectangle (ie. render will only be executed on a specific portion of the screen) * @param x defines the x-coordinate of the bottom left corner of the clear rectangle * @param y defines the y-coordinate of the corner of the clear rectangle * @param width defines the width of the clear rectangle * @param height defines the height of the clear rectangle */ enableScissor(x: number, y: number, width: number, height: number): void; /** * Disable previously set scissor test rectangle */ disableScissor(): void; /** * @internal */ _loadFileAsync(url: string, offlineProvider?: IOfflineProvider, useArrayBuffer?: false): Promise<string>; _loadFileAsync(url: string, offlineProvider?: IOfflineProvider, useArrayBuffer?: true): Promise<ArrayBuffer>; /** * Gets the source code of the vertex shader associated with a specific webGL program * @param program defines the program to use * @returns a string containing the source code of the vertex shader associated with the program */ getVertexShaderSource(program: WebGLProgram): Nullable<string>; /** * Gets the source code of the fragment shader associated with a specific webGL program * @param program defines the program to use * @returns a string containing the source code of the fragment shader associated with the program */ getFragmentShaderSource(program: WebGLProgram): Nullable<string>; /** * sets the object from which width and height will be taken from when getting render width and height * Will fallback to the gl object * @param dimensions the framebuffer width and height that will be used. */ set framebufferDimensionsObject(dimensions: Nullable<{ framebufferWidth: number; framebufferHeight: number; }>); protected _rebuildBuffers(): void; /** * Get Font size information * @param font font name * @returns an object containing ascent, height and descent */ getFontOffset(font: string): { ascent: number; height: number; descent: number; }; protected _cancelFrame(): void; _renderLoop(timestamp?: number): void; /** * Enters Pointerlock mode */ enterPointerlock(): void; /** * Exits Pointerlock mode */ exitPointerlock(): void; /** * Begin a new frame */ beginFrame(): void; _deletePipelineContext(pipelineContext: IPipelineContext): void; createShaderProgram(pipelineContext: IPipelineContext, vertexCode: string, fragmentCode: string, defines: Nullable<string>, context?: WebGLRenderingContext, transformFeedbackVaryings?: Nullable<string[]>): WebGLProgram; protected _createShaderProgram(pipelineContext: WebGLPipelineContext, vertexShader: WebGLShader, fragmentShader: WebGLShader, context: WebGLRenderingContext, transformFeedbackVaryings?: Nullable<string[]>): WebGLProgram; /** * @internal */ _releaseTexture(texture: InternalTexture): void; /** * @internal */ _releaseRenderTargetWrapper(rtWrapper: RenderTargetWrapper): void; /** * @internal * Rescales a texture * @param source input texture * @param destination destination texture * @param scene scene to use to render the resize * @param internalFormat format to use when resizing * @param onComplete callback to be called when resize has completed */ _rescaleTexture(source: InternalTexture, destination: InternalTexture, scene: Nullable<any>, internalFormat: number, onComplete: () => void): void; /** * Wraps an external web gl texture in a Babylon texture. * @param texture defines the external texture * @param hasMipMaps defines whether the external texture has mip maps (default: false) * @param samplingMode defines the sampling mode for the external texture (default: Constants.TEXTURE_TRILINEAR_SAMPLINGMODE) * @param width defines the width for the external texture (default: 0) * @param height defines the height for the external texture (default: 0) * @returns the babylon internal texture */ wrapWebGLTexture(texture: WebGLTexture, hasMipMaps?: boolean, samplingMode?: number, width?: number, height?: number): InternalTexture; /** * @internal */ _uploadImageToTexture(texture: InternalTexture, image: HTMLImageElement | ImageBitmap, faceIndex?: number, lod?: number): void; /** * Updates a depth texture Comparison Mode and Function. * If the comparison Function is equal to 0, the mode will be set to none. * Otherwise, this only works in webgl 2 and requires a shadow sampler in the shader. * @param texture The texture to set the comparison function for * @param comparisonFunction The comparison function to set, 0 if no comparison required */ updateTextureComparisonFunction(texture: InternalTexture, comparisonFunction: number): void; /** * Creates a webGL buffer to use with instantiation * @param capacity defines the size of the buffer * @returns the webGL buffer */ createInstancesBuffer(capacity: number): DataBuffer; /** * Delete a webGL buffer used with instantiation * @param buffer defines the webGL buffer to delete */ deleteInstancesBuffer(buffer: WebGLBuffer): void; private _clientWaitAsync; /** * @internal */ _readPixelsAsync(x: number, y: number, w: number, h: number, format: number, type: number, outputBuffer: ArrayBufferView): Nullable<Promise<ArrayBufferView>>; dispose(): void; } // Mixins declare global{ // This file contains native only extensions for WebXR. These APIs are not supported in the browser yet. // They are intended for use with either Babylon Native https://github.com/BabylonJS/BabylonNative or // Babylon React Native: https://github.com/BabylonJS/BabylonReactNative type XRSceneObjectType = "unknown" | "background" | "wall" | "floor" | "ceiling" | "platform" | "inferred" | "world"; interface XRSceneObject { type: XRSceneObjectType; } interface XRFieldOfView { angleLeft: number; angleRight: number; angleUp: number; angleDown: number; } interface XRFrustum { position: DOMPointReadOnly; orientation: DOMPointReadOnly; fieldOfView: XRFieldOfView; farDistance: number; } interface XRPlane { parentSceneObject?: XRSceneObject; } // extending the webxr XRMesh with babylon native properties interface XRMesh { normals?: Float32Array; parentSceneObject?: XRSceneObject; positions: Float32Array; // Babylon native! } interface XRFrustumDetectionBoundary { type: "frustum"; frustum: XRFrustum; } interface XRSphereDetectionBoundary { type: "sphere"; radius: number; } interface XRBoxDetectionBoundary { type: "box"; extent: DOMPointReadOnly; } type XRDetectionBoundary = XRFrustumDetectionBoundary | XRSphereDetectionBoundary | XRBoxDetectionBoundary; interface XRGeometryDetectorOptions { detectionBoundary?: XRDetectionBoundary; updateInterval?: number; } interface XRSession { trySetFeaturePointCloudEnabled(enabled: boolean): boolean; trySetPreferredPlaneDetectorOptions(preferredOptions: XRGeometryDetectorOptions): boolean; trySetMeshDetectorEnabled(enabled: boolean): boolean; trySetPreferredMeshDetectorOptions(preferredOptions: XRGeometryDetectorOptions): boolean; } interface XRFrame { featurePointCloud?: Array<number> | undefined; } interface XRWorldInformation { detectedMeshes?: XRMeshSet; } /* eslint-disable @typescript-eslint/naming-convention */ // Type definitions for non-npm package webxr 0.5 // Project: https://www.w3.org/TR/webxr/ // Definitions by: Rob Rohan <https://github.com/robrohan> // Raanan Weber <https://github.com/RaananW> // Sean T. McBeth <https://github.com/capnmidnight> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped // Minimum TypeScript Version: 3.7 // Most of this was hand written and... more or less copied from the following // sites: // https://www.w3.org/TR/webxr/ // https://developer.mozilla.org/en-US/docs/Web/API/WebXR_Device_API // https://www.w3.org/immersive-web/ // https://github.com/immersive-web // /** * ref: https://immersive-web.github.io/webxr/#navigator-xr-attribute */ interface Navigator { /** * An XRSystem object is the entry point to the API, used to query for XR features * available to the user agent and initiate communication with XR hardware via the * creation of XRSessions. */ xr?: XRSystem | undefined; } /** * WebGL Context Compatability * * ref: https://immersive-web.github.io/webxr/#contextcompatibility */ interface WebGLContextAttributes { xrCompatible?: boolean | undefined; } interface WebGLRenderingContextBase { makeXRCompatible(): Promise<void>; } /** * Available session modes * * ref: https://immersive-web.github.io/webxr/#xrsessionmode-enum */ type XRSessionMode = "inline" | "immersive-vr" | "immersive-ar"; /** * Reference space types */ type XRReferenceSpaceType = "viewer" | "local" | "local-floor" | "bounded-floor" | "unbounded"; type XREnvironmentBlendMode = "opaque" | "additive" | "alpha-blend"; /** * ref: https://immersive-web.github.io/webxr/#xrsession-interface */ type XRVisibilityState = "visible" | "visible-blurred" | "hidden"; /** * Handedness types */ type XRHandedness = "none" | "left" | "right"; /** * InputSource target ray modes */ type XRTargetRayMode = "gaze" | "tracked-pointer" | "screen" | "transient-pointer"; /** * Eye types */ type XREye = "none" | "left" | "right"; type XRFrameRequestCallback = (time: DOMHighResTimeStamp, frame: XRFrame) => void; interface XRSystemDeviceChangeEvent extends Event { type: "devicechange"; } interface XRSystemDeviceChangeEventHandler { (event: XRSystemDeviceChangeEvent): any; } interface XRSystemEventMap { devicechange: XRSystemDeviceChangeEvent; } /** * An XRSystem object is the entry point to the API, used to query for XR features available * to the user agent and initiate communication with XR hardware via the creation of * XRSessions. * * ref: https://immersive-web.github.io/webxr/#xrsystem-interface */ interface XRSystem extends EventTarget { /** * Attempts to initialize an XRSession for the given mode if possible, entering immersive * mode if necessary. * @param mode * @param options */ requestSession(mode: XRSessionMode, options?: XRSessionInit): Promise<XRSession>; /** * Queries if a given mode may be supported by the user agent and device capabilities. * @param mode */ isSessionSupported(mode: XRSessionMode): Promise<boolean>; ondevicechange: XRSystemDeviceChangeEventHandler | null; addEventListener<K extends keyof XRSystemEventMap>(type: K, listener: (this: XRSystem, ev: XRSystemEventMap[K]) => any, options?: boolean | AddEventListenerOptions): void; addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | AddEventListenerOptions): void; removeEventListener<K extends keyof XRSystemEventMap>(type: K, listener: (this: XRSystem, ev: XRSystemEventMap[K]) => any, options?: boolean | EventListenerOptions): void; removeEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | EventListenerOptions): void; } abstract class XRSystem implements XRSystem {} /** * Describes a viewport, or rectangular region, of a graphics surface. * * ref: https://immersive-web.github.io/webxr/#xrviewport-interface */ interface XRViewport { readonly x: number; readonly y: number; readonly width: number; readonly height: number; } abstract class XRViewport implements XRViewport {} /** * Represents a virtual coordinate system with an origin that corresponds to a physical location. * Spatial data that is requested from the API or given to the API is always expressed in relation * to a specific XRSpace at the time of a specific XRFrame. Numeric values such as pose positions * are coordinates in that space relative to its origin. The interface is intentionally opaque. * * ref: https://immersive-web.github.io/webxr/#xrspace-interface */ // tslint:disable-next-line no-empty-interface interface XRSpace extends EventTarget {} abstract class XRSpace implements XRSpace {} interface XRRenderStateInit { baseLayer?: XRWebGLLayer | undefined; depthFar?: number | undefined; depthNear?: number | undefined; inlineVerticalFieldOfView?: number | undefined; } interface XRRenderState { readonly baseLayer?: XRWebGLLayer | undefined; readonly depthFar: number; readonly depthNear: number; readonly inlineVerticalFieldOfView?: number | undefined; } abstract class XRRenderState implements XRRenderState {} interface XRReferenceSpaceEventInit extends EventInit { referenceSpace?: XRReferenceSpace | undefined; transform?: XRRigidTransform | undefined; } /** * XRReferenceSpaceEvents are fired to indicate changes to the state of an XRReferenceSpace. * * ref: https://immersive-web.github.io/webxr/#xrreferencespaceevent-interface */ interface XRReferenceSpaceEvent extends Event { readonly type: "reset"; readonly referenceSpace: XRReferenceSpace; readonly transform?: XRRigidTransform | undefined; } // tslint:disable-next-line no-unnecessary-class class XRReferenceSpaceEvent implements XRReferenceSpaceEvent { constructor(type: "reset", eventInitDict?: XRReferenceSpaceEventInit); } interface XRReferenceSpaceEventHandler { (event: XRReferenceSpaceEvent): any; } interface XRReferenceSpaceEventMap { reset: XRReferenceSpaceEvent; } /** * One of several common XRSpaces that applications can use to establish a spatial relationship * with the user's physical environment. * * ref: https://immersive-web.github.io/webxr/#xrreferencespace-interface */ interface XRReferenceSpace extends XRSpace { getOffsetReferenceSpace(originOffset: XRRigidTransform): XRReferenceSpace; onreset: XRReferenceSpaceEventHandler; addEventListener<K extends keyof XRReferenceSpaceEventMap>( type: K, listener: (this: XRReferenceSpace, ev: XRReferenceSpaceEventMap[K]) => any, options?: boolean | AddEventListenerOptions ): void; addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | AddEventListenerOptions): void; removeEventListener<K extends keyof XRReferenceSpaceEventMap>( type: K, listener: (this: XRReferenceSpace, ev: XRReferenceSpaceEventMap[K]) => any, options?: boolean | EventListenerOptions ): void; removeEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | EventListenerOptions): void; } abstract class XRReferenceSpace implements XRReferenceSpace {} /** * Extends XRReferenceSpace to include boundsGeometry, indicating the pre-configured boundaries * of the user's space. * * ref: https://immersive-web.github.io/webxr/#xrboundedreferencespace-interface */ interface XRBoundedReferenceSpace extends XRReferenceSpace { readonly boundsGeometry: DOMPointReadOnly[]; } abstract class XRBoundedReferenceSpace implements XRBoundedReferenceSpace {} /** * Represents an XR input source, which is any input mechanism which allows the user to perform * targeted actions in the same virtual space as the viewer. Example XR input sources include, * but are not limited to, handheld controllers, optically tracked hands, and gaze-based input * methods that operate on the viewer's pose. Input mechanisms which are not explicitly associated * with the XR device, such as traditional gamepads, mice, or keyboards SHOULD NOT be considered * XR input sources. * ref: https://immersive-web.github.io/webxr/#xrinputsource-interface */ interface XRInputSource { readonly handedness: XRHandedness; readonly targetRayMode: XRTargetRayMode; readonly targetRaySpace: XRSpace; readonly gripSpace?: XRSpace | undefined; readonly gamepad?: Gamepad | undefined; readonly profiles: string[]; readonly hand?: XRHand; } abstract class XRInputSource implements XRInputSource {} /** * Represents a list of XRInputSources. It is used in favor of a frozen array type when the contents * of the list are expected to change over time, such as with the XRSession inputSources attribute. * ref: https://immersive-web.github.io/webxr/#xrinputsourcearray-interface */ interface XRInputSourceArray { [Symbol.iterator](): IterableIterator<XRInputSource>; [n: number]: XRInputSource; length: number; entries(): IterableIterator<[number, XRInputSource]>; keys(): IterableIterator<number>; values(): IterableIterator<XRInputSource>; forEach(callbackfn: (value: XRInputSource, index: number, array: XRInputSource[]) => void, thisArg?: any): void; } abstract class XRInputSourceArray implements XRInputSourceArray {} /** * Describes a position and orientation in space relative to an XRSpace. * * ref: https://immersive-web.github.io/webxr/#xrpose-interface */ interface XRPose { readonly transform: XRRigidTransform; readonly emulatedPosition: boolean; } abstract class XRPose implements XRPose {} /** * Represents a snapshot of the state of all of the tracked objects for an XRSession. Applications * can acquire an XRFrame by calling requestAnimationFrame() on an XRSession with an * XRFrameRequestCallback. When the callback is called it will be passed an XRFrame. * Events which need to communicate tracking state, such as the select event, will also provide an * XRFrame. * * ref: https://immersive-web.github.io/webxr/#xrframe-interface */ interface XRFrame { readonly session: XRSession; // BABYLON CHANGE - switched to optional readonly predictedDisplayTime?: DOMHighResTimeStamp; /** * Provides the pose of space relative to baseSpace as an XRPose, at the time represented by * the XRFrame. * * @param space * @param baseSpace */ getPose(space: XRSpace, baseSpace: XRSpace): XRPose | undefined; /** * Provides the pose of the viewer relative to referenceSpace as an XRViewerPose, at the * XRFrame's time. * * @param referenceSpace */ getViewerPose(referenceSpace: XRReferenceSpace): XRViewerPose | undefined; } abstract class XRFrame implements XRFrame {} /** * Type of XR events available */ type XRInputSourceEventType = "select" | "selectend" | "selectstart" | "squeeze" | "squeezeend" | "squeezestart"; interface XRInputSourceEventInit extends EventInit { frame?: XRFrame | undefined; inputSource?: XRInputSource | undefined; } /** * XRInputSourceEvents are fired to indicate changes to the state of an XRInputSource. * ref: https://immersive-web.github.io/webxr/#xrinputsourceevent-interface */ class XRInputSourceEvent extends Event { readonly type: XRInputSourceEventType; readonly frame: XRFrame; readonly inputSource: XRInputSource; constructor(type: XRInputSourceEventType, eventInitDict?: XRInputSourceEventInit); } interface XRInputSourceEventHandler { (evt: XRInputSourceEvent): any; } type XRSessionEventType = "end" | "visibilitychange" | "frameratechange"; interface XRSessionEventInit extends EventInit { session: XRSession; } /** * XRSessionEvents are fired to indicate changes to the state of an XRSession. * ref: https://immersive-web.github.io/webxr/#xrsessionevent-interface */ class XRSessionEvent extends Event { readonly session: XRSession; constructor(type: XRSessionEventType, eventInitDict?: XRSessionEventInit); } interface XRSessionEventHandler { (evt: XRSessionEvent): any; } /** * ref: https://immersive-web.github.io/webxr/#feature-dependencies */ interface XRSessionInit { optionalFeatures?: string[] | undefined; requiredFeatures?: string[] | undefined; } interface XRSessionEventMap { inputsourceschange: XRInputSourceChangeEvent; end: XRSessionEvent; visibilitychange: XRSessionEvent; frameratechange: XRSessionEvent; select: XRInputSourceEvent; selectstart: XRInputSourceEvent; selectend: XRInputSourceEvent; squeeze: XRInputSourceEvent; squeezestart: XRInputSourceEvent; squeezeend: XRInputSourceEvent; eyetrackingstart: XREyeTrackingSourceEvent; eyetrackingend: XREyeTrackingSourceEvent; } /** * Any interaction with XR hardware is done via an XRSession object, which can only be * retrieved by calling requestSession() on the XRSystem object. Once a session has been * successfully acquired, it can be used to poll the viewer pose, query information about * the user's environment, and present imagery to the user. * * ref: https://immersive-web.github.io/webxr/#xrsession-interface */ interface XRSession extends EventTarget { /** * Returns a list of this session's XRInputSources, each representing an input device * used to control the camera and/or scene. */ readonly inputSources: XRInputSourceArray; /** * object which contains options affecting how the imagery is rendered. * This includes things such as the near and far clipping planes */ readonly renderState: XRRenderState; readonly environmentBlendMode: XREnvironmentBlendMode; readonly visibilityState: XRVisibilityState; readonly frameRate?: number | undefined; readonly supportedFrameRates?: Float32Array | undefined; /** * Removes a callback from the animation frame painting callback from * XRSession's set of animation frame rendering callbacks, given the * identifying handle returned by a previous call to requestAnimationFrame(). */ cancelAnimationFrame(id: number): void; /** * Ends the WebXR session. Returns a promise which resolves when the * session has been shut down. */ end(): Promise<void>; /** * Schedules the specified method to be called the next time the user agent * is working on rendering an animation frame for the WebXR device. Returns an * integer value which can be used to identify the request for the purposes of * canceling the callback using cancelAnimationFrame(). This method is comparable * to the Window.requestAnimationFrame() method. */ requestAnimationFrame(callback: XRFrameRequestCallback): number; /** * Requests that a new XRReferenceSpace of the specified type be created. * Returns a promise which resolves with the XRReferenceSpace or * XRBoundedReferenceSpace which was requested, or throws a NotSupportedError if * the requested space type isn't supported by the device. */ requestReferenceSpace(type: XRReferenceSpaceType): Promise<XRReferenceSpace | XRBoundedReferenceSpace>; updateRenderState(renderStateInit?: XRRenderStateInit): Promise<void>; updateTargetFrameRate(rate: number): Promise<void>; onend: XRSessionEventHandler; oninputsourceschange: XRInputSourceChangeEventHandler; onselect: XRInputSourceEventHandler; onselectstart: XRInputSourceEventHandler; onselectend: XRInputSourceEventHandler; onsqueeze: XRInputSourceEventHandler; onsqueezestart: XRInputSourceEventHandler; onsqueezeend: XRInputSourceEventHandler; onvisibilitychange: XRSessionEventHandler; onframeratechange: XRSessionEventHandler; addEventListener<K extends keyof XRSessionEventMap>(type: K, listener: (this: XRSession, ev: XRSessionEventMap[K]) => any, options?: boolean | AddEventListenerOptions): void; addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | AddEventListenerOptions): void; removeEventListener<K extends keyof XRSessionEventMap>(type: K, listener: (this: XRSession, ev: XRSessionEventMap[K]) => any, options?: boolean | EventListenerOptions): void; removeEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | EventListenerOptions): void; } abstract class XRSession implements XRSession {} /** * An XRPose describing the state of a viewer of the XR scene as tracked by the XR * device. A viewer may represent a tracked piece of hardware, the observed position * of a user's head relative to the hardware, or some other means of computing a series * of viewpoints into the XR scene. XRViewerPoses can only be queried relative to an * XRReferenceSpace. It provides, in addition to the XRPose values, an array of views * which include rigid transforms to indicate the viewpoint and projection matrices. * These values should be used by the application when rendering a frame of an XR scene. * * ref: https://immersive-web.github.io/webxr/#xrviewerpose-interface */ interface XRViewerPose extends XRPose { readonly views: ReadonlyArray<XRView>; } abstract class XRViewerPose implements XRViewerPose {} /** * A transform described by a position and orientation. When interpreting an * XRRigidTransform the orientation is always applied prior to the position. * * ref: https://immersive-web.github.io/webxr/#xrrigidtransform-interface */ class XRRigidTransform { readonly position: DOMPointReadOnly; readonly orientation: DOMPointReadOnly; readonly matrix: Float32Array; readonly inverse: XRRigidTransform; constructor(position?: DOMPointInit, direction?: DOMPointInit); } /** * Describes a single view into an XR scene for a given frame. * * ref: https://immersive-web.github.io/webxr/#xrview-interface */ interface XRView { readonly eye: XREye; readonly projectionMatrix: Float32Array; readonly transform: XRRigidTransform; readonly recommendedViewportScale?: number | undefined; requestViewportScale(scale: number): void; } abstract class XRView implements XRView {} /** * XRInputSourcesChangeEvents are fired to indicate changes to the XRInputSources that are * available to an XRSession. * ref: https://immersive-web.github.io/webxr/#xrinputsourceschangeevent-interface */ interface XRInputSourceChangeEvent extends XRSessionEvent { readonly removed: ReadonlyArray<XRInputSource>; readonly added: ReadonlyArray<XRInputSource>; } interface XRInputSourceChangeEventHandler { (evt: XRInputSourceChangeEvent): any; } // Experimental/Draft features // Anchors type XRAnchorSet = Set<XRAnchor>; interface XRAnchor { anchorSpace: XRSpace; delete(): void; } abstract class XRAnchor implements XRAnchor {} interface XRFrame { trackedAnchors?: XRAnchorSet | undefined; createAnchor?: (pose: XRRigidTransform, space: XRSpace) => Promise<XRAnchor>; } // AR Hit testing class XRRay { readonly origin: DOMPointReadOnly; readonly direction: DOMPointReadOnly; readonly matrix: Float32Array; constructor(transformOrOrigin?: XRRigidTransform | DOMPointInit, direction?: DOMPointInit); } type XRHitTestTrackableType = "point" | "plane" | "mesh"; interface XRTransientInputHitTestResult { readonly inputSource: XRInputSource; readonly results: ReadonlyArray<XRHitTestResult>; } class XRTransientInputHitTestResult { prototype: XRTransientInputHitTestResult; } interface XRHitTestResult { getPose(baseSpace: XRSpace): XRPose | undefined; // When anchor system is enabled createAnchor?: (pose: XRRigidTransform) => Promise<XRAnchor> | undefined; } abstract class XRHitTestResult implements XRHitTestResult {} interface XRHitTestSource { cancel(): void; } abstract class XRHitTestSource implements XRHitTestSource {} interface XRTransientInputHitTestSource { cancel(): void; } abstract class XRTransientInputHitTestSource implements XRTransientInputHitTestSource {} interface XRHitTestOptionsInit { space: XRSpace; entityTypes?: XRHitTestTrackableType[] | undefined; offsetRay?: XRRay | undefined; } interface XRTransientInputHitTestOptionsInit { profile: string; entityTypes?: XRHitTestTrackableType[] | undefined; offsetRay?: XRRay | undefined; } interface XRSession { requestHitTestSource?: (options: XRHitTestOptionsInit) => Promise<XRHitTestSource>; requestHitTestSourceForTransientInput?: (options: XRTransientInputHitTestOptionsInit) => Promise<XRTransientInputHitTestSource>; // Legacy requestHitTest?: (ray: XRRay, referenceSpace: XRReferenceSpace) => Promise<XRHitResult[]>; } interface XRFrame { getHitTestResults(hitTestSource: XRHitTestSource): XRHitTestResult[]; getHitTestResultsForTransientInput(hitTestSource: XRTransientInputHitTestSource): XRTransientInputHitTestResult[]; } // Legacy interface XRHitResult { hitMatrix: Float32Array; } // Plane detection type XRPlaneSet = Set<XRPlane>; type XRPlaneOrientation = "horizontal" | "vertical"; interface XRPlane { orientation: XRPlaneOrientation; planeSpace: XRSpace; polygon: DOMPointReadOnly[]; lastChangedTime: number; } abstract class XRPlane implements XRPlane {} interface XRSession { // Legacy updateWorldTrackingState?: (options: { planeDetectionState?: { enabled: boolean } | undefined }) => void | undefined; } // interface XRFrame { // worldInformation?: // | { // detectedPlanes?: XRPlaneSet | undefined; // } // | undefined; // } // Hand Tracking type XRHandJoint = | "wrist" | "thumb-metacarpal" | "thumb-phalanx-proximal" | "thumb-phalanx-distal" | "thumb-tip" | "index-finger-metacarpal" | "index-finger-phalanx-proximal" | "index-finger-phalanx-intermediate" | "index-finger-phalanx-distal" | "index-finger-tip" | "middle-finger-metacarpal" | "middle-finger-phalanx-proximal" | "middle-finger-phalanx-intermediate" | "middle-finger-phalanx-distal" | "middle-finger-tip" | "ring-finger-metacarpal" | "ring-finger-phalanx-proximal" | "ring-finger-phalanx-intermediate" | "ring-finger-phalanx-distal" | "ring-finger-tip" | "pinky-finger-metacarpal" | "pinky-finger-phalanx-proximal" | "pinky-finger-phalanx-intermediate" | "pinky-finger-phalanx-distal" | "pinky-finger-tip"; interface XRJointSpace extends XRSpace { readonly jointName: XRHandJoint; } abstract class XRJointSpace implements XRJointSpace {} interface XRJointPose extends XRPose { readonly radius: number | undefined; } abstract class XRJointPose implements XRJointPose {} interface XRHand extends Map<XRHandJoint, XRJointSpace> { readonly WRIST: number; readonly THUMB_METACARPAL: number; readonly THUMB_PHALANX_PROXIMAL: number; readonly THUMB_PHALANX_DISTAL: number; readonly THUMB_PHALANX_TIP: number; readonly INDEX_METACARPAL: number; readonly INDEX_PHALANX_PROXIMAL: number; readonly INDEX_PHALANX_INTERMEDIATE: number; readonly INDEX_PHALANX_DISTAL: number; readonly INDEX_PHALANX_TIP: number; readonly MIDDLE_METACARPAL: number; readonly MIDDLE_PHALANX_PROXIMAL: number; readonly MIDDLE_PHALANX_INTERMEDIATE: number; readonly MIDDLE_PHALANX_DISTAL: number; readonly MIDDLE_PHALANX_TIP: number;