three
Version:
JavaScript 3D library
1,919 lines (1,279 loc) • 103 kB
JavaScript
import {
REVISION,
BackSide,
FrontSide,
DoubleSide,
HalfFloatType,
UnsignedByteType,
NoToneMapping,
LinearMipmapLinearFilter,
SRGBColorSpace,
LinearSRGBColorSpace,
RGBAIntegerFormat,
RGIntegerFormat,
RedIntegerFormat,
UnsignedIntType,
UnsignedShortType,
UnsignedInt248Type,
UnsignedShort4444Type,
UnsignedShort5551Type,
WebGLCoordinateSystem
} from '../constants.js';
import { Color } from '../math/Color.js';
import { Frustum } from '../math/Frustum.js';
import { Matrix4 } from '../math/Matrix4.js';
import { Vector3 } from '../math/Vector3.js';
import { Vector4 } from '../math/Vector4.js';
import { WebGLAnimation } from './webgl/WebGLAnimation.js';
import { WebGLAttributes } from './webgl/WebGLAttributes.js';
import { WebGLBackground } from './webgl/WebGLBackground.js';
import { WebGLBindingStates } from './webgl/WebGLBindingStates.js';
import { WebGLBufferRenderer } from './webgl/WebGLBufferRenderer.js';
import { WebGLCapabilities } from './webgl/WebGLCapabilities.js';
import { WebGLClipping } from './webgl/WebGLClipping.js';
import { WebGLCubeMaps } from './webgl/WebGLCubeMaps.js';
import { WebGLCubeUVMaps } from './webgl/WebGLCubeUVMaps.js';
import { WebGLExtensions } from './webgl/WebGLExtensions.js';
import { WebGLGeometries } from './webgl/WebGLGeometries.js';
import { WebGLIndexedBufferRenderer } from './webgl/WebGLIndexedBufferRenderer.js';
import { WebGLInfo } from './webgl/WebGLInfo.js';
import { WebGLMorphtargets } from './webgl/WebGLMorphtargets.js';
import { WebGLObjects } from './webgl/WebGLObjects.js';
import { WebGLPrograms } from './webgl/WebGLPrograms.js';
import { WebGLProperties } from './webgl/WebGLProperties.js';
import { WebGLRenderLists } from './webgl/WebGLRenderLists.js';
import { WebGLRenderStates } from './webgl/WebGLRenderStates.js';
import { WebGLRenderTarget } from './WebGLRenderTarget.js';
import { WebGLShadowMap } from './webgl/WebGLShadowMap.js';
import { WebGLState } from './webgl/WebGLState.js';
import { WebGLTextures } from './webgl/WebGLTextures.js';
import { WebGLUniforms } from './webgl/WebGLUniforms.js';
import { WebGLUtils } from './webgl/WebGLUtils.js';
import { WebXRManager } from './webxr/WebXRManager.js';
import { WebGLMaterials } from './webgl/WebGLMaterials.js';
import { WebGLUniformsGroups } from './webgl/WebGLUniformsGroups.js';
import { createCanvasElement, probeAsync, toNormalizedProjectionMatrix, toReversedProjectionMatrix, warnOnce } from '../utils.js';
import { ColorManagement } from '../math/ColorManagement.js';
/**
* This renderer uses WebGL 2 to display scenes.
*
* WebGL 1 is not supported since `r163`.
*/
class WebGLRenderer {
/**
* Constructs a new WebGL renderer.
*
* @param {WebGLRenderer~Options} [parameters] - The configuration parameter.
*/
constructor( parameters = {} ) {
const {
canvas = createCanvasElement(),
context = null,
depth = true,
stencil = false,
alpha = false,
antialias = false,
premultipliedAlpha = true,
preserveDrawingBuffer = false,
powerPreference = 'default',
failIfMajorPerformanceCaveat = false,
reverseDepthBuffer = false,
} = parameters;
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isWebGLRenderer = true;
let _alpha;
if ( context !== null ) {
if ( typeof WebGLRenderingContext !== 'undefined' && context instanceof WebGLRenderingContext ) {
throw new Error( 'THREE.WebGLRenderer: WebGL 1 is not supported since r163.' );
}
_alpha = context.getContextAttributes().alpha;
} else {
_alpha = alpha;
}
const uintClearColor = new Uint32Array( 4 );
const intClearColor = new Int32Array( 4 );
let currentRenderList = null;
let currentRenderState = null;
// render() can be called from within a callback triggered by another render.
// We track this so that the nested render call gets its list and state isolated from the parent render call.
const renderListStack = [];
const renderStateStack = [];
// public properties
/**
* A canvas where the renderer draws its output.This is automatically created by the renderer
* in the constructor (if not provided already); you just need to add it to your page like so:
* ```js
* document.body.appendChild( renderer.domElement );
* ```
*
* @type {DOMElement}
*/
this.domElement = canvas;
/**
* A object with debug configuration settings.
*
* - `checkShaderErrors`: If it is `true`, defines whether material shader programs are
* checked for errors during compilation and linkage process. It may be useful to disable
* this check in production for performance gain. It is strongly recommended to keep these
* checks enabled during development. If the shader does not compile and link - it will not
* work and associated material will not render.
* - `onShaderError(gl, program, glVertexShader,glFragmentShader)`: A callback function that
* can be used for custom error reporting. The callback receives the WebGL context, an instance
* of WebGLProgram as well two instances of WebGLShader representing the vertex and fragment shader.
* Assigning a custom function disables the default error reporting.
*
* @type {Object}
*/
this.debug = {
/**
* Enables error checking and reporting when shader programs are being compiled.
* @type {boolean}
*/
checkShaderErrors: true,
/**
* Callback for custom error reporting.
* @type {?Function}
*/
onShaderError: null
};
// clearing
/**
* Whether the renderer should automatically clear its output before rendering a frame or not.
*
* @type {boolean}
* @default true
*/
this.autoClear = true;
/**
* If {@link WebGLRenderer#autoClear} set to `true`, whether the renderer should clear
* the color buffer or not.
*
* @type {boolean}
* @default true
*/
this.autoClearColor = true;
/**
* If {@link WebGLRenderer#autoClear} set to `true`, whether the renderer should clear
* the depth buffer or not.
*
* @type {boolean}
* @default true
*/
this.autoClearDepth = true;
/**
* If {@link WebGLRenderer#autoClear} set to `true`, whether the renderer should clear
* the stencil buffer or not.
*
* @type {boolean}
* @default true
*/
this.autoClearStencil = true;
// scene graph
/**
* Whether the renderer should sort objects or not.
*
* Note: Sorting is used to attempt to properly render objects that have some
* degree of transparency. By definition, sorting objects may not work in all
* cases. Depending on the needs of application, it may be necessary to turn
* off sorting and use other methods to deal with transparency rendering e.g.
* manually determining each object's rendering order.
*
* @type {boolean}
* @default true
*/
this.sortObjects = true;
// user-defined clipping
/**
* User-defined clipping planes specified in world space. These planes apply globally.
* Points in space whose dot product with the plane is negative are cut away.
*
* @type {Array<Plane>}
*/
this.clippingPlanes = [];
/**
* Whether the renderer respects object-level clipping planes or not.
*
* @type {boolean}
* @default false
*/
this.localClippingEnabled = false;
// tone mapping
/**
* The tone mapping technique of the renderer.
*
* @type {(NoToneMapping|LinearToneMapping|ReinhardToneMapping|CineonToneMapping|ACESFilmicToneMapping|CustomToneMapping|AgXToneMapping|NeutralToneMapping)}
* @default NoToneMapping
*/
this.toneMapping = NoToneMapping;
/**
* Exposure level of tone mapping.
*
* @type {number}
* @default 1
*/
this.toneMappingExposure = 1.0;
// transmission
/**
* The normalized resolution scale for the transmission render target, measured in percentage
* of viewport dimensions. Lowering this value can result in significant performance improvements
* when using {@link MeshPhysicalMaterial#transmission}.
*
* @type {number}
* @default 1
*/
this.transmissionResolutionScale = 1.0;
// internal properties
const _this = this;
let _isContextLost = false;
// internal state cache
this._outputColorSpace = SRGBColorSpace;
let _currentActiveCubeFace = 0;
let _currentActiveMipmapLevel = 0;
let _currentRenderTarget = null;
let _currentMaterialId = - 1;
let _currentCamera = null;
const _currentViewport = new Vector4();
const _currentScissor = new Vector4();
let _currentScissorTest = null;
const _currentClearColor = new Color( 0x000000 );
let _currentClearAlpha = 0;
//
let _width = canvas.width;
let _height = canvas.height;
let _pixelRatio = 1;
let _opaqueSort = null;
let _transparentSort = null;
const _viewport = new Vector4( 0, 0, _width, _height );
const _scissor = new Vector4( 0, 0, _width, _height );
let _scissorTest = false;
// frustum
const _frustum = new Frustum();
// clipping
let _clippingEnabled = false;
let _localClippingEnabled = false;
// camera matrices cache
const _currentProjectionMatrix = new Matrix4();
const _projScreenMatrix = new Matrix4();
const _vector3 = new Vector3();
const _vector4 = new Vector4();
const _emptyScene = { background: null, fog: null, environment: null, overrideMaterial: null, isScene: true };
let _renderBackground = false;
function getTargetPixelRatio() {
return _currentRenderTarget === null ? _pixelRatio : 1;
}
// initialize
let _gl = context;
function getContext( contextName, contextAttributes ) {
return canvas.getContext( contextName, contextAttributes );
}
try {
const contextAttributes = {
alpha: true,
depth,
stencil,
antialias,
premultipliedAlpha,
preserveDrawingBuffer,
powerPreference,
failIfMajorPerformanceCaveat,
};
// OffscreenCanvas does not have setAttribute, see #22811
if ( 'setAttribute' in canvas ) canvas.setAttribute( 'data-engine', `three.js r${REVISION}` );
// event listeners must be registered before WebGL context is created, see #12753
canvas.addEventListener( 'webglcontextlost', onContextLost, false );
canvas.addEventListener( 'webglcontextrestored', onContextRestore, false );
canvas.addEventListener( 'webglcontextcreationerror', onContextCreationError, false );
if ( _gl === null ) {
const contextName = 'webgl2';
_gl = getContext( contextName, contextAttributes );
if ( _gl === null ) {
if ( getContext( contextName ) ) {
throw new Error( 'Error creating WebGL context with your selected attributes.' );
} else {
throw new Error( 'Error creating WebGL context.' );
}
}
}
} catch ( error ) {
console.error( 'THREE.WebGLRenderer: ' + error.message );
throw error;
}
let extensions, capabilities, state, info;
let properties, textures, cubemaps, cubeuvmaps, attributes, geometries, objects;
let programCache, materials, renderLists, renderStates, clipping, shadowMap;
let background, morphtargets, bufferRenderer, indexedBufferRenderer;
let utils, bindingStates, uniformsGroups;
function initGLContext() {
extensions = new WebGLExtensions( _gl );
extensions.init();
utils = new WebGLUtils( _gl, extensions );
capabilities = new WebGLCapabilities( _gl, extensions, parameters, utils );
state = new WebGLState( _gl, extensions );
if ( capabilities.reverseDepthBuffer && reverseDepthBuffer ) {
state.buffers.depth.setReversed( true );
}
info = new WebGLInfo( _gl );
properties = new WebGLProperties();
textures = new WebGLTextures( _gl, extensions, state, properties, capabilities, utils, info );
cubemaps = new WebGLCubeMaps( _this );
cubeuvmaps = new WebGLCubeUVMaps( _this );
attributes = new WebGLAttributes( _gl );
bindingStates = new WebGLBindingStates( _gl, attributes );
geometries = new WebGLGeometries( _gl, attributes, info, bindingStates );
objects = new WebGLObjects( _gl, geometries, attributes, info );
morphtargets = new WebGLMorphtargets( _gl, capabilities, textures );
clipping = new WebGLClipping( properties );
programCache = new WebGLPrograms( _this, cubemaps, cubeuvmaps, extensions, capabilities, bindingStates, clipping );
materials = new WebGLMaterials( _this, properties );
renderLists = new WebGLRenderLists();
renderStates = new WebGLRenderStates( extensions );
background = new WebGLBackground( _this, cubemaps, cubeuvmaps, state, objects, _alpha, premultipliedAlpha );
shadowMap = new WebGLShadowMap( _this, objects, capabilities );
uniformsGroups = new WebGLUniformsGroups( _gl, info, capabilities, state );
bufferRenderer = new WebGLBufferRenderer( _gl, extensions, info );
indexedBufferRenderer = new WebGLIndexedBufferRenderer( _gl, extensions, info );
info.programs = programCache.programs;
/**
* Holds details about the capabilities of the current rendering context.
*
* @name WebGLRenderer#capabilities
* @type {WebGLRenderer~Capabilities}
*/
_this.capabilities = capabilities;
/**
* Provides methods for retrieving and testing WebGL extensions.
*
* - `get(extensionName:string)`: Used to check whether a WebGL extension is supported
* and return the extension object if available.
* - `has(extensionName:string)`: returns `true` if the extension is supported.
*
* @name WebGLRenderer#extensions
* @type {Object}
*/
_this.extensions = extensions;
/**
* Used to track properties of other objects like native WebGL objects.
*
* @name WebGLRenderer#properties
* @type {Object}
*/
_this.properties = properties;
/**
* Manages the render lists of the renderer.
*
* @name WebGLRenderer#renderLists
* @type {Object}
*/
_this.renderLists = renderLists;
/**
* Interface for managing shadows.
*
* @name WebGLRenderer#shadowMap
* @type {WebGLRenderer~ShadowMap}
*/
_this.shadowMap = shadowMap;
/**
* Interface for managing the WebGL state.
*
* @name WebGLRenderer#state
* @type {Object}
*/
_this.state = state;
/**
* Holds a series of statistical information about the GPU memory
* and the rendering process. Useful for debugging and monitoring.
*
* By default these data are reset at each render call but when having
* multiple render passes per frame (e.g. when using post processing) it can
* be preferred to reset with a custom pattern. First, set `autoReset` to
* `false`.
* ```js
* renderer.info.autoReset = false;
* ```
* Call `reset()` whenever you have finished to render a single frame.
* ```js
* renderer.info.reset();
* ```
*
* @name WebGLRenderer#info
* @type {WebGLRenderer~Info}
*/
_this.info = info;
}
initGLContext();
// xr
const xr = new WebXRManager( _this, _gl );
/**
* A reference to the XR manager.
*
* @type {WebXRManager}
*/
this.xr = xr;
/**
* Returns the rendering context.
*
* @return {WebGL2RenderingContext} The rendering context.
*/
this.getContext = function () {
return _gl;
};
/**
* Returns the rendering context attributes.
*
* @return {WebGLContextAttributes} The rendering context attributes.
*/
this.getContextAttributes = function () {
return _gl.getContextAttributes();
};
/**
* Simulates a loss of the WebGL context. This requires support for the `WEBGL_lose_context` extension.
*/
this.forceContextLoss = function () {
const extension = extensions.get( 'WEBGL_lose_context' );
if ( extension ) extension.loseContext();
};
/**
* Simulates a restore of the WebGL context. This requires support for the `WEBGL_lose_context` extension.
*/
this.forceContextRestore = function () {
const extension = extensions.get( 'WEBGL_lose_context' );
if ( extension ) extension.restoreContext();
};
/**
* Returns the pixel ratio.
*
* @return {number} The pixel ratio.
*/
this.getPixelRatio = function () {
return _pixelRatio;
};
/**
* Sets the given pixel ratio and resizes the canvas if necessary.
*
* @param {number} value - The pixel ratio.
*/
this.setPixelRatio = function ( value ) {
if ( value === undefined ) return;
_pixelRatio = value;
this.setSize( _width, _height, false );
};
/**
* Returns the renderer's size in logical pixels. This method does not honor the pixel ratio.
*
* @param {Vector2} target - The method writes the result in this target object.
* @return {Vector2} The renderer's size in logical pixels.
*/
this.getSize = function ( target ) {
return target.set( _width, _height );
};
/**
* Resizes the output canvas to (width, height) with device pixel ratio taken
* into account, and also sets the viewport to fit that size, starting in (0,
* 0). Setting `updateStyle` to false prevents any style changes to the output canvas.
*
* @param {number} width - The width in logical pixels.
* @param {number} height - The height in logical pixels.
* @param {boolean} [updateStyle=true] - Whether to update the `style` attribute of the canvas or not.
*/
this.setSize = function ( width, height, updateStyle = true ) {
if ( xr.isPresenting ) {
console.warn( 'THREE.WebGLRenderer: Can\'t change size while VR device is presenting.' );
return;
}
_width = width;
_height = height;
canvas.width = Math.floor( width * _pixelRatio );
canvas.height = Math.floor( height * _pixelRatio );
if ( updateStyle === true ) {
canvas.style.width = width + 'px';
canvas.style.height = height + 'px';
}
this.setViewport( 0, 0, width, height );
};
/**
* Returns the drawing buffer size in physical pixels. This method honors the pixel ratio.
*
* @param {Vector2} target - The method writes the result in this target object.
* @return {Vector2} The drawing buffer size.
*/
this.getDrawingBufferSize = function ( target ) {
return target.set( _width * _pixelRatio, _height * _pixelRatio ).floor();
};
/**
* This method allows to define the drawing buffer size by specifying
* width, height and pixel ratio all at once. The size of the drawing
* buffer is computed with this formula:
* ```js
* size.x = width * pixelRatio;
* size.y = height * pixelRatio;
* ```
*
* @param {number} width - The width in logical pixels.
* @param {number} height - The height in logical pixels.
* @param {number} pixelRatio - The pixel ratio.
*/
this.setDrawingBufferSize = function ( width, height, pixelRatio ) {
_width = width;
_height = height;
_pixelRatio = pixelRatio;
canvas.width = Math.floor( width * pixelRatio );
canvas.height = Math.floor( height * pixelRatio );
this.setViewport( 0, 0, width, height );
};
/**
* Returns the current viewport definition.
*
* @param {Vector2} target - The method writes the result in this target object.
* @return {Vector2} The current viewport definition.
*/
this.getCurrentViewport = function ( target ) {
return target.copy( _currentViewport );
};
/**
* Returns the viewport definition.
*
* @param {Vector4} target - The method writes the result in this target object.
* @return {Vector4} The viewport definition.
*/
this.getViewport = function ( target ) {
return target.copy( _viewport );
};
/**
* Sets the viewport to render from `(x, y)` to `(x + width, y + height)`.
*
* @param {number | Vector4} x - The horizontal coordinate for the lower left corner of the viewport origin in logical pixel unit.
* Or alternatively a four-component vector specifying all the parameters of the viewport.
* @param {number} y - The vertical coordinate for the lower left corner of the viewport origin in logical pixel unit.
* @param {number} width - The width of the viewport in logical pixel unit.
* @param {number} height - The height of the viewport in logical pixel unit.
*/
this.setViewport = function ( x, y, width, height ) {
if ( x.isVector4 ) {
_viewport.set( x.x, x.y, x.z, x.w );
} else {
_viewport.set( x, y, width, height );
}
state.viewport( _currentViewport.copy( _viewport ).multiplyScalar( _pixelRatio ).round() );
};
/**
* Returns the scissor region.
*
* @param {Vector4} target - The method writes the result in this target object.
* @return {Vector4} The scissor region.
*/
this.getScissor = function ( target ) {
return target.copy( _scissor );
};
/**
* Sets the scissor region to render from `(x, y)` to `(x + width, y + height)`.
*
* @param {number | Vector4} x - The horizontal coordinate for the lower left corner of the scissor region origin in logical pixel unit.
* Or alternatively a four-component vector specifying all the parameters of the scissor region.
* @param {number} y - The vertical coordinate for the lower left corner of the scissor region origin in logical pixel unit.
* @param {number} width - The width of the scissor region in logical pixel unit.
* @param {number} height - The height of the scissor region in logical pixel unit.
*/
this.setScissor = function ( x, y, width, height ) {
if ( x.isVector4 ) {
_scissor.set( x.x, x.y, x.z, x.w );
} else {
_scissor.set( x, y, width, height );
}
state.scissor( _currentScissor.copy( _scissor ).multiplyScalar( _pixelRatio ).round() );
};
/**
* Returns `true` if the scissor test is enabled.
*
* @return {boolean} Whether the scissor test is enabled or not.
*/
this.getScissorTest = function () {
return _scissorTest;
};
/**
* Enable or disable the scissor test. When this is enabled, only the pixels
* within the defined scissor area will be affected by further renderer
* actions.
*
* @param {boolean} boolean - Whether the scissor test is enabled or not.
*/
this.setScissorTest = function ( boolean ) {
state.setScissorTest( _scissorTest = boolean );
};
/**
* Sets a custom opaque sort function for the render lists. Pass `null`
* to use the default `painterSortStable` function.
*
* @param {?Function} method - The opaque sort function.
*/
this.setOpaqueSort = function ( method ) {
_opaqueSort = method;
};
/**
* Sets a custom transparent sort function for the render lists. Pass `null`
* to use the default `reversePainterSortStable` function.
*
* @param {?Function} method - The opaque sort function.
*/
this.setTransparentSort = function ( method ) {
_transparentSort = method;
};
// Clearing
/**
* Returns the clear color.
*
* @param {Color} target - The method writes the result in this target object.
* @return {Color} The clear color.
*/
this.getClearColor = function ( target ) {
return target.copy( background.getClearColor() );
};
/**
* Sets the clear color and alpha.
*
* @param {Color} color - The clear color.
* @param {number} [alpha=1] - The clear alpha.
*/
this.setClearColor = function () {
background.setClearColor( ...arguments );
};
/**
* Returns the clear alpha. Ranges within `[0,1]`.
*
* @return {number} The clear alpha.
*/
this.getClearAlpha = function () {
return background.getClearAlpha();
};
/**
* Sets the clear alpha.
*
* @param {number} alpha - The clear alpha.
*/
this.setClearAlpha = function () {
background.setClearAlpha( ...arguments );
};
/**
* Tells the renderer to clear its color, depth or stencil drawing buffer(s).
* This method initializes the buffers to the current clear color values.
*
* @param {boolean} [color=true] - Whether the color buffer should be cleared or not.
* @param {boolean} [depth=true] - Whether the depth buffer should be cleared or not.
* @param {boolean} [stencil=true] - Whether the stencil buffer should be cleared or not.
*/
this.clear = function ( color = true, depth = true, stencil = true ) {
let bits = 0;
if ( color ) {
// check if we're trying to clear an integer target
let isIntegerFormat = false;
if ( _currentRenderTarget !== null ) {
const targetFormat = _currentRenderTarget.texture.format;
isIntegerFormat = targetFormat === RGBAIntegerFormat ||
targetFormat === RGIntegerFormat ||
targetFormat === RedIntegerFormat;
}
// use the appropriate clear functions to clear the target if it's a signed
// or unsigned integer target
if ( isIntegerFormat ) {
const targetType = _currentRenderTarget.texture.type;
const isUnsignedType = targetType === UnsignedByteType ||
targetType === UnsignedIntType ||
targetType === UnsignedShortType ||
targetType === UnsignedInt248Type ||
targetType === UnsignedShort4444Type ||
targetType === UnsignedShort5551Type;
const clearColor = background.getClearColor();
const a = background.getClearAlpha();
const r = clearColor.r;
const g = clearColor.g;
const b = clearColor.b;
if ( isUnsignedType ) {
uintClearColor[ 0 ] = r;
uintClearColor[ 1 ] = g;
uintClearColor[ 2 ] = b;
uintClearColor[ 3 ] = a;
_gl.clearBufferuiv( _gl.COLOR, 0, uintClearColor );
} else {
intClearColor[ 0 ] = r;
intClearColor[ 1 ] = g;
intClearColor[ 2 ] = b;
intClearColor[ 3 ] = a;
_gl.clearBufferiv( _gl.COLOR, 0, intClearColor );
}
} else {
bits |= _gl.COLOR_BUFFER_BIT;
}
}
if ( depth ) {
bits |= _gl.DEPTH_BUFFER_BIT;
}
if ( stencil ) {
bits |= _gl.STENCIL_BUFFER_BIT;
this.state.buffers.stencil.setMask( 0xffffffff );
}
_gl.clear( bits );
};
/**
* Clears the color buffer. Equivalent to calling `renderer.clear( true, false, false )`.
*/
this.clearColor = function () {
this.clear( true, false, false );
};
/**
* Clears the depth buffer. Equivalent to calling `renderer.clear( false, true, false )`.
*/
this.clearDepth = function () {
this.clear( false, true, false );
};
/**
* Clears the stencil buffer. Equivalent to calling `renderer.clear( false, false, true )`.
*/
this.clearStencil = function () {
this.clear( false, false, true );
};
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
this.dispose = function () {
canvas.removeEventListener( 'webglcontextlost', onContextLost, false );
canvas.removeEventListener( 'webglcontextrestored', onContextRestore, false );
canvas.removeEventListener( 'webglcontextcreationerror', onContextCreationError, false );
background.dispose();
renderLists.dispose();
renderStates.dispose();
properties.dispose();
cubemaps.dispose();
cubeuvmaps.dispose();
objects.dispose();
bindingStates.dispose();
uniformsGroups.dispose();
programCache.dispose();
xr.dispose();
xr.removeEventListener( 'sessionstart', onXRSessionStart );
xr.removeEventListener( 'sessionend', onXRSessionEnd );
animation.stop();
};
// Events
function onContextLost( event ) {
event.preventDefault();
console.log( 'THREE.WebGLRenderer: Context Lost.' );
_isContextLost = true;
}
function onContextRestore( /* event */ ) {
console.log( 'THREE.WebGLRenderer: Context Restored.' );
_isContextLost = false;
const infoAutoReset = info.autoReset;
const shadowMapEnabled = shadowMap.enabled;
const shadowMapAutoUpdate = shadowMap.autoUpdate;
const shadowMapNeedsUpdate = shadowMap.needsUpdate;
const shadowMapType = shadowMap.type;
initGLContext();
info.autoReset = infoAutoReset;
shadowMap.enabled = shadowMapEnabled;
shadowMap.autoUpdate = shadowMapAutoUpdate;
shadowMap.needsUpdate = shadowMapNeedsUpdate;
shadowMap.type = shadowMapType;
}
function onContextCreationError( event ) {
console.error( 'THREE.WebGLRenderer: A WebGL context could not be created. Reason: ', event.statusMessage );
}
function onMaterialDispose( event ) {
const material = event.target;
material.removeEventListener( 'dispose', onMaterialDispose );
deallocateMaterial( material );
}
// Buffer deallocation
function deallocateMaterial( material ) {
releaseMaterialProgramReferences( material );
properties.remove( material );
}
function releaseMaterialProgramReferences( material ) {
const programs = properties.get( material ).programs;
if ( programs !== undefined ) {
programs.forEach( function ( program ) {
programCache.releaseProgram( program );
} );
if ( material.isShaderMaterial ) {
programCache.releaseShaderCache( material );
}
}
}
// Buffer rendering
this.renderBufferDirect = function ( camera, scene, geometry, material, object, group ) {
if ( scene === null ) scene = _emptyScene; // renderBufferDirect second parameter used to be fog (could be null)
const frontFaceCW = ( object.isMesh && object.matrixWorld.determinant() < 0 );
const program = setProgram( camera, scene, geometry, material, object );
state.setMaterial( material, frontFaceCW );
//
let index = geometry.index;
let rangeFactor = 1;
if ( material.wireframe === true ) {
index = geometries.getWireframeAttribute( geometry );
if ( index === undefined ) return;
rangeFactor = 2;
}
//
const drawRange = geometry.drawRange;
const position = geometry.attributes.position;
let drawStart = drawRange.start * rangeFactor;
let drawEnd = ( drawRange.start + drawRange.count ) * rangeFactor;
if ( group !== null ) {
drawStart = Math.max( drawStart, group.start * rangeFactor );
drawEnd = Math.min( drawEnd, ( group.start + group.count ) * rangeFactor );
}
if ( index !== null ) {
drawStart = Math.max( drawStart, 0 );
drawEnd = Math.min( drawEnd, index.count );
} else if ( position !== undefined && position !== null ) {
drawStart = Math.max( drawStart, 0 );
drawEnd = Math.min( drawEnd, position.count );
}
const drawCount = drawEnd - drawStart;
if ( drawCount < 0 || drawCount === Infinity ) return;
//
bindingStates.setup( object, material, program, geometry, index );
let attribute;
let renderer = bufferRenderer;
if ( index !== null ) {
attribute = attributes.get( index );
renderer = indexedBufferRenderer;
renderer.setIndex( attribute );
}
//
if ( object.isMesh ) {
if ( material.wireframe === true ) {
state.setLineWidth( material.wireframeLinewidth * getTargetPixelRatio() );
renderer.setMode( _gl.LINES );
} else {
renderer.setMode( _gl.TRIANGLES );
}
} else if ( object.isLine ) {
let lineWidth = material.linewidth;
if ( lineWidth === undefined ) lineWidth = 1; // Not using Line*Material
state.setLineWidth( lineWidth * getTargetPixelRatio() );
if ( object.isLineSegments ) {
renderer.setMode( _gl.LINES );
} else if ( object.isLineLoop ) {
renderer.setMode( _gl.LINE_LOOP );
} else {
renderer.setMode( _gl.LINE_STRIP );
}
} else if ( object.isPoints ) {
renderer.setMode( _gl.POINTS );
} else if ( object.isSprite ) {
renderer.setMode( _gl.TRIANGLES );
}
if ( object.isBatchedMesh ) {
if ( object._multiDrawInstances !== null ) {
// @deprecated, r174
warnOnce( 'THREE.WebGLRenderer: renderMultiDrawInstances has been deprecated and will be removed in r184. Append to renderMultiDraw arguments and use indirection.' );
renderer.renderMultiDrawInstances( object._multiDrawStarts, object._multiDrawCounts, object._multiDrawCount, object._multiDrawInstances );
} else {
if ( ! extensions.get( 'WEBGL_multi_draw' ) ) {
const starts = object._multiDrawStarts;
const counts = object._multiDrawCounts;
const drawCount = object._multiDrawCount;
const bytesPerElement = index ? attributes.get( index ).bytesPerElement : 1;
const uniforms = properties.get( material ).currentProgram.getUniforms();
for ( let i = 0; i < drawCount; i ++ ) {
uniforms.setValue( _gl, '_gl_DrawID', i );
renderer.render( starts[ i ] / bytesPerElement, counts[ i ] );
}
} else {
renderer.renderMultiDraw( object._multiDrawStarts, object._multiDrawCounts, object._multiDrawCount );
}
}
} else if ( object.isInstancedMesh ) {
renderer.renderInstances( drawStart, drawCount, object.count );
} else if ( geometry.isInstancedBufferGeometry ) {
const maxInstanceCount = geometry._maxInstanceCount !== undefined ? geometry._maxInstanceCount : Infinity;
const instanceCount = Math.min( geometry.instanceCount, maxInstanceCount );
renderer.renderInstances( drawStart, drawCount, instanceCount );
} else {
renderer.render( drawStart, drawCount );
}
};
// Compile
function prepareMaterial( material, scene, object ) {
if ( material.transparent === true && material.side === DoubleSide && material.forceSinglePass === false ) {
material.side = BackSide;
material.needsUpdate = true;
getProgram( material, scene, object );
material.side = FrontSide;
material.needsUpdate = true;
getProgram( material, scene, object );
material.side = DoubleSide;
} else {
getProgram( material, scene, object );
}
}
/**
* Compiles all materials in the scene with the camera. This is useful to precompile shaders
* before the first rendering. If you want to add a 3D object to an existing scene, use the third
* optional parameter for applying the target scene.
*
* Note that the (target) scene's lighting and environment must be configured before calling this method.
*
* @param {Object3D} scene - The scene or another type of 3D object to precompile.
* @param {Camera} camera - The camera.
* @param {?Scene} [targetScene=null] - The target scene.
* @return {?Set} The precompiled materials.
*/
this.compile = function ( scene, camera, targetScene = null ) {
if ( targetScene === null ) targetScene = scene;
currentRenderState = renderStates.get( targetScene );
currentRenderState.init( camera );
renderStateStack.push( currentRenderState );
// gather lights from both the target scene and the new object that will be added to the scene.
targetScene.traverseVisible( function ( object ) {
if ( object.isLight && object.layers.test( camera.layers ) ) {
currentRenderState.pushLight( object );
if ( object.castShadow ) {
currentRenderState.pushShadow( object );
}
}
} );
if ( scene !== targetScene ) {
scene.traverseVisible( function ( object ) {
if ( object.isLight && object.layers.test( camera.layers ) ) {
currentRenderState.pushLight( object );
if ( object.castShadow ) {
currentRenderState.pushShadow( object );
}
}
} );
}
currentRenderState.setupLights();
// Only initialize materials in the new scene, not the targetScene.
const materials = new Set();
scene.traverse( function ( object ) {
if ( ! ( object.isMesh || object.isPoints || object.isLine || object.isSprite ) ) {
return;
}
const material = object.material;
if ( material ) {
if ( Array.isArray( material ) ) {
for ( let i = 0; i < material.length; i ++ ) {
const material2 = material[ i ];
prepareMaterial( material2, targetScene, object );
materials.add( material2 );
}
} else {
prepareMaterial( material, targetScene, object );
materials.add( material );
}
}
} );
currentRenderState = renderStateStack.pop();
return materials;
};
// compileAsync
/**
* Asynchronous version of {@link WebGLRenderer#compile}.
*
* This method makes use of the `KHR_parallel_shader_compile` WebGL extension. Hence,
* it is recommended to use this version of `compile()` whenever possible.
*
* @async
* @param {Object3D} scene - The scene or another type of 3D object to precompile.
* @param {Camera} camera - The camera.
* @param {?Scene} [targetScene=null] - The target scene.
* @return {Promise} A Promise that resolves when the given scene can be rendered without unnecessary stalling due to shader compilation.
*/
this.compileAsync = function ( scene, camera, targetScene = null ) {
const materials = this.compile( scene, camera, targetScene );
// Wait for all the materials in the new object to indicate that they're
// ready to be used before resolving the promise.
return new Promise( ( resolve ) => {
function checkMaterialsReady() {
materials.forEach( function ( material ) {
const materialProperties = properties.get( material );
const program = materialProperties.currentProgram;
if ( program.isReady() ) {
// remove any programs that report they're ready to use from the list
materials.delete( material );
}
} );
// once the list of compiling materials is empty, call the callback
if ( materials.size === 0 ) {
resolve( scene );
return;
}
// if some materials are still not ready, wait a bit and check again
setTimeout( checkMaterialsReady, 10 );
}
if ( extensions.get( 'KHR_parallel_shader_compile' ) !== null ) {
// If we can check the compilation status of the materials without
// blocking then do so right away.
checkMaterialsReady();
} else {
// Otherwise start by waiting a bit to give the materials we just
// initialized a chance to finish.
setTimeout( checkMaterialsReady, 10 );
}
} );
};
// Animation Loop
let onAnimationFrameCallback = null;
function onAnimationFrame( time ) {
if ( onAnimationFrameCallback ) onAnimationFrameCallback( time );
}
function onXRSessionStart() {
animation.stop();
}
function onXRSessionEnd() {
animation.start();
}
const animation = new WebGLAnimation();
animation.setAnimationLoop( onAnimationFrame );
if ( typeof self !== 'undefined' ) animation.setContext( self );
this.setAnimationLoop = function ( callback ) {
onAnimationFrameCallback = callback;
xr.setAnimationLoop( callback );
( callback === null ) ? animation.stop() : animation.start();
};
xr.addEventListener( 'sessionstart', onXRSessionStart );
xr.addEventListener( 'sessionend', onXRSessionEnd );
// Rendering
/**
* Renders the given scene (or other type of 3D object) using the given camera.
*
* The render is done to a previously specified render target set by calling {@link WebGLRenderer#setRenderTarget}
* or to the canvas as usual.
*
* By default render buffers are cleared before rendering but you can prevent
* this by setting the property `autoClear` to `false`. If you want to prevent
* only certain buffers being cleared you can `autoClearColor`, `autoClearDepth`
* or `autoClearStencil` to `false`. To force a clear, use {@link WebGLRenderer#clear}.
*
* @param {Object3D} scene - The scene to render.
* @param {Camera} camera - The camera.
*/
this.render = function ( scene, camera ) {
if ( camera !== undefined && camera.isCamera !== true ) {
console.error( 'THREE.WebGLRenderer.render: camera is not an instance of THREE.Camera.' );
return;
}
if ( _isContextLost === true ) return;
// update scene graph
if ( scene.matrixWorldAutoUpdate === true ) scene.updateMatrixWorld();
// update camera matrices and frustum
if ( camera.parent === null && camera.matrixWorldAutoUpdate === true ) camera.updateMatrixWorld();
if ( xr.enabled === true && xr.isPresenting === true ) {
if ( xr.cameraAutoUpdate === true ) xr.updateCamera( camera );
camera = xr.getCamera(); // use XR camera for rendering
}
//
if ( scene.isScene === true ) scene.onBeforeRender( _this, scene, camera, _currentRenderTarget );
currentRenderState = renderStates.get( scene, renderStateStack.length );
currentRenderState.init( camera );
renderStateStack.push( currentRenderState );
_projScreenMatrix.multiplyMatrices( camera.projectionMatrix, camera.matrixWorldInverse );
_frustum.setFromProjectionMatrix( _projScreenMatrix );
_localClippingEnabled = this.localClippingEnabled;
_clippingEnabled = clipping.init( this.clippingPlanes, _localClippingEnabled );
currentRenderList = renderLists.get( scene, renderListStack.length );
currentRenderList.init();
renderListStack.push( currentRenderList );
if ( xr.enabled === true && xr.isPresenting === true ) {
const depthSensingMesh = _this.xr.getDepthSensingMesh();
if ( depthSensingMesh !== null ) {
projectObject( depthSensingMesh, camera, - Infinity, _this.sortObjects );
}
}
projectObject( scene, camera, 0, _this.sortObjects );
currentRenderList.finish();
if ( _this.sortObjects === true ) {
currentRenderList.sort( _opaqueSort, _transparentSort );
}
_renderBackground = xr.enabled === false || xr.isPresenting === false || xr.hasDepthSensing() === false;
if ( _renderBackground ) {
background.addToRenderList( currentRenderList, scene );
}
//
this.info.render.frame ++;
if ( _clippingEnabled === true ) clipping.beginShadows();
const shadowsArray = currentRenderState.state.shadowsArray;
shadowMap.render( shadowsArray, scene, camera );
if ( _clippingEnabled === true ) clipping.endShadows();
//
if ( this.info.autoReset === true ) this.info.reset();
// render scene
const opaqueObjects = currentRenderList.opaque;
const transmissiveObjects = currentRenderList.transmissive;
currentRenderState.setupLights();
if ( camera.isArrayCamera ) {
const cameras = camera.cameras;
if ( transmissiveObjects.length > 0 ) {
for ( let i = 0, l = cameras.length; i < l; i ++ ) {
const camera2 = cameras[ i ];
renderTransmissionPass( opaqueObjects, transmissiveObjects, scene, camera2 );
}
}
if ( _renderBackground ) background.render( scene );
for ( let i = 0, l = cameras.length; i < l; i ++ ) {
const camera2 = cameras[ i ];
renderScene( currentRenderList, scene, camera2, camera2.viewport );
}
} else {
if ( transmissiveObjects.length > 0 ) renderTransmissionPass( opaqueObjects, transmissiveObjects, scene, camera );
if ( _renderBackground ) background.render( scene );
renderScene( currentRenderList, scene, camera );
}
//
if ( _currentRenderTarget !== null && _currentActiveMipmapLevel === 0 ) {
// resolve multisample renderbuffers to a single-sample texture if necessary
textures.updateMultisampleRenderTarget( _currentRenderTarget );
// Generate mipmap if we're using any kind of mipmap filtering
textures.updateRenderTargetMipmap( _currentRenderTarget );
}
//
if ( scene.isScene === true ) scene.onAfterRender( _this, scene, camera );
// _gl.finish();
bindingStates.resetDefaultState();
_currentMaterialId = - 1;
_currentCamera = null;
renderStateStack.pop();
if ( renderStateStack.length > 0 ) {
currentRenderState = renderStateStack[ renderStateStack.length - 1 ];
if ( _clippingEnabled === true ) clipping.setGlobalState( _this.clippingPlanes, currentRenderState.state.camera );
} else {
currentRenderState = null;
}
renderListStack.pop();
if ( renderListStack.length > 0 ) {
currentRenderList = renderListStack[ renderListStack.length - 1 ];
} else {
currentRenderList = null;
}
};
function projectObject( object, camera, groupOrder, sortObjects ) {
if ( object.visible === false ) return;
const visible = object.layers.test( camera.layers );
if ( visible ) {
if ( object.isGroup ) {
groupOrder = object.renderOrder;
} else if ( object.isLOD ) {
if ( object.autoUpdate === true ) object.update( camera );
} else if ( object.isLight ) {
currentRenderState.pushLight( object );
if ( object.castShadow ) {
currentRenderState.pushShadow( object );
}
} else if ( object.isSprite ) {
if ( ! object.frustumCulled || _frustum.intersectsSprite( object ) ) {
if ( sortObjects ) {
_vector4.setFromMatrixPosition( object.matrixWorld )
.applyMatrix4( _projScreenMatrix );
}
const geometry = objects.update( object );
const material = object.material;
if ( material.visible ) {
currentRenderList.push( object, geometry, material, groupOrder, _vector4.z, null );
}
}
} else if ( object.isMesh || object.isLine || object.isPoints ) {
if ( ! object.frustumCulled || _frustum.intersectsObject( object ) ) {
const geometry = objects.update( object );
const material = object.material;
if ( sortObjects ) {
if ( object.boundingSphere !== undefined ) {
if ( object.boundingSphere === null ) object.computeBoundingSphere();
_vector4.copy( object.boundingSphere.center );
} else {
if ( geometry.boundingSphere === null ) geometry.computeBoundingSphere();
_vector4.copy( geometry.boundingSphere.center );
}
_vector4
.applyMatrix4( object.matrixWorld )
.applyMatrix4( _projScreenMatrix );
}
if ( Array.isArray( material ) ) {
const groups = geometry.groups;
for ( let i = 0, l = groups.length; i < l; i ++ ) {
const group = groups[ i ];
const groupMaterial = material[ group.materialIndex ];
if ( groupMaterial && groupMaterial.visible ) {
currentRenderList.push( object, geometry, groupMaterial, groupOrder, _vector4.z, group );
}
}
} else if ( material.visible ) {
currentRenderList.push( object, geometry, material, groupOrder, _vector4.z, null );
}
}
}
}
const children = object.children;
for ( let i = 0, l = children.length; i < l; i ++ ) {
projectObject( children[ i ], camera, groupOrder, sortObjects );
}
}
function renderScene( currentRenderList, scene, camera, viewport ) {
const opaqueObjects = currentRenderList.opaque;
const transmissiveObjects = currentRenderList.transmissive;
const transparentObjects = currentRenderList.transparent;
currentRenderState.setupLightsView( camera );
if ( _clippingEnabled === true ) clipping.setGlobalState( _this.clippingPlanes, camera );
if ( viewport ) state.viewport( _currentViewport.copy( viewport ) );
if ( opaqueObjects.length > 0 ) renderObjects( opaqueObjects, scene, camera );
if ( transmissiveObjects.length > 0 ) renderObjects( transmissiveObjects, scene, camera );
if ( transparentObjects.length > 0 ) renderObjects( transparentObjects, scene, camera );
// Ensure depth buffer writing is enabled so it can be cleared on next render
state.buffers.depth.setTest( true );
state.buffers.depth.setMask( true );
state.buffers.color.setMask( true );
state.setPolygonOffset( false );
}
function renderTransmissionPass( opaqueObjects, transmissiveObjects, scene, camera ) {
const overrideMaterial = scene.isScene === true ? scene.overrideMaterial : null;
if ( overrideMaterial !== null ) {
return;
}
if ( currentRenderState.state.transmissionRenderTarget[ camera.id ] === undefined ) {
currentRenderState.state.transmissionRenderTarget[ camera.id ] = new WebGLRenderTarget( 1, 1, {
generateMipmaps: true,
type: ( extensions.has( 'EXT_color_buffer_half_float' ) || extensions.has( 'EXT_color_buffer_float' ) ) ? HalfFloatType : UnsignedByteType,
minFilter: LinearMipmapLinearFilter,
samples: 4,
stencilBuffer: stencil,
resolveDepthBuffer: false,
resolveStencilBuffer: false,
colorSpace: ColorManagement.workingColorSpace,
} );
// debug
/*
const geometry = new PlaneGeometry();
const material = new MeshBasicMaterial( { map: _transmissionRenderTarget.texture } );
const mesh = new Mesh( geometry, material );
scene.add( mesh );
*/
}
const transmissionRenderTarget = currentRenderState.state.transmissionRenderTarget[ camera.id ];
const activeViewport = camera.viewport || _currentViewport;
transmissionRenderTarget.setSize( activeViewport.z * _this.transmissionResolutionScale, activeViewport.w * _this.transmissionResolutionScale );
//
const currentRenderTarget = _this.getRenderTarget();
_this.setRenderTarget( transmissionRenderTarget );
_this.getClearColor( _currentClearColor );
_currentClearAlpha = _this.getClearAlpha();
if ( _currentClearAlpha < 1 ) _this.setClearColor( 0xffffff, 0.5 );
_this.clear();
if ( _renderBackground ) background.render( scene );
// Turn off the features which can affect the frag color for opaque objects pass.
// Otherwise they are applied twice in opaque objects pass and transmission objects pass.
const currentToneMapping = _this.toneMapping;
_this.toneMapping = NoToneMapping;
// Remove viewport from camera to avoid nested render calls resetting viewport to it (e.g Reflector).
// Transmission render pass requires viewport to match the transmissionRenderTarget.
const currentCameraViewport = camera.viewport;
if ( camera.viewport !== undefined ) camera.viewport = undefined;
currentRenderState.setupLightsView( camera );
if ( _clippingEnabled === true ) clipping.setGlobalState( _this.clippingPlanes, camera );
renderObjects( opaqueObjects, scene, camera );
textu