@babylonjs/viewer
Version:
The Babylon Viewer aims to simplify a specific but common Babylon.js use case: loading, viewing, and interacting with a 3D model.
1,103 lines (1,101 loc) • 193 kB
JavaScript
import { ArcRotateCamera, ComputeAlpha, ComputeBeta } from '@babylonjs/core/Cameras/arcRotateCamera.js';
import { Constants } from '@babylonjs/core/Engines/constants.js';
import { PointerEventTypes } from '@babylonjs/core/Events/pointerEvents.js';
import { HemisphericLight } from '@babylonjs/core/Lights/hemisphericLight.js';
import { DirectionalLight } from '@babylonjs/core/Lights/directionalLight.js';
import { LoadAssetContainerAsync } from '@babylonjs/core/Loading/sceneLoader.js';
import { BackgroundMaterial } from '@babylonjs/core/Materials/Background/backgroundMaterial.js';
import { ImageProcessingConfiguration } from '@babylonjs/core/Materials/imageProcessingConfiguration.js';
import { PBRMaterial } from '@babylonjs/core/Materials/PBR/pbrMaterial.js';
import { Texture } from '@babylonjs/core/Materials/Textures/texture.js';
import { Color3, Color4 } from '@babylonjs/core/Maths/math.color.js';
import { Clamp, Lerp } from '@babylonjs/core/Maths/math.scalar.functions.js';
import { Vector3, Matrix, Vector2 } from '@babylonjs/core/Maths/math.vector.js';
import { Viewport } from '@babylonjs/core/Maths/math.viewport.js';
import { GetHotSpotToRef } from '@babylonjs/core/Meshes/abstractMesh.hotSpot.js';
import { CreateBox } from '@babylonjs/core/Meshes/Builders/boxBuilder.js';
import { Mesh } from '@babylonjs/core/Meshes/mesh.js';
import { RemoveUnreferencedVerticesData, computeMaxExtents } from '@babylonjs/core/Meshes/meshUtils.js';
import { BuildTuple } from '@babylonjs/core/Misc/arrayTools.js';
import { AsyncLock } from '@babylonjs/core/Misc/asyncLock.js';
import { deepMerge } from '@babylonjs/core/Misc/deepMerger.js';
import { AbortError } from '@babylonjs/core/Misc/error.js';
import { Logger } from '@babylonjs/core/Misc/logger.js';
import { Observable } from '@babylonjs/core/Misc/observable.js';
import { SceneOptimizerOptions, HardwareScalingOptimization, SceneOptimizer } from '@babylonjs/core/Misc/sceneOptimizer.js';
import { SnapshotRenderingHelper } from '@babylonjs/core/Misc/snapshotRenderingHelper.js';
import { GetExtensionFromUrl } from '@babylonjs/core/Misc/urlTools.js';
import { Scene } from '@babylonjs/core/scene.js';
import { registerBuiltInLoaders } from '@babylonjs/loaders/dynamic.js';
import { _RetryWithInterval } from '@babylonjs/core/Misc/timingTools.js';
import { Lazy } from '@babylonjs/core/Misc/lazy.js';
import { __decorate } from '@babylonjs/core/tslib.es6.js';
import { Deferred } from '@babylonjs/core/Misc/deferred.js';
// eslint-disable-next-line @typescript-eslint/promise-function-async
const LazySSAODependenciesPromise = new Lazy(() => Promise.all([
import('@babylonjs/core/PostProcesses/RenderPipeline/Pipelines/ssao2RenderingPipeline.js'),
import('@babylonjs/core/Rendering/prePassRendererSceneComponent.js'),
import('@babylonjs/core/Rendering/geometryBufferRendererSceneComponent.js'),
import('@babylonjs/core/Engines/Extensions/engine.multiRender.js'),
import('@babylonjs/core/Engines/WebGPU/Extensions/engine.multiRender.js'),
]));
const shadowQualityOptions = ["none", "normal", "high"];
const toneMappingOptions = ["none", "standard", "aces", "neutral"];
/**
* Checks if the given value is a valid tone mapping option.
* @param value The value to check.
* @returns True if the value is a valid tone mapping option, otherwise false.
*/
function IsToneMapping(value) {
return toneMappingOptions.includes(value);
}
/**
* Checks if the given value is a valid shadow quality option.
* @param value The value to check.
* @returns True if the value is a valid shadow quality option, otherwise false.
*/
function IsShadowQuality(value) {
return shadowQualityOptions.includes(value);
}
function throwIfAborted(...abortSignals) {
for (const signal of abortSignals) {
signal?.throwIfAborted();
}
}
async function createCubeTexture(url, scene, extension) {
extension = extension ?? GetExtensionFromUrl(url);
const instantiateTexture = await (async () => {
if (extension === ".hdr") {
const { HDRCubeTexture } = await import('@babylonjs/core/Materials/Textures/hdrCubeTexture.js');
return () => new HDRCubeTexture(url, scene, 256, false, true, false, true, undefined, undefined, undefined, true, true);
}
else {
const { CubeTexture } = await import('@babylonjs/core/Materials/Textures/cubeTexture.js');
return () => new CubeTexture(url, scene, null, false, null, null, null, undefined, true, extension, true);
}
})();
const originalUseDelayedTextureLoading = scene.useDelayedTextureLoading;
try {
scene.useDelayedTextureLoading = false;
return instantiateTexture();
}
finally {
scene.useDelayedTextureLoading = originalUseDelayedTextureLoading;
}
}
function createSkybox(scene, camera, reflectionTexture, blur) {
const originalBlockMaterialDirtyMechanism = scene.blockMaterialDirtyMechanism;
scene.blockMaterialDirtyMechanism = true;
try {
const hdrSkybox = CreateBox("hdrSkyBox", { sideOrientation: Mesh.BACKSIDE }, scene);
const hdrSkyboxMaterial = new BackgroundMaterial("skyBox", scene);
// Use the default image processing configuration on the skybox (e.g. don't apply tone mapping, contrast, or exposure).
hdrSkyboxMaterial.imageProcessingConfiguration = new ImageProcessingConfiguration();
hdrSkyboxMaterial.reflectionTexture = reflectionTexture;
reflectionTexture.coordinatesMode = Texture.SKYBOX_MODE;
hdrSkyboxMaterial.reflectionBlur = blur;
hdrSkybox.material = hdrSkyboxMaterial;
hdrSkybox.isPickable = false;
hdrSkybox.infiniteDistance = true;
hdrSkybox.applyFog = false;
updateSkybox(hdrSkybox, camera);
return hdrSkybox;
}
finally {
scene.blockMaterialDirtyMechanism = originalBlockMaterialDirtyMechanism;
}
}
function updateSkybox(skybox, camera) {
skybox?.scaling.setAll((camera.maxZ - camera.minZ) / 2);
}
function computeModelsMaxExtents(models) {
return models.flatMap((model) => {
return computeMaxExtents(model.assetContainer.meshes, model.assetContainer.animationGroups[model.selectedAnimation]);
});
}
function reduceMeshesExtendsToBoundingInfo(maxExtents) {
if (maxExtents.length === 0) {
return null;
}
const min = new Vector3(Math.min(...maxExtents.map((e) => e.minimum.x)), Math.min(...maxExtents.map((e) => e.minimum.y)), Math.min(...maxExtents.map((e) => e.minimum.z)));
const max = new Vector3(Math.max(...maxExtents.map((e) => e.maximum.x)), Math.max(...maxExtents.map((e) => e.maximum.y)), Math.max(...maxExtents.map((e) => e.maximum.z)));
const size = max.subtract(min);
const center = min.add(size.scale(0.5));
return {
extents: {
min: min.asArray(),
max: max.asArray(),
},
size: size.asArray(),
center: center.asArray(),
};
}
/**
* Adjusts the light's target direction to ensure it's not too flat and points downwards.
* @param targetDirection The target direction of the light.
* @returns The adjusted target direction of the light.
*/
function adjustLightTargetDirection(targetDirection) {
const lightSteepnessThreshold = -0.01; // threshold to trigger steepness adjustment
const lightSteepnessFactor = 10; // the factor to multiply Y by if it's too flat
const minLightDirectionY = -0.05; // the minimum steepness for light direction Y
const adjustedDirection = targetDirection.clone();
// ensure light points downwards
if (adjustedDirection.y > 0) {
adjustedDirection.y *= -1;
}
// if light is too flat (pointing almost horizontally or very slightly down), make it steeper
if (adjustedDirection.y > lightSteepnessThreshold) {
adjustedDirection.y = Math.min(adjustedDirection.y * lightSteepnessFactor, minLightDirectionY);
}
return adjustedDirection;
}
/**
* Compute the bounding info for the models by computing their maximum extents, size, and center considering animation, skeleton, and morph targets.
* @param models The models to consider when computing the bounding info
* @returns The computed bounding info for the models or null
*/
function computeModelsBoundingInfos(models) {
const maxExtents = computeModelsMaxExtents(models);
return reduceMeshesExtendsToBoundingInfo(maxExtents);
}
// This helper function is used in functions that are naturally void returning, but need to call an async Promise returning function.
// If there is any error (other than AbortError) in the async function, it will be logged.
function observePromise(promise) {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
(async () => {
try {
await promise;
}
catch (error) {
if (!(error instanceof AbortError)) {
Logger.Error(error);
}
}
})();
}
/**
* Generates a HotSpot from a camera by computing its spherical coordinates (alpha, beta, radius) relative to a target point.
*
* The target point is determined using the camera's forward ray:
* - If the ray intersects with a mesh in the model, the intersection point is used as the target.
* - If no intersection is found, a fallback target is calculated by projecting the distance
* between the camera and the model's center along the camera's forward direction.
*
* @param model The reference model used to determine the target point.
* @param camera The camera from which the HotSpot is generated.
* @returns A HotSpot object.
*/
async function CreateHotSpotFromCamera(model, camera) {
await import('@babylonjs/core/Culling/ray.js');
const scene = model.assetContainer.scene;
const ray = camera.getForwardRay(100, camera.getWorldMatrix(), camera.globalPosition); // Set starting point to camera global position
const camGlobalPos = camera.globalPosition.clone();
// Target
let radius = 0.0001; // Just to avoid division by zero
const targetPoint = Vector3.Zero();
const pickingInfo = scene.pickWithRay(ray, (mesh) => model.assetContainer.meshes.includes(mesh));
if (pickingInfo && pickingInfo.hit) {
targetPoint.copyFrom(pickingInfo.pickedPoint); // Use intersection point as target
}
else {
const worldBounds = model.getWorldBounds();
const centerArray = worldBounds ? worldBounds.center : [0, 0, 0];
const distancePoint = Vector3.FromArray(centerArray);
const direction = ray.direction.clone();
targetPoint.copyFrom(camGlobalPos);
radius = Vector3.Distance(camGlobalPos, distancePoint);
direction.scaleAndAddToRef(radius, targetPoint); // Compute fallback target
}
const computationVector = Vector3.Zero();
camGlobalPos.subtractToRef(targetPoint, computationVector);
// Radius
if (pickingInfo && pickingInfo.hit) {
radius = computationVector.length();
}
// Alpha and Beta
const alpha = ComputeAlpha(computationVector);
const beta = ComputeBeta(computationVector.y, radius);
const targetArray = targetPoint.asArray();
return { type: "world", position: targetArray, normal: targetArray, cameraOrbit: [alpha, beta, radius] };
}
/**
* The default options for the Viewer.
*/
const DefaultViewerOptions = {
clearColor: [0, 0, 0, 0],
autoSuspendRendering: true,
environmentConfig: {
intensity: 1,
blur: 0.3,
rotation: 0,
},
environmentLighting: "auto",
environmentSkybox: "none",
cameraAutoOrbit: {
enabled: false,
delay: 2000,
speed: 0.05,
},
animationAutoPlay: false,
animationSpeed: 1,
shadowConfig: {
quality: "none",
},
postProcessing: {
toneMapping: "neutral",
contrast: 1,
exposure: 1,
ssao: "auto",
},
useRightHandedSystem: false,
};
const defaultLoadEnvironmentOptions = {
lighting: true,
skybox: true,
};
/**
* Provides the result of a hot spot query.
*/
class ViewerHotSpotResult {
constructor() {
/**
* 2D canvas position in pixels
*/
this.screenPosition = [NaN, NaN];
/**
* 3D world coordinates
*/
this.worldPosition = [NaN, NaN, NaN];
/**
* visibility range is [-1..1]. A value of 0 means camera eye is on the plane.
*/
this.visibility = NaN;
}
}
/**
* @experimental
* Provides an experience for viewing a single 3D model.
* @remarks
* The Viewer is not tied to a specific UI framework and can be used with Babylon.js in a browser or with Babylon Native.
*/
class Viewer {
constructor(_engine, _options) {
this._engine = _engine;
this._options = _options;
/**
* When enabled, the Viewer will emit additional diagnostic logs to the console.
*/
this.showDebugLogs = false;
/**
* Fired when the environment has changed.
*/
this.onEnvironmentChanged = new Observable();
/**
* Fired when the environment configuration has changed.
*/
this.onEnvironmentConfigurationChanged = new Observable();
/**
* Fired when an error occurs while loading the environment.
*/
this.onEnvironmentError = new Observable();
/**
* Fired when the shadows configuration changes.
*/
this.onShadowsConfigurationChanged = new Observable();
/**
* Fired when the post processing state changes.
*/
this.onPostProcessingChanged = new Observable();
/**
* Fired when a model is loaded into the viewer (or unloaded from the viewer).
* @remarks
* The event argument is the source that was loaded, or null if no model is loaded.
*/
this.onModelChanged = new Observable();
/**
* Fired when an error occurs while loading a model.
*/
this.onModelError = new Observable();
/**
* Fired when progress changes on loading activity.
*/
this.onLoadingProgressChanged = new Observable();
/**
* Fired when the camera auto orbit state changes.
*/
this.onCameraAutoOrbitChanged = new Observable();
/**
* Fired when the selected animation changes.
*/
this.onSelectedAnimationChanged = new Observable();
/**
* Fired when the animation speed changes.
*/
this.onAnimationSpeedChanged = new Observable();
/**
* Fired when the selected animation is playing or paused.
*/
this.onIsAnimationPlayingChanged = new Observable();
/**
* Fired when the current point on the selected animation timeline changes.
*/
this.onAnimationProgressChanged = new Observable();
/**
* Fired when the selected material variant changes.
*/
this.onSelectedMaterialVariantChanged = new Observable();
/**
* Fired when the hot spots object changes to a complete new object instance.
*/
this.onHotSpotsChanged = new Observable();
/**
* Fired when the cameras as hot spots property changes.
*/
this.onCamerasAsHotSpotsChanged = new Observable();
this._renderedLastFrame = null;
this._sceneOptimizer = null;
this._tempVectors = BuildTuple(4, Vector3.Zero);
this._meshDataCache = new Map();
this._renderLoopController = null;
this._loadedModelsBacking = [];
this._activeModelBacking = null;
this._environmentSkyboxMode = "none";
this._environmentLightingMode = "none";
this._skybox = null;
this._skyboxBlur = this._options?.environmentConfig?.blur ?? DefaultViewerOptions.environmentConfig.blur;
this._skyboxTexture = null;
this._reflectionTexture = null;
this._reflectionsIntensity = this._options?.environmentConfig?.intensity ?? DefaultViewerOptions.environmentConfig.intensity;
this._reflectionsRotation = this._options?.environmentConfig?.rotation ?? DefaultViewerOptions.environmentConfig.rotation;
this._light = null;
this._ssaoOption = this._options?.postProcessing?.ssao ?? DefaultViewerOptions.postProcessing.ssao;
this._ssaoPipeline = null;
this._autoSuspendRendering = this._options?.autoSuspendRendering ?? DefaultViewerOptions.autoSuspendRendering;
this._sceneMutated = false;
this._suspendRenderCount = 0;
this._isDisposed = false;
this._loadModelLock = new AsyncLock();
this._loadModelAbortController = null;
this._loadEnvironmentLock = new AsyncLock();
this._loadEnvironmentAbortController = null;
this._camerasAsHotSpotsAbortController = null;
this._updateShadowsLock = new AsyncLock();
this._shadowsAbortController = null;
this._loadOperations = new Set();
this._activeAnimationObservers = [];
this._animationSpeed = this._options?.animationSpeed ?? DefaultViewerOptions.animationSpeed;
this._camerasAsHotSpots = false;
this._hotSpots = this._options?.hotSpots ?? {};
this._shadowQuality = this._options?.shadowConfig?.quality ?? DefaultViewerOptions.shadowConfig.quality;
this._shadowState = {};
this._defaultHardwareScalingLevel = this._lastHardwareScalingLevel = this._engine.getHardwareScalingLevel();
{
const scene = new Scene(this._engine);
scene.useRightHandedSystem = this._options?.useRightHandedSystem ?? DefaultViewerOptions.useRightHandedSystem;
const defaultMaterial = new PBRMaterial("default Material", scene);
defaultMaterial.albedoColor = new Color3(0.4, 0.4, 0.4);
defaultMaterial.metallic = 0;
defaultMaterial.roughness = 1;
defaultMaterial.baseDiffuseRoughness = 1;
defaultMaterial.microSurface = 0;
scene.defaultMaterial = defaultMaterial;
// Deduce tone mapping, contrast, and exposure from the scene (so the viewer stays in sync if anything mutates these values directly on the scene).
this._toneMappingEnabled = scene.imageProcessingConfiguration.toneMappingEnabled;
this._toneMappingType = scene.imageProcessingConfiguration.toneMappingType;
this._contrast = scene.imageProcessingConfiguration.contrast;
this._exposure = scene.imageProcessingConfiguration.exposure;
this._imageProcessingConfigurationObserver = scene.imageProcessingConfiguration.onUpdateParameters.add(() => {
let hasChanged = false;
if (this._toneMappingEnabled !== scene.imageProcessingConfiguration.toneMappingEnabled) {
this._toneMappingEnabled = scene.imageProcessingConfiguration.toneMappingEnabled;
hasChanged = true;
}
if (this._toneMappingType !== scene.imageProcessingConfiguration.toneMappingType) {
this._toneMappingType = scene.imageProcessingConfiguration.toneMappingType;
hasChanged = true;
}
if (this._contrast !== scene.imageProcessingConfiguration.contrast) {
this._contrast = scene.imageProcessingConfiguration.contrast;
hasChanged = true;
}
if (this._exposure !== scene.imageProcessingConfiguration.exposure) {
this._exposure = scene.imageProcessingConfiguration.exposure;
hasChanged = true;
}
if (hasChanged) {
this.onPostProcessingChanged.notifyObservers();
}
});
const camera = new ArcRotateCamera("Viewer Default Camera", 0, 0, 1, Vector3.Zero(), scene);
camera.useInputToRestoreState = false;
camera.useAutoRotationBehavior = true;
camera.onViewMatrixChangedObservable.add(() => {
this._markSceneMutated();
});
scene.onClearColorChangedObservable.add(() => {
this._markSceneMutated();
});
scene.onPointerObservable.add(async (pointerInfo) => {
const pickingInfo = await this._pick(pointerInfo.event.offsetX, pointerInfo.event.offsetY);
if (pickingInfo?.pickedPoint) {
const distance = pickingInfo.pickedPoint.subtract(camera.position).dot(camera.getForwardRay().direction);
// Immediately reset the target and the radius based on the distance to the picked point.
// This eliminates unnecessary camera movement on the local z-axis when interpolating.
camera.target = camera.position.add(camera.getForwardRay().direction.scale(distance));
camera.radius = distance;
camera.interpolateTo(undefined, undefined, undefined, pickingInfo.pickedPoint);
}
else {
this.resetCamera(true);
}
}, PointerEventTypes.POINTERDOUBLETAP);
scene.onNewCameraAddedObservable.add((camera) => {
if (this.camerasAsHotSpots) {
observePromise(this._addCameraHotSpot(camera, this._camerasAsHotSpotsAbortController?.signal));
}
});
scene.onCameraRemovedObservable.add((camera) => {
this._removeCameraHotSpot(camera);
});
this._scene = scene;
this._camera = camera;
}
this._scene.skipFrustumClipping = true;
this._scene.skipPointerDownPicking = true;
this._scene.skipPointerUpPicking = true;
this._scene.skipPointerMovePicking = true;
this._snapshotHelper = new SnapshotRenderingHelper(this._scene, { morphTargetsNumMaxInfluences: 30 });
// this._snapshotHelper.showDebugLogs = true;
this._beforeRenderObserver = this._scene.onBeforeRenderObservable.add(() => {
this._snapshotHelper.updateMesh(this._scene.meshes);
});
this._camera.attachControl();
this._autoRotationBehavior = this._camera.getBehaviorByName("AutoRotation");
this._reset(false, "camera");
// Load a default light, but ignore errors as the user might be immediately loading their own environment.
observePromise(this.resetEnvironment());
this._beginRendering();
// eslint-disable-next-line @typescript-eslint/no-this-alias
const viewer = this;
this._options?.onInitialized?.({
scene: viewer._scene,
camera: viewer._camera,
get model() {
return viewer._activeModel ?? null;
},
suspendRendering: () => this._suspendRendering(),
markSceneMutated: () => this._markSceneMutated(),
pick: async (screenX, screenY) => await this._pick(screenX, screenY),
});
this._reset(false, "source", "environment", "post-processing");
}
/**
* The camera auto orbit configuration.
*/
get cameraAutoOrbit() {
return {
enabled: this._camera.behaviors.includes(this._autoRotationBehavior),
speed: this._autoRotationBehavior.idleRotationSpeed,
delay: this._autoRotationBehavior.idleRotationWaitTime,
};
}
set cameraAutoOrbit(value) {
if (value.enabled !== undefined && value.enabled !== this.cameraAutoOrbit.enabled) {
if (value.enabled) {
this._camera.addBehavior(this._autoRotationBehavior);
}
else {
this._camera.removeBehavior(this._autoRotationBehavior);
}
}
if (value.delay !== undefined) {
this._autoRotationBehavior.idleRotationWaitTime = value.delay;
}
if (value.speed !== undefined) {
this._autoRotationBehavior.idleRotationSpeed = value.speed;
}
this.onCameraAutoOrbitChanged.notifyObservers();
}
/**
* Get the current environment configuration.
*/
get environmentConfig() {
return {
intensity: this._reflectionsIntensity,
blur: this._skyboxBlur,
rotation: this._reflectionsRotation,
};
}
set environmentConfig(value) {
if (value.blur !== undefined) {
this._changeSkyboxBlur(value.blur);
}
if (value.intensity !== undefined) {
this._changeEnvironmentIntensity(value.intensity);
this._changeShadowLightIntensity();
}
if (value.rotation !== undefined) {
this._changeEnvironmentRotation(value.rotation);
this._rotateShadowLightWithEnvironment();
}
this.onEnvironmentConfigurationChanged.notifyObservers();
}
/**
* Get the current shadow configuration.
*/
get shadowConfig() {
return {
quality: this._shadowQuality,
};
}
/**
* Update the shadow configuration.
* @param value The new shadow configuration.
*/
async updateShadows(value) {
if (value.quality && this._shadowQuality !== value.quality) {
this._shadowQuality = value.quality;
await this._updateShadows();
this.onShadowsConfigurationChanged.notifyObservers();
}
}
_changeSkyboxBlur(value) {
if (value !== this._skyboxBlur) {
this._skyboxBlur = value;
if (this._skybox) {
const material = this._skybox.material;
if (material instanceof BackgroundMaterial) {
this._snapshotHelper.disableSnapshotRendering();
material.reflectionBlur = this._skyboxBlur;
this._snapshotHelper.enableSnapshotRendering();
this._markSceneMutated();
}
}
}
}
/**
* Change the environment rotation.
* @param value the rotation in radians
*/
_changeEnvironmentRotation(value) {
if (value !== this._reflectionsRotation) {
this._reflectionsRotation = value;
this._snapshotHelper.disableSnapshotRendering();
if (this._skyboxTexture) {
this._skyboxTexture.rotationY = this._reflectionsRotation;
}
if (this._reflectionTexture) {
this._reflectionTexture.rotationY = this._reflectionsRotation;
}
this._snapshotHelper.enableSnapshotRendering();
this._markSceneMutated();
}
}
_changeEnvironmentIntensity(value) {
if (value !== this._reflectionsIntensity) {
this._reflectionsIntensity = value;
this._snapshotHelper.disableSnapshotRendering();
if (this._skyboxTexture) {
this._skyboxTexture.level = this._reflectionsIntensity;
}
if (this._reflectionTexture) {
this._reflectionTexture.level = this._reflectionsIntensity;
}
this._snapshotHelper.enableSnapshotRendering();
this._markSceneMutated();
}
}
_updateAutoClear() {
// NOTE: Not clearing (even when every pixel is rendered with an opaque color) results in rendering
// artifacts in Chromium browsers on Intel-based Macs (see https://issues.chromium.org/issues/396612322).
// The performance impact of clearing when not necessary is very small, so for now just always auto clear.
//this._scene.autoClear = !this._skybox || !this._skybox.isEnabled() || !this._skyboxVisible;
this._scene.autoClear = true;
this._markSceneMutated();
}
/**
* The post processing configuration.
*/
get postProcessing() {
let toneMapping = "none";
if (this._toneMappingEnabled) {
switch (this._toneMappingType) {
case ImageProcessingConfiguration.TONEMAPPING_STANDARD:
toneMapping = "standard";
break;
case ImageProcessingConfiguration.TONEMAPPING_ACES:
toneMapping = "aces";
break;
case ImageProcessingConfiguration.TONEMAPPING_KHR_PBR_NEUTRAL:
toneMapping = "neutral";
break;
}
}
return {
toneMapping,
contrast: this._contrast,
exposure: this._exposure,
ssao: this._ssaoOption,
};
}
set postProcessing(value) {
this._snapshotHelper.disableSnapshotRendering();
if (value.toneMapping !== undefined) {
if (value.toneMapping === "none") {
this._scene.imageProcessingConfiguration.toneMappingEnabled = false;
}
else {
switch (value.toneMapping) {
case "standard":
this._scene.imageProcessingConfiguration.toneMappingType = ImageProcessingConfiguration.TONEMAPPING_STANDARD;
break;
case "aces":
this._scene.imageProcessingConfiguration.toneMappingType = ImageProcessingConfiguration.TONEMAPPING_ACES;
break;
case "neutral":
this._scene.imageProcessingConfiguration.toneMappingType = ImageProcessingConfiguration.TONEMAPPING_KHR_PBR_NEUTRAL;
break;
}
this._scene.imageProcessingConfiguration.toneMappingEnabled = true;
}
}
if (value.contrast !== undefined) {
this._scene.imageProcessingConfiguration.contrast = value.contrast;
}
if (value.exposure !== undefined) {
this._scene.imageProcessingConfiguration.exposure = value.exposure;
}
if (value.ssao && this._ssaoOption !== value.ssao) {
this._ssaoOption = value.ssao;
this._updateSSAOPipeline();
}
this._scene.imageProcessingConfiguration.isEnabled = this._toneMappingEnabled || this._contrast !== 1 || this._exposure !== 1 || this._ssaoPipeline !== null;
this._snapshotHelper.enableSnapshotRendering();
this._markSceneMutated();
}
/**
* Gets information about loading activity.
* @remarks
* false indicates no loading activity.
* true indicates loading activity with no progress information.
* A number between 0 and 1 indicates loading activity with progress information.
*/
get loadingProgress() {
if (this._loadOperations.size > 0) {
let totalProgress = 0;
for (const operation of this._loadOperations) {
if (operation.progress == null) {
return true;
}
totalProgress += operation.progress;
}
return totalProgress / this._loadOperations.size;
}
return false;
}
get _loadedModels() {
return this._loadedModelsBacking;
}
get _activeModel() {
return this._activeModelBacking;
}
_setActiveModel(...args) {
const [model, options] = args;
if (model !== this._activeModelBacking) {
this._activeModelBacking = model;
this._updateLight();
observePromise(this._updateShadows());
this._updateSSAOPipeline();
this._applyAnimationSpeed();
this._selectAnimation(0, false);
this.onSelectedMaterialVariantChanged.notifyObservers();
this._reframeCamera(true, model ? [model] : undefined);
this.onModelChanged.notifyObservers(options?.source ?? null);
}
}
async _enableSSAOPipeline(mode) {
const hasModels = this._loadedModels.length > 0;
const hasMaterials = this._loadedModels.some((model) => model.assetContainer.materials.length > 0);
if (mode === "enabled" || (mode === "auto" && hasModels && !hasMaterials)) {
const [{ SSAO2RenderingPipeline }] = await LazySSAODependenciesPromise.value;
if (!this._ssaoPipeline) {
this._scene.postProcessRenderPipelineManager.onNewPipelineAddedObservable.add((pipeline) => {
if (pipeline.name === "ssao") {
this.onPostProcessingChanged.notifyObservers();
}
});
this._scene.postProcessRenderPipelineManager.onPipelineRemovedObservable.add((pipeline) => {
if (pipeline.name === "ssao") {
this.onPostProcessingChanged.notifyObservers();
}
});
}
const ssaoRatio = {
ssaoRatio: 1,
blurRatio: 1,
};
this._ssaoPipeline = new SSAO2RenderingPipeline("ssao", this._scene, ssaoRatio);
const worldBounds = this._getWorldBounds(this._loadedModels);
if (this._ssaoPipeline && worldBounds) {
const size = Vector3.FromArray(worldBounds.size).length();
this._ssaoPipeline.expensiveBlur = true;
this._ssaoPipeline.maxZ = size * 2;
// arbitrary max size to cap SSAO settings
const maxSceneSize = 50;
this._ssaoPipeline.radius = Clamp(Lerp(1, 5, Clamp((size - 1) / maxSceneSize, 0, 1)), 1, 5);
this._ssaoPipeline.totalStrength = Clamp(Lerp(0.3, 1.0, Clamp((size - 1) / maxSceneSize, 0, 1)), 0.3, 1.0);
this._ssaoPipeline.samples = Math.round(Clamp(Lerp(8, 32, Clamp((size - 1) / maxSceneSize, 0, 1)), 8, 32));
}
this._scene.postProcessRenderPipelineManager.attachCamerasToRenderPipeline("ssao", this._camera);
}
}
_disableSSAOPipeline() {
if (this._ssaoPipeline) {
this._scene.postProcessRenderPipelineManager.detachCamerasFromRenderPipeline("ssao", this._camera);
this._scene.postProcessRenderPipelineManager.removePipeline("ssao");
this._ssaoPipeline?.dispose();
this._ssaoPipeline = null;
}
}
_updateSSAOPipeline() {
if (!this._ssaoPipeline && (this._ssaoOption === "auto" || this._ssaoOption === "enabled")) {
observePromise(this._enableSSAOPipeline(this._ssaoOption));
}
else if (this._ssaoOption === "disabled") {
this._disableSSAOPipeline();
}
}
/**
* The list of animation names for the currently loaded model.
*/
get animations() {
return this._activeModel?.assetContainer.animationGroups.map((group) => group.name) ?? [];
}
/**
* The currently selected animation index.
*/
get selectedAnimation() {
return this._activeModel?.selectedAnimation ?? -1;
}
set selectedAnimation(value) {
this._selectAnimation(value, this._loadOperations.size === 0);
}
_selectAnimation(index, interpolateCamera = true) {
index = Math.round(Clamp(index, -1, this.animations.length - 1));
if (this._activeModel && index !== this._activeModel.selectedAnimation) {
this._activeAnimationObservers.forEach((observer) => observer.remove());
this._activeAnimationObservers = [];
this._activeModel.selectedAnimation = index;
if (this._activeAnimation) {
this._activeAnimationObservers = [
this._activeAnimation.onAnimationGroupPlayObservable.add(() => {
this.onIsAnimationPlayingChanged.notifyObservers();
}),
this._activeAnimation.onAnimationGroupPauseObservable.add(() => {
this.onIsAnimationPlayingChanged.notifyObservers();
}),
this._activeAnimation.onAnimationGroupEndObservable.add(() => {
this.onIsAnimationPlayingChanged.notifyObservers();
this.onAnimationProgressChanged.notifyObservers();
}),
];
this._reframeCamera(interpolateCamera);
}
this.onSelectedAnimationChanged.notifyObservers();
this.onAnimationProgressChanged.notifyObservers();
}
}
/**
* True if an animation is currently playing.
*/
get isAnimationPlaying() {
return this._activeModelBacking?._animationPlaying() ?? false;
}
/**
* The speed scale at which animations are played.
*/
get animationSpeed() {
return this._animationSpeed;
}
set animationSpeed(value) {
this._animationSpeed = value;
this._applyAnimationSpeed();
this.onAnimationSpeedChanged.notifyObservers();
}
/**
* The current point on the selected animation timeline, normalized between 0 and 1.
*/
get animationProgress() {
if (this._activeAnimation) {
return this._activeAnimation.getCurrentFrame() / (this._activeAnimation.to - this._activeAnimation.from);
}
return 0;
}
set animationProgress(value) {
if (this._activeAnimation) {
this._activeAnimation.goToFrame(value * (this._activeAnimation.to - this._activeAnimation.from));
this.onAnimationProgressChanged.notifyObservers();
this._autoRotationBehavior.resetLastInteractionTime();
this._markSceneMutated();
}
}
get _activeAnimation() {
return this._activeModel?.assetContainer.animationGroups[this._activeModel?.selectedAnimation] ?? null;
}
/**
* The list of material variant names for the currently loaded model.
*/
get materialVariants() {
return this._activeModel?.materialVariantsController?.variants ?? [];
}
/**
* The currently selected material variant.
*/
get selectedMaterialVariant() {
return this._activeModel?.materialVariantsController?.selectedVariant ?? null;
}
set selectedMaterialVariant(value) {
if (this._activeModel?.materialVariantsController) {
if (!value) {
value = this._activeModel.materialVariantsController.variants[0];
}
if (value !== this.selectedMaterialVariant && this._activeModel.materialVariantsController.variants.includes(value)) {
this._snapshotHelper.disableSnapshotRendering();
this._activeModel.materialVariantsController.selectedVariant = value;
this._snapshotHelper.enableSnapshotRendering();
this._markSceneMutated();
this.onSelectedMaterialVariantChanged.notifyObservers();
}
}
}
/**
* The set of defined hotspots.
*/
get hotSpots() {
return this._hotSpots;
}
set hotSpots(value) {
this._hotSpots = value;
this.onHotSpotsChanged.notifyObservers();
}
/**
* True if scene cameras should be used as hotspots.
*/
get camerasAsHotSpots() {
return this._camerasAsHotSpots;
}
set camerasAsHotSpots(value) {
if (this._camerasAsHotSpots !== value) {
this._camerasAsHotSpots = value;
this._toggleCamerasAsHotSpots();
this.onCamerasAsHotSpotsChanged.notifyObservers();
}
}
_beginLoadOperation() {
// eslint-disable-next-line @typescript-eslint/no-this-alias
const viewer = this;
let progress = null;
const loadOperation = {
get progress() {
return progress;
},
set progress(value) {
progress = value;
viewer.onLoadingProgressChanged.notifyObservers();
},
dispose: () => {
viewer._loadOperations.delete(loadOperation);
viewer.onLoadingProgressChanged.notifyObservers();
},
};
this._loadOperations.add(loadOperation);
this.onLoadingProgressChanged.notifyObservers();
return loadOperation;
}
/**
* Loads a 3D model from the specified URL.
* @remarks
* If a model is already loaded, it will be unloaded before loading the new model.
* @param source A url or File or ArrayBufferView that points to the model to load.
* @param options The options to use when loading the model.
* @param abortSignal An optional signal that can be used to abort the loading process.
*/
async loadModel(source, options, abortSignal) {
await this._updateModel(source, options, abortSignal);
}
/**
* Unloads the current 3D model if one is loaded.
* @param abortSignal An optional signal that can be used to abort the reset.
*/
async resetModel(abortSignal) {
await this._updateModel(undefined, undefined, abortSignal);
}
async _loadModel(source, options, abortSignal) {
this._throwIfDisposedOrAborted(abortSignal);
const loadOperation = this._beginLoadOperation();
const originalOnProgress = options?.onProgress;
const onProgress = (event) => {
originalOnProgress?.(event);
loadOperation.progress = event.lengthComputable ? event.loaded / event.total : null;
};
delete options?.onProgress;
let materialVariantsController = null;
const originalOnMaterialVariantsLoaded = options?.pluginOptions?.gltf?.extensionOptions?.KHR_materials_variants?.onLoaded;
const onMaterialVariantsLoaded = (controller) => {
originalOnMaterialVariantsLoaded?.(controller);
materialVariantsController = controller;
};
delete options?.pluginOptions?.gltf?.extensionOptions?.KHR_materials_variants?.onLoaded;
const defaultOptions = {
// Pass a progress callback to update the loading progress.
onProgress,
pluginOptions: {
gltf: {
// Enable transparency as coverage by default to be 3D Commerce compliant by default.
// https://doc.babylonjs.com/setup/support/3D_commerce_certif
transparencyAsCoverage: true,
extensionOptions: {
// eslint-disable-next-line @typescript-eslint/naming-convention
KHR_materials_variants: {
// Capture the material variants controller when it is loaded.
onLoaded: onMaterialVariantsLoaded,
},
},
},
},
};
options = deepMerge(defaultOptions, options ?? {});
this._snapshotHelper.disableSnapshotRendering();
try {
const assetContainer = await LoadAssetContainerAsync(source, this._scene, options);
RemoveUnreferencedVerticesData(assetContainer.meshes.filter((mesh) => mesh instanceof Mesh));
assetContainer.animationGroups.forEach((group) => {
group.start(true, this.animationSpeed);
group.pause();
});
assetContainer.addAllToScene();
this._snapshotHelper.fixMeshes(assetContainer.meshes);
let selectedAnimation = -1;
const cachedWorldBounds = [];
// eslint-disable-next-line @typescript-eslint/no-this-alias
const viewer = this;
const model = {
assetContainer,
materialVariantsController,
_animationPlaying: () => {
const activeAnimation = assetContainer.animationGroups[selectedAnimation];
return activeAnimation?.isPlaying ?? false;
},
_shouldRender: () => {
const stillTransitioning = model?.assetContainer.animationGroups.some((group) => group.animatables.some((animatable) => animatable.animationStarted));
// Should render if :
// 1. An animation is playing.
// 2. Animation is paused, but any individual animatable hasn't transitioned to a paused state yet.
return model._animationPlaying() || stillTransitioning;
},
getHotSpotToRef: (query, result) => {
return this._getHotSpotToRef(assetContainer, query, result);
},
dispose: () => {
this._snapshotHelper.disableSnapshotRendering();
assetContainer.meshes.forEach((mesh) => this._meshDataCache.delete(mesh));
assetContainer.dispose();
const index = this._loadedModelsBacking.indexOf(model);
if (index !== -1) {
this._loadedModelsBacking.splice(index, 1);
if (model === this._activeModel) {
this._setActiveModel(null);
}
}
this._snapshotHelper.enableSnapshotRendering();
this._markSceneMutated();
},
getWorldBounds: (animationIndex = selectedAnimation) => {
let worldBounds = cachedWorldBounds[animationIndex];
if (!worldBounds) {
worldBounds = computeModelsBoundingInfos([model]);
if (worldBounds) {
cachedWorldBounds[animationIndex] = worldBounds;
}
}
return worldBounds;
},
resetWorldBounds: () => {
cachedWorldBounds.length = 0;
},
get selectedAnimation() {
return selectedAnimation;
},
set selectedAnimation(index) {
let activeAnimation = assetContainer.animationGroups[selectedAnimation];
const startAnimation = activeAnimation?.isPlaying ?? false;
if (activeAnimation) {
activeAnimation.pause();
activeAnimation.goToFrame(0);
}
selectedAnimation = index;
activeAnimation = assetContainer.animationGroups[selectedAnimation];
observePromise(viewer._updateShadows());
if (activeAnimation) {
activeAnimation.goToFrame(0);
activeAnimation.play(true);
if (!startAnimation) {
activeAnimation.pause();
}
}
},
makeActive: (options) => {
this._setActiveModel(model, options);
},
};
this._loadedModelsBacking.push(model);
return model;
}
catch (e) {
this.onModelError.notifyObservers(e);
throw e;
}
finally {
loadOperation.dispose();
this._snapshotHelper.enableSnapshotRendering();
this._markSceneMutated();
}
}
async _updateModel(source, options, abortSignal) {
this._throwIfDisposedOrAborted(abortSignal);
this._loadModelAbortController?.abort(new AbortError("New model is being loaded before previous model finished loading."));
const abortController = (this._loadModelAbortController = new AbortController());
await this._loadModelLock.lockAsync(async () => {
throwIfAborted(abortSignal, abortController.signal);
this._activeModel?.dispose();
this._activeModelBacking = null;
this.selectedAnimation = -1;
if (source) {
const model = await this._loadModel(source, options, abortController.signal);
model.makeActive(Object.assign({ source, interpolateCamera: false }, options));
this._reset(false, "camera", "animation", "material-variant");
}
});
const hasPBRMaterials = this._loadedModels.some((model) => model.assetContainer.materials.some((material) => material instanceof PBRMaterial));
const usesDefaultMaterial = this._loadedModels.some((model) => model.assetContainer.meshes.some((mesh) => !mesh.material));
// If PBR is used (either explicitly, or implicitly by a mesh not having a material and therefore using the default PBRMaterial)
// and an environment texture is not already loaded, then load the default environment.
if (!this._scene.environmentTexture && (hasPBRMaterials || usesDefaultMaterial)) {
await this.resetEnvironment({ lighting: true }, abortSignal);
}
this._startSceneOptimizer(true);
}
async _updateShadows() {
this._shadowsAbortController?.abort(new AbortError("Shadows quality is being change before previous shadows finished initializing."));
const abortController = (this._shadowsAbortController = new AbortController());
await this._updateShadowsLock.lockAsync(async () => {
if (this._shadowQuality === "none") {
this._disposeShadows();
}
else {
// make sure there is an env light before creating shadows
if (!this._reflectionTexture) {
await this.loadEnvironment("auto", { lighting: true, skybox: false });
}
if (this._shadowQuality === "normal") {