@babylonjs/core
Version:
Getting started? Play directly with the Babylon.js API using our [playground](https://playground.babylonjs.com/). It also contains a lot of samples to learn how to use it.
378 lines (377 loc) • 13.9 kB
TypeScript
import type { Nullable } from "../../../types.js";
import type { IAnimatable } from "../../../Animations/animatable.interface.js";
import type { Camera } from "../../../Cameras/camera.js";
import { Texture } from "../../../Materials/Textures/texture.js";
import { PostProcess } from "../../../PostProcesses/postProcess.js";
import { PostProcessRenderPipeline } from "../../../PostProcesses/RenderPipeline/postProcessRenderPipeline.js";
import { BlurPostProcess } from "../../../PostProcesses/blurPostProcess.js";
import { FxaaPostProcess } from "../../../PostProcesses/fxaaPostProcess.js";
import type { IDisposable, Scene } from "../../../scene.js";
import type { SpotLight } from "../../../Lights/spotLight.js";
import type { DirectionalLight } from "../../../Lights/directionalLight.js";
import { ScreenSpaceReflectionPostProcess } from "../../screenSpaceReflectionPostProcess.js";
import type { Animation } from "../../../Animations/animation.js";
import "../../../PostProcesses/RenderPipeline/postProcessRenderPipelineManagerSceneComponent.js";
import "../../../Shaders/standard.fragment.js";
/**
* Standard rendering pipeline
* Default pipeline should be used going forward but the standard pipeline will be kept for backwards compatibility.
* @see https://doc.babylonjs.com/features/featuresDeepDive/postProcesses/standardRenderingPipeline
*/
export declare class StandardRenderingPipeline extends PostProcessRenderPipeline implements IDisposable, IAnimatable {
/**
* Public members
*/
/**
* Post-process which contains the original scene color before the pipeline applies all the effects
*/
originalPostProcess: Nullable<PostProcess>;
/**
* Post-process used to down scale an image x4
*/
downSampleX4PostProcess: Nullable<PostProcess>;
/**
* Post-process used to calculate the illuminated surfaces controlled by a threshold
*/
brightPassPostProcess: Nullable<PostProcess>;
/**
* Post-process array storing all the horizontal blur post-processes used by the pipeline
*/
blurHPostProcesses: PostProcess[];
/**
* Post-process array storing all the vertical blur post-processes used by the pipeline
*/
blurVPostProcesses: PostProcess[];
/**
* Post-process used to add colors of 2 textures (typically brightness + real scene color)
*/
textureAdderPostProcess: Nullable<PostProcess>;
/**
* Post-process used to create volumetric lighting effect
*/
volumetricLightPostProcess: Nullable<PostProcess>;
/**
* Post-process used to smooth the previous volumetric light post-process on the X axis
*/
volumetricLightSmoothXPostProcess: Nullable<BlurPostProcess>;
/**
* Post-process used to smooth the previous volumetric light post-process on the Y axis
*/
volumetricLightSmoothYPostProcess: Nullable<BlurPostProcess>;
/**
* Post-process used to merge the volumetric light effect and the real scene color
*/
volumetricLightMergePostProces: Nullable<PostProcess>;
/**
* Post-process used to store the final volumetric light post-process (attach/detach for debug purpose)
*/
volumetricLightFinalPostProcess: Nullable<PostProcess>;
/**
* Base post-process used to calculate the average luminance of the final image for HDR
*/
luminancePostProcess: Nullable<PostProcess>;
/**
* Post-processes used to create down sample post-processes in order to get
* the average luminance of the final image for HDR
* Array of length "StandardRenderingPipeline.LuminanceSteps"
*/
luminanceDownSamplePostProcesses: PostProcess[];
/**
* Post-process used to create a HDR effect (light adaptation)
*/
hdrPostProcess: Nullable<PostProcess>;
/**
* Post-process used to store the final texture adder post-process (attach/detach for debug purpose)
*/
textureAdderFinalPostProcess: Nullable<PostProcess>;
/**
* Post-process used to store the final lens flare post-process (attach/detach for debug purpose)
*/
lensFlareFinalPostProcess: Nullable<PostProcess>;
/**
* Post-process used to merge the final HDR post-process and the real scene color
*/
hdrFinalPostProcess: Nullable<PostProcess>;
/**
* Post-process used to create a lens flare effect
*/
lensFlarePostProcess: Nullable<PostProcess>;
/**
* Post-process that merges the result of the lens flare post-process and the real scene color
*/
lensFlareComposePostProcess: Nullable<PostProcess>;
/**
* Post-process used to create a motion blur effect
*/
motionBlurPostProcess: Nullable<PostProcess>;
/**
* Post-process used to create a depth of field effect
*/
depthOfFieldPostProcess: Nullable<PostProcess>;
/**
* The Fast Approximate Anti-Aliasing post process which attempts to remove aliasing from an image.
*/
fxaaPostProcess: Nullable<FxaaPostProcess>;
/**
* Post-process used to simulate realtime reflections using the screen space and geometry renderer.
*/
screenSpaceReflectionPostProcess: Nullable<ScreenSpaceReflectionPostProcess>;
/**
* Represents the brightness threshold in order to configure the illuminated surfaces
*/
brightThreshold: number;
/**
* Configures the blur intensity used for surexposed surfaces are highlighted surfaces (light halo)
*/
blurWidth: number;
/**
* Sets if the blur for highlighted surfaces must be only horizontal
*/
horizontalBlur: boolean;
/**
* Gets the overall exposure used by the pipeline
*/
get exposure(): number;
/**
* Sets the overall exposure used by the pipeline
*/
set exposure(value: number);
/**
* Texture used typically to simulate "dirty" on camera lens
*/
lensTexture: Nullable<Texture>;
/**
* Represents the offset coefficient based on Rayleigh principle. Typically in interval [-0.2, 0.2]
*/
volumetricLightCoefficient: number;
/**
* The overall power of volumetric lights, typically in interval [0, 10] maximum
*/
volumetricLightPower: number;
/**
* Used the set the blur intensity to smooth the volumetric lights
*/
volumetricLightBlurScale: number;
/**
* Light (spot or directional) used to generate the volumetric lights rays
* The source light must have a shadow generate so the pipeline can get its
* depth map
*/
sourceLight: Nullable<SpotLight | DirectionalLight>;
/**
* For eye adaptation, represents the minimum luminance the eye can see
*/
hdrMinimumLuminance: number;
/**
* For eye adaptation, represents the decrease luminance speed
*/
hdrDecreaseRate: number;
/**
* For eye adaptation, represents the increase luminance speed
*/
hdrIncreaseRate: number;
/**
* Gets whether or not the exposure of the overall pipeline should be automatically adjusted by the HDR post-process
*/
get hdrAutoExposure(): boolean;
/**
* Sets whether or not the exposure of the overall pipeline should be automatically adjusted by the HDR post-process
*/
set hdrAutoExposure(value: boolean);
/**
* Lens color texture used by the lens flare effect. Mandatory if lens flare effect enabled
*/
lensColorTexture: Nullable<Texture>;
/**
* The overall strength for the lens flare effect
*/
lensFlareStrength: number;
/**
* Dispersion coefficient for lens flare ghosts
*/
lensFlareGhostDispersal: number;
/**
* Main lens flare halo width
*/
lensFlareHaloWidth: number;
/**
* Based on the lens distortion effect, defines how much the lens flare result
* is distorted
*/
lensFlareDistortionStrength: number;
/**
* Configures the blur intensity used for for lens flare (halo)
*/
lensFlareBlurWidth: number;
/**
* Lens star texture must be used to simulate rays on the flares and is available
* in the documentation
*/
lensStarTexture: Nullable<Texture>;
/**
* As the "lensTexture" (can be the same texture or different), it is used to apply the lens
* flare effect by taking account of the dirt texture
*/
lensFlareDirtTexture: Nullable<Texture>;
/**
* Represents the focal length for the depth of field effect
*/
depthOfFieldDistance: number;
/**
* Represents the blur intensity for the blurred part of the depth of field effect
*/
depthOfFieldBlurWidth: number;
/**
* Gets how much the image is blurred by the movement while using the motion blur post-process
*/
get motionStrength(): number;
/**
* Sets how much the image is blurred by the movement while using the motion blur post-process
*/
set motionStrength(strength: number);
/**
* Gets whether or not the motion blur post-process is object based or screen based.
*/
get objectBasedMotionBlur(): boolean;
/**
* Sets whether or not the motion blur post-process should be object based or screen based
*/
set objectBasedMotionBlur(value: boolean);
/**
* List of animations for the pipeline (IAnimatable implementation)
*/
animations: Animation[];
/**
* Private members
*/
private _scene;
private _currentDepthOfFieldSource;
private _basePostProcess;
private _fixedExposure;
private _currentExposure;
private _hdrAutoExposure;
private _hdrCurrentLuminance;
private _motionStrength;
private _isObjectBasedMotionBlur;
private _floatTextureType;
private _camerasToBeAttached;
private _ratio;
private _bloomEnabled;
private _depthOfFieldEnabled;
private _vlsEnabled;
private _lensFlareEnabled;
private _hdrEnabled;
private _motionBlurEnabled;
private _fxaaEnabled;
private _screenSpaceReflectionsEnabled;
private _motionBlurSamples;
private _volumetricLightStepsCount;
private _samples;
/**
* @ignore
* Specifies if the bloom pipeline is enabled
*/
get BloomEnabled(): boolean;
set BloomEnabled(enabled: boolean);
/**
* @ignore
* Specifies if the depth of field pipeline is enabled
*/
get DepthOfFieldEnabled(): boolean;
set DepthOfFieldEnabled(enabled: boolean);
/**
* @ignore
* Specifies if the lens flare pipeline is enabled
*/
get LensFlareEnabled(): boolean;
set LensFlareEnabled(enabled: boolean);
/**
* @ignore
* Specifies if the HDR pipeline is enabled
*/
get HDREnabled(): boolean;
set HDREnabled(enabled: boolean);
/**
* @ignore
* Specifies if the volumetric lights scattering effect is enabled
*/
get VLSEnabled(): boolean;
set VLSEnabled(enabled: boolean);
/**
* @ignore
* Specifies if the motion blur effect is enabled
*/
get MotionBlurEnabled(): boolean;
set MotionBlurEnabled(enabled: boolean);
/**
* Specifies if anti-aliasing is enabled
*/
get fxaaEnabled(): boolean;
set fxaaEnabled(enabled: boolean);
/**
* Specifies if screen space reflections are enabled.
*/
get screenSpaceReflectionsEnabled(): boolean;
set screenSpaceReflectionsEnabled(enabled: boolean);
/**
* Specifies the number of steps used to calculate the volumetric lights
* Typically in interval [50, 200]
*/
get volumetricLightStepsCount(): number;
set volumetricLightStepsCount(count: number);
/**
* Specifies the number of samples used for the motion blur effect
* Typically in interval [16, 64]
*/
get motionBlurSamples(): number;
set motionBlurSamples(samples: number);
/**
* Specifies MSAA sample count, setting this to 4 will provide 4x anti aliasing. (default: 1)
*/
get samples(): number;
set samples(sampleCount: number);
/**
* Default pipeline should be used going forward but the standard pipeline will be kept for backwards compatibility.
* @constructor
* @param name The rendering pipeline name
* @param scene The scene linked to this pipeline
* @param ratio The size of the postprocesses (0.5 means that your postprocess will have a width = canvas.width 0.5 and a height = canvas.height 0.5)
* @param originalPostProcess the custom original color post-process. Must be "reusable". Can be null.
* @param cameras The array of cameras that the rendering pipeline will be attached to
*/
constructor(name: string, scene: Scene, ratio: number, originalPostProcess?: Nullable<PostProcess>, cameras?: Camera[]);
private _buildPipeline;
private _createDownSampleX4PostProcess;
private _createBrightPassPostProcess;
private _createBlurPostProcesses;
private _createTextureAdderPostProcess;
private _createVolumetricLightPostProcess;
private _createLuminancePostProcesses;
private _createHdrPostProcess;
private _createLensFlarePostProcess;
private _createDepthOfFieldPostProcess;
private _createMotionBlurPostProcess;
private _getDepthTexture;
private _disposePostProcesses;
/**
* Dispose of the pipeline and stop all post processes
*/
dispose(): void;
/**
* Serialize the rendering pipeline (Used when exporting)
* @returns the serialized object
*/
serialize(): any;
/**
* Parse the serialized pipeline
* @param source Source pipeline.
* @param scene The scene to load the pipeline to.
* @param rootUrl The URL of the serialized pipeline.
* @returns An instantiated pipeline from the serialized object.
*/
static Parse(source: any, scene: Scene, rootUrl: string): StandardRenderingPipeline;
/**
* Luminance steps
*/
static LuminanceSteps: number;
}