UNPKG

@threepipe/webgi-plugins

Version:

WebGi - Realistic Rendering Plugins for ThreePipe.

1 lines 406 kB
{"version":3,"file":"index.mjs","sources":["../src/plugins/buffer/shaders/VelocityBufferPlugin.unpack.glsl","../src/plugins/buffer/shaders/VelocityBufferPlugin.mat.vert.glsl","../src/plugins/buffer/shaders/VelocityBufferPlugin.mat.frag.glsl","../src/utils/shaders/computeScreenSpaceVelocity.glsl","../src/utils/shaders/getWorldPositionFromViewZ.glsl","../src/utils/shaders/temporalaa.glsl","../src/utils/shaders/ssreflection.glsl","../src/utils/shaders/ssrtao.glsl","../src/utils/shaders.ts","../src/plugins/postprocessing/TemporalAAPlugin.ts","../src/plugins/buffer/VelocityBufferPlugin.ts","../src/plugins/postprocessing/shaders/hdrBloom.glsl","../src/plugins/postprocessing/BloomPlugin.ts","../src/plugins/postprocessing/shaders/dofCombine.glsl","../src/plugins/postprocessing/shaders/dofPoissonBox.glsl","../src/plugins/postprocessing/shaders/poissonDiskSamples.glsl","../src/plugins/postprocessing/shaders/dofComputeCoC.glsl","../src/plugins/postprocessing/shaders/dofExpandCoC.glsl","../src/plugins/postprocessing/DepthOfFieldPlugin.ts","../src/plugins/postprocessing/shaders/ssrt.glsl","../src/plugins/postprocessing/SSContactShadowsPlugin.ts","../src/plugins/postprocessing/shaders/samplePointHelpers.glsl","../src/plugins/postprocessing/shaders/ssrPatch.glsl","../src/plugins/postprocessing/shaders/ssreflectionMain.glsl","../src/plugins/postprocessing/SSReflectionPlugin.ts","../src/plugins/postprocessing/shaders/outline.glsl","../src/plugins/postprocessing/shaders/outlineDepthVert.glsl","../src/plugins/postprocessing/shaders/outlineDepthFrag.glsl","../src/plugins/postprocessing/OutlinePlugin.ts","../src/plugins/postprocessing/shaders/giPatch.glsl","../src/passes/shaders/ssaoBilateral.glsl","../src/passes/BilateralFilterPass.ts","../src/plugins/postprocessing/SSGIPlugin.ts","../src/plugins/extras/shaders/anisotropyBsdf.glsl","../src/plugins/extras/shaders/anisotropyTBN.glsl","../src/plugins/extras/AnisotropyPlugin.ts","../src/utils/shaders/poissonDiskSamples.glsl","../src/utils/shaders/reflectorSample.glsl","../src/utils/Reflector2.ts","../node_modules/rand-seed/dist/es/index.js","../src/utils/RandomizedDirectionalLight.ts","../src/utils/FSShadowMaterial.ts","../src/utils/shaders/seperableShadowBlur.glsl","../src/utils/ShadowMapBaker.ts","../src/plugins/extras/AdvancedGroundPlugin.ts","../src/plugins/extras/WatchHandsPlugin.ts","../src/plugins/index.ts"],"sourcesContent":["#if defined(HAS_VELOCITY_BUFFER)\nuniform sampler2D tVelocity;\nvec2 getVelocity(const in vec2 uv) {\n vec2 screenSpaceVelocity = texture2D(tVelocity, uv).xy * 2.0 - 1.0;\n return sign(screenSpaceVelocity) * pow(abs(screenSpaceVelocity), vec2(4.));\n}\n#endif\n\n","#ifdef USE_ALPHAMAP\n#define USE_UV\n#endif\n#include <uv_pars_vertex>\n#include <morphtarget_pars_vertex>\n#include <skinning_pars_vertex>\n#include <logdepthbuf_pars_vertex>\n#include <clipping_planes_pars_vertex>\n\n//varying vec3 vViewPosition;\n\nvarying vec3 vWorldPosition;\nvarying vec3 vWorldPositionPrevious;\n\nuniform mat4 modelMatrixPrevious;\n\nvoid main() {\n\n #include <uv_vertex>\n #include <skinbase_vertex>\n\n #include <begin_vertex>\n #include <morphtarget_vertex>\n #include <skinning_vertex>\n #include <displacementmap_vertex>\n\n // project_vertex\n\n vec4 mvPosition = vec4( transformed, 1.0 );\n\n #ifdef USE_INSTANCING\n\n mvPosition = instanceMatrix * mvPosition;\n\n #endif\n\n vWorldPosition = (modelMatrix * mvPosition).xyz;\n vWorldPositionPrevious = (modelMatrixPrevious * mvPosition).xyz;\n\n mvPosition = modelViewMatrix * mvPosition;\n\n gl_Position = projectionMatrix * mvPosition;\n\n #include <logdepthbuf_vertex>\n #include <clipping_planes_vertex>\n\n // vViewPosition = - mvPosition.xyz;\n\n}\n","varying vec3 vWorldPosition;\nvarying vec3 vWorldPositionPrevious;\nuniform mat4 currentProjectionViewMatrix;\nuniform mat4 lastProjectionViewMatrix;\n\nvec2 computeScreenSpaceVelocity2() {\n vec4 currentPositionClip = currentProjectionViewMatrix * vec4(vWorldPosition, 1.0);\n vec4 prevPositionClip = lastProjectionViewMatrix * vec4(vWorldPositionPrevious, 1.0);\n\n vec2 currentPositionNDC = currentPositionClip.xy / currentPositionClip.w;\n vec2 prevPositionNDC = prevPositionClip.xy / prevPositionClip.w;\n\n if(prevPositionNDC.x >= 1.0 || prevPositionNDC.x <= -1.0 || prevPositionNDC.x >= 1.0 || prevPositionNDC.y <= -1.0) {\n return vec2(0.0);\n }\n return 0.5 * (currentPositionNDC - prevPositionNDC);\n}\n\nvoid main() {\n vec2 velocity = clamp(computeScreenSpaceVelocity2(), -1.0, 1.0);\n velocity = sign(velocity) * pow(abs(velocity), vec2(1./4.));\n velocity = velocity * 0.5 + 0.5;\n gl_FragColor = vec4(velocity.x, velocity.y, 1., 1.);\n\n // float speed = length(computeScreenSpaceVelocity2());\n // gl_FragColor = vec4(speed, speed, speed, 1.);\n}\n","\nuniform mat4 lastProjectionViewMatrix;\nuniform mat4 currentProjectionViewMatrix;\n\nvec2 computeScreenSpaceVelocity(const in vec3 worldPosition) {\n vec4 currentPositionClip = currentProjectionViewMatrix * vec4(worldPosition, 1.0);\n vec4 prevPositionClip = lastProjectionViewMatrix * vec4(worldPosition, 1.0);\n\n vec2 currentPositionNDC = currentPositionClip.xy / currentPositionClip.w;\n vec2 prevPositionNDC = prevPositionClip.xy / prevPositionClip.w;\n\n if(prevPositionNDC.x >= 1.0 || prevPositionNDC.x <= -1.0 || prevPositionNDC.x >= 1.0 || prevPositionNDC.y <= -1.0) {\n return vec2(0.0);\n }\n return 0.5 * (currentPositionNDC - prevPositionNDC);\n}\n","\nuniform mat4 inverseViewMatrix;\n\nvec3 getWorldPositionFromViewZ(const in vec2 uv, const in float viewDepth) {\n vec2 uv_ = 2. * uv - 1.;\n float xe = -(uv_.x + projection[2][0]) * viewDepth/projection[0][0];\n float ye = -(uv_.y + projection[2][1]) * viewDepth/projection[1][1];\n return (inverseViewMatrix * vec4(xe, ye, viewDepth, 1.)).xyz;\n}\n","#include <common>\n#include <gbuffer_unpack>\n\n#ifdef HAS_VELOCITY_BUFFER\n#pragma <velocity_unpack>\n#else\n#define HAS_VELOCITY_BUFFER 0\n#endif\n\n#include <cameraHelpers>\n\nvarying vec2 vUv;\nuniform vec2 previousRTSize;\nuniform vec2 jitterSample;\nuniform vec2 feedBack;\nuniform bool firstFrame;\n\n//float getViewZ( const in float depth ) {\n//\t#if PERSPECTIVE_CAMERA == 1\n//\treturn perspectiveDepthToViewZ( depth, cameraNearFar.x, cameraNearFar.y );\n//\t#else\n//\treturn orthoDepthToViewZ( depth, cameraNearFar.x, cameraNearFar.y );\n//\t#endif\n//}\n\nvec3 find_closest_fragment_3x3(const in vec2 uv) {\n const vec3 offset = vec3(-1.0, 1.0, 0.0);\n vec2 texelSize = 1.0/previousRTSize;\n\n vec3 dtr = vec3( 1, 1, getDepth( uv + offset.yy * texelSize) );\n vec3 dtc = vec3( 0, 1, getDepth( uv + offset.zy * texelSize) );\n vec3 dtl = vec3( -1, 1, getDepth( uv + offset.xy * texelSize) );\n\n vec3 dml = vec3(-1, 0, getDepth( uv + offset.xz * texelSize) );\n vec3 dmc = vec3( 0, 0, getDepth( uv ) );\n vec3 dmr = vec3( 1, 0, getDepth( uv + offset.yz * texelSize) );\n\n vec3 dbl = vec3(-1, -1, getDepth( uv + offset.xx * texelSize) );\n vec3 dbc = vec3( 0, -1, getDepth( uv + offset.zx * texelSize) );\n vec3 dbr = vec3( 1, -1, getDepth( uv + offset.yx * texelSize) );\n\n vec3 dmin = dtl;\n if ( dmin.z > dtc.z ) dmin = dtc;\n if ( dmin.z > dtr.z ) dmin = dtr;\n\n if ( dmin.z > dml.z ) dmin = dml;\n if ( dmin.z > dmc.z ) dmin = dmc;\n if ( dmin.z > dmr.z ) dmin = dmr;\n\n if ( dmin.z > dbl.z ) dmin = dbl;\n if ( dmin.z > dbc.z ) dmin = dbc;\n if ( dmin.z > dbr.z ) dmin = dbr;\n\n return vec3(uv + texelSize.xy * dmin.xy, dmin.z);\n}\n\nvec3 find_closest_fragment_5tap(const in vec2 uv)\n{\n vec2 texelSize = 1.0/previousRTSize;\n vec2 offset = vec2(1.0, -1.0);\n\n vec3 dtl = vec3(-1, 1, getDepth( uv + offset.yx * texelSize) );\n vec3 dtr = vec3( 1, 1, getDepth( uv + offset.xx * texelSize) );\n\n vec3 dmc = vec3( 0, 0, getDepth( uv) );\n\n vec3 dbl = vec3(-1, -1, getDepth( uv + offset.yy * texelSize) );\n vec3 dbr = vec3( 1, -1, getDepth( uv + offset.xy * texelSize) );\n\n vec3 dmin = dtl;\n if ( dmin.z > dtr.z ) dmin = dtr;\n if ( dmin.z > dmc.z ) dmin = dmc;\n\n if ( dmin.z > dbl.z ) dmin = dbl;\n if ( dmin.z > dbr.z ) dmin = dbr;\n\n return vec3(uv + dmin.xy * texelSize, dmin.z);\n}\n\nvec4 clip_aabb(const in vec4 aabb_min, const in vec4 aabb_max, vec4 p )\n{\n const float FLT_EPS = 1e-8;\n vec4 p_clip = 0.5 * (aabb_max + aabb_min);\n vec4 e_clip = 0.5 * (aabb_max - aabb_min) + FLT_EPS;\n\n vec4 v_clip = p - p_clip;\n vec4 v_unit = abs(v_clip / e_clip);\n float ma_unit = max(v_unit.x, max(v_unit.y, v_unit.z));\n\n if (ma_unit > 1.0)\n return p_clip + v_clip / ma_unit;\n else return p;\n}\n\n#if HAS_VELOCITY_BUFFER == 0 || defined(DEBUG_VELOCITY)\n#include <computeScreenSpaceVelocity>\n#include <getWorldPositionFromViewZ>\n#endif\n\nvec4 currentRTTexelToLinear1(vec4 a){\n if(isinf(a.x) || isinf(a.y) || isinf(a.z) || isinf(a.w)){\n return vec4(1.);\n }\n return currentRTTexelToLinear(a);\n}\nvec4 computeTAA(const in vec2 uv, const in vec2 screenSpaceVelocity, const in float feedbackScale) {\n// vec2 jitterOffset = jitterSample/previousRTSize; // todo\n vec2 uvUnJitter = uv;\n\n vec4 currentColor = currentRTTexelToLinear(texture2D(currentRT, uvUnJitter));\n vec4 previousColor = previousRTTexelToLinear(texture2D(previousRT, uv - screenSpaceVelocity));\n const vec3 offset = vec3(1., -1., 0.);\n vec2 texelSize = 1./previousRTSize;\n\n float texelSpeed = length( screenSpaceVelocity );\n\n // todo pick only the neighbors which are not background?\n vec4 tl = currentRTTexelToLinear1(texture2D(currentRT, uvUnJitter + offset.yx * texelSize));\n vec4 tc = currentRTTexelToLinear1(texture2D(currentRT, uvUnJitter + offset.zx * texelSize));\n vec4 tr = currentRTTexelToLinear1(texture2D(currentRT, uvUnJitter + offset.xx * texelSize));\n vec4 ml = currentRTTexelToLinear1(texture2D(currentRT, uvUnJitter + offset.yz * texelSize));\n vec4 mc = currentColor;\n vec4 mr = currentRTTexelToLinear1(texture2D(currentRT, uvUnJitter + offset.xz * texelSize));\n vec4 bl = currentRTTexelToLinear1(texture2D(currentRT, uvUnJitter + offset.yy * texelSize));\n vec4 bc = currentRTTexelToLinear1(texture2D(currentRT, uvUnJitter + offset.zy * texelSize));\n vec4 br = currentRTTexelToLinear1(texture2D(currentRT, uvUnJitter + offset.xy * texelSize));\n\n vec4 corners = 2.0 * (tr + bl + br + tl) - 2.0 * mc;\n mc += (mc - (corners * 0.166667)) * 2.718282 * 0.3;\n mc = max(vec4(0.0), mc);\n\n vec4 min5 = min(tc, min(ml, min(mc, min(mr, bc))));\n vec4 max5 = max(tc, max(ml, max(mc, max(mr, bc))));\n\n vec4 cmin = min(min5, min(tl, min(tr, min(bl, br))));\n vec4 cmax = max(min5, max(tl, max(tr, max(bl, br))));;\n\n cmin = 0.5 * (cmin + min5);\n cmax = 0.5 * (cmax + max5);\n previousColor = clip_aabb(cmin, cmax, previousColor);\n\n float lum0 = luminance(currentColor.rgb);\n float lum1 = luminance(previousColor.rgb);\n float unbiased_diff = abs(lum0 - lum1) / max(lum0, max(lum1, 0.2));\n float unbiased_weight = 1.0 - unbiased_diff;\n float unbiased_weight_sqr = unbiased_weight * unbiased_weight;\n float k_feedback = mix(feedBack.x, feedBack.y, unbiased_weight_sqr);\n\n return mix(currentColor, previousColor, clamp(k_feedback * feedbackScale, 0., 1.));\n\n}\n\nvoid main() {\n\n // gl_FragColor.rgb = vec3(getDepth( vUv ));\n // gl_FragColor.a = 1.;\n // return;\n // vec2 jitterOffset = jitterSample/previousRTSize;\n\n #if HAS_VELOCITY_BUFFER == 0 // todo why not using closest fragment in velocity buffer?\n #if QUALITY == 1\n vec3 c_frag = find_closest_fragment_3x3(vUv);\n #else\n vec3 c_frag = find_closest_fragment_5tap(vUv);\n #endif\n #else\n vec3 c_frag = vec3(vUv, 0.);\n #endif\n\n bool bg = firstFrame;\n\n #if BACKGROUND_TAA // this is required for edge artifacts in msaa\n\n// float d = getDepth(vUv);\n float d = c_frag.z;\n float edgef = min(1., max(0., 1.- (d*100. - 99.)));\n\n #else\n\n bg = bg || c_frag.z > 0.999;\n\n #endif\n\n if( bg ) {\n\n gl_FragColor = currentRTTexelToLinear1(texture2D(currentRT, vUv));\n\n } else {\n #if HAS_VELOCITY_BUFFER == 0\n // #if LINEAR_DEPTH == 0\n // float sampleViewZ = getViewZ( c_frag.z );\n // #else\n float sampleViewZ = mix(-cameraNearFar.x, -cameraNearFar.y, c_frag.z);\n // #endif\n vec3 worldPosition = getWorldPositionFromViewZ(c_frag.xy, sampleViewZ);\n vec2 screenSpaceVelocity = computeScreenSpaceVelocity(worldPosition);\n #else\n vec2 screenSpaceVelocity = getVelocity(c_frag.xy);\n #endif\n\n// float previousDepth = getDepth(vUv - screenSpaceVelocity);\n\n// screenSpaceVelocity *= min(1., edgef);\n// screenSpaceVelocity *= (d >= 0.99) ? 0. : 1.;\n// screenSpaceVelocity *= abs(d-previousDepth) > 0.01 ? 0. : 1.;\n// screenSpaceVelocity *= 0.;\n\n // todo add velocity scale also\n #if BACKGROUND_TAA\n gl_FragColor = computeTAA(vUv, screenSpaceVelocity * edgef, edgef);\n #else\n gl_FragColor = computeTAA(vUv, screenSpaceVelocity, 1.);\n #endif\n\n// gl_FragColor = firstFrame /* || previousDepth > 0.999*/ ? currentRTTexelToLinear1(texture2D(currentRT, vUv)) : computeTAA(vUv, screenSpaceVelocity, edgef);\n// gl_FragColor = vec4(1. - max(0., (d - 0.9) * 10.0), 0., 0., 1.0);\n// gl_FragColor = vec4(10. * length(screenSpaceVelocity));\n// gl_FragColor = vec4(abs(d-previousDepth) > 0.1, 0., 0., 1.);\n// gl_FragColor = vec4(abs(d-previousDepth) > 0.1, 0., 0., 1.);\n\n }\n\n #include <colorspace_fragment>\n\n #ifdef DEBUG_VELOCITY\n float sampleViewZ = mix(-cameraNearFar.x, -cameraNearFar.y, c_frag.z);\n vec3 worldPosition = getWorldPositionFromViewZ(c_frag.xy, sampleViewZ);\n vec2 screenSpaceVelocity = computeScreenSpaceVelocity(worldPosition);\n// screenSpaceVelocity *= min(1., edgef);\n gl_FragColor = vec4(10. * length(screenSpaceVelocity), 0., 0., 1.);\n #endif\n\n}\n","\n//https://web.archive.org/web/20170808071110/http://graphics.cs.williams.edu/papers/AlchemyHPG11/AlchemyHPG2011-present.pdf\n\n//uniform float opacity;\n//uniform sampler2D tDiffuse;\n//uniform sampler2D tLastFrame;\nuniform float currentFrameCount;\n#ifndef D_frameCount\n#define D_frameCount\nuniform float frameCount;\n#endif\nuniform float objectRadius;\nuniform float radius;\n//uniform float power;\n//uniform float bias;\n//uniform float falloff;\nuniform float tolerance;\nuniform float ssrRoughnessFactor;\nuniform bool autoRadius;\n//uniform bool giEnabled;\n\n#ifndef D_sceneBoundingRadius\n#define D_sceneBoundingRadius\nuniform float sceneBoundingRadius;\n#endif\n\n#if SSREFL_ENABLED == 2 // split mode\nuniform float ssrSplitX;\n#endif\n\n#ifdef HAS_VELOCITY_BUFFER\n#pragma <velocity_unpack>\n#else\n#define HAS_VELOCITY_BUFFER 0\n#endif\n#if HAS_VELOCITY_BUFFER == 0\n#include <computeScreenSpaceVelocity>\n#include <getWorldPositionFromViewZ>\n#endif\n\nvec3 ComputeReflectionL(vec3 N, vec2 E, vec3 V, float rough){\n// vec3 L;\n// L = reflect(normalize(V), N);\n// return L;\n\n float rough4 = rough *rough *rough *rough;\n // importance sampling\n float phi = 2.0 * PI * E.x;\n // float cos_theta = sqrt((1.0 - E.y) / (1.0 + (rough4 - 1.0) * E.y)); // ggx, NOISY\n float cos_theta = pow(max(E.y, 0.000001), rough4 / (2.0 - rough4));// blinn\n float sin_theta = sqrt(max(0., 1.0 - cos_theta * cos_theta));\n vec3 half_vec = vec3(sin_theta * cos(phi), sin_theta * sin(phi), cos_theta);\n vec3 tangentX = normalize(cross(abs(N.z) < 0.999 ? vec3(0.0, 0.0, 1.0) : vec3(1.0, 0.0, 0.0), N));\n vec3 tangentY = cross(N, tangentX);\n half_vec = half_vec.x * tangentX + half_vec.y * tangentY + half_vec.z * N;\n\n // in view space\n vec3 ray_dir = (2.0 * dot(V, half_vec)) * half_vec - V;\n// ray_dir = normalize(ray_dir);\n\n return ray_dir;\n\n}\n\nvec2 GetRandomE(float seed){\n\n vec2 rand_e;// random\n rand_e.x = interleavedGradientNoise(gl_FragCoord.xy, frameCount*34. + seed);\n rand_e.y = fract(rand_e.x * 38.65435);\n // https://www.slideshare.net/DICEStudio/stochastic-screenspace-reflections#p=67\n rand_e.y = mix(rand_e.y, 1.0, 0.7);\n return rand_e;\n\n}\n\nvec4 calculateSSR(in float seed, in vec3 screenPos, in vec3 normal, in float radiusFactor, in float roughness){\n\n// if(roughness > 0.9) return vec4(0.);\n\n vec3 viewPos = screenToView3(screenPos.xy, screenPos.z);\n normal = normalize(normal);\n vec2 E = GetRandomE(seed);\n vec3 L = ComputeReflectionL(normal, E, -normalize(viewPos), roughness * ssrRoughnessFactor);\n\n L = normalize(L);\n// L *= sign(dot(L, normal));\n\n float cameraDist = length(cameraPositionWorld);\n\n float rayLen = objectRadius*sceneBoundingRadius;\n rayLen = autoRadius ?\n // length(viewPos - screenToView3(screenPos.xy + objectRadius/10., screenPos.z)):\n // mix((cameraNearFar.y) + viewPos.z, -viewPos.z - cameraNearFar.x, L.z * 0.5 + 0.5)*objectRadius:\n min(max(mix(\n max(0.0, (cameraDist + rayLen) + viewPos.z),\n max(0.0, -viewPos.z - max(0.0, cameraDist - rayLen)),\n L.z * 0.5 + 0.5), rayLen *0.1), rayLen*5.) :\n rayLen;\n// rayLen = mix((cameraDist + objectRadius) + viewPos.z, -viewPos.z - (cameraDist - objectRadius), L.z * 0.5 + 0.5);\n\n rayLen *= radiusFactor;\n// rayLen = 10.;\n\n float r = interleavedGradientNoise(gl_FragCoord.xy, frameCount + seed);\n\n // jitter rayLen\n // rayLen = rayLen * (.75 + 0.5 * (random(r)));//mix(rayLen*(1.-.5), rayLen*(1.+.5), random(r));\n\n rayLen = max(rayLen, 0.001);\n\n int steps = SSR_STEP_COUNT / (currentFrameCount < float(SSR_LOW_QUALITY_FRAMES) ? 2 : 1);\n vec3 state = vec3(0.,(r+0.5)/float(steps),2.);\n viewPos += normal * max(-0.0001*viewPos.z, 0.001);\n\n vec3 screenHitP = traceRay(viewPos, L * rayLen, tolerance * rayLen, state, steps);\n// return vec4(screenHitP.x);\n// return vec4(-viewPos.z,0,0,1);\n if(state.z < 0.99){\n\n if(currentFrameCount < 1.){\n #if HAS_VELOCITY_BUFFER == 0\n vec3 worldPosition = getWorldPositionFromViewZ(screenHitP.xy, screenHitP.z);\n vec2 screenSpaceVelocity = computeScreenSpaceVelocity(worldPosition);\n #else\n vec2 screenSpaceVelocity = getVelocity(screenHitP.xy);\n #endif\n screenHitP.xy -= screenSpaceVelocity;\n }\n\n vec3 hitColor = (tLastFrameTexelToLinear(texture2D( tLastFrame , screenHitP.xy))).rgb;\n// vec3 hitColor = tDiffuseTexelToLinear(texture2D( tDiffuse, screenHitP.xy )).rgb;\n// vec3 hitNormal = getViewNormal(screenHitP.xy);\n\n float ssrWeight = 1.;\n\n return vec4(hitColor*ssrWeight, 1.);\n }\n\n return vec4(0.);\n}\n\n","\n//https://web.archive.org/web/20170808071110/http://graphics.cs.williams.edu/papers/AlchemyHPG11/AlchemyHPG2011-present.pdf\n\n//uniform float opacity;\n//varying vec2 vUv;\n//#ifndef D_frameCount\n//#define D_frameCount\n//uniform float frameCount;\n//#endif\nuniform float currentFrameCount;\nuniform float intensity;\nuniform float objectRadius;\n//uniform float radius;\nuniform float rayCount;\nuniform float power;\nuniform float bias;\nuniform float falloff;\nuniform float tolerance;\nuniform bool autoRadius;\n\nuniform vec2 screenSize;\n\n#ifndef D_sceneBoundingRadius\n#define D_sceneBoundingRadius\nuniform float sceneBoundingRadius;\n#endif\n\n#ifdef HAS_VELOCITY_BUFFER\n#pragma <velocity_unpack>\n#else\n#define HAS_VELOCITY_BUFFER 0\n#endif\n#if HAS_VELOCITY_BUFFER == 0\n#include <computeScreenSpaceVelocity>\n#include <getWorldPositionFromViewZ>\n#endif\n\nvec3 ComputeUniformL(vec3 N, vec2 E){\n vec3 L;\n L.xy = E;\n L.z = interleavedGradientNoise(gl_FragCoord.xy, currentFrameCount*5.);\n L = L * 2. - 1.;\n return L;\n}\n\nvec2 GetRandomE(float seed){\n vec2 rand_e;\n rand_e.x = random3(vec3(gl_FragCoord.xy, currentFrameCount + seed));\n rand_e.y = random3(vec3(gl_FragCoord.yx, rand_e.x + (currentFrameCount)*7.));\n return rand_e;\n}\n\nfloat saturate2(float v, float mx){\n return max(0., min(mx, v));\n}\n\nvec4 calculateGI(in float seed, in vec3 screenPos, in vec3 normal, in float radiusFactor){\n\n vec3 viewPos = screenToView3(screenPos.xy, screenPos.z);\n\n normal = normalize(normal);\n vec2 E = GetRandomE(seed);\n vec3 L = ComputeUniformL(normal, E);\n\n L = normalize(L);\n L *= sign(dot(L, normal));\n\n float cameraDist = length(cameraPositionWorld);\n\n// float rayLen = autoRadius ?\n// length(viewPos - screenToView3(screenPos.xy + objectRadius/10., screenPos.z)):\n// mix((cameraNearFar.y) + viewPos.z, -viewPos.z - cameraNearFar.x, L.z * 0.5 + 0.5)*objectRadius;\n\n float rayLen = objectRadius*sceneBoundingRadius;\n rayLen = autoRadius ?\n // length(viewPos - screenToView3(screenPos.xy + objectRadius/10., screenPos.z)):\n // mix((cameraNearFar.y) + viewPos.z, -viewPos.z - cameraNearFar.x, L.z * 0.5 + 0.5)*objectRadius:\n min(max(mix(\n max(0.0, (cameraDist + rayLen) + viewPos.z),\n max(0.0, -viewPos.z - max(0.0, cameraDist - rayLen)),\n L.z * 0.5 + 0.5), rayLen *0.1), rayLen*5.) :\n rayLen;\n\n// rayLen = min(-viewPos.z, rayLen);\n// rayLen = mix((cameraDist + objectRadius) + viewPos.z, -viewPos.z - (cameraDist - objectRadius), L.z * 0.5 + 0.5);\n\n rayLen *= radiusFactor;\n\n// float r = interleavedGradientNoise(gl_FragCoord.xy, currentFrameCount*14. + seed) + 0.05;\n float r = interleavedGradientNoise(gl_FragCoord.xy, currentFrameCount*14. + seed) + 0.05;\n\n // jitter rayLen\n// rayLen = rayLen * (.75 + 0.5 * (random(r)));//mix(rayLen*(1.-.5), rayLen*(1.+.5), random(r));\n\n rayLen = max(rayLen, 0.001);\n\n vec3 state = vec3(1.,(r+0.5)/float(RTAO_STEP_COUNT),2.);\n viewPos += normal * max(-0.01*viewPos.z, 0.001);\n vec3 screenHitP = traceRay(viewPos, L * rayLen, tolerance * rayLen, state, RTAO_STEP_COUNT);\n\n vec3 viewHitP = screenToView3(screenHitP.xy, screenHitP.z);\n vec3 LRes = viewHitP - viewPos;\n if(state.z > 1.) LRes = vec3(9999999.);\n float dist = length(LRes) * falloff;\n\n float EPS = 0.01;\n float zBias = (viewPos.z) * bias;\n float ao = (max(dot(normal, L) + zBias, 0.)) / (dist*dist + EPS);\n\n #if defined(SSGI_ENABLED) && SSGI_ENABLED > 0\n\n if(currentFrameCount < 1.){\n #if HAS_VELOCITY_BUFFER == 0\n vec3 worldPosition = getWorldPositionFromViewZ(screenHitP.xy, screenHitP.z);\n vec2 screenSpaceVelocity = computeScreenSpaceVelocity(worldPosition);\n #else\n vec2 screenSpaceVelocity = getVelocity(screenHitP.xy);\n #endif\n screenHitP.xy -= screenSpaceVelocity;\n }\n\n vec3 hitColor = tLastFrameTexelToLinear(texture2D(tLastFrame, screenHitP.xy)).rgb;\n // vec3 hitColor = tDiffuseTexelToLinear(texture2D( tDiffuse, screenHitP.xy )).rgb;\n vec3 hitNormal = getViewNormal(screenHitP.xy);\n float giWeight = 1.;\n giWeight = saturate2(giWeight / (dist+EPS), 1.);\n giWeight *= saturate2((dot(normal, L)), 1.0);\n giWeight *= saturate2((dot(hitNormal, -L)), 1.0);\n // giWeight *= saturate2((1.-dot(hitNormal, normal) ), 1.1);\n\n return vec4(hitColor*giWeight, ao);\n #endif\n\n return vec4(0,0,0,ao);\n}\n\n\nfloat normpdf(in float x, in float sigma)\n{\n return exp(-0.5*x*x/(sigma*sigma));\n}\n//float normpdf(in float x, in float sigma)\n//{\n// return 0.39894*exp(-0.5*x*x/(sigma*sigma))/sigma;\n//}\n//\n\n\n//uniform bool smoothEnabled; // last frame bilateral denoise\n//uniform vec4 smoothSigma; // color, depth, pixel, normal\n//uniform vec4 smoothScale; // color, depth, pixel, normal\n//#define BILATERAL_KERNEL 2\n\n//uniform vec4 smoothModes; // depthScale, 0, 0,\nvec4 getLastThis(sampler2D tex, float depth, vec3 normal){\n// vec4 smoothSigma = vec4(5,10,2,2);\n vec2 direction = vec2(1,1);\n\n vec4 color = clamp(tLastThisTexelToLinear(texture2D(tex, vUv.xy)), 0., 5.);\n\n// float depth;\n// vec3 normal;\n// getDepthNormal(vUv, depth, normal);\n\n// if(!smoothEnabled || frameCount < 60. || vUv.x > 0.5)\n return color;\n\n// direction *= vec2(int(frameCount)%2, int(frameCount+1.)%2);\n\n// float Z = 1.0;\n// vec4 final_colour = Z * color;\n// float factor;\n// vec2 nuv;\n// vec4 cc, np; float dp; vec3 nor; // cc is color, np is position, dp is depth, nor is normal\n// direction /= screenSize.xy;\n//\n// // -1, 1, -2, 2, -3, 3\n// for (int i = 0; i < BILATERAL_KERNEL; ++i)\n// {\n//\n// direction *= -1.;\n// nuv = vUv + direction * float( i/2 + 1 ); // clamp to screen border\n// getDepthNormal(nuv, dp, nor);\n// if(dp > 0.99) continue;\n//\n// cc = clamp(texture2D(tex, nuv), 0., 5.); // clamp(texsample(tDiffuse, float(i), float(j)), 0., 1.);\n//\n// factor = 1.;\n//\n// factor *= normpdf( length(cc-color) * smoothScale.x, smoothSigma.x); //color\n// factor *= normpdf( sqrt(abs(dp-depth)) * smoothScale.y, smoothSigma.y); //depth\n// factor *= normpdf( float( i/2 + 1 ) * smoothScale.z, smoothSigma.z); //pixel distance\n// factor *= normpdf( (1.-dot(normal, nor)) * smoothScale.w, smoothSigma.w); //normal\n//// factor *= normpdf(sqrt(length(np-pos))*smoothScale.y, smoothSigma.y); // position.\n//\n// Z += factor;\n// final_colour += factor*cc;\n//\n// }\n//\n// final_colour /= Z;\n// return final_colour;\n}\n\nvoid main() {\n\n// vec4 texel = texture2D( tDiffuse, vUv );\n float depth;\n vec3 normal;\n// float alpha = opacity;\n getDepthNormal(vUv, depth, normal);\n\n if (depth > 0.99) {\n discard;\n\n gl_FragColor = getLastThis(tLastThis, depth, normal);\n\n return;\n }\n\n float viewZ = depthToViewZ(depth);\n vec3 screenPos = vec3(vUv.x, vUv.y, viewZ);\n\n vec4 gi = vec4(0.);\n// screenPos.z += 0.001;\n gi += calculateGI(8., screenPos, normal, 1.);\n if(rayCount > 1.5)\n gi = max(gi, calculateGI(2., screenPos, normal, 0.4));\n if(rayCount > 2.5)\n gi = max(gi, calculateGI(3., screenPos, normal, 1.5));\n if(rayCount > 3.5)\n gi = max(gi, calculateGI(1., screenPos, normal, 0.6));\n if(rayCount > 4.5)\n gi = max(gi, calculateGI(3., screenPos, normal, 1.));\n\n// gi += calculateGI(3., screenPos, normal, 1.);\n// gi += calculateGI(4., screenPos, normal, 1.);\n// gi += calculateGI(5., screenPos, normal, 1.);\n// gi += calculateGI(6., screenPos, normal, 1.);\n// gi += calculateGI(7., screenPos, normal, 1.);\n// gi = gi / 6.;\n// gi = gi / 3.;\n\n// gl_FragColor = vec4(texel) * (1.-ao);\n\n gi.a = min(1., gi.a);\n gi.a = max(0., gi.a);\n// gi = ao;\n// gi *= intensity/1.;\n\n gi.rgb = min(vec3(3.), gi.rgb);\n gi.rgb = max(vec3(0.), gi.rgb);\n\n if(currentFrameCount < 3.){\n gl_FragColor = gi;\n return;\n }\n\n gl_FragColor = (texture2D( tLastThis, vUv ));\n// gl_FragColor = getLastThis(tLastThis, depth, normal);\n\n // if(gi.a < 0.001){\n // gl_FragColor.rgb = gl_FragColor.rgb;\n // gl_FragColor.a = (((gl_FragColor.a) * frameCount)/(frameCount+1.));\n // }else {\n // gl_FragColor = ((gi + (gl_FragColor) * frameCount)/(frameCount+1.));\n // }\n\n gl_FragColor = ((gi + (gl_FragColor) * currentFrameCount)/(currentFrameCount+1.));\n\n // todo: encodings??\n\n// #include <colorspace_fragment>\n\n}\n","import computeScreenSpaceVelocity from './shaders/computeScreenSpaceVelocity.glsl'\nimport getWorldPositionFromViewZ from './shaders/getWorldPositionFromViewZ.glsl'\nimport temporalaa from './shaders/temporalaa.glsl'\nimport ssreflection from './shaders/ssreflection.glsl'\nimport ssrtao from './shaders/ssrtao.glsl'\nimport {ShaderChunk} from \"threepipe\";\n\nconst shadersUtils2 = {\n computeScreenSpaceVelocity: computeScreenSpaceVelocity,\n getWorldPositionFromViewZ: getWorldPositionFromViewZ,\n temporalAA: temporalaa,\n ssReflection: ssreflection,\n calculateGI: ssrtao,\n\n ['__inited']: false,\n}\n\nexport function getShaders(){\n if(!shadersUtils2.__inited){\n shadersUtils2.__inited = true\n Object.assign(ShaderChunk, shadersUtils2)\n }\n return shadersUtils2\n}\n","import {\n Camera,\n CopyShader,\n ExtendedShaderPass,\n GBufferPlugin,\n generateUiConfig,\n getOrCall,\n ICamera,\n IPassID,\n IPipelinePass,\n IRenderManager,\n IScene,\n IWebGLRenderer,\n matDefineBool,\n MaterialExtension,\n Matrix4,\n PipelinePassPlugin,\n ProgressivePlugin,\n serialize,\n ThreeViewer,\n uiConfig,\n UiObjectConfig,\n uiToggle,\n uiVector,\n uniform,\n ValOrFunc,\n Vector2,\n WebGLMultipleRenderTargets,\n WebGLRenderTarget,\n} from 'threepipe'\nimport {getShaders} from \"../../utils/shaders\";\n\nconst passId = 'taa'\ntype TemporalAAPassId = typeof passId\n\n/**\n * Temporal Anti-Aliasing Plugin\n *\n * This plugin uses a temporal anti-aliasing pass to smooth out the final image when the camera or some mesh is moving\n * @category Plugins\n */\nexport class TemporalAAPlugin\n extends PipelinePassPlugin<TemporalAAPluginPass<TemporalAAPassId>, TemporalAAPassId> {\n static readonly PluginType = 'TAA'\n static readonly OldPluginType = 'TemporalAAPlugin' // todo swap\n readonly passId = passId\n\n // readonly materialExtension: MaterialExtension = uiConfigMaterialExtension(this._getUiConfig.bind(this), TemporalAAPlugin.PluginType)\n\n constructor(enabled = true) {\n super()\n this.enabled = enabled\n this.setDirty = this.setDirty.bind(this)\n }\n\n\n private _stableNoise = true\n /**\n * Same as BaseRenderer.stableNoise Use total frame count, if this is set to true, then frameCount won't be reset when the viewer is set to dirty.\n * Which will generate different random numbers for each frame during postprocessing steps. With TAA set properly, this will give a smoother result.\n */\n @uiToggle('Stable Noise (Total frame count)')\n get stableNoise(): boolean {\n return this._viewer?.renderManager.stableNoise ?? this._stableNoise\n }\n set stableNoise(v: boolean) {\n if (this._viewer) this._viewer.renderManager.stableNoise = v\n this._stableNoise = v\n }\n\n @uiConfig(undefined, {unwrapContents: true}) declare protected _pass?: TemporalAAPluginPass<TemporalAAPassId>\n\n private _gbufferUnpackExtension = undefined as MaterialExtension|undefined\n private _gbufferUnpackExtensionChanged = ()=>{\n if (!this._pass || !this._viewer) throw new Error('TemporalAAPlugin: pass/viewer not created yet')\n const newExtension = this._viewer.renderManager.gbufferUnpackExtension\n if (this._gbufferUnpackExtension === newExtension) return\n if (this._gbufferUnpackExtension) this._pass.material.unregisterMaterialExtensions([this._gbufferUnpackExtension])\n this._gbufferUnpackExtension = newExtension\n if (this._gbufferUnpackExtension) this._pass.material.registerMaterialExtensions([this._gbufferUnpackExtension])\n else this._viewer.console.warn('TemporalAAPlugin: GBuffer unpack extension removed')\n }\n\n protected _createPass() {\n if (!this._viewer) throw new Error('TemporalAAPlugin: viewer not set')\n if (!this._viewer.renderManager.gbufferTarget || !this._viewer.renderManager.gbufferUnpackExtension)\n throw new Error('TemporalAAPlugin: GBuffer target not created. GBufferPlugin is required.')\n const applyOnBackground = !!this._viewer.renderManager.msaa\n const t = new TemporalAAPluginPass(this.passId, ()=>this._viewer?.getPlugin(ProgressivePlugin)?.target, applyOnBackground)\n return t\n }\n\n dependencies = [GBufferPlugin, ProgressivePlugin] // todo use gbufferUnpackExtension from render manager to support depth buffer plugin as well.\n\n onAdded(viewer: ThreeViewer) {\n super.onAdded(viewer)\n this._gbufferUnpackExtensionChanged()\n viewer.renderManager.addEventListener('gbufferUnpackExtensionChanged', this._gbufferUnpackExtensionChanged)\n viewer.renderManager.addEventListener('resize', this._pass!.onSizeUpdate)\n // viewer.materialManager.registerMaterialExtension(this.materialExtension)\n }\n\n onRemove(viewer: ThreeViewer) {\n viewer.renderManager.removeEventListener('gbufferUnpackExtensionChanged', this._gbufferUnpackExtensionChanged)\n viewer.renderManager.removeEventListener('resize', this._pass!.onSizeUpdate)\n super.onRemove(viewer)\n // viewer.materialManager.unregisterMaterialExtension(this.materialExtension)\n }\n\n uiConfig: UiObjectConfig = {\n type: 'folder',\n label: 'TemporalAA Plugin',\n onChange: this.setDirty.bind(this),\n children: [\n ...generateUiConfig(this) || [],\n ],\n }\n\n protected _beforeRender(scene: IScene, camera: ICamera, renderManager: IRenderManager): boolean {\n if (!super._beforeRender(scene, camera, renderManager)) return false\n const pass = this.pass\n const v = this._viewer\n if (!pass || !v) return false\n\n const frame = renderManager.frameCount\n pass.taaEnabled = frame <= 1 && scene.renderCamera === scene.mainCamera\n if (!pass.taaEnabled) return false\n\n const cam = camera\n if (!cam) return false\n\n cam.updateMatrixWorld(true)\n\n cam.updateShaderProperties(pass.material) // for cameraNearFar\n\n pass.updateCameraProperties(cam)\n\n pass.target = v.getPlugin(ProgressivePlugin)!.target as any\n return true\n }\n\n // region to be done or removed\n\n // static AddTemporalAAData(material: IMaterial, params?: IMaterialUserData['TemporalAA'], setDirty = true): IMaterialUserData['TemporalAA']|null {\n // const ud = material?.userData\n // if (!ud) return null\n // if (!ud[TemporalAAPlugin.PluginType]) {\n // ud[TemporalAAPlugin.PluginType] = {}\n // }\n // const data = ud[TemporalAAPlugin.PluginType]!\n // data.enable = true\n // params && Object.assign(data, params)\n // if (setDirty && material.setDirty) material.setDirty()\n // return data\n // }\n\n /**\n * This uiConfig is added to each material by extension\n * @param material\n * @private\n */\n // private _getUiConfig(material: IMaterial) {\n // const config: UiObjectConfig = {\n // type: 'folder',\n // label: 'TemporalAA',\n // children: [\n // {\n // type: 'checkbox',\n // label: 'Enabled',\n // get value() {\n // return material.userData[TemporalAAPlugin.PluginType]?.enable ?? true\n // },\n // set value(v) {\n // let data = material.userData[TemporalAAPlugin.PluginType]\n // if (v === data?.enable) return\n // if (!data) data = TemporalAAPlugin.AddTemporalAAData(material, undefined, false)!\n // data.enable = v\n // material.setDirty()\n // config.uiRefresh?.(true, 'postFrame')\n // },\n // onChange: this.setDirty,\n // },\n // ],\n // }\n // return config\n // }\n\n // endregion\n}\n\nexport class TemporalAAPluginPass<Tid extends IPassID> extends ExtendedShaderPass implements IPipelinePass<Tid> {\n before = ['progressive']\n after = [] // leave empty so that this is placed right before progressive\n required = ['render', 'progressive']\n\n public target: ValOrFunc<WebGLRenderTarget|undefined>\n public readonly passId: Tid\n\n constructor(pid: Tid, target: ValOrFunc<WebGLRenderTarget|undefined>, applyOnBackground = false) {\n super({\n vertexShader: CopyShader.vertexShader,\n fragmentShader: getShaders().temporalAA,\n uniforms: {\n currentRT: {value: null},\n previousRT: {value: null},\n previousRTSize: {value: new Vector2()},\n cameraNearFar: {value: new Vector2()},\n lastProjectionViewMatrix: {value: new Matrix4()},\n currentProjectionViewMatrix: {value: new Matrix4()},\n projection: {value: new Matrix4()},\n inverseViewMatrix: {value: new Matrix4()},\n jitterSample: {value: new Vector2()},\n firstFrame: {value: true},\n },\n defines: {\n ['QUALITY']: 1,\n ['UNJITTER']: 0,\n ['BACKGROUND_TAA']: applyOnBackground ? 1 : 0,\n },\n\n }, 'currentRT', 'previousRT')\n this.passId = pid\n this.onSizeUpdate = this.onSizeUpdate.bind(this)\n this.target = target\n this.clear = false\n this.needsSwap = true\n\n }\n\n /**\n * to switch with ssaa, for internal use only, dont set from outside\n */\n taaEnabled = true\n\n render(renderer: IWebGLRenderer, writeBuffer?: WebGLMultipleRenderTargets | WebGLRenderTarget | null, readBuffer?: WebGLMultipleRenderTargets | WebGLRenderTarget, deltaTime?: number, maskActive?: boolean) {\n if (!this.taaEnabled || !this.enabled) {\n this.needsSwap = false\n return\n }\n this.needsSwap = true\n\n this.uniforms.previousRT.value = getOrCall(this.target)?.texture ?? null\n\n super.render(renderer, writeBuffer, readBuffer, deltaTime, maskActive)\n\n // ;(renderer as any).baseRenderer.blit(writeBuffer.texture, this.target, {clear: false}) // this is done in the progressive plugin\n\n this.uniforms.lastProjectionViewMatrix.value.copy(this.uniforms.currentProjectionViewMatrix.value)\n\n this.uniforms.firstFrame.value = false\n\n }\n\n\n updateCameraProperties(camera?: Camera): void {\n if (!camera) return\n this.uniforms.currentProjectionViewMatrix.value.multiplyMatrices(camera.projectionMatrix, camera.matrixWorldInverse)\n // this.uniforms.lastProjectionViewMatrix.value.copy(this.lastProjectionViewMatrix_)\n // this.uniforms.ProjectionMatrix.value.copy(this.projectionMatrix_)\n this.uniforms.inverseViewMatrix.value.copy(camera.matrixWorld)\n }\n onSizeUpdate() {\n this.uniforms.firstFrame.value = true\n this.setDirty()\n }\n setSize(width: number, height: number) {\n super.setSize(width, height)\n this.onSizeUpdate()\n }\n\n @serialize() @uniform()\n @uiVector('Feedback', undefined, 0.0001)\n feedBack: Vector2 = new Vector2(0.88, 0.97)\n\n @uiToggle()\n @matDefineBool('DEBUG_VELOCITY', undefined, false, undefined, true)\n debugVelocity = false\n\n uiConfig: UiObjectConfig = {\n type: 'folder',\n label: 'Temporal AA Pass',\n onChange: this.setDirty,\n children: [\n {\n type: 'checkbox',\n label: 'Enabled',\n property: [this, 'enabled'],\n onChange: ()=>this.onSizeUpdate(),\n },\n ...generateUiConfig(this)?.filter(c=>c && (c as any).label !== 'Enabled') || [],\n ],\n }\n\n}\n\ndeclare module 'threepipe' {\n // interface IMaterialUserData {\n // [TemporalAAPlugin.PluginType]?: {\n // enable?: boolean\n // }\n // }\n}\n","import {\n BufferGeometry,\n Camera,\n Color,\n DoubleSide,\n GBufferRenderPass,\n ICamera,\n IObject3D,\n IRenderManager,\n IScene,\n IUniform,\n IWebGLRenderer,\n LinearSRGBColorSpace,\n MaterialExtension,\n Matrix3,\n Matrix4,\n NoBlending,\n PipelinePassPlugin,\n Scene,\n ShaderMaterial,\n shaderReplaceString,\n Texture,\n TextureDataType,\n ThreeViewer,\n uiFolderContainer,\n uiImage,\n UnsignedByteType,\n Vector2,\n WebGLMultipleRenderTargets,\n WebGLRenderer,\n WebGLRenderTarget,\n} from 'threepipe'\nimport VelocityBufferUnpack from './shaders/VelocityBufferPlugin.unpack.glsl'\nimport ssVelocityVert from './shaders/VelocityBufferPlugin.mat.vert.glsl'\nimport ssVelocityFrag from './shaders/VelocityBufferPlugin.mat.frag.glsl'\nimport {TemporalAAPlugin} from '../postprocessing/TemporalAAPlugin'\n\n// type VelocityBufferPluginTarget = WebGLMultipleRenderTargets | WebGLRenderTarget\nexport type VelocityBufferPluginTarget = WebGLRenderTarget\nexport type VelocityBufferPluginPass = GBufferRenderPass<'velocityBuffer', VelocityBufferPluginTarget>\n/**\n * Velocity Buffer Plugin\n *\n * Adds a pre-render pass to render the normal buffer to a render target that can be used for postprocessing.\n * @category Plugins\n */\n@uiFolderContainer('Velocity Buffer Plugin (for TAA)')\nexport class VelocityBufferPlugin\n extends PipelinePassPlugin<VelocityBufferPluginPass, 'velocityBuffer'> {\n\n readonly passId = 'velocityBuffer'\n public static readonly PluginType = 'VelocityBuffer'\n public static readonly OldPluginType = 'VelocityBufferPlugin' // todo swap\n\n target?: VelocityBufferPluginTarget\n @uiImage('Velocity Buffer', {readOnly: true}) texture?: Texture\n readonly material: SSVelocityMaterial = new SSVelocityMaterial()\n\n // @onChange2(VelocityBufferPlugin.prototype._createTarget)\n // @uiDropdown('Buffer Type', threeConstMappings.TextureDataType.uiConfig)\n readonly bufferType: TextureDataType // cannot be changed after creation (for now)\n\n protected _createTarget(recreate = true) {\n if (!this._viewer) return\n if (recreate) this._disposeTarget()\n\n if (!this.target) this.target = this._viewer.renderManager.createTarget<VelocityBufferPluginTarget>(\n {\n depthBuffer: true,\n // samples: v.renderManager.composerTarget.samples || 0,\n samples: 0,\n type: this.bufferType,\n // magFilter: NearestFilter,\n // minFilter: NearestFilter,\n generateMipmaps: false,\n colorSpace: LinearSRGBColorSpace,\n })\n this.texture = this.target.texture\n this.texture.name = 'velocityBuffer'\n\n if (this._pass) this._pass.target = this.target\n }\n protected _disposeTarget() {\n if (!this._viewer) return\n if (this.target) {\n this._viewer.renderManager.disposeTarget(this.target)\n this.target = undefined\n }\n this.texture = undefined\n }\n\n protected _createPass() {\n this._createTarget(true)\n if (!this.target) throw new Error('VelocityBufferPlugin: target not created')\n this.material.userData.isGBufferMaterial = true\n const v = this._viewer!\n const pass = new class extends GBufferRenderPass {\n private _firstCall = true\n render(renderer: IWebGLRenderer, writeBuffer?: WebGLRenderTarget | WebGLMultipleRenderTargets | null, readBuffer?: WebGLRenderTarget | WebGLMultipleRenderTargets, deltaTime?: number, maskActive?: boolean) {\n if (v.renderManager.frameCount > 0) return\n if (!this.enabled || !this.camera) return\n const mat = this.overrideMaterial as ShaderMaterial\n mat.uniforms.currentProjectionViewMatrix.value.copy(this.camera.projectionMatrix).multiply(this.camera.matrixWorldInverse)\n if (this._firstCall) {\n mat.uniforms.lastProjectionViewMatrix.value.copy(mat.uniforms.currentProjectionViewMatrix.value)\n this._firstCall = false\n }\n super.render(renderer, writeBuffer, readBuffer, deltaTime, maskActive)\n mat.uniforms.lastProjectionViewMatrix.value.copy(mat.uniforms.currentProjectionViewMatrix.value)\n }\n }(this.passId, this.target, this.material, new Color(0.5, 0.5, 0.5), 1) // clear color 0.5 means 0 velocity\n const preprocessMaterial = pass.preprocessMaterial\n pass.preprocessMaterial = (m) => preprocessMaterial(m, m.userData[VelocityBufferPlugin.PluginType]?.disabled)\n pass.before = ['render']\n pass.after = []\n pass.required = ['render']\n return pass as any\n }\n\n // automatically register the unpack extension with TAA plugin\n protected readonly _attachToTaa: boolean\n\n constructor(\n bufferType: TextureDataType = UnsignedByteType,\n enabled = true,\n _attachToTaa = true\n ) {\n super()\n this.enabled = enabled\n this.bufferType = bufferType\n this._attachToTaa = _attachToTaa\n }\n\n onAdded(viewer: ThreeViewer) {\n super.onAdded(viewer)\n viewer.forPlugin(TemporalAAPlugin, (taa) => {\n this._attachToTaa && taa.pass?.material.registerMaterialExtensions([this.unpackExtension])\n }, (taa)=>{\n taa.pass?.material?.unregisterMaterialExtensions([this.unpackExtension])\n })\n }\n\n onRemove(viewer: ThreeViewer): void {\n this._disposeTarget()\n return super.onRemove(viewer)\n }\n\n unpackExtension: MaterialExtension = {\n shaderExtender: (shader)=>{\n if(this.isDisabled()) return\n shader.fragmentShader = shaderReplaceString(shader.fragmentShader,\n '#pragma <velocity_unpack>',\n '\\n' + VelocityBufferUnpack + '\\n')\n },\n computeCacheKey: ()=>this.isDisabled() ? '' : 'vb',\n extraUniforms: {\n tVelocity: ()=>({value: !this.isDisabled() ? this.target?.texture:null}),\n },\n extraDefines: {\n ['HAS_VELOCITY_BUFFER']: ()=>!this.isDisabled() && this.target?.texture ? 1 : undefined,\n },\n priority: 100,\n isCompatible: () => true,\n }\n\n setDirty() {\n super.setDirty();\n this.unpackExtension.setDirty?.()\n }\n\n protected _beforeRender(scene: IScene, camera: ICamera, renderManager: IRenderManager): boolean {\n if (!super._beforeRender(scene, camera, renderManager)) return false\n const pass = this.pass\n if (!pass) return false\n if (renderManager.frameCount > 0) return false\n pass.scene = scene\n pass.camera = camera\n camera.updateShaderProperties(pass.overrideMaterial as ShaderMaterial)\n return true\n }\n\n}\n\n\ndeclare module 'threepipe' {\n interface IMaterialUserData {\n [VelocityBufferPlugin.PluginType]?: {\n /**\n * Disables rendering to the velocity buffer.\n */\n disabled?: boolean\n }\n }\n}\nexport class SSVelocityMaterial extends ShaderMaterial {\n\n constructor() {\n super({\n vertexShader: ssVelocityVert,\n fragmentShader: ssVelocityFrag,\n uniforms: {\n cameraNearFar: {value: new Vector2(0.1, 1000)},\n alphaMap: {value: null},\n alphaTest: {value: null},\n alphaMapTransform: {value: /* @__PURE__*/ new Matrix3()},\n currentProjectionViewMatrix: {value: new Matrix4()},\n lastProjectionViewMatrix: {value: new Matrix4()},\n },\n blending: NoBlending, // todo?\n })\n }\n\n extraUniformsToUpload: Record<string, IUniform> = {\n modelMatrixPrevious: {value: new Matrix4().identity()},\n }\n\n private _previousWorldMatrices: Record<string, Matrix4> = {}\n\n // this gets called for each object.\n onBeforeRender(_r: WebGLRenderer, _s: Scene, _c: Camera, _geometry: BufferGeometry, object: IObject3D) {\n const prevMatrix = this._previousWorldMatrices[object.uuid]\n this.extraUniformsToUpload.modelMatrixPrevious.value.copy(prevMatrix ?? object.matrixWorld)\n\n // todo: make sure all objects are only rendered once.\n if (prevMatrix) {\n prevMatrix.copy(object.matrixWorld)\n } else {\n this._previousWorldMatrices[object.uuid] = object.matrixWorld.clone()\n }\n\n // todo: add support for all this in the shaders.\n let mat = object.material\n\n if (Array.isArray(mat)) { // todo: add support for multi materials.\n mat = mat[0]\n }\n this.uniforms.alphaMap.value = mat?.alphaMap ?? null\n this.uniforms.alphaTest.value = !mat || !mat.alphaTest || mat.alphaTest < 0.0000001 ? 0.001 : mat.alphaTest\n\n let x = this.uniforms.alphaMap.value ? 1 : undefined\n if (x !== this.defines.USE_ALPHAMAP) {\n if (x === undefined) delete this.defines.USE_ALPHAMAP\n else this.defines.USE_ALPHAMAP = x\n this.needsUpdate = true\n }\n x = mat?.userData.ALPHA_I_RGBA_PACKING ? 1 : undefined\n if (x !== this.defines.ALPHA_I_RGBA_PACKING) {\n if (x === undefined) delete this.defines.ALPHA_I_RGBA_PACKING\n else this.defines.ALPHA_I_RGBA_PA