UNPKG

@kitware/vtk.js

Version:

Visualization Toolkit for the Web

740 lines (587 loc) 54.2 kB
import { mat3, mat4 } from 'gl-matrix'; import { newInstance as newInstance$1, setGet } from '../../macros.js'; import vtkMapper from '../Core/Mapper.js'; import vtkProp from '../Core/Prop.js'; import vtkProperty from '../Core/Property.js'; import vtkProperty2D from '../Core/Property2D.js'; import vtkTexture from '../Core/Texture.js'; import vtkWebGPUBufferManager from './BufferManager.js'; import vtkWebGPUShaderCache from './ShaderCache.js'; import vtkWebGPUUniformBuffer from './UniformBuffer.js'; import vtkWebGPUSimpleMapper from './SimpleMapper.js'; import vtkWebGPUTypes from './Types.js'; var BufferUsage = vtkWebGPUBufferManager.BufferUsage, PrimitiveTypes = vtkWebGPUBufferManager.PrimitiveTypes; var Representation = vtkProperty.Representation; var ScalarMode = vtkMapper.ScalarMode; var CoordinateSystem = vtkProp.CoordinateSystem; var DisplayLocation = vtkProperty2D.DisplayLocation; var vtkWebGPUPolyDataVS = "\n//VTK::Renderer::Dec\n\n//VTK::Color::Dec\n\n//VTK::Normal::Dec\n\n//VTK::TCoord::Dec\n\n//VTK::Select::Dec\n\n//VTK::Mapper::Dec\n\n//VTK::IOStructs::Dec\n\n@vertex\nfn main(\n//VTK::IOStructs::Input\n)\n//VTK::IOStructs::Output\n{\n var output : vertexOutput;\n\n var vertex: vec4<f32> = vertexBC;\n\n //VTK::Color::Impl\n\n //VTK::Normal::Impl\n\n //VTK::TCoord::Impl\n\n //VTK::Select::Impl\n\n //VTK::Position::Impl\n\n return output;\n}\n"; var vtkWebGPUPolyDataFS = "\nstruct PBRData {\n diffuse: vec3<f32>,\n specular: vec3<f32>,\n}\n\n// Dot product with the max already in it\nfn mdot(a: vec3<f32>, b: vec3<f32>) -> f32 {\n return max(0.0, dot(a, b));\n}\n// Dot product with a max in it that does not allow for negative values\n// Physically based rendering is accurate as long as normals are accurate,\n// however this is pretty often not the case. In order to prevent negative\n// values from ruining light calculations and creating zones of zero light,\n// this remapping is used, which smoothly clamps the dot product between\n// zero and one while still maintaining a good amount of accuracy.\nfn cdot(a: vec3<f32>, b: vec3<f32>) -> f32 {\n var d: f32 = max(0.0, dot(a, b));\n d = pow((d + 1) / 2.0, 2.6);\n return d;\n}\n\n// Lambertian diffuse model\nfn lambertDiffuse(base: vec3<f32>, N: vec3<f32>, L: vec3<f32>) -> vec3<f32> {\n var pi: f32 = 3.14159265359; \n var NdotL: f32 = mdot(N, L);\n NdotL = pow(NdotL, 1.5);\n return (base/pi)*NdotL;\n}\n\n// Yasuhiro Fujii improvement on the Oren-Nayar model\n// https://mimosa-pudica.net/improved-oren-nayar.html\n// p is surface color, o is roughness\nfn fujiiOrenNayar(p: vec3<f32>, o: f32, N: vec3<f32>, L: vec3<f32>, V: vec3<f32>) -> vec3<f32> {\n var invpi: f32 = 0.31830988618; // 1/pi\n\n var o2 = o*o;\n var NdotL: f32 = mdot(N, L);\n NdotL = pow(NdotL, 1.5); // Less physically accurate, but hides the \"seams\" between lights better\n\n var NdotV: f32 = mdot(N, V);\n var LdotV: f32 = mdot(L, V);\n\n var s: f32 = LdotV - NdotL*NdotV;\n var t: f32 = mix(1, max(NdotL, NdotV), step(0, s)); // Mix with step is the equivalent of an if statement\n var A: vec3<f32> = 0.5*(o2 / (o2 + 0.33)) + 0.17*p*(o2 / (o2 + 0.13));\n A = invpi*(1 - A);\n var B: f32 = 0.45*(o2 / (o2 + 0.09));\n B = invpi*B;\n\n return p*NdotL*(A + B*(s/t));\n}\n\n// Fresnel portion of BRDF (IOR only, simplified)\nfn schlickFresnelIOR(V: vec3<f32>, N: vec3<f32>, ior: f32, k: f32) -> f32 {\n var NdotV: f32 = mdot(V, N);\n var F0: f32 = (pow((ior - 1.0), 2) + k*k) / (pow((ior + 1.0), 2) + k*k); // This takes into account the roughness, which the other one does not\n return F0 + (1 - F0) * pow((1-NdotV), 5); \n}\n\n// Fresnel portion of BRDF (Color ior, better)\nfn schlickFresnelRGB(V: vec3<f32>, N: vec3<f32>, F0: vec3<f32>) -> vec3<f32> {\n var NdotV: f32 = mdot(V, N);\n return F0 + (1 - F0) * pow((1-NdotV), 5); \n}\n\n// Normal portion of BRDF\n// https://learnopengl.com/PBR/Theory\n// Trowbridge-Reitz GGX functions: normal, halfway, roughness^2\nfn trGGX(N: vec3<f32>, H: vec3<f32>, a: f32) -> f32 {\n var pi: f32 = 3.14159265359; \n\n var a2: f32 = a*a;\n var NdotH = mdot(N, H);\n var NdotH2 = NdotH*NdotH;\n \n var denom: f32 = NdotH2 * (a2 - 1.0) + 1.0;\n\n return a2 / max((pi*denom*denom), 0.000001);\n}\n\n// A VERY bad approximation of anisotropy. Real anisotropic calculations require tangent and bitangent\nfn anisotrophicTrGGX(N: vec3<f32>, H: vec3<f32>, O: vec3<f32>, s: f32, a: f32) -> f32 {\n var Op: vec3<f32> = (rendererUBO.WCVCNormals * vec4<f32>(normalize(O) * s, 0.)).xyz;\n\n var ggx1: f32 = trGGX(N + Op*s, H, a);\n var ggx2: f32 = trGGX(N - Op*s, H, a);\n return (0.5 * ggx1 + 0.5 * ggx2);\n}\n\n// Geometry portion of BRDF\nfn schlickGGX(N: vec3<f32>, X: vec3<f32>, k: f32) -> f32 {\n var NdotX = cdot(N, X);\n return NdotX / max(0.000001, (NdotX*(1-k) + k));\n}\n\nfn smithSurfaceRoughness(N: vec3<f32>, V: vec3<f32>, L: vec3<f32>, k: f32) -> f32 {\n var ggx1: f32 = min(1, schlickGGX(N, V, k));\n var ggx2: f32 = min(1, schlickGGX(N, L, k));\n return ggx1*ggx2;\n}\n\n// BRDF Combination\nfn cookTorrance(D: f32, F: f32, G: f32, N: vec3<f32>, V: vec3<f32>, L: vec3<f32>) -> f32 {\n var num: f32 = D*F*G;\n var denom: f32 = 4*cdot(V, N)*cdot(L, N);\n\n return num / max(denom, 0.000001);\n}\n\n// Different lighting calculations for different light sources\nfn calcDirectionalLight(N: vec3<f32>, V: vec3<f32>, ior: f32, roughness: f32, metallic: f32, direction: vec3<f32>, color: vec3<f32>, base: vec3<f32>) -> PBRData { \n var L: vec3<f32> = normalize(direction); // Light Vector\n var H: vec3<f32> = normalize(L + V); // Halfway Vector\n\n var alpha = roughness*roughness;\n var k: f32 = alpha*alpha / 2;\n\n var D: f32 = trGGX(N, H, alpha); // Distribution\n // var F: f32 = schlickFresnelIOR(V, N, ior, k); // Fresnel\n var G: f32 = smithSurfaceRoughness(N, V, L, k); // Geometry\n\n var brdf: f32 = cookTorrance(D, 1, G, N, V, L); // Fresnel term is replaced with 1 because it is added later\n var incoming: vec3<f32> = color;\n var angle: f32 = mdot(L, N);\n angle = pow(angle, 1.5);\n\n var specular: vec3<f32> = brdf*incoming*angle;\n // Oren-Nayar gives a clay-like effect when fully rough which some people may not want, so it might be better to give a separate\n // control property for the diffuse vs specular roughness\n var diffuse: vec3<f32> = incoming*fujiiOrenNayar(base, roughness, N, L, V); \n // Stores the specular and diffuse separately to allow for finer post processing\n var out = PBRData(diffuse, specular);\n \n return out; // Returns angle along with color of light so the final color can be multiplied by angle as well (creates black areas)\n}\n\n// TODO: find some way to reduce the number of arguments going in here\nfn calcPointLight(N: vec3<f32>, V: vec3<f32>, fragPos: vec3<f32>, ior: f32, roughness: f32, metallic: f32, position: vec3<f32>, color: vec3<f32>, base: vec3<f32>) -> PBRData {\n var L: vec3<f32> = normalize(position - fragPos); // Light Vector\n var H: vec3<f32> = normalize(L + V); // Halfway Vector\n var dist = distance(position, fragPos);\n\n var alpha = roughness*roughness;\n var k: f32 = alpha*alpha / 2; // could also be pow(alpha + 1.0, 2) / 8\n\n var D: f32 = trGGX(N, H, alpha); // Distribution\n // var F: f32 = schlickFresnelIOR(V, N, ior, k); // Fresnel\n var G: f32 = smithSurfaceRoughness(N, V, L, k); // Geometry\n\n var brdf: f32 = cookTorrance(D, 1, G, N, V, L); \n var incoming: vec3<f32> = color * (1. / (dist*dist));\n var angle: f32 = mdot(L, N);\n angle = pow(angle, 1.5); // Smoothing factor makes it less accurate, but reduces ugly \"seams\" bewteen light sources\n\n var specular: vec3<f32> = brdf*incoming*angle;\n var diffuse: vec3<f32> = incoming*fujiiOrenNayar(base, roughness, N, L, V);\n\n // Stores the specular and diffuse separately to allow for finer post processing\n // Could also be done (propably more properly) with a struct\n var out = PBRData(diffuse, specular);\n \n return out; // Returns angle along with color of light so the final color can be multiplied by angle as well (creates black areas)\n}\n\n// For a reason unknown to me, spheres dont seem to behave propperly with head-on spot lights\nfn calcSpotLight(N: vec3<f32>, V: vec3<f32>, fragPos: vec3<f32>, ior: f32, roughness: f32, metallic: f32, position: vec3<f32>, direction: vec3<f32>, cones: vec2<f32>, color: vec3<f32>, base: vec3<f32>) -> PBRData {\n var L: vec3<f32> = normalize(position - fragPos);\n var H: vec3<f32> = normalize(L + V); // Halfway Vector\n var dist = distance(position, fragPos);\n\n var alpha = roughness*roughness;\n var k: f32 = alpha*alpha / 2; // could also be pow(alpha + 1.0, 2) / 8\n\n var D: f32 = trGGX(N, H, alpha); // Distribution\n // var F: f32 = schlickFresnelIOR(V, N, ior, k); // Fresnel\n var G: f32 = smithSurfaceRoughness(N, V, L, k); // Geometry\n\n var brdf: f32 = cookTorrance(D, 1, G, N, V, L); \n \n // Cones.x is the inner phi and cones.y is the outer phi\n var theta: f32 = mdot(normalize(direction), L);\n var epsilon: f32 = cones.x - cones.y;\n var intensity: f32 = (theta - cones.y) / epsilon;\n intensity = clamp(intensity, 0.0, 1.0);\n intensity /= dist*dist;\n\n var incoming: vec3<f32> = color * intensity;\n\n var angle: f32 = mdot(L, N);\n angle = pow(angle, 1.5); // Smoothing factor makes it less accurate, but reduces ugly \"seams\" bewteen light sources\n\n var specular: vec3<f32> = brdf*incoming*angle;\n var diffuse: vec3<f32> = incoming*fujiiOrenNayar(base, roughness, N, L, V);\n\n // Stores the specular and diffuse separately to allow for finer post processing\n // Could also be done (propably more properly) with a struct\n var out = PBRData(diffuse, specular);\n \n return out; // Returns angle along with color of light so the final color can be multiplied by angle as well (creates black areas)\n}\n\n// Environment mapping stuff\n// Takes in a vector and converts it to an equivalent coordinate in a rectilinear texture. Should be replaced with cubemaps at some point\nfn vecToRectCoord(dir: vec3<f32>) -> vec2<f32> {\n var tau: f32 = 6.28318530718;\n var pi: f32 = 3.14159265359;\n var out: vec2<f32> = vec2<f32>(0.0);\n\n out.x = atan2(dir.z, dir.x) / tau;\n out.x += 0.5;\n\n var phix: f32 = length(vec2(dir.x, dir.z));\n out.y = atan2(dir.y, phix) / pi + 0.5;\n\n return out;\n}\n\n//VTK::Renderer::Dec\n\n//VTK::Color::Dec\n\n//VTK::TCoord::Dec\n\n// optional surface normal declaration\n//VTK::Normal::Dec\n\n//VTK::Select::Dec\n\n//VTK::RenderEncoder::Dec\n\n//VTK::Mapper::Dec\n\n//VTK::IOStructs::Dec\n\n@fragment\nfn main(\n//VTK::IOStructs::Input\n)\n//VTK::IOStructs::Output\n{\n var output : fragmentOutput;\n\n // Temporary ambient, diffuse, and opacity\n var ambientColor: vec4<f32> = mapperUBO.AmbientColor;\n var diffuseColor: vec4<f32> = mapperUBO.DiffuseColor;\n var opacity: f32 = mapperUBO.Opacity;\n\n // This should be declared somewhere else\n var _diffuseMap: vec4<f32> = vec4<f32>(1);\n var _roughnessMap: vec4<f32> = vec4<f32>(1);\n var _metallicMap: vec4<f32> = vec4<f32>(1);\n var _normalMap: vec4<f32> = vec4<f32>(0, 0, 1, 0); // normal map was setting off the normal vector detection in fragment\n var _ambientOcclusionMap: vec4<f32> = vec4<f32>(1);\n var _emissionMap: vec4<f32> = vec4<f32>(0);\n\n //VTK::Color::Impl\n\n //VTK::TCoord::Impl\n\n //VTK::Normal::Impl\n\n var computedColor: vec4<f32> = vec4<f32>(diffuseColor.rgb, 1.0);\n\n //VTK::Light::Impl\n\n //VTK::Select::Impl\n\n if (computedColor.a == 0.0) { discard; };\n\n //VTK::Position::Impl\n\n //VTK::RenderEncoder::Impl\n\n return output;\n}\n"; function isEdges(hash) { // edge pipelines have "edge" in them return hash.indexOf('edge') >= 0; } // ---------------------------------------------------------------------------- // vtkWebGPUCellArrayMapper methods // ---------------------------------------------------------------------------- function vtkWebGPUCellArrayMapper(publicAPI, model) { // Set our className model.classHierarchy.push('vtkWebGPUCellArrayMapper'); publicAPI.buildPass = function (prepass) { if (prepass) { if (model.is2D) { model.WebGPUActor = publicAPI.getFirstAncestorOfType('vtkWebGPUActor2D'); model.forceZValue = true; } else { model.WebGPUActor = publicAPI.getFirstAncestorOfType('vtkWebGPUActor'); model.forceZValue = false; } model.coordinateSystem = model.WebGPUActor.getRenderable().getCoordinateSystem(); model.useRendererMatrix = model.coordinateSystem !== CoordinateSystem.DISPLAY; model.WebGPURenderer = model.WebGPUActor.getFirstAncestorOfType('vtkWebGPURenderer'); model.WebGPURenderWindow = model.WebGPURenderer.getParent(); model.device = model.WebGPURenderWindow.getDevice(); } }; // Renders myself publicAPI.translucentPass = function (prepass) { if (prepass) { publicAPI.prepareToDraw(model.WebGPURenderer.getRenderEncoder()); model.renderEncoder.registerDrawCallback(model.pipeline, publicAPI.draw); } }; publicAPI.opaquePass = function (prepass) { if (prepass) { publicAPI.prepareToDraw(model.WebGPURenderer.getRenderEncoder()); model.renderEncoder.registerDrawCallback(model.pipeline, publicAPI.draw); } }; publicAPI.updateUBO = function () { // make sure the data is up to date var actor = model.WebGPUActor.getRenderable(); var ppty = actor.getProperty(); var utime = model.UBO.getSendTime(); if (publicAPI.getMTime() > utime || ppty.getMTime() > utime || model.renderable.getMTime() > utime) { var _ppty$getEdgeColorByR; // Matricies var keyMats = model.WebGPUActor.getKeyMatrices(model.WebGPURenderer); model.UBO.setArray('BCWCMatrix', keyMats.bcwc); model.UBO.setArray('BCSCMatrix', keyMats.bcsc); model.UBO.setArray('MCWCNormals', keyMats.normalMatrix); if (model.is2D) { model.UBO.setValue('ZValue', model.WebGPUActor.getRenderable().getProperty().getDisplayLocation() === DisplayLocation.FOREGROUND ? 1.0 : 0.0); var _aColor = ppty.getColorByReference(); model.UBO.setValue('AmbientIntensity', 1.0); model.UBO.setArray('DiffuseColor', [_aColor[0], _aColor[1], _aColor[2], 1.0]); model.UBO.setValue('DiffuseIntensity', 0.0); model.UBO.setValue('SpecularIntensity', 0.0); } else { // Base Colors var _aColor2 = ppty.getAmbientColorByReference(); model.UBO.setValue('AmbientIntensity', ppty.getAmbient()); model.UBO.setArray('AmbientColor', [_aColor2[0], _aColor2[1], _aColor2[2], 1.0]); model.UBO.setValue('DiffuseIntensity', ppty.getDiffuse()); _aColor2 = ppty.getDiffuseColorByReference(); model.UBO.setArray('DiffuseColor', [_aColor2[0], _aColor2[1], _aColor2[2], 1.0]); // Roughness model.UBO.setValue('Roughness', ppty.getRoughness()); model.UBO.setValue('BaseIOR', ppty.getBaseIOR()); // Metallic model.UBO.setValue('Metallic', ppty.getMetallic()); // Normal model.UBO.setValue('NormalStrength', ppty.getNormalStrength()); // Emission model.UBO.setValue('Emission', ppty.getEmission()); // Specular model.UBO.setValue('SpecularIntensity', ppty.getSpecular()); _aColor2 = ppty.getSpecularColorByReference(); model.UBO.setArray('SpecularColor', [_aColor2[0], _aColor2[1], _aColor2[2], 1.0]); } // Edge and line rendering var aColor = (_ppty$getEdgeColorByR = ppty.getEdgeColorByReference) === null || _ppty$getEdgeColorByR === void 0 ? void 0 : _ppty$getEdgeColorByR.call(ppty); if (aColor) { model.UBO.setArray('EdgeColor', [aColor[0], aColor[1], aColor[2], 1.0]); } model.UBO.setValue('LineWidth', ppty.getLineWidth()); model.UBO.setValue('Opacity', ppty.getOpacity()); model.UBO.setValue('PropID', model.WebGPUActor.getPropID()); var device = model.WebGPURenderWindow.getDevice(); model.UBO.sendIfNeeded(device); } }; publicAPI.haveWideLines = function () { var actor = model.WebGPUActor.getRenderable(); var representation = actor.getProperty().getRepresentation(); if (actor.getProperty().getLineWidth() <= 1.0) { return false; } if (model.primitiveType === PrimitiveTypes.Verts) { return false; } if (model.primitiveType === PrimitiveTypes.Triangles || model.primitiveType === PrimitiveTypes.TriangleStrips) { return representation === Representation.WIREFRAME; } return true; }; publicAPI.replaceShaderPosition = function (hash, pipeline, vertexInput) { var vDesc = pipeline.getShaderDescription('vertex'); vDesc.addBuiltinOutput('vec4<f32>', '@builtin(position) Position'); if (!vDesc.hasOutput('vertexVC')) vDesc.addOutput('vec4<f32>', 'vertexVC'); var code = vDesc.getCode(); if (model.useRendererMatrix) { code = vtkWebGPUShaderCache.substitute(code, '//VTK::Position::Impl', [' var pCoord: vec4<f32> = rendererUBO.SCPCMatrix*mapperUBO.BCSCMatrix*vertexBC;', ' output.vertexVC = rendererUBO.SCVCMatrix * mapperUBO.BCSCMatrix * vec4<f32>(vertexBC.xyz, 1.0);', '//VTK::Position::Impl']).result; if (model.forceZValue) { code = vtkWebGPUShaderCache.substitute(code, '//VTK::Position::Impl', ['pCoord = vec4<f32>(pCoord.xyz/pCoord.w, 1.0);', 'pCoord.z = mapperUBO.ZValue;', '//VTK::Position::Impl']).result; } } else { code = vtkWebGPUShaderCache.substitute(code, '//VTK::Position::Impl', [' var pCoord: vec4<f32> = mapperUBO.BCSCMatrix*vertexBC;', ' pCoord.x = 2.0* pCoord.x / rendererUBO.viewportSize.x - 1.0;', ' pCoord.y = 2.0* pCoord.y / rendererUBO.viewportSize.y - 1.0;', ' pCoord.z = 0.5 - 0.5 * pCoord.z;', '//VTK::Position::Impl']).result; if (model.forceZValue) { code = vtkWebGPUShaderCache.substitute(code, '//VTK::Position::Impl', [' pCoord.z = mapperUBO.ZValue;', '//VTK::Position::Impl']).result; } } if (publicAPI.haveWideLines()) { vDesc.addBuiltinInput('u32', '@builtin(instance_index) instanceIndex'); // widen the edge code = vtkWebGPUShaderCache.substitute(code, '//VTK::Position::Impl', [' var tmpPos: vec4<f32> = pCoord;', ' var numSteps: f32 = ceil(mapperUBO.LineWidth - 1.0);', ' var offset: f32 = (mapperUBO.LineWidth - 1.0) * (f32(input.instanceIndex / 2u) - numSteps/2.0) / numSteps;', ' var tmpPos2: vec3<f32> = tmpPos.xyz / tmpPos.w;', ' tmpPos2.x = tmpPos2.x + 2.0 * (f32(input.instanceIndex) % 2.0) * offset / rendererUBO.viewportSize.x;', ' tmpPos2.y = tmpPos2.y + 2.0 * (f32(input.instanceIndex + 1u) % 2.0) * offset / rendererUBO.viewportSize.y;', ' tmpPos2.z = min(1.0, tmpPos2.z + 0.00001);', // could become a setting ' pCoord = vec4<f32>(tmpPos2.xyz * tmpPos.w, tmpPos.w);', '//VTK::Position::Impl']).result; } code = vtkWebGPUShaderCache.substitute(code, '//VTK::Position::Impl', [' output.Position = pCoord;']).result; vDesc.setCode(code); }; model.shaderReplacements.set('replaceShaderPosition', publicAPI.replaceShaderPosition); publicAPI.replaceShaderNormal = function (hash, pipeline, vertexInput) { var normalBuffer = vertexInput.getBuffer('normalMC'); var actor = model.WebGPUActor.getRenderable(); if (normalBuffer) { var vDesc = pipeline.getShaderDescription('vertex'); if (!vDesc.hasOutput('normalVC')) { vDesc.addOutput('vec3<f32>', 'normalVC', normalBuffer.getArrayInformation()[0].interpolation); } if (!vDesc.hasOutput('tangentVC')) { vDesc.addOutput('vec3<f32>', 'tangentVC', normalBuffer.getArrayInformation()[0].interpolation); } if (!vDesc.hasOutput('bitangentVC')) { vDesc.addOutput('vec3<f32>', 'bitangentVC', normalBuffer.getArrayInformation()[0].interpolation); } var code = vDesc.getCode(); code = vtkWebGPUShaderCache.substitute(code, '//VTK::Normal::Impl', [' output.normalVC = normalize((rendererUBO.WCVCNormals * mapperUBO.MCWCNormals * normalMC).xyz);', // This is just an approximation, but it happens to work extremely well // It only works well for normals that are head on and not super angled though // Definitely needs to be replaced ' var c1: vec3<f32> = cross(output.normalVC, vec3<f32>(0, 0, 1));', ' var c2: vec3<f32> = cross(output.normalVC, vec3<f32>(0, 1, 0));', ' var tangent: vec3<f32> = mix(c1, c2, distance(c1, c2));', ' output.tangentVC = normalize(tangent);', ' output.bitangentVC = normalize(cross(output.normalVC, tangent));']).result; vDesc.setCode(code); var fDesc = pipeline.getShaderDescription('fragment'); code = fDesc.getCode(); if (actor.getProperty().getNormalTexture()) { code = vtkWebGPUShaderCache.substitute(code, '//VTK::Normal::Impl', [' var normal: vec3<f32> = input.normalVC;', ' if (!input.frontFacing) { normal = -normal; }', ' var tangent: vec3<f32> = input.tangentVC;', ' var bitangent: vec3<f32> = input.bitangentVC;', ' var TCVCMatrix: mat3x3<f32> = mat3x3<f32>(', ' tangent.x, bitangent.x, normal.x,', ' tangent.y, bitangent.y, normal.y,', ' tangent.z, bitangent.z, normal.z,', ' );', ' var mappedNormal: vec3<f32> = TCVCMatrix * (_normalMap.xyz * 2 - 1);', ' normal = mix(normal, mappedNormal, mapperUBO.NormalStrength);', ' normal = normalize(normal);']).result; } else { code = vtkWebGPUShaderCache.substitute(code, '//VTK::Normal::Impl', [' var normal: vec3<f32> = input.normalVC;', ' if (!input.frontFacing) { normal = -normal; }', ' normal = normalize(normal);']).result; } fDesc.setCode(code); } }; model.shaderReplacements.set('replaceShaderNormal', publicAPI.replaceShaderNormal); // we only apply lighting when there is a "var normal" declaration in the // fragment shader code. That is the lighting trigger. publicAPI.replaceShaderLight = function (hash, pipeline, vertexInput) { if (hash.includes('sel')) return; var vDesc = pipeline.getShaderDescription('vertex'); if (!vDesc.hasOutput('vertexVC')) vDesc.addOutput('vec4<f32>', 'vertexVC'); var renderer = model.WebGPURenderer.getRenderable(); var fDesc = pipeline.getShaderDescription('fragment'); var code = fDesc.getCode(); // Code that runs if the fragment shader includes normals if (code.includes('var normal:') && model.useRendererMatrix && !isEdges(hash) && !model.is2D && !hash.includes('sel')) { var _renderer$getEnvironm; var lightingCode = [// Constants ' var pi: f32 = 3.14159265359;', // Vectors needed for light calculations ' var fragPos: vec3<f32> = vec3<f32>(input.vertexVC.xyz);', ' var V: vec3<f32> = mix(normalize(-fragPos), vec3<f32>(0, 0, 1), f32(rendererUBO.cameraParallel)); // View Vector', // Values needed for light calculations ' var baseColor: vec3<f32> = _diffuseMap.rgb * diffuseColor.rgb;', ' var roughness: f32 = max(0.000001, mapperUBO.Roughness * _roughnessMap.r);', // Need to have a different way of sampling greyscale values aside from .r ' var metallic: f32 = mapperUBO.Metallic * _metallicMap.r;', ' var alpha: f32 = roughness*roughness;', ' var ior: f32 = mapperUBO.BaseIOR;', ' var k: f32 = alpha*alpha / 2;', // Split diffuse and specular components ' var diffuse: vec3<f32> = vec3<f32>(0.);', ' var specular: vec3<f32> = vec3<f32>(0.);', ' var emission: vec3<f32> = _emissionMap.rgb * mapperUBO.Emission;', // Summing diffuse and specular components of directional lights ' {', ' var i: i32 = 0;', ' loop {', ' if !(i < rendererUBO.LightCount) { break; }', ' switch (i32(rendererLightSSBO.values[i].LightData.x)) {', ' // Point Light', ' case 0 {', ' var color: vec3<f32> = rendererLightSSBO.values[i].LightColor.rgb * rendererLightSSBO.values[i].LightColor.w;', ' var pos: vec3<f32> = (rendererLightSSBO.values[i].LightPos).xyz;', ' var calculated: PBRData = calcPointLight(normal, V, fragPos, ior, roughness, metallic, pos, color, baseColor);', ' diffuse += max(vec3<f32>(0), calculated.diffuse);', ' specular += max(vec3<f32>(0), calculated.specular);', ' }', ' // Directional light', ' case 1 {', ' var dir: vec3<f32> = (rendererUBO.WCVCNormals * vec4<f32>(normalize(rendererLightSSBO.values[i].LightDir.xyz), 0.)).xyz;', ' dir = normalize(dir);', ' var color: vec3<f32> = rendererLightSSBO.values[i].LightColor.rgb * rendererLightSSBO.values[i].LightColor.w;', ' var calculated: PBRData = calcDirectionalLight(normal, V, ior, roughness, metallic, dir, color, baseColor); // diffuseColor.rgb needs to be fixed with a more dynamic diffuse color', ' diffuse += max(vec3<f32>(0), calculated.diffuse);', ' specular += max(vec3<f32>(0), calculated.specular);', ' }', ' // Spot Light', ' case 2 {', ' var color: vec3<f32> = rendererLightSSBO.values[i].LightColor.rgb * rendererLightSSBO.values[i].LightColor.w;', ' var pos: vec3<f32> = (rendererLightSSBO.values[i].LightPos).xyz;', ' var dir: vec3<f32> = (rendererUBO.WCVCNormals * vec4<f32>(normalize(rendererLightSSBO.values[i].LightDir.xyz), 0.)).xyz;', ' dir = normalize(dir);', ' var cones: vec2<f32> = vec2<f32>(rendererLightSSBO.values[i].LightData.y, rendererLightSSBO.values[i].LightData.z);', ' var calculated: PBRData = calcSpotLight(normal, V, fragPos, ior, roughness, metallic, pos, dir, cones, color, baseColor);', ' diffuse += max(vec3<f32>(0), calculated.diffuse);', ' specular += max(vec3<f32>(0), calculated.specular);', ' }', ' default { continue; }', ' }', ' continuing { i++; }', ' }', ' }', // Final variables for combining specular and diffuse ' var fresnel: f32 = schlickFresnelIOR(V, normal, ior, k); // Fresnel', ' fresnel = min(1, fresnel);', ' // This could be controlled with its own variable (that isnt base color) for better artistic control', ' var fresnelMetallic: vec3<f32> = schlickFresnelRGB(V, normal, baseColor); // Fresnel for metal, takes color into account', ' var kS: vec3<f32> = mix(vec3<f32>(fresnel), fresnelMetallic, metallic);', ' kS = min(vec3<f32>(1), kS);', ' var kD: vec3<f32> = (1.0 - kS) * (1.0 - metallic);', ' var PBR: vec3<f32> = mapperUBO.DiffuseIntensity*kD*diffuse + kS*specular;', ' PBR += emission;', ' computedColor = vec4<f32>(PBR, mapperUBO.Opacity);']; if ((_renderer$getEnvironm = renderer.getEnvironmentTexture()) !== null && _renderer$getEnvironm !== void 0 && _renderer$getEnvironm.getImageLoaded()) { lightingCode.push(' // To get diffuse IBL, the texture is sampled with normals in worldspace', ' var diffuseIBLCoords: vec3<f32> = (transpose(rendererUBO.WCVCNormals) * vec4<f32>(normal, 1.)).xyz;', ' var diffuseCoords: vec2<f32> = vecToRectCoord(diffuseIBLCoords);', ' // To get specular IBL, the texture is sampled as the worldspace reflection between the normal and view vectors', ' // Reflections are first calculated in viewspace, then converted to worldspace to sample the environment', ' var VreflN: vec3<f32> = normalize(reflect(-V, normal));', ' var reflectionIBLCoords = (transpose(rendererUBO.WCVCNormals) * vec4<f32>(VreflN, 1.)).xyz;', ' var specularCoords: vec2<f32> = vecToRectCoord(reflectionIBLCoords);', ' var diffuseIBL = textureSampleLevel(EnvironmentTexture, EnvironmentTextureSampler, diffuseCoords, rendererUBO.MaxEnvironmentMipLevel);', // Level multiplier should be set by UBO ' var level = roughness * rendererUBO.MaxEnvironmentMipLevel;', ' var specularIBL = textureSampleLevel(EnvironmentTexture, EnvironmentTextureSampler, specularCoords, level);', // Manual mip smoothing since not all formats support smooth level sampling ' var specularIBLContribution: vec3<f32> = specularIBL.rgb*rendererUBO.BackgroundSpecularStrength;', ' computedColor += vec4<f32>(specularIBLContribution*kS, 0);', ' var diffuseIBLContribution: vec3<f32> = diffuseIBL.rgb*rendererUBO.BackgroundDiffuseStrength;', ' diffuseIBLContribution *= baseColor * _ambientOcclusionMap.rgb;', // Multipy by baseColor may be changed ' computedColor += vec4<f32>(diffuseIBLContribution*kD, 0);'); } code = vtkWebGPUShaderCache.substitute(code, '//VTK::Light::Impl', lightingCode).result; fDesc.setCode(code); // If theres no normals, just set the specular color to be flat } else { code = vtkWebGPUShaderCache.substitute(code, '//VTK::Light::Impl', [' var diffuse: vec3<f32> = diffuseColor.rgb;', ' var specular: vec3<f32> = mapperUBO.SpecularColor.rgb * mapperUBO.SpecularColor.a;', ' computedColor = vec4<f32>(diffuse * _diffuseMap.rgb, mapperUBO.Opacity);']).result; fDesc.setCode(code); } }; model.shaderReplacements.set('replaceShaderLight', publicAPI.replaceShaderLight); publicAPI.replaceShaderColor = function (hash, pipeline, vertexInput) { // By default, set the colors to be flat if (isEdges(hash)) { var _fDesc = pipeline.getShaderDescription('fragment'); var _code = _fDesc.getCode(); _code = vtkWebGPUShaderCache.substitute(_code, '//VTK::Color::Impl', ['ambientColor = mapperUBO.EdgeColor;', 'diffuseColor = mapperUBO.EdgeColor;']).result; _fDesc.setCode(_code); return; } // If there's no vertex color buffer return the shader as is var colorBuffer = vertexInput.getBuffer('colorVI'); if (!colorBuffer) return; // Modifies the vertex shader to include the vertex colors and interpolation in the outputs var vDesc = pipeline.getShaderDescription('vertex'); vDesc.addOutput('vec4<f32>', 'color', colorBuffer.getArrayInformation()[0].interpolation); var code = vDesc.getCode(); code = vtkWebGPUShaderCache.substitute(code, '//VTK::Color::Impl', [' output.color = colorVI;']).result; vDesc.setCode(code); // Sets the fragment shader to accept the color inputs from the vertex shader var fDesc = pipeline.getShaderDescription('fragment'); code = fDesc.getCode(); code = vtkWebGPUShaderCache.substitute(code, '//VTK::Color::Impl', ['ambientColor = input.color;', 'diffuseColor = input.color;', 'opacity = mapperUBO.Opacity * input.color.a;']).result; fDesc.setCode(code); }; model.shaderReplacements.set('replaceShaderColor', publicAPI.replaceShaderColor); publicAPI.replaceShaderTCoord = function (hash, pipeline, vertexInput) { var _actor$getProperty$ge, _actor$getProperty, _actor$getProperty$ge2, _actor$getProperty$ge4, _actor$getProperty3, _actor$getProperty$ge5, _actor$getProperty$ge6, _actor$getProperty4, _actor$getProperty$ge7, _actor$getProperty$ge8, _actor$getProperty5, _actor$getProperty$ge9, _actor$getProperty$ge10, _actor$getProperty6, _actor$getProperty$ge11, _actor$getProperty$ge12, _actor$getProperty7, _actor$getProperty$ge13; if (!vertexInput.hasAttribute('tcoord')) return; var vDesc = pipeline.getShaderDescription('vertex'); var tcoords = vertexInput.getBuffer('tcoord'); var numComp = vtkWebGPUTypes.getNumberOfComponentsFromBufferFormat(tcoords.getArrayInformation()[0].format); var code = vDesc.getCode(); vDesc.addOutput("vec".concat(numComp, "<f32>"), 'tcoordVS'); code = vtkWebGPUShaderCache.substitute(code, '//VTK::TCoord::Impl', [' output.tcoordVS = tcoord;']).result; vDesc.setCode(code); var fDesc = pipeline.getShaderDescription('fragment'); code = fDesc.getCode(); var actor = model.WebGPUActor.getRenderable(); var checkDims = function checkDims(texture) { if (!texture) return false; var dims = texture.getDimensionality(); return dims === numComp; }; var usedTextures = []; if ((_actor$getProperty$ge = (_actor$getProperty = actor.getProperty()).getDiffuseTexture) !== null && _actor$getProperty$ge !== void 0 && (_actor$getProperty$ge2 = _actor$getProperty$ge.call(_actor$getProperty)) !== null && _actor$getProperty$ge2 !== void 0 && _actor$getProperty$ge2.getImageLoaded() || actor.getTextures()[0] || model.colorTexture) { var _actor$getProperty$ge3, _actor$getProperty2; if ( // Chained or statements here are questionable checkDims((_actor$getProperty$ge3 = (_actor$getProperty2 = actor.getProperty()).getDiffuseTexture) === null || _actor$getProperty$ge3 === void 0 ? void 0 : _actor$getProperty$ge3.call(_actor$getProperty2)) || checkDims(actor.getTextures()[0]) || checkDims(model.colorTexture)) { usedTextures.push('_diffuseMap = textureSample(DiffuseTexture, DiffuseTextureSampler, input.tcoordVS);'); } } if ((_actor$getProperty$ge4 = (_actor$getProperty3 = actor.getProperty()).getRoughnessTexture) !== null && _actor$getProperty$ge4 !== void 0 && (_actor$getProperty$ge5 = _actor$getProperty$ge4.call(_actor$getProperty3)) !== null && _actor$getProperty$ge5 !== void 0 && _actor$getProperty$ge5.getImageLoaded()) { if (checkDims(actor.getProperty().getRoughnessTexture())) { usedTextures.push('_roughnessMap = textureSample(RoughnessTexture, RoughnessTextureSampler, input.tcoordVS);'); } } if ((_actor$getProperty$ge6 = (_actor$getProperty4 = actor.getProperty()).getMetallicTexture) !== null && _actor$getProperty$ge6 !== void 0 && (_actor$getProperty$ge7 = _actor$getProperty$ge6.call(_actor$getProperty4)) !== null && _actor$getProperty$ge7 !== void 0 && _actor$getProperty$ge7.getImageLoaded()) { if (checkDims(actor.getProperty().getMetallicTexture())) { usedTextures.push('_metallicMap = textureSample(MetallicTexture, MetallicTextureSampler, input.tcoordVS);'); } } if ((_actor$getProperty$ge8 = (_actor$getProperty5 = actor.getProperty()).getNormalTexture) !== null && _actor$getProperty$ge8 !== void 0 && (_actor$getProperty$ge9 = _actor$getProperty$ge8.call(_actor$getProperty5)) !== null && _actor$getProperty$ge9 !== void 0 && _actor$getProperty$ge9.getImageLoaded()) { if (checkDims(actor.getProperty().getNormalTexture())) { usedTextures.push('_normalMap = textureSample(NormalTexture, NormalTextureSampler, input.tcoordVS);'); } } if ((_actor$getProperty$ge10 = (_actor$getProperty6 = actor.getProperty()).getAmbientOcclusionTexture) !== null && _actor$getProperty$ge10 !== void 0 && (_actor$getProperty$ge11 = _actor$getProperty$ge10.call(_actor$getProperty6)) !== null && _actor$getProperty$ge11 !== void 0 && _actor$getProperty$ge11.getImageLoaded()) { if (checkDims(actor.getProperty().getAmbientOcclusionTexture())) { usedTextures.push('_ambientOcclusionMap = textureSample(AmbientOcclusionTexture, AmbientOcclusionTextureSampler, input.tcoordVS);'); } } if ((_actor$getProperty$ge12 = (_actor$getProperty7 = actor.getProperty()).getEmissionTexture) !== null && _actor$getProperty$ge12 !== void 0 && (_actor$getProperty$ge13 = _actor$getProperty$ge12.call(_actor$getProperty7)) !== null && _actor$getProperty$ge13 !== void 0 && _actor$getProperty$ge13.getImageLoaded()) { if (checkDims(actor.getProperty().getEmissionTexture())) { usedTextures.push('_emissionMap = textureSample(EmissionTexture, EmissionTextureSampler, input.tcoordVS);'); } } code = vtkWebGPUShaderCache.substitute(code, '//VTK::TCoord::Impl', usedTextures).result; fDesc.setCode(code); }; model.shaderReplacements.set('replaceShaderTCoord', publicAPI.replaceShaderTCoord); publicAPI.replaceShaderSelect = function (hash, pipeline, vertexInput) { if (hash.includes('sel')) { var fDesc = pipeline.getShaderDescription('fragment'); var code = fDesc.getCode(); // by default there are no composites, so just 0 code = vtkWebGPUShaderCache.substitute(code, '//VTK::Select::Impl', [' var compositeID: u32 = 0u;']).result; fDesc.setCode(code); } }; model.shaderReplacements.set('replaceShaderSelect', publicAPI.replaceShaderSelect); publicAPI.getUsage = function (rep, i) { if (rep === Representation.POINTS || i === PrimitiveTypes.Points) { return BufferUsage.Verts; } if (i === PrimitiveTypes.Lines) { return BufferUsage.Lines; } if (rep === Representation.WIREFRAME) { if (i === PrimitiveTypes.Triangles) { return BufferUsage.LinesFromTriangles; } return BufferUsage.LinesFromStrips; } if (i === PrimitiveTypes.Triangles) { return BufferUsage.Triangles; } if (i === PrimitiveTypes.TriangleStrips) { return BufferUsage.Strips; } if (i === PrimitiveTypes.TriangleEdges) { return BufferUsage.LinesFromTriangles; } // only strip edges left which are lines return BufferUsage.LinesFromStrips; }; publicAPI.getHashFromUsage = function (usage) { return "pt".concat(usage); }; publicAPI.getTopologyFromUsage = function (usage) { switch (usage) { case BufferUsage.Triangles: return 'triangle-list'; case BufferUsage.Verts: return 'point-list'; case BufferUsage.Lines: default: return 'line-list'; } }; // TODO: calculate tangents publicAPI.buildVertexInput = function () { var _model$renderable$get, _model$renderable; var pd = model.currentInput; var cells = model.cellArray; var primType = model.primitiveType; var actor = model.WebGPUActor.getRenderable(); var representation = actor.getProperty().getRepresentation(); var device = model.WebGPURenderWindow.getDevice(); var edges = false; if (primType === PrimitiveTypes.TriangleEdges) { edges = true; representation = Representation.WIREFRAME; } var vertexInput = model.vertexInput; var points = pd.getPoints(); var indexBuffer; // get the flat mapping indexBuffer for the cells if (cells) { var buffRequest = { hash: "R".concat(representation, "P").concat(primType).concat(cells.getMTime()), usage: BufferUsage.Index, cells: cells, numberOfPoints: points.getNumberOfPoints(), primitiveType: primType, representation: representation }; indexBuffer = device.getBufferManager().getBuffer(buffRequest); vertexInput.setIndexBuffer(indexBuffer); } else { vertexInput.setIndexBuffer(null); } // hash = all things that can change the values on the buffer // since mtimes are unique we can use // - indexBuffer mtime - because cells drive how we pack // - relevant dataArray mtime - the source data // - shift - not currently captured // - scale - not currently captured // - format // - usage // - packExtra - covered by format // points if (points) { var shift = model.WebGPUActor.getBufferShift(model.WebGPURenderer); var _buffRequest = { hash: "".concat(points.getMTime(), "I").concat(indexBuffer.getMTime()).concat(shift.join(), "float32x4"), usage: BufferUsage.PointArray, format: 'float32x4', dataArray: points, indexBuffer: indexBuffer, shift: shift, packExtra: true }; var buff = device.getBufferManager().getBuffer(_buffRequest); vertexInput.addBuffer(buff, ['vertexBC']); } else { vertexInput.removeBufferIfPresent('vertexBC'); } // normals, only used for surface rendering var usage = publicAPI.getUsage(representation, primType); model._usesCellNormals = false; if (!model.is2D && ( // no lighting on Property2D usage === BufferUsage.Triangles || usage === BufferUsage.Strips)) { var normals = pd.getPointData().getNormals(); // https://vtk.org/doc/nightly/html/classvtkPolyDataTangents.html // Need to find some way of using precomputed tangents (or computing new ones) var _buffRequest2 = { format: 'snorm8x4', indexBuffer: indexBuffer, packExtra: true, shift: 0, scale: 127 }; if (normals) { _buffRequest2.hash = "".concat(normals.getMTime(), "I").concat(indexBuffer.getMTime(), "snorm8x4"); _buffRequest2.dataArray = normals; _buffRequest2.usage = BufferUsage.PointArray; var _buff = device.getBufferManager().getBuffer(_buffRequest2); vertexInput.addBuffer(_buff, ['normalMC']); } else if (primType === PrimitiveTypes.Triangles) { model._usesCellNormals = true; _buffRequest2.hash = "PFN".concat(points.getMTime(), "I").concat(indexBuffer.getMTime(), "snorm8x4"); _buffRequest2.dataArray = points; _buffRequest2.cells = cells; _buffRequest2.usage = BufferUsage.NormalsFromPoints; var _buff2 = device.getBufferManager().getBuffer(_buffRequest2); vertexInput.addBuffer(_buff2, ['normalMC']); } else { vertexInput.removeBufferIfPresent('normalMC'); } } else { vertexInput.removeBufferIfPresent('normalMC'); } // deal with colors but only if modified var haveColors = false; if (model.renderable.getScalarVisibility()) { var c = model.renderable.getColorMapColors(); if (c && !edges) { var scalarMode = model.renderable.getScalarMode(); var haveCellScalars = false; // We must figure out how the scalars should be mapped to the polydata. if ((scalarMode === ScalarMode.USE_CELL_DATA || scalarMode === ScalarMode.USE_CELL_FIELD_DATA || scalarMode === ScalarMode.USE_FIELD_DATA || !pd.getPointData().getScalars()) && scalarMode !== ScalarMode.USE_POINT_FIELD_DATA && c) { haveCellScalars = true; } var _buffRequest3 = { usage: BufferUsage.PointArray, format: 'unorm8x4', hash: "".concat(haveCellScalars).concat(c.getMTime(), "I").concat(indexBuffer.getMTime(), "unorm8x4"), dataArray: c, indexBuffer: indexBuffer, cellData: haveCellScalars, cellOffset: 0 }; var _buff3 = device.getBufferManager().getBuffer(_buffRequest3); vertexInput.addBuffer(_buff3, ['colorVI']); haveColors = true; } } if (!haveColors) { vertexInput.removeBufferIfPresent('colorVI'); } var tcoords = null; if ((_model$renderable$get = (_model$renderable = model.renderable).getInterpolateScalarsBeforeMapping) !== null && _model$renderable$get !== void 0 && _model$renderable$get.call(_model$renderable) && model.renderable.getColorCoordinates()) { tcoords = model.renderable.getColorCoordinates(); } else { tcoords = pd.getPointData().getTCoords(); } if (tcoords && !edges) { var _buff4 = device.getBufferManager().getBufferForPointArray(tcoords, vertexInput.getIndexBuffer()); vertexInput.addBuffer(_buff4, ['tcoord']); } else { vertexInput.removeBufferIfPresent('tcoord'); } }; publicAPI.updateTextures = function () { var _model$renderable$get2, _model$renderable2, _actor$getProperty$ge14, _actor$getProperty8, _actor$getProperty$ge15, _actor$getProperty9, _actor$getProperty$ge16, _actor$getProperty10, _actor$getProperty$ge17, _actor$getProperty11, _actor$getProperty$ge18, _actor$getProperty12, _actor$getProperty$ge19, _actor$getProperty13, _renderer$getEnvironm2; // we keep track of new and used textures so // that we can clean up any unused textures so we don't hold onto them var usedTextures = []; var newTextures = []; // do we have a scalar color texture var idata = (_model$renderable$get2 = (_model$renderable2 = model.renderable).getColorTextureMap) === null || _model$renderable$get2 === void 0 ? void 0 : _model$renderable$get2.call(_model$renderable2); if (idata) { if (!model.colorTexture) { model.colorTexture = vtkTexture.newInstance({ label: 'polyDataColor' }); } model.colorTexture.setInputData(idata); newTextures.push(['Diffuse', model.colorTexture]); } // actor textures? var actor = model.WebGPUActor.getRenderable(); var renderer = model.WebGPURenderer.getRenderable(); // Reusing the old code for new and old textures, just loading in from properties instead of actor.getTextures() var textures = []; // Feels like there should be a better way than individually adding all if ((_actor$getProperty$ge14 = (_actor$getProperty8 = actor.getProperty()).getDiffuseTexture) !== null && _actor$getProperty$ge14 !== void 0 && _actor$getProperty$ge14.call(_actor$getProperty8)) { var pair = ['Diffuse', actor.getProperty().getDiffuseTexture()]; textures.push(pair); } if (actor.getTextures()[0]) { var _pair = ['Diffuse', actor.getTextures()[0]]; textures.push(_pair); } if (model.colorTexture) { var _pair2 = ['Diffuse', model.colorTexture]; textures.push(_pair2); } if ((_actor$getProperty$ge15 = (_actor$getProperty9 = actor.getProperty()).getRoughnessTexture) !== null && _actor$getProperty$ge15 !== void 0 && _actor$getProperty$ge15.call(_actor$getProperty9)) { var _pair3 = ['Roughness', actor.getProperty().getRoughnessTexture()]; textures.push(_pair3); } if ((_actor$getProperty$ge16 = (_actor$getProperty10 = actor.getProperty()).getMetallicTexture) !== null && _actor$getProperty$ge16 !== void 0 && _actor$getProperty$ge16.call(_actor$getProperty10)) { var _pair4 = ['Metallic', actor.getProperty().getMetallicTexture()]; textures.push(_pair4); } if ((_actor$getProperty$ge17 = (_actor$getProperty11 = actor.getProperty()).getNormalTexture) !== null && _actor$getProperty$ge17 !== void 0 && _actor$getProperty$ge17.call(_actor$getProperty11)) { var _pair5 = ['Normal', actor.getProperty().getNormalTexture()]; textures.push(_pair5); } if ((_actor$getProperty$ge18 = (_actor$getProperty12 = actor.getProperty()).getAmbientOcclusionTexture) !== null && _actor$getProperty$ge18 !== void 0 && _actor$getProperty$ge18.call(_actor$getProperty12)) { var _pair6 = ['AmbientOcclusion', actor.getProperty().getAmbientOcclusionTexture()]; textures.push(_pair6); } if ((_actor$getProperty$ge19 = (_actor$getProperty13 = actor.getProperty()).getEmissionTexture) !== null && _actor$getProperty$ge19 !== void 0 && _actor$getProperty$ge19.call(_actor$getProperty13)) { var _pair7 = ['Emission', actor.getProperty().getEmissionTexture()]; textures.push(_pair7); } if ((_renderer$getEnvironm2 = renderer.getEnvironmentTexture) !== null && _renderer$getEnvironm2 !== void 0 && _renderer$getEnvironm2.call(renderer)) { var _pair8 = ['Environment', renderer.getEnvironmentTexture()]; textures.push(_pair8); } for (var i = 0; i < textures.length; i++) { if (textures[i][1].getInputData() || textures[i][1].getJsImageData() || textures[i][1].getCanvas()) { newTextures.push(textures[i]); } if (textures[i][1].getImage() && textures[i][1].getImageLoaded()) { newTextures.push(textures[i]); } } for (var _i = 0; _i < newTextures.length; _i++) { var srcTexture = newTextures[_i][1]; var textureName = newTextures[_i][0]; var newTex = model.device.getTextureManager().getTextureForVTKTexture(srcTexture); // Generates hash if (newTex.getReady()) { // is this a new texture var found = false; for (var t = 0; t < model.textures.length; t++) { if (model.textures[t] === newTex) { found = true; usedTextures[t] = true; } } if (!found) { usedTextures[model.textures.length] = true; var tview = newTex.createView("".concat(textureName, "Texture")); model.textures.push(newTex); model.textureViews.push(tview); var interpolate = srcTexture.getInterpolate() ? 'linear' : 'nearest'; var addressMode = null; if (!addressMode && srcTexture.getEdgeClamp() && srcTexture.getRepeat()) addressMode = 'mirror-repeat'; if (!addressMode && srcTexture.getEdgeClamp()) addressMode = 'clamp-to-edge'; if (!addressMode && srcTexture.getRepeat()) addressMode = 'repeat'; if (textureName !== 'Environment') { tview.addSampler(model.device, { addressModeU: addressMode, addressModeV: addressMode, addressModeW: addressMode, minFilter: interpolate, magFilter: interpolate }); } else { tview.addSampler(model.device, { addressModeU: 'repeat', addressModeV: 'clamp-to-edge', addressModeW: 'repeat', minFilter: interpolate, magFilter: interpolate, mipmapFilter: 'linear' }); } } } } // remove unused textures for (var _i2 = model.textures.length - 1; _i2 >= 0; _i2--) { if (!usedTextures[_i2]) { model.textures.splice(_i2, 1); model.textureViews.splice(_i2, 1); } } }; // compute a unique hash for a pipeline, this needs to be unique enough to // capture any pipeline code changes (which includes shader changes) // or vertex input changes/ bind groups/ etc publicAPI.computePipelineHash = function () { var pipelineHash = "pd".concat(model.useRendererMatrix ? 'r' : '').concat(model.forceZValue ? 'z' : ''); if (model.primitiveType === PrimitiveTypes.TriangleEdges || model.primitiveType === PrimitiveTypes.TriangleStripEdges) { pipelineHash += 'edge'; } else { if (model.vertexInput.hasAttribute("normalMC")) { pipelineHash += "n"; } if (model.vertexInput.hasAttribute("colorVI")) { pipelineHash += "c"; } if (model.vertexInput.hasAttribute("tcoord")) { var tcoords = model.vertexInput.getBuffer('tcoord'); var numComp = vtkWebGPUTypes.getNumberOfComponen