UNPKG

@arcgis/core

Version:

ArcGIS Maps SDK for JavaScript: A complete 2D and 3D mapping and data visualization API

93 lines (73 loc) 7.24 kB
/* All material copyright ESRI, All Rights Reserved, unless otherwise specified. See https://js.arcgis.com/4.33/esri/copyright.txt for details. */ import{set as e}from"../core/libs/gl-matrix-2/math/vec2.js";import{create as i}from"../core/libs/gl-matrix-2/factories/vec2f64.js";import{ZEROS as r}from"../core/libs/gl-matrix-2/factories/vec4f64.js";import{RejectBySlice as o}from"../views/3d/webgl-engine/core/shaderLibrary/Slice.glsl.js";import{AlignPixel as t}from"../views/3d/webgl-engine/core/shaderLibrary/hud/AlignPixel.glsl.js";import{HUD as n}from"../views/3d/webgl-engine/core/shaderLibrary/hud/HUD.glsl.js";import{HUDVisibility as a}from"../views/3d/webgl-engine/core/shaderLibrary/hud/HUDVisibility.glsl.js";import{multipassGeometryTest as l}from"../views/3d/webgl-engine/core/shaderLibrary/shading/MultipassGeometryTest.glsl.js";import{addScreenSizePerspectiveAlignment as d}from"../views/3d/webgl-engine/core/shaderLibrary/util/ScreenSizePerspective.glsl.js";import{Float2BindUniform as s}from"../views/3d/webgl-engine/core/shaderModules/Float2BindUniform.js";import{Float2PassUniform as c}from"../views/3d/webgl-engine/core/shaderModules/Float2PassUniform.js";import{Float4BindUniform as p}from"../views/3d/webgl-engine/core/shaderModules/Float4BindUniform.js";import{Float4PassUniform as g}from"../views/3d/webgl-engine/core/shaderModules/Float4PassUniform.js";import{FloatPassUniform as f}from"../views/3d/webgl-engine/core/shaderModules/FloatPassUniform.js";import{If as v,glsl as S}from"../views/3d/webgl-engine/core/shaderModules/glsl.js";import{VertexAttribute as m}from"../views/3d/webgl-engine/lib/VertexAttribute.js";import{ShaderBuilder as w}from"../views/webgl/ShaderBuilder.js";function h(i){const h=new w,{vertex:b,fragment:z}=h,{terrainDepthTest:x}=i;return b.include(t),h.include(n,i),h.vertex.include(o,i),h.attributes.add(m.UV0,"vec2"),b.uniforms.add(new p("viewport",(e=>e.camera.fullViewport)),new f("lineSize",((e,i)=>e.size>0?Math.max(1,e.size)*i.camera.pixelRatio:0)),new s("pixelToNDC",(i=>e(u,2/i.camera.fullViewport[2],2/i.camera.fullViewport[3]))),new f("borderSize",((e,i)=>e.borderColor?i.camera.pixelRatio:0)),new c("screenOffset",((i,r)=>e(u,i.horizontalScreenOffset*r.camera.pixelRatio,0)))),h.varyings.add("coverageSampling","vec4"),h.varyings.add("lineSizes","vec2"),x&&h.varyings.add("depth","float"),i.occlusionTestEnabled&&h.include(a),i.hasScreenSizePerspective&&d(b),b.main.add(S` ProjectHUDAux projectAux; vec4 endPoint = projectPositionHUD(projectAux); vec3 vpos = projectAux.posModel; if (rejectBySlice(vpos)) { gl_Position = vec4(1e38, 1e38, 1e38, 1.0); return; } ${v(i.occlusionTestEnabled,S`if (!testHUDVisibility(endPoint)) { gl_Position = vec4(1e38, 1e38, 1e38, 1.0); return; }`)} ${i.hasScreenSizePerspective?S`vec3 perspectiveFactor = screenSizePerspectiveScaleFactor(projectAux.absCosAngle, projectAux.distanceToCamera, screenSizePerspectiveAlignment); vec2 screenOffsetScaled = applyScreenSizePerspectiveScaleFactorVec2(screenOffset, perspectiveFactor);`:"vec2 screenOffsetScaled = screenOffset;"} // Add view dependent polygon offset to get exact same original starting point. This is mostly used to get the // correct depth value vec3 posView = (view * vec4(position, 1.0)).xyz; ${v(x,"depth = posView.z;")} applyHUDViewDependentPolygonOffset(centerOffsetAndDistance.w, projectAux.absCosAngle, posView); vec4 startPoint = proj * vec4(posView, 1.0); // Apply screen offset to both start and end point vec2 screenOffsetNorm = screenOffsetScaled * 2.0 / viewport.zw; startPoint.xy += screenOffsetNorm * startPoint.w; endPoint.xy += screenOffsetNorm * endPoint.w; // Align start and end to pixel origin vec4 startAligned = alignToPixelOrigin(startPoint, viewport.zw); vec4 endAligned = alignToPixelOrigin(endPoint, viewport.zw); ${v(i.hudDepth,i.hudDepthAlignStart?"endAligned = vec4(endAligned.xy / endAligned.w * startAligned.w, startAligned.zw);":"startAligned = vec4(startAligned.xy / startAligned.w * endAligned.w, endAligned.zw);")} vec4 projectedPosition = mix(startAligned, endAligned, uv0.y); // The direction of the line in screen space vec2 screenSpaceDirection = normalize(endAligned.xy / endAligned.w - startAligned.xy / startAligned.w); vec2 perpendicularScreenSpaceDirection = vec2(screenSpaceDirection.y, -screenSpaceDirection.x); ${i.hasScreenSizePerspective?S`float lineSizeScaled = applyScreenSizePerspectiveScaleFactorFloat(lineSize, perspectiveFactor); float borderSizeScaled = applyScreenSizePerspectiveScaleFactorFloat(borderSize, perspectiveFactor);`:S`float lineSizeScaled = lineSize; float borderSizeScaled = borderSize;`} float halfPixelSize = lineSizeScaled * 0.5; // Compute full ndc offset, adding 1px padding for doing anti-aliasing and the border size float padding = 1.0 + borderSizeScaled; vec2 ndcOffset = (-halfPixelSize - padding + uv0.x * (lineSizeScaled + padding + padding)) * pixelToNDC; // Offset x/y from the center of the line in screen space projectedPosition.xy += perpendicularScreenSpaceDirection * ndcOffset * projectedPosition.w; // Compute a coverage varying which we can use in the fragment shader to determine // how much a pixel is actually covered by the line (i.e. to anti alias the line). // This works by computing two coordinates that can be linearly interpolated and then // subtracted to find out how far away from the line edge we are. float edgeDirection = (uv0.x * 2.0 - 1.0); float halfBorderSize = 0.5 * borderSizeScaled; float halfPixelSizeAndBorder = halfPixelSize + halfBorderSize; float outerEdgeCoverageSampler = edgeDirection * (halfPixelSizeAndBorder + halfBorderSize + 1.0); float isOneSided = float(lineSizeScaled < 2.0 && borderSize < 2.0); coverageSampling = vec4( // Edge coordinate outerEdgeCoverageSampler, // Border edge coordinate outerEdgeCoverageSampler - halfPixelSizeAndBorder * isOneSided, // Line offset halfPixelSize - 0.5, // Border offset halfBorderSize - 0.5 + halfPixelSizeAndBorder * (1.0 - isOneSided) ); lineSizes = vec2(lineSizeScaled, borderSizeScaled); gl_Position = projectedPosition;`),z.uniforms.add(new g("uColor",(e=>e.color??r)),new g("borderColor",(e=>e.borderColor??r))),x&&(z.include(l,i),z.uniforms.add(new s("inverseViewport",(e=>e.inverseViewport)))),z.main.add(S` ${v(x,"if( geometryDepthTest(gl_FragCoord.xy * inverseViewport, depth) ){ discard; }")} vec2 coverage = min(1.0 - clamp(abs(coverageSampling.xy) - coverageSampling.zw, 0.0, 1.0), lineSizes); float borderAlpha = uColor.a * borderColor.a * coverage.y; float colorAlpha = uColor.a * coverage.x; float finalAlpha = mix(borderAlpha, 1.0, colorAlpha); ${v(!i.hudDepth,S`vec3 finalRgb = mix(borderColor.rgb * borderAlpha, uColor.rgb, colorAlpha); fragColor = vec4(finalRgb, finalAlpha);`)}`),h}const u=i(),b=Object.freeze(Object.defineProperty({__proto__:null,build:h},Symbol.toStringTag,{value:"Module"}));export{b as L,h as b};