UNPKG

@arcgis/core

Version:

ArcGIS Maps SDK for JavaScript: A complete 2D and 3D mapping and data visualization API

161 lines (135 loc) • 10.7 kB
/* COPYRIGHT Esri - https://js.arcgis.com/5.0.8/LICENSE.txt */ import{markerTextureSize as e,markerSymbolSize as r,markerTipThicknessFactor as i}from"../views/3d/support/engineContent/marker.js";import{SliceDraw as o}from"../views/3d/webgl-engine/core/shaderLibrary/Slice.glsl.js";import{RibbonVertexPosition as t}from"../views/3d/webgl-engine/core/shaderLibrary/attributes/RibbonVertexPosition.glsl.js";import{OutputHighlight as a}from"../views/3d/webgl-engine/core/shaderLibrary/output/OutputHighlight.glsl.js";import{MarkerSizing as n}from"../views/3d/webgl-engine/core/shaderLibrary/shading/MarkerSizing.glsl.js";import{positionOutsideClipSpace as s}from"../views/3d/webgl-engine/core/shaderLibrary/shading/PositionOutsideClipSpace.js";import{terrainDepthTest as l}from"../views/3d/webgl-engine/core/shaderLibrary/shading/TerrainDepthTest.glsl.js";import{ColorConversion as c}from"../views/3d/webgl-engine/core/shaderLibrary/util/ColorConversion.glsl.js";import{NoPerspectiveWrite as p,NoPerspectiveRead as d}from"../views/3d/webgl-engine/core/shaderLibrary/util/NoPerspective.glsl.js";import{addProjViewLocalOrigin as v,addViewNormal as m,addPixelRatio as h}from"../views/3d/webgl-engine/core/shaderLibrary/util/View.glsl.js";import{Float2BindUniform as f}from"../views/3d/webgl-engine/core/shaderModules/Float2BindUniform.js";import{Float4BindUniform as g}from"../views/3d/webgl-engine/core/shaderModules/Float4BindUniform.js";import{Float4PassUniform as u}from"../views/3d/webgl-engine/core/shaderModules/Float4PassUniform.js";import{FloatBindUniform as w}from"../views/3d/webgl-engine/core/shaderModules/FloatBindUniform.js";import{glsl as y,If as S}from"../views/3d/webgl-engine/core/shaderModules/glsl.js";import{Matrix4BindUniform as x}from"../views/3d/webgl-engine/core/shaderModules/Matrix4BindUniform.js";import{Texture2DPassUniform as b}from"../views/3d/webgl-engine/core/shaderModules/Texture2DPassUniform.js";import{outputColorHighlightOLID as P}from"../views/3d/webgl-engine/shaders/OutputColorHighlightOLID.glsl.js";import{ShaderBuilder as z}from"../views/webgl/ShaderBuilder.js";function j(j){const L=new z,{space:D,anchor:k,hasTip:M,hasScreenSizePerspective:C}=j,$=2===D,W=1===D;L.include(t,j),L.include(n,j),L.include(l,j);const{vertex:O,fragment:U,varyings:T}=L;v(O,j),L.attributes.add("position","vec3"),L.attributes.add("previousDelta","vec4"),L.attributes.add("uv0","vec2"),T.add("vColor","vec4"),T.add("vpos","vec3",{invariant:!0}),T.add("vUV","vec2"),T.add("vSize","float"),M&&T.add("vLineWidth","float"),O.uniforms.add(new f("nearFar",({camera:e})=>e.nearFar),new g("viewport",({camera:e})=>e.fullViewport)).code.add(y`vec4 projectAndScale(vec4 pos) { vec4 posNdc = proj * pos; posNdc.xy *= viewport.zw / posNdc.w; return posNdc; }`),O.code.add(y`void clip(vec4 pos, inout vec4 prev) { float vnp = nearFar[0] * 0.99; if (prev.z > -nearFar[0]) { float interpolation = (-vnp - pos.z) / (prev.z - pos.z); prev = mix(pos, prev, interpolation); } }`),$?(L.attributes.add("normal","vec3"),m(O),O.constants.add("tiltThreshold","float",.7),O.code.add(y`vec3 perpendicular(vec3 v) { vec3 n = (viewNormal * vec4(normal.xyz, 1.0)).xyz; vec3 n2 = cross(v, n); vec3 forward = vec3(0.0, 0.0, 1.0); float tiltDot = dot(forward, n); return abs(tiltDot) < tiltThreshold ? n : n2; }`)):O.code.add(y`vec2 perpendicular(vec2 v) { return vec2(v.y, -v.x); }`);const N=$?"vec3":"vec2";return O.code.add(y` ${N} normalizedSegment(${N} pos, ${N} prev) { ${N} segment = pos - prev; float segmentLen = length(segment); // normalize or zero if too short return (segmentLen > 0.001) ? segment / segmentLen : ${$?"vec3(0.0, 0.0, 0.0)":"vec2(0.0, 0.0)"}; } ${N} displace(${N} pos, ${N} prev, float displacementLen) { ${N} segment = normalizedSegment(pos, prev); ${N} displacementDirU = perpendicular(segment); ${N} displacementDirV = segment; ${1===k?"pos -= 0.5 * displacementLen * displacementDirV;":""} return pos + displacementLen * (uv0.x * displacementDirU + uv0.y * displacementDirV); } `),W&&(O.uniforms.add(new x("inverseProjectionMatrix",({camera:e})=>e.inverseProjectionMatrix)),O.code.add(y`vec3 inverseProject(vec4 posScreen) { posScreen.xy = (posScreen.xy / viewport.zw) * posScreen.w; return (inverseProjectionMatrix * posScreen).xyz; }`),O.code.add(y`bool rayIntersectPlane(vec3 rayDir, vec3 planeOrigin, vec3 planeNormal, out vec3 intersection) { float cos = dot(rayDir, planeNormal); float t = dot(planeOrigin, planeNormal) / cos; intersection = t * rayDir; return abs(cos) > 0.001 && t > 0.0; }`),O.uniforms.add(new w("perScreenPixelRatio",({camera:e})=>e.perScreenPixelRatio)),O.code.add(y` vec4 toFront(vec4 displacedPosScreen, vec3 posLeft, vec3 posRight, vec3 prev, float lineWidth) { // Project displaced position back to camera space vec3 displacedPos = inverseProject(displacedPosScreen); // Calculate the plane that we want the marker to lie in. Note that this will always be an approximation since ribbon lines are generally // not planar and we do not know the actual position of the displaced prev vertices (they are offset in screen space, too). vec3 planeNormal = normalize(cross(posLeft - posRight, posLeft - prev)); vec3 planeOrigin = posLeft; ${S(j.hasCap,"if(prev.z > posLeft.z) {\n vec2 diff = posLeft.xy - posRight.xy;\n planeOrigin.xy += perpendicular(diff) / 2.0;\n }")}; // Move the plane towards the camera by a margin dependent on the line width (approximated in world space). This tolerance corrects for the // non-planarity in most cases, but sharp joins can place the prev vertices at arbitrary positions so markers can still clip. float offset = lineWidth * perScreenPixelRatio; planeOrigin *= (1.0 - offset); // Intersect camera ray with the plane and make sure it is within clip space vec3 rayDir = normalize(displacedPos); vec3 intersection; if (rayIntersectPlane(rayDir, planeOrigin, planeNormal, intersection) && intersection.z < -nearFar[0] && intersection.z > -nearFar[1]) { return vec4(intersection.xyz, 1.0); } // Fallback: use depth of pos or prev, whichever is closer to the camera float minDepth = planeOrigin.z > prev.z ? length(planeOrigin) : length(prev); displacedPos *= minDepth / length(displacedPos); return vec4(displacedPos.xyz, 1.0); } `)),h(O),L.include(p),O.main.add(y` // Check for special value of uv0.y which is used by the Renderer when graphics // are removed before the VBO is recompacted. If this is the case, then we just // project outside of clip space. if (uv0.y == 0.0) { // Project out of clip space gl_Position = ${s}; } else { vec4 pos = view * vec4(position, 1.0); vec4 prev = view * vec4(position + previousDelta.xyz * previousDelta.w, 1.0); float lineWidth = getLineWidth(${S(C,"pos.xyz")}); float screenMarkerSize = getScreenMarkerSize(lineWidth); clip(pos, prev); ${$?y`${S(j.hideOnShortSegments,y` if (areWorldMarkersHidden(pos.xyz, prev.xyz)) { gl_Position = ${s}; return; }`)} pos.xyz = displace(pos.xyz, prev.xyz, getWorldMarkerSize(pos.xyz)); vec4 displacedPosScreen = projectAndScale(pos);`:y` vec4 posScreen = projectAndScale(pos); vec4 prevScreen = projectAndScale(prev); vec4 displacedPosScreen = posScreen; displacedPosScreen.xy = displace(posScreen.xy, prevScreen.xy, screenMarkerSize); ${S(W,y` vec2 displacementDirU = perpendicular(normalizedSegment(posScreen.xy, prevScreen.xy)); // We need three points of the ribbon line in camera space to calculate the plane it lies in // Note that we approximate the third point, since we have no information about the join around prev vec3 lineRight = inverseProject(posScreen + lineWidth * vec4(displacementDirU.xy, 0.0, 0.0)); vec3 lineLeft = pos.xyz + (pos.xyz - lineRight); pos = toFront(displacedPosScreen, lineLeft, lineRight, prev.xyz, lineWidth); displacedPosScreen = projectAndScale(pos);`)}`} forwardViewPosDepth(pos.xyz); // Convert back into NDC displacedPosScreen.xy = (displacedPosScreen.xy / viewport.zw) * displacedPosScreen.w; // Convert texture coordinate into [0,1] vUV = (uv0 + 1.0) / 2.0; ${S(!$,"vUV = noPerspectiveWrite(vUV, displacedPosScreen.w);")} ${S(M,"vLineWidth = noPerspectiveWrite(lineWidth, displacedPosScreen.w);")} vSize = screenMarkerSize; vColor = getColor(); // Use camera space for slicing vpos = pos.xyz; gl_Position = displacedPosScreen; }`),U.include(o,j),L.include(P,j),U.include(c),U.uniforms.add(new u("intrinsicColor",({color:e})=>e),new b("tex",({markerTexture:e})=>e)).constants.add("texelSize","float",1/e).code.add(y`float markerAlpha(vec2 samplePos) { samplePos += vec2(0.5, -0.5) * texelSize; float sdf = texture(tex, samplePos).r; float pixelDistance = sdf * vSize; pixelDistance -= 0.5; return clamp(0.5 - pixelDistance, 0.0, 1.0); }`),M&&(L.include(d),U.constants.add("relativeMarkerSize","float",r/e).constants.add("relativeTipLineWidth","float",i).code.add(y` float tipAlpha(vec2 samplePos) { // Convert coordinates s.t. they are in pixels and relative to the tip of an arrow marker samplePos -= vec2(0.5, 0.5 + 0.5 * relativeMarkerSize); samplePos *= vSize; float halfMarkerSize = 0.5 * relativeMarkerSize * vSize; float halfTipLineWidth = 0.5 * max(1.0, relativeTipLineWidth * noPerspectiveRead(vLineWidth)); ${S($,"halfTipLineWidth *= fwidth(samplePos.y);")} float distance = max(abs(samplePos.x) - halfMarkerSize, abs(samplePos.y) - halfTipLineWidth); return clamp(0.5 - distance, 0.0, 1.0); } `)),L.include(a,j),L.include(d),U.main.add(y` discardBySlice(vpos); discardByTerrainDepth(); vec4 finalColor = intrinsicColor * vColor; // Cancel out perspective correct interpolation if in screen space or draped vec2 samplePos = ${S(!$,"noPerspectiveRead(vUV)","vUV")}; finalColor.a *= ${M?"max(markerAlpha(samplePos), tipAlpha(samplePos))":"markerAlpha(samplePos)"}; outputColorHighlightOLID(applySlice(finalColor, vpos), finalColor.rgb);`),L}const L=Object.freeze(Object.defineProperty({__proto__:null,build:j},Symbol.toStringTag,{value:"Module"}));export{L,j as b};