UNPKG

@kitware/vtk.js

Version:

Visualization Toolkit for the Web

683 lines (533 loc) 23.1 kB
import _toConsumableArray from '@babel/runtime/helpers/toConsumableArray'; import macro from '../../macros.js'; import vtkPolyData from '../../Common/DataModel/PolyData.js'; import vtkProperty from '../Core/Property.js'; import vtkRenderPass from '../SceneGraph/RenderPass.js'; import vtkWebGPUBufferManager from './BufferManager.js'; import vtkWebGPUSimpleMapper from './SimpleMapper.js'; import vtkWebGPURenderEncoder from './RenderEncoder.js'; import vtkWebGPUShaderCache from './ShaderCache.js'; import vtkWebGPUTexture from './Texture.js'; import vtkWebGPUUniformBuffer from './UniformBuffer.js'; import vtkWebGPUFullScreenQuad from './FullScreenQuad.js'; import vtkWebGPUVolumePassFSQ from './VolumePassFSQ.js'; import { e as distance2BetweenPoints } from '../../Common/Core/Math/index.js'; var Representation = vtkProperty.Representation; var BufferUsage = vtkWebGPUBufferManager.BufferUsage, PrimitiveTypes = vtkWebGPUBufferManager.PrimitiveTypes; // The volume rendering pass consists of two sub passes. The first // (depthRange) renders polygonal cubes for the volumes to compute min and // max bounds in depth for the image. This is then fed into the second pass // (final) which actually does the raycasting between those bounds sampling // the volumes along the way. So the first pass tends to be very fast whicle // the second is where most of the work is done. // given x then y then z ordering // // 2-----3 // / | / | // 6-----7 | // | | | | // | 0-----1 // |/ |/ // 4-----5 // var cubeFaceTriangles = [[0, 4, 6], [0, 6, 2], [1, 3, 7], [1, 7, 5], [0, 5, 4], [0, 1, 5], [2, 6, 7], [2, 7, 3], [0, 3, 1], [0, 2, 3], [4, 5, 7], [4, 7, 6]]; var DepthBoundsFS = "\n//VTK::Renderer::Dec\n\n//VTK::Select::Dec\n\n//VTK::VolumePass::Dec\n\n//VTK::TCoord::Dec\n\n//VTK::RenderEncoder::Dec\n\n//VTK::Mapper::Dec\n\n//VTK::IOStructs::Dec\n\n@fragment\nfn main(\n//VTK::IOStructs::Input\n)\n//VTK::IOStructs::Output\n{\n var output : fragmentOutput;\n\n //VTK::Select::Impl\n\n //VTK::TCoord::Impl\n\n //VTK::VolumePass::Impl\n\n // use the maximum (closest) of the current value and the zbuffer\n // the blend func will then take the min to find the farthest stop value\n var stopval: f32 = max(input.fragPos.z, textureLoad(opaquePassDepthTexture, vec2<i32>(i32(input.fragPos.x), i32(input.fragPos.y)), 0));\n\n //VTK::RenderEncoder::Impl\n return output;\n}\n"; var volumeCopyFragTemplate = "\n//VTK::Renderer::Dec\n\n//VTK::Mapper::Dec\n\n//VTK::TCoord::Dec\n\n//VTK::RenderEncoder::Dec\n\n//VTK::IOStructs::Dec\n\n@fragment\nfn main(\n//VTK::IOStructs::Input\n)\n//VTK::IOStructs::Output\n{\n var output: fragmentOutput;\n\n var computedColor: vec4<f32> = textureSample(volumePassColorTexture,\n volumePassColorTextureSampler, mapperUBO.tscale*input.tcoordVS);\n\n //VTK::RenderEncoder::Impl\n return output;\n}\n"; /* eslint-disable no-undef */ /* eslint-disable no-bitwise */ // ---------------------------------------------------------------------------- function vtkWebGPUVolumePass(publicAPI, model) { // Set our className model.classHierarchy.push('vtkWebGPUVolumePass'); // create the required textures, encoders, FSQ etc publicAPI.initialize = function (viewNode) { if (!model._clearEncoder) { publicAPI.createClearEncoder(viewNode); } if (!model._mergeEncoder) { publicAPI.createMergeEncoder(viewNode); } if (!model._copyEncoder) { publicAPI.createCopyEncoder(viewNode); } if (!model._depthRangeEncoder) { publicAPI.createDepthRangeEncoder(viewNode); } if (!model.fullScreenQuad) { model.fullScreenQuad = vtkWebGPUVolumePassFSQ.newInstance(); model.fullScreenQuad.setDevice(viewNode.getDevice()); model.fullScreenQuad.setTextureViews(_toConsumableArray(model._depthRangeEncoder.getColorTextureViews())); } if (!model._volumeCopyQuad) { model._volumeCopyQuad = vtkWebGPUFullScreenQuad.newInstance(); model._volumeCopyQuad.setPipelineHash('volpassfsq'); model._volumeCopyQuad.setDevice(viewNode.getDevice()); model._volumeCopyQuad.setFragmentShaderTemplate(volumeCopyFragTemplate); model._copyUBO = vtkWebGPUUniformBuffer.newInstance({ label: 'mapperUBO' }); model._copyUBO.addEntry('tscale', 'vec2<f32>'); model._volumeCopyQuad.setUBO(model._copyUBO); model._volumeCopyQuad.setTextureViews([model._colorTextureView]); } }; publicAPI.traverse = function (renNode, viewNode) { if (model.deleted) { return; } // we just render our delegates in order model._currentParent = viewNode; // create stuff we need publicAPI.initialize(viewNode); // determine if we are rendering a small size publicAPI.computeTiming(viewNode); // first render the boxes to generate a min max depth // map for all the volumes publicAPI.renderDepthBounds(renNode, viewNode); // always mark true model._firstGroup = true; var device = viewNode.getDevice(); // determine how many volumes we can render at a time. We subtract // 4 because we use know we use textures for min, max, ofun and tfun var maxVolumes = device.getHandle().limits.maxSampledTexturesPerShaderStage - 4; // if we have to make multiple passes then break the volumes up into groups // rendered from farthest to closest if (model.volumes.length > maxVolumes) { var cameraPos = renNode.getRenderable().getActiveCamera().getPosition(); // sort from back to front based on volume centroid var distances = []; for (var v = 0; v < model.volumes.length; v++) { var bounds = model.volumes[v].getRenderable().getBounds(); var centroid = [0.5 * (bounds[1] + bounds[0]), 0.5 * (bounds[3] + bounds[2]), 0.5 * (bounds[5] + bounds[4])]; distances[v] = distance2BetweenPoints(centroid, cameraPos); } // sort by distance var volumeOrder = _toConsumableArray(Array(model.volumes.length).keys()); volumeOrder.sort(function (a, b) { return distances[b] - distances[a]; }); // render in chunks back to front var volumesToRender = []; // start with smallest chunk so that the last (closest) chunk // has a full maxVolumes; var chunkSize = volumeOrder.length % maxVolumes; for (var _v = 0; _v < volumeOrder.length; _v++) { volumesToRender.push(model.volumes[volumeOrder[_v]]); if (volumesToRender.length >= chunkSize) { publicAPI.rayCastPass(viewNode, renNode, volumesToRender); volumesToRender = []; chunkSize = maxVolumes; model._firstGroup = false; } } } else { // if not rendering in chunks then just draw all of them at once publicAPI.rayCastPass(viewNode, renNode, model.volumes); } // copy back to the original color buffer // final composite model._volumeCopyQuad.setWebGPURenderer(renNode); if (model._useSmallViewport) { var width = model._colorTextureView.getTexture().getWidth(); var height = model._colorTextureView.getTexture().getHeight(); model._copyUBO.setArray('tscale', [model._smallViewportWidth / width, model._smallViewportHeight / height]); } else { model._copyUBO.setArray('tscale', [1.0, 1.0]); } model._copyUBO.sendIfNeeded(device); model._copyEncoder.setColorTextureView(0, model.colorTextureView); model._copyEncoder.attachTextureViews(); model._copyEncoder.begin(viewNode.getCommandEncoder()); renNode.scissorAndViewport(model._copyEncoder); model._volumeCopyQuad.prepareAndDraw(model._copyEncoder); model._copyEncoder.end(); }; // unsubscribe from our listeners publicAPI.delete = macro.chain(function () { if (model._animationRateSubscription) { model._animationRateSubscription.unsubscribe(); model._animationRateSubscription = null; } }, publicAPI.delete); publicAPI.computeTiming = function (viewNode) { model._useSmallViewport = false; var rwi = viewNode.getRenderable().getInteractor(); if (rwi.isAnimating() && model._lastScale > 1.5) { if (!model._smallViewportHeight) { model._smallViewportWidth = Math.ceil(viewNode.getCanvas().width / Math.sqrt(model._lastScale)); model._smallViewportHeight = Math.ceil(viewNode.getCanvas().height / Math.sqrt(model._lastScale)); } model._useSmallViewport = true; } model._colorTexture.resize(viewNode.getCanvas().width, viewNode.getCanvas().height); if (!model._animationRateSubscription) { // when the animation frame rate changes recompute the scale factor model._animationRateSubscription = rwi.onAnimationFrameRateUpdate(function () { var firstMapper = model.volumes[0].getRenderable().getMapper(); if (firstMapper.getAutoAdjustSampleDistances()) { var frate = rwi.getRecentAnimationFrameRate(); var targetScale = model._lastScale * rwi.getDesiredUpdateRate() / frate; model._lastScale = targetScale; // clamp scale to some reasonable values. // Below 1.5 we will just be using full resolution as that is close enough // Above 400 seems like a lot so we limit to that 1/20th per axis if (model._lastScale > 400) { model._lastScale = 400; } } else { model._lastScale = firstMapper.getImageSampleDistance() * firstMapper.getImageSampleDistance(); } if (model._lastScale < 1.5) { model._lastScale = 1.5; } else { model._smallViewportWidth = Math.ceil(viewNode.getCanvas().width / Math.sqrt(model._lastScale)); model._smallViewportHeight = Math.ceil(viewNode.getCanvas().height / Math.sqrt(model._lastScale)); } }); } }; publicAPI.rayCastPass = function (viewNode, renNode, volumes) { var encoder = model._firstGroup ? model._clearEncoder : model._mergeEncoder; encoder.attachTextureViews(); encoder.begin(viewNode.getCommandEncoder()); var width = model._colorTextureView.getTexture().getWidth(); var height = model._colorTextureView.getTexture().getHeight(); if (model._useSmallViewport) { width = model._smallViewportWidth; height = model._smallViewportHeight; } encoder.getHandle().setViewport(0, 0, width, height, 0.0, 1.0); // set scissor encoder.getHandle().setScissorRect(0, 0, width, height); model.fullScreenQuad.setWebGPURenderer(renNode); model.fullScreenQuad.setVolumes(volumes); model.fullScreenQuad.prepareAndDraw(encoder); encoder.end(); }; publicAPI.renderDepthBounds = function (renNode, viewNode) { publicAPI.updateDepthPolyData(renNode); var pd = model._boundsPoly; var points = pd.getPoints(); var cells = pd.getPolys(); var buffRequest = { hash: "vp".concat(cells.getMTime()), usage: BufferUsage.Index, cells: cells, numberOfPoints: points.getNumberOfPoints(), primitiveType: PrimitiveTypes.Triangles, representation: Representation.SURFACE }; var indexBuffer = viewNode.getDevice().getBufferManager().getBuffer(buffRequest); model._mapper.getVertexInput().setIndexBuffer(indexBuffer); // points buffRequest = { usage: BufferUsage.PointArray, format: 'float32x4', hash: "vp".concat(points.getMTime()).concat(cells.getMTime()), dataArray: points, indexBuffer: indexBuffer, packExtra: true }; var buff = viewNode.getDevice().getBufferManager().getBuffer(buffRequest); model._mapper.getVertexInput().addBuffer(buff, ['vertexBC']); model._mapper.setNumberOfVertices(buff.getSizeInBytes() / buff.getStrideInBytes()); publicAPI.drawDepthRange(renNode, viewNode); }; publicAPI.updateDepthPolyData = function (renNode) { // check mtimes first var update = false; for (var i = 0; i < model.volumes.length; i++) { var mtime = model.volumes[i].getMTime(); if (!model._lastMTimes[i] || mtime !== model._lastMTimes[i]) { update = true; model._lastMTimes[i] = mtime; } } // also check stabilized time var stime = renNode.getStabilizedTime(); if (model._lastMTimes.length <= model.volumes.length || stime !== model._lastMTimes[model.volumes.length]) { update = true; model._lastMTimes[model.volumes.length] = stime; } // if no need to update then return if (!update) { return; } // rebuild var center = renNode.getStabilizedCenterByReference(); var numPts = model.volumes.length * 8; var points = new Float64Array(numPts * 3); var numTris = model.volumes.length * 12; var polys = new Uint16Array(numTris * 4); // add points and cells for (var _i = 0; _i < model.volumes.length; _i++) { model.volumes[_i].getBoundingCubePoints(points, _i * 24); var cellIdx = _i * 12 * 4; var offset = _i * 8; for (var t = 0; t < 12; t++) { polys[cellIdx++] = 3; polys[cellIdx++] = offset + cubeFaceTriangles[t][0]; polys[cellIdx++] = offset + cubeFaceTriangles[t][1]; polys[cellIdx++] = offset + cubeFaceTriangles[t][2]; } } for (var p = 0; p < points.length; p += 3) { points[p] -= center[0]; points[p + 1] -= center[1]; points[p + 2] -= center[2]; } model._boundsPoly.getPoints().setData(points, 3); model._boundsPoly.getPoints().modified(); model._boundsPoly.getPolys().setData(polys, 1); model._boundsPoly.getPolys().modified(); model._boundsPoly.modified(); }; publicAPI.drawDepthRange = function (renNode, viewNode) { // copy current depth buffer to model._depthRangeTexture.resizeToMatch(model.colorTextureView.getTexture()); model._depthRangeTexture2.resizeToMatch(model.colorTextureView.getTexture()); model._depthRangeEncoder.attachTextureViews(); publicAPI.setCurrentOperation('volumeDepthRangePass'); renNode.setRenderEncoder(model._depthRangeEncoder); renNode.volumeDepthRangePass(true); model._mapper.setWebGPURenderer(renNode); model._mapper.prepareToDraw(model._depthRangeEncoder); model._mapper.registerDrawCallback(model._depthRangeEncoder); renNode.volumeDepthRangePass(false); }; publicAPI.createDepthRangeEncoder = function (viewNode) { var device = viewNode.getDevice(); model._depthRangeEncoder = vtkWebGPURenderEncoder.newInstance({ label: 'VolumePass DepthRange' }); model._depthRangeEncoder.setPipelineHash('volr'); model._depthRangeEncoder.setReplaceShaderCodeFunction(function (pipeline) { var fDesc = pipeline.getShaderDescription('fragment'); fDesc.addOutput('vec4<f32>', 'outColor1'); fDesc.addOutput('vec4<f32>', 'outColor2'); var code = fDesc.getCode(); code = vtkWebGPUShaderCache.substitute(code, '//VTK::RenderEncoder::Impl', ['output.outColor1 = vec4<f32>(input.fragPos.z, 0.0, 0.0, 0.0);', 'output.outColor2 = vec4<f32>(stopval, 0.0, 0.0, 0.0);']).result; fDesc.setCode(code); }); model._depthRangeEncoder.setDescription({ colorAttachments: [{ view: null, clearValue: [0.0, 0.0, 0.0, 0.0], loadOp: 'clear', storeOp: 'store' }, { view: null, clearValue: [1.0, 1.0, 1.0, 1.0], loadOp: 'clear', storeOp: 'store' }] }); model._depthRangeEncoder.setPipelineSettings({ primitive: { cullMode: 'none' }, fragment: { targets: [{ format: 'r16float', blend: { color: { srcFactor: 'one', dstFactor: 'one', operation: 'max' }, alpha: { srcfactor: 'one', dstFactor: 'one', operation: 'max' } } }, { format: 'r16float', blend: { color: { srcFactor: 'one', dstFactor: 'one', operation: 'min' }, alpha: { srcfactor: 'one', dstFactor: 'one', operation: 'min' } } }] } }); // and the textures it needs model._depthRangeTexture = vtkWebGPUTexture.newInstance({ label: 'volumePassMaxDepth' }); model._depthRangeTexture.create(device, { width: viewNode.getCanvas().width, height: viewNode.getCanvas().height, format: 'r16float', usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING }); var maxView = model._depthRangeTexture.createView('maxTexture'); model._depthRangeEncoder.setColorTextureView(0, maxView); model._depthRangeTexture2 = vtkWebGPUTexture.newInstance({ label: 'volumePassDepthMin' }); model._depthRangeTexture2.create(device, { width: viewNode.getCanvas().width, height: viewNode.getCanvas().height, format: 'r16float', usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING }); var minView = model._depthRangeTexture2.createView('minTexture'); model._depthRangeEncoder.setColorTextureView(1, minView); model._mapper.setDevice(viewNode.getDevice()); model._mapper.setTextureViews([model.depthTextureView]); }; publicAPI.createClearEncoder = function (viewNode) { model._colorTexture = vtkWebGPUTexture.newInstance({ label: 'volumePassColor' }); model._colorTexture.create(viewNode.getDevice(), { width: viewNode.getCanvas().width, height: viewNode.getCanvas().height, format: 'bgra8unorm', /* eslint-disable no-undef */ /* eslint-disable no-bitwise */ usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_SRC }); model._colorTextureView = model._colorTexture.createView('volumePassColorTexture'); model._colorTextureView.addSampler(viewNode.getDevice(), { minFilter: 'linear', magFilter: 'linear' }); model._clearEncoder = vtkWebGPURenderEncoder.newInstance({ label: 'VolumePass Clear' }); model._clearEncoder.setColorTextureView(0, model._colorTextureView); model._clearEncoder.setDescription({ colorAttachments: [{ view: null, clearValue: [0.0, 0.0, 0.0, 0.0], loadOp: 'clear', storeOp: 'store' }] }); model._clearEncoder.setPipelineHash('volpf'); model._clearEncoder.setPipelineSettings({ primitive: { cullMode: 'none' }, fragment: { targets: [{ format: 'bgra8unorm', blend: { color: { srcFactor: 'src-alpha', dstFactor: 'one-minus-src-alpha' }, alpha: { srcfactor: 'one', dstFactor: 'one-minus-src-alpha' } } }] } }); }; publicAPI.createCopyEncoder = function (viewNode) { model._copyEncoder = vtkWebGPURenderEncoder.newInstance({ label: 'volumePassCopy' }); model._copyEncoder.setDescription({ colorAttachments: [{ view: null, loadOp: 'load', storeOp: 'store' }] }); model._copyEncoder.setPipelineHash('volcopypf'); model._copyEncoder.setPipelineSettings({ primitive: { cullMode: 'none' }, fragment: { targets: [{ format: 'rgba16float', blend: { color: { srcFactor: 'one', dstFactor: 'one-minus-src-alpha' }, alpha: { srcfactor: 'one', dstFactor: 'one-minus-src-alpha' } } }] } }); }; publicAPI.createMergeEncoder = function (viewNode) { model._mergeEncoder = vtkWebGPURenderEncoder.newInstance({ label: 'volumePassMerge' }); model._mergeEncoder.setColorTextureView(0, model._colorTextureView); model._mergeEncoder.setDescription({ colorAttachments: [{ view: null, loadOp: 'load', storeOp: 'store' }] }); model._mergeEncoder.setReplaceShaderCodeFunction(function (pipeline) { var fDesc = pipeline.getShaderDescription('fragment'); fDesc.addOutput('vec4<f32>', 'outColor'); var code = fDesc.getCode(); code = vtkWebGPUShaderCache.substitute(code, '//VTK::RenderEncoder::Impl', ['output.outColor = vec4<f32>(computedColor.rgb, computedColor.a);']).result; fDesc.setCode(code); }); model._mergeEncoder.setPipelineHash('volpf'); model._mergeEncoder.setPipelineSettings({ primitive: { cullMode: 'none' }, fragment: { targets: [{ format: 'bgra8unorm', blend: { color: { srcFactor: 'src-alpha', dstFactor: 'one-minus-src-alpha' }, alpha: { srcfactor: 'one', dstFactor: 'one-minus-src-alpha' } } }] } }); }; // marks modified when needed publicAPI.setVolumes = function (val) { if (!model.volumes || model.volumes.length !== val.length) { model.volumes = _toConsumableArray(val); publicAPI.modified(); return; } for (var i = 0; i < val.length; i++) { if (val[i] !== model.volumes[i]) { model.volumes = _toConsumableArray(val); publicAPI.modified(); return; } } }; } // ---------------------------------------------------------------------------- // Object factory // ---------------------------------------------------------------------------- var DEFAULT_VALUES = { colorTextureView: null, depthTextureView: null, volumes: null }; // ---------------------------------------------------------------------------- function extend(publicAPI, model) { var initialValues = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; Object.assign(model, DEFAULT_VALUES, initialValues); // Build VTK API vtkRenderPass.extend(publicAPI, model, initialValues); model._lastScale = 2.0; model._mapper = vtkWebGPUSimpleMapper.newInstance(); model._mapper.setFragmentShaderTemplate(DepthBoundsFS); model._mapper.getShaderReplacements().set('replaceShaderVolumePass', function (hash, pipeline, vertexInput) { var fDesc = pipeline.getShaderDescription('fragment'); fDesc.addBuiltinInput('vec4<f32>', '@builtin(position) fragPos'); }); model._boundsPoly = vtkPolyData.newInstance(); model._lastMTimes = []; macro.setGet(publicAPI, model, ['colorTextureView', 'depthTextureView']); // Object methods vtkWebGPUVolumePass(publicAPI, model); } // ---------------------------------------------------------------------------- var newInstance = macro.newInstance(extend, 'vtkWebGPUVolumePass'); // ---------------------------------------------------------------------------- var vtkWebGPUVolumePass$1 = { newInstance: newInstance, extend: extend }; export { vtkWebGPUVolumePass$1 as default, extend, newInstance };