@kitware/vtk.js
Version:
Visualization Toolkit for the Web
695 lines (561 loc) • 24.1 kB
JavaScript
import _asyncToGenerator from '@babel/runtime/helpers/asyncToGenerator';
import _regeneratorRuntime from '@babel/runtime/regenerator';
import macro from '../../macros.js';
import { registerViewConstructor } from '../Core/RenderWindow.js';
import vtkForwardPass from './ForwardPass.js';
import vtkWebGPUBuffer from './Buffer.js';
import vtkWebGPUDevice from './Device.js';
import vtkWebGPUHardwareSelector from './HardwareSelector.js';
import vtkWebGPUViewNodeFactory from './ViewNodeFactory.js';
import vtkRenderPass from '../SceneGraph/RenderPass.js';
import vtkRenderWindowViewNode from '../SceneGraph/RenderWindowViewNode.js';
import HalfFloat from '../../Common/Core/HalfFloat.js';
var vtkErrorMacro = macro.vtkErrorMacro; // const IS_CHROME = navigator.userAgent.indexOf('Chrome') !== -1;
var SCREENSHOT_PLACEHOLDER = {
position: 'absolute',
top: 0,
left: 0,
width: '100%',
height: '100%'
}; // ----------------------------------------------------------------------------
// vtkWebGPURenderWindow methods
// ----------------------------------------------------------------------------
function vtkWebGPURenderWindow(publicAPI, model) {
// Set our className
model.classHierarchy.push('vtkWebGPURenderWindow');
publicAPI.getViewNodeFactory = function () {
return model.myFactory;
}; // Auto update style
var previousSize = [0, 0];
function updateWindow() {
// Canvas size
if (model.renderable) {
if (model.size[0] !== previousSize[0] || model.size[1] !== previousSize[1]) {
previousSize[0] = model.size[0];
previousSize[1] = model.size[1];
model.canvas.setAttribute('width', model.size[0]);
model.canvas.setAttribute('height', model.size[1]);
publicAPI.recreateSwapChain();
}
} // ImageStream size
if (model.viewStream) {
// If same size that's a NoOp
model.viewStream.setSize(model.size[0], model.size[1]);
} // Offscreen ?
model.canvas.style.display = model.useOffScreen ? 'none' : 'block'; // Cursor type
if (model.el) {
model.el.style.cursor = model.cursorVisibility ? model.cursor : 'none';
} // Invalidate cached DOM container size
model.containerSize = null;
}
publicAPI.onModified(updateWindow);
publicAPI.recreateSwapChain = function () {
if (model.context) {
model.context.unconfigure();
model.presentationFormat = navigator.gpu.getPreferredCanvasFormat(model.adapter);
/* eslint-disable no-undef */
/* eslint-disable no-bitwise */
model.context.configure({
device: model.device.getHandle(),
format: model.presentationFormat,
alphaMode: 'premultiplied',
usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.COPY_DST,
width: model.size[0],
height: model.size[1]
});
model._configured = true;
}
};
publicAPI.getCurrentTexture = function () {
return model.context.getCurrentTexture();
}; // Builds myself.
publicAPI.buildPass = function (prepass) {
if (prepass) {
if (!model.renderable) {
return;
}
publicAPI.prepareNodes();
publicAPI.addMissingNodes(model.renderable.getRenderersByReference());
publicAPI.removeUnusedNodes();
publicAPI.initialize();
} else if (model.initialized) {
if (!model._configured) {
publicAPI.recreateSwapChain();
}
model.commandEncoder = model.device.createCommandEncoder();
}
}; // publicAPI.traverseRenderers = (renPass) => {
// // iterate over renderers
// const numlayers = publicAPI.getRenderable().getNumberOfLayers();
// const renderers = publicAPI.getChildren();
// for (let i = 0; i < numlayers; i++) {
// for (let index = 0; index < renderers.length; index++) {
// const renNode = renderers[index];
// const ren = publicAPI.getRenderable().getRenderers()[index];
// if (ren.getDraw() && ren.getLayer() === i) {
// renNode.traverse(renPass);
// }
// }
// }
// };
publicAPI.initialize = function () {
if (!model.initializing) {
model.initializing = true;
if (!navigator.gpu) {
vtkErrorMacro('WebGPU is not enabled.');
return;
}
publicAPI.create3DContextAsync().then(function () {
model.initialized = true;
if (model.deleted) {
return;
}
publicAPI.invokeInitialized();
});
}
};
publicAPI.setContainer = function (el) {
if (model.el && model.el !== el) {
if (model.canvas.parentNode !== model.el) {
vtkErrorMacro('Error: canvas parent node does not match container');
} // Remove canvas from previous container
model.el.removeChild(model.canvas); // If the renderer has previously added
// a background image, remove it from the DOM.
if (model.el.contains(model.bgImage)) {
model.el.removeChild(model.bgImage);
}
}
if (model.el !== el) {
model.el = el;
if (model.el) {
model.el.appendChild(model.canvas); // If the renderer is set to use a background
// image, attach it to the DOM.
if (model.useBackgroundImage) {
model.el.appendChild(model.bgImage);
}
} // Trigger modified()
publicAPI.modified();
}
};
publicAPI.getContainer = function () {
return model.el;
};
publicAPI.getContainerSize = function () {
if (!model.containerSize && model.el) {
var _model$el$getBounding = model.el.getBoundingClientRect(),
width = _model$el$getBounding.width,
height = _model$el$getBounding.height;
model.containerSize = [width, height];
}
return model.containerSize || model.size;
};
publicAPI.getFramebufferSize = function () {
return model.size;
};
publicAPI.create3DContextAsync = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee() {
return _regeneratorRuntime.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
_context.next = 2;
return navigator.gpu.requestAdapter({
powerPreference: 'high-performance'
});
case 2:
model.adapter = _context.sent;
if (!model.deleted) {
_context.next = 5;
break;
}
return _context.abrupt("return");
case 5:
// console.log([...model.adapter.features]);
model.device = vtkWebGPUDevice.newInstance();
_context.t0 = model.device;
_context.next = 9;
return model.adapter.requestDevice();
case 9:
_context.t1 = _context.sent;
_context.t0.initialize.call(_context.t0, _context.t1);
if (!model.deleted) {
_context.next = 14;
break;
}
model.device = null;
return _context.abrupt("return");
case 14:
// model.device.getHandle().lost.then((info) => {
// console.log(`${info.message}`);
// publicAPI.releaseGraphicsResources();
// });
model.context = model.canvas.getContext('webgpu');
case 15:
case "end":
return _context.stop();
}
}
}, _callee);
}));
publicAPI.releaseGraphicsResources = function () {
var rp = vtkRenderPass.newInstance();
rp.setCurrentOperation('Release');
rp.traverse(publicAPI, null);
model.adapter = null;
model.device = null;
model.context = null;
model.initialized = false;
model.initializing = false;
};
publicAPI.setBackgroundImage = function (img) {
model.bgImage.src = img.src;
};
publicAPI.setUseBackgroundImage = function (value) {
model.useBackgroundImage = value; // Add or remove the background image from the
// DOM as specified.
if (model.useBackgroundImage && !model.el.contains(model.bgImage)) {
model.el.appendChild(model.bgImage);
} else if (!model.useBackgroundImage && model.el.contains(model.bgImage)) {
model.el.removeChild(model.bgImage);
}
};
function getCanvasDataURL() {
return _getCanvasDataURL.apply(this, arguments);
}
function _getCanvasDataURL() {
_getCanvasDataURL = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee3() {
var format,
temporaryCanvas,
temporaryContext,
result,
imageData,
mainBoundingClientRect,
renderWindow,
renderers,
screenshot,
_args3 = arguments;
return _regeneratorRuntime.wrap(function _callee3$(_context3) {
while (1) {
switch (_context3.prev = _context3.next) {
case 0:
format = _args3.length > 0 && _args3[0] !== undefined ? _args3[0] : model.imageFormat;
// Copy current canvas to not modify the original
temporaryCanvas = document.createElement('canvas');
temporaryContext = temporaryCanvas.getContext('2d');
temporaryCanvas.width = model.canvas.width;
temporaryCanvas.height = model.canvas.height;
_context3.next = 7;
return publicAPI.getPixelsAsync();
case 7:
result = _context3.sent;
imageData = new ImageData(result.colorValues, result.width, result.height); // temporaryCanvas.putImageData(imageData, 0, 0);
temporaryContext.putImageData(imageData, 0, 0); // Get current client rect to place canvas
mainBoundingClientRect = model.canvas.getBoundingClientRect();
renderWindow = model.renderable;
renderers = renderWindow.getRenderers();
renderers.forEach(function (renderer) {
var viewProps = renderer.getViewProps();
viewProps.forEach(function (viewProp) {
// Check if the prop has a container that should have canvas
if (viewProp.getContainer) {
var container = viewProp.getContainer();
var canvasList = container.getElementsByTagName('canvas'); // Go throughout all canvas and copy it into temporary main canvas
for (var i = 0; i < canvasList.length; i++) {
var currentCanvas = canvasList[i];
var boundingClientRect = currentCanvas.getBoundingClientRect();
var newXPosition = boundingClientRect.x - mainBoundingClientRect.x;
var newYPosition = boundingClientRect.y - mainBoundingClientRect.y;
temporaryContext.drawImage(currentCanvas, newXPosition, newYPosition);
}
}
});
});
screenshot = temporaryCanvas.toDataURL(format);
temporaryCanvas.remove();
publicAPI.invokeImageReady(screenshot);
case 17:
case "end":
return _context3.stop();
}
}
}, _callee3);
}));
return _getCanvasDataURL.apply(this, arguments);
}
publicAPI.captureNextImage = function () {
var format = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 'image/png';
var _ref2 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
_ref2$resetCamera = _ref2.resetCamera,
resetCamera = _ref2$resetCamera === void 0 ? false : _ref2$resetCamera,
_ref2$size = _ref2.size,
size = _ref2$size === void 0 ? null : _ref2$size,
_ref2$scale = _ref2.scale,
scale = _ref2$scale === void 0 ? 1 : _ref2$scale;
if (model.deleted) {
return null;
}
model.imageFormat = format;
var previous = model.notifyStartCaptureImage;
model.notifyStartCaptureImage = true;
model._screenshot = {
size: !!size || scale !== 1 ? size || model.size.map(function (val) {
return val * scale;
}) : null
};
return new Promise(function (resolve, reject) {
var subscription = publicAPI.onImageReady(function (imageURL) {
if (model._screenshot.size === null) {
model.notifyStartCaptureImage = previous;
subscription.unsubscribe();
if (model._screenshot.placeHolder) {
// resize the main canvas back to its original size and show it
model.size = model._screenshot.originalSize; // process the resize
publicAPI.modified(); // restore the saved camera parameters, if applicable
if (model._screenshot.cameras) {
model._screenshot.cameras.forEach(function (_ref3) {
var restoreParamsFn = _ref3.restoreParamsFn,
arg = _ref3.arg;
return restoreParamsFn(arg);
});
} // Trigger a render at the original size
publicAPI.traverseAllPasses(); // Remove and clean up the placeholder, revealing the original
model.el.removeChild(model._screenshot.placeHolder);
model._screenshot.placeHolder.remove();
model._screenshot = null;
}
resolve(imageURL);
} else {
// Create a placeholder image overlay while we resize and render
var tmpImg = document.createElement('img');
tmpImg.style = SCREENSHOT_PLACEHOLDER;
tmpImg.src = imageURL;
model._screenshot.placeHolder = model.el.appendChild(tmpImg); // hide the main canvas
model.canvas.style.display = 'none'; // remember the main canvas original size, then resize it
model._screenshot.originalSize = model.size;
model.size = model._screenshot.size;
model._screenshot.size = null; // process the resize
publicAPI.modified();
if (resetCamera) {
var isUserResetCamera = resetCamera !== true; // If resetCamera was requested, we first save camera parameters
// from all the renderers, so we can restore them later
model._screenshot.cameras = model.renderable.getRenderers().map(function (renderer) {
var camera = renderer.getActiveCamera();
var params = camera.get('focalPoint', 'position', 'parallelScale');
return {
resetCameraArgs: isUserResetCamera ? {
renderer: renderer
} : undefined,
resetCameraFn: isUserResetCamera ? resetCamera : renderer.resetCamera,
restoreParamsFn: camera.set,
// "clone" the params so we don't keep refs to properties
arg: JSON.parse(JSON.stringify(params))
};
}); // Perform the resetCamera() on each renderer only after capturing
// the params from all active cameras, in case there happen to be
// linked cameras among the renderers.
model._screenshot.cameras.forEach(function (_ref4) {
var resetCameraFn = _ref4.resetCameraFn,
resetCameraArgs = _ref4.resetCameraArgs;
return resetCameraFn(resetCameraArgs);
});
} // Trigger a render at the custom size
publicAPI.traverseAllPasses();
}
});
});
};
publicAPI.traverseAllPasses = function () {
if (model.deleted) {
return;
} // if we are not initialized then we call initialize
// which is async so we will not actually get a render
// so we queue up another traverse for when we are initialized
if (!model.initialized) {
publicAPI.initialize();
var subscription = publicAPI.onInitialized(function () {
subscription.unsubscribe();
publicAPI.traverseAllPasses();
});
} else {
if (model.renderPasses) {
for (var index = 0; index < model.renderPasses.length; ++index) {
model.renderPasses[index].traverse(publicAPI, null);
}
}
if (model.commandEncoder) {
model.device.submitCommandEncoder(model.commandEncoder);
model.commandEncoder = null;
if (model.notifyStartCaptureImage) {
model.device.onSubmittedWorkDone().then(function () {
getCanvasDataURL();
});
}
}
}
};
publicAPI.setViewStream = function (stream) {
if (model.viewStream === stream) {
return false;
}
if (model.subscription) {
model.subscription.unsubscribe();
model.subscription = null;
}
model.viewStream = stream;
if (model.viewStream) {
// Force background to be transparent + render
var mainRenderer = model.renderable.getRenderers()[0];
mainRenderer.getBackgroundByReference()[3] = 0; // Enable display of the background image
publicAPI.setUseBackgroundImage(true); // Bind to remote stream
model.subscription = model.viewStream.onImageReady(function (e) {
return publicAPI.setBackgroundImage(e.image);
});
model.viewStream.setSize(model.size[0], model.size[1]);
model.viewStream.invalidateCache();
model.viewStream.render();
publicAPI.modified();
}
return true;
};
publicAPI.getUniquePropID = function () {
return model.nextPropID++;
};
publicAPI.getPropFromID = function (id) {
for (var i = 0; i < model.children.length; i++) {
var res = model.children[i].getPropFromID(id);
if (res !== null) {
return res;
}
}
return null;
};
publicAPI.getPixelsAsync = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee2() {
var device, texture, result, colorBuffer, cmdEnc, cLoad, tmparray, y, x, doffset, soffset;
return _regeneratorRuntime.wrap(function _callee2$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
device = model.device;
texture = model.renderPasses[0].getOpaquePass().getColorTexture(); // as this is async we really don't want to store things in
// the class as multiple calls may start before resolving
// so anything specific to this request gets put into the
// result object (by value in most cases)
result = {
width: texture.getWidth(),
height: texture.getHeight()
}; // must be a multiple of 256 bytes, so 32 texels with rgba16
result.colorBufferWidth = 32 * Math.floor((result.width + 31) / 32);
result.colorBufferSizeInBytes = result.colorBufferWidth * result.height * 8;
colorBuffer = vtkWebGPUBuffer.newInstance();
colorBuffer.setDevice(device);
/* eslint-disable no-bitwise */
/* eslint-disable no-undef */
colorBuffer.create(result.colorBufferSizeInBytes, GPUBufferUsage.MAP_READ | GPUBufferUsage.COPY_DST);
/* eslint-enable no-bitwise */
/* eslint-enable no-undef */
cmdEnc = model.device.createCommandEncoder();
cmdEnc.copyTextureToBuffer({
texture: texture.getHandle()
}, {
buffer: colorBuffer.getHandle(),
bytesPerRow: 8 * result.colorBufferWidth,
rowsPerImage: result.height
}, {
width: result.width,
height: result.height,
depthOrArrayLayers: 1
});
device.submitCommandEncoder(cmdEnc);
/* eslint-disable no-undef */
cLoad = colorBuffer.mapAsync(GPUMapMode.READ);
_context2.next = 14;
return cLoad;
case 14:
/* eslint-enable no-undef */
result.colorValues = new Uint16Array(colorBuffer.getMappedRange().slice());
colorBuffer.unmap(); // repack the array
tmparray = new Uint8ClampedArray(result.height * result.width * 4);
for (y = 0; y < result.height; y++) {
for (x = 0; x < result.width; x++) {
doffset = (y * result.width + x) * 4;
soffset = (y * result.colorBufferWidth + x) * 4;
tmparray[doffset] = 255.0 * HalfFloat.fromHalf(result.colorValues[soffset]);
tmparray[doffset + 1] = 255.0 * HalfFloat.fromHalf(result.colorValues[soffset + 1]);
tmparray[doffset + 2] = 255.0 * HalfFloat.fromHalf(result.colorValues[soffset + 2]);
tmparray[doffset + 3] = 255.0 * HalfFloat.fromHalf(result.colorValues[soffset + 3]);
}
}
result.colorValues = tmparray;
return _context2.abrupt("return", result);
case 20:
case "end":
return _context2.stop();
}
}
}, _callee2);
}));
publicAPI.createSelector = function () {
var ret = vtkWebGPUHardwareSelector.newInstance();
ret.setWebGPURenderWindow(publicAPI);
return ret;
};
publicAPI.delete = macro.chain(publicAPI.delete, publicAPI.setViewStream);
} // ----------------------------------------------------------------------------
// Object factory
// ----------------------------------------------------------------------------
var DEFAULT_VALUES = {
initialized: false,
context: null,
adapter: null,
device: null,
canvas: null,
cursorVisibility: true,
cursor: 'pointer',
containerSize: null,
renderPasses: [],
notifyStartCaptureImage: false,
imageFormat: 'image/png',
useOffScreen: false,
useBackgroundImage: false,
nextPropID: 1,
xrSupported: false,
presentationFormat: null
}; // ----------------------------------------------------------------------------
function extend(publicAPI, model) {
var initialValues = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
Object.assign(model, DEFAULT_VALUES, initialValues); // Create internal instances
model.canvas = document.createElement('canvas');
model.canvas.style.width = '100%'; // Create internal bgImage
model.bgImage = new Image();
model.bgImage.style.position = 'absolute';
model.bgImage.style.left = '0';
model.bgImage.style.top = '0';
model.bgImage.style.width = '100%';
model.bgImage.style.height = '100%';
model.bgImage.style.zIndex = '-1'; // Inheritance
vtkRenderWindowViewNode.extend(publicAPI, model, initialValues);
model.myFactory = vtkWebGPUViewNodeFactory.newInstance();
/* eslint-disable no-use-before-define */
model.myFactory.registerOverride('vtkRenderWindow', newInstance);
/* eslint-enable no-use-before-define */
// setup default forward pass rendering
model.renderPasses[0] = vtkForwardPass.newInstance();
if (!model.selector) {
model.selector = vtkWebGPUHardwareSelector.newInstance();
model.selector.setWebGPURenderWindow(publicAPI);
}
macro.event(publicAPI, model, 'imageReady');
macro.event(publicAPI, model, 'initialized'); // Build VTK API
macro.get(publicAPI, model, ['commandEncoder', 'device', 'presentationFormat', 'useBackgroundImage', 'xrSupported']);
macro.setGet(publicAPI, model, ['initialized', 'context', 'canvas', 'device', 'renderPasses', 'notifyStartCaptureImage', 'cursor', 'useOffScreen']);
macro.setGetArray(publicAPI, model, ['size'], 2); // Object methods
vtkWebGPURenderWindow(publicAPI, model);
} // ----------------------------------------------------------------------------
var newInstance = macro.newInstance(extend, 'vtkWebGPURenderWindow'); // ----------------------------------------------------------------------------
// Register API specific RenderWindow implementation
// ----------------------------------------------------------------------------
registerViewConstructor('WebGPU', newInstance); // ----------------------------------------------------------------------------
var vtkRenderWindow = {
newInstance: newInstance,
extend: extend
};
export { vtkRenderWindow as default, extend, newInstance };