vitessce
Version:
This package is the main `vitessce` package on NPM. It exports the `<Vitessce/>` from `@vitessce/all` for backwards compatibility.
1,165 lines (1,128 loc) • 147 kB
JavaScript
import { aw as n, ax as we, ay as se, ar as ue, as as ze, az as _e, l as he, aA as ke, aB as Ie, m as re, aC as le, e as C, V as _, ag as Z, ae as De, aq as M, aD as Me, aE as Pe, av as Ae, W as Be, ad as Ne, z as Ue, aF as Le, aG as j, ao as N, aH as Oe, am as Fe } from "./index-CiwyZ_Wn.js";
import { useRef as P, useState as fe, useMemo as de, useEffect as $, Suspense as Ve } from "react";
import { u as ve, a as Ge, O as me, C as qe } from "./OrbitControls-gS0qAcAF.js";
const S = 32, B = 64, G = 64, ce = 4, oe = B * G * ce, ge = B * S, xe = G * S, pe = ce * S, V = {
NOT_STARTED: "not_started",
IN_PROGRESS: "in_progress",
COMPLETE: "complete",
FAILED: "failed"
};
function z(f) {
se(le.DEBUG) && console.warn(`%cDM: ${f}`, "background: blue; color: white; padding: 2px; border-radius: 3px;");
}
function Ze(f) {
return f.map((t) => {
const { dims: e } = t;
return [
// Other spatial-accelerated code assumes TCZYX dimension order
e.t,
// TODO: standardize lowercase/uppercase dim names at store-level
e.c,
// TODO: handle case when dimension(s) are missing
e.z,
e.y,
e.x
];
});
}
function Xe(f) {
return f.map((t) => {
const e = [
t.depth,
// z
t.height,
// y
t.width
// x
];
return [
Math.ceil((e[0] || 1) / S),
Math.ceil((e[1] || 1) / S),
Math.ceil((e[2] || 1) / S)
];
});
}
function je(f, t) {
n.debug("_initMRMCPT", f, t);
const e = {
channelOffsets: [
[0, 0, 1],
[0, 1, 0],
[0, 1, 1],
[1, 0, 0],
[1, 0, 1],
[1, 1, 0],
[1, 1, 1]
],
anchors: [],
offsets: [],
xExtent: 0,
// includes the offset inclusive
yExtent: 0,
// includes the offset inclusive
zExtent: 0,
// includes the offset inclusive
z0Extent: 0,
// l0 z extent
zTotal: 0
// original z extent plus the l0 z extent times the channel count
};
e.xExtent = 1, e.yExtent = 1, e.zExtent = 1;
const i = f[0][0];
e.z0Extent = i, e.lowestDataRes = f.length - 1;
for (let u = f.length - 1; u > 0; u--)
e.anchors.push([
e.xExtent,
e.yExtent,
e.zExtent
]), e.xExtent += f[u][2], e.yExtent += f[u][1], e.zExtent += f[u][0];
e.anchors.push([0, 0, e.zExtent]), e.anchors.reverse(), e.zTotal = e.zExtent + t * i;
const a = new Uint8Array(ge * xe * pe);
a.fill(0);
const r = new Uint32Array(e.xExtent * e.yExtent * e.zTotal);
r.fill(0);
const o = new ue(a, ge, xe, pe);
o.format = ze, o.type = _e, o.internalFormat = "R8", o.minFilter = he, o.magFilter = he, o.generateMipmaps = !1, o.needsUpdate = !0;
const l = new ue(r, e.xExtent, e.yExtent, e.zTotal);
return l.format = ke, l.type = Ie, l.internalFormat = "R32UI", l.minFilter = re, l.magFilter = re, l.generateMipmaps = !1, l.needsUpdate = !0, n.debug("_initMRMCPT", e, l, o), {
PT: e,
ptTHREE: l,
bcTHREE: o
};
}
function Ye(f, t, e, i, a) {
const r = (o) => Math.max(0, Math.min(127, Math.floor(o / 2)));
return (1 << 31 | 1 << 30 | r(f) << 23 | r(t) << 16 | (e & 63) << 10 | (i & 63) << 4 | a & 15) >>> 0;
}
function He(f, t, e, i) {
const { PT_zExtent: a, PT_z0Extent: r, PT_anchors: o } = i;
let l = -1, u = -1, s = -1, v = -1, g = -1;
if (e >= a)
u = 0, s = f, v = t, g = (e - a) % r, l = Math.floor((e - a) / r);
else
for (let c = 1; c < o.length; c++)
if (!(f < o[c][0] && t < o[c][1] && e < o[c][2])) {
u = c;
const d = [0, 0, 0];
f >= o[c][0] && (d[0] = 1), t >= o[c][1] && (d[1] = 1), e >= o[c][2] && (d[2] = 1);
const m = d[0] << 2 | d[1] << 1 | d[2];
l = Math.max(1, Math.min(7, m)) - 1;
const h = d.map((x, b) => x * o[c][b]);
s = f - h[0], v = t - h[1], g = e - h[2];
break;
}
return {
channel: l,
resolution: u,
x: s,
y: v,
z: g
};
}
const Ke = 0.25;
function We(f, t, e) {
const i = /* @__PURE__ */ new Map(), { width: a, height: r, sigmaNormalized: o } = e, l = Number.isInteger(a) && Number.isInteger(r) && a > 0 && r > 0 && typeof o == "number";
l || n.warn("_requestBufferToRequestObjects: proceeding without weighting");
const u = Math.floor(f.length / 4);
for (let v = 0; v < u; v += 1) {
const g = v * 4, c = f[g], d = f[g + 1], m = f[g + 2], h = f[g + 3];
if ((c | d | m | h) === 0)
continue;
const x = (c << 24 | d << 16 | m << 8 | h) >>> 0;
let b = 1;
if (l) {
const R = v % a, E = Math.floor(v / a), k = R - a / 2, O = E - r / 2, T = Math.sqrt(k * k / (a * a) + O * O / (r * r)) / o;
b = Math.exp(-0.5 * T * T);
}
i.set(x, (i.get(x) || 0) + b);
}
return { requests: [...i.entries()].sort((v, g) => g[1] - v[1]).slice(0, t).map(([v]) => ({
x: v >> 22 & 1023,
y: v >> 12 & 1023,
z: v & 4095
})), origRequestCount: i.size };
}
class $e {
constructor(t) {
z("CLASS INITIALIZING"), n.debug("VolumeDataManager constructor", { glParam: t, glParamContext: t.getContext?.() });
const e = t.getContext?.() || t, i = t;
e.domElement && e.getContext ? this.gl = e.getContext() : e.isWebGLRenderer ? this.gl = e.getContext() : this.gl = e, this.renderer = i, (!this.gl || typeof this.gl.getParameter != "function") && (n.debug("Unable to get WebGL context, using mock context"), this.gl = {
getParameter: (a) => ({
MAX_TEXTURE_SIZE: 4096,
MAX_3D_TEXTURE_SIZE: 256,
MAX_RENDERBUFFER_SIZE: 4096,
MAX_UNIFORM_BUFFER_BINDINGS: 16
})[a] || 0,
isContextLost: () => !1,
MAX_TEXTURE_SIZE: "MAX_TEXTURE_SIZE",
MAX_3D_TEXTURE_SIZE: "MAX_3D_TEXTURE_SIZE",
MAX_RENDERBUFFER_SIZE: "MAX_RENDERBUFFER_SIZE",
MAX_UNIFORM_BUFFER_BINDINGS: "MAX_UNIFORM_BUFFER_BINDINGS"
}), this._originalGlParam = t, this._isContextLost = !1, this._contextRestoredCallbacks = [], this.gl && this.gl.canvas && (this.gl.canvas.addEventListener("webglcontextlost", this._handleContextLost.bind(this)), this.gl.canvas.addEventListener("webglcontextrestored", this._handleContextRestored.bind(this))), n.debug("GL CONSTANTS"), n.debug(this.gl), n.debug(this.gl.TEXTURE0), n.debug(this.gl.textures), n.debug("RENDERER"), n.debug(this.renderer), this.deviceLimits = {
maxTextureSize: this.gl.getParameter(this.gl.MAX_TEXTURE_SIZE),
max3DTextureSize: this.gl.getParameter(this.gl.MAX_3D_TEXTURE_SIZE),
maxRenderbufferSize: this.gl.getParameter(this.gl.MAX_RENDERBUFFER_SIZE),
maxUniformBufferBindings: this.gl.getParameter(this.gl.MAX_UNIFORM_BUFFER_BINDINGS)
}, this.zarrStore = {
resolutions: null,
// 6 (the number of resolutions aka. pyramid levels in the file)
chunkSize: [],
// [32, 32, 32]
shapes: [],
// [[795, 1024, 1024], ..., [64, 64, 64], [32, 32, 32]]
arrays: [],
// [array0, array1, array2, array3, array4, array5]
dtype: "",
// 'uint8'
physicalSizeTotal: [],
// [795 x 0.0688, 1024 x 0.03417, 1024 x 0.03417]
physicalSizeVoxel: [],
// [0.0688, 0.03417, 0.03417]
brickLayout: [],
// [[25, 32, 32],[13, 16, 16], ..., [2,2,2],[1,1,1]]
// store: '', // ref to this.store
// group: '', // ref to this.group
channelCount: 1,
// MAX 7 TODO: get from zarr metadata
scales: [],
// downsample ratios, [x,y,z] per resolution level
lowestDataRes: 0
// lowest resolution level with data
}, this.ptTHREE = null, this.bcTHREE = null, this.channels = {
maxChannels: 7,
// lower when dataset has fewer, dictates page table size
zarrMappings: [],
// stores the zarr channel index for every one of the up to 7 channels
colorMappings: [],
// stores the PT slot for every color
downsampleMin: [],
// stores the downsample min for every one of the up to 7 channels
downsampleMax: []
// stores the downsample max for every one of the up to 7 channels
}, this.PT = {
channelOffsets: [
[0, 0, 1],
[0, 1, 0],
[0, 1, 1],
[1, 0, 0],
[1, 0, 1],
[1, 1, 0],
[1, 1, 1]
],
anchors: [],
offsets: [],
xExtent: 0,
// includes the offset inclusive
yExtent: 0,
// includes the offset inclusive
zExtent: 0,
// includes the offset inclusive
z0Extent: 0,
// l0 z extent
zTotal: 0
// original z extent plus the l0 z extent times the channel count
}, this.bricksEverLoaded = /* @__PURE__ */ new Set(), this.isBusy = !1, this.BCTimeStamps = new Array(oe).fill(0), this.BCMinMax = new Array(oe).fill([0, 0]), this.BCFull = !1, this.BCUnusedIndex = 0, this.bc2pt = new Array(oe).fill(null), this.LRUStack = [], this.triggerUsage = !0, this.triggerRequest = !1, this.timeStamp = 0, this.k = 40, this.noNewRequests = !1, this.manuallyStopped = !1, this.needsBailout = !1, this.initStatus = V.NOT_STARTED, this.initError = null, this._lastChannelConfig = null, this.currentRequestCount = 0, this.totalBricksRequested = 0, z("VolumeDataManager constructor complete");
}
/**
* Get loading progress information
* @returns {Object} Loading progress with bricksLoaded, totalBricks, isLoading, and percentage
*/
getLoadingProgress() {
const t = this.bricksEverLoaded.size, e = !this.noNewRequests && this.currentRequestCount > 0, i = this.totalBricksRequested > 0 ? (this.totalBricksRequested - this.currentRequestCount) / this.totalBricksRequested * 100 : 0;
return {
bricksLoaded: t,
currentRequestCount: this.currentRequestCount,
totalBricksRequested: this.totalBricksRequested,
isLoading: e,
percentage: i,
noNewRequests: this.noNewRequests
};
}
/**
* Manually stop loading and render at highest resolution
*/
stopLoading() {
n.debug("Manually stopping data loading"), this.noNewRequests = !0, this.manuallyStopped = !0, this.needsBailout = !0, this.currentRequestCount = 0, this.totalBricksRequested = 0;
}
/**
* Manually restart loading
*/
restartLoading() {
n.debug("Manually restarting data loading"), this.noNewRequests = !1, this.manuallyStopped = !1, this.needsBailout = !1, this.triggerUsage = !0, this.currentRequestCount = 0, this.totalBricksRequested = 0;
}
/**
* Handle WebGL context loss
*/
_handleContextLost(t) {
z("CONTEXT LOST"), n.warn("WebGL context lost, preventing default and setting flag"), t.preventDefault(), this._isContextLost = !0, this.channels && this.channels.zarrMappings && (this._lastChannelConfig = {
zarrMappings: [...this.channels.zarrMappings],
colorMappings: [...this.channels.colorMappings],
downsampleMin: [...this.channels.downsampleMin],
downsampleMax: [...this.channels.downsampleMax]
});
}
/**
* Handle WebGL context restoration
*/
_handleContextRestored(t) {
if (z("CONTEXT RESTORED"), n.warn("WebGL context restored, reinitializing textures"), this._isContextLost = !1, this._originalGlParam && this._originalGlParam.getContext && (this.gl = this._originalGlParam.getContext()), this._lastChannelConfig && (this.channels.zarrMappings = [...this._lastChannelConfig.zarrMappings], this.channels.colorMappings = [...this._lastChannelConfig.colorMappings], this.channels.downsampleMin = [...this._lastChannelConfig.downsampleMin], this.channels.downsampleMax = [...this._lastChannelConfig.downsampleMax], n.debug("Restored channel configuration after context loss")), this.PT && this.zarrStore && this.zarrStore.brickLayout)
try {
this.initMRMCPT(), n.debug("Successfully reinitialized MRMCPT after context restoration");
} catch (e) {
n.error("Failed to reinitialize MRMCPT after context restoration:", e);
}
this._contextRestoredCallbacks.forEach((e) => {
try {
e();
} catch (i) {
n.error("Error in context restored callback:", i);
}
});
}
/**
* Check if WebGL context is lost
*/
isContextLost() {
return this._isContextLost ? !0 : this.gl && typeof this.gl.isContextLost == "function" ? this.gl.isContextLost() : !1;
}
/**
* Register a callback to be called when context is restored
*/
onContextRestored(t) {
typeof t == "function" && this._contextRestoredCallbacks.push(t);
}
initImages(t, e) {
z("INIT IMAGES"), this.images = t, this.imageLayerScopes = e;
}
/**
* Initialize the VolumeDataManager with Zarr store details and device limits
* This should be called ONCE at website initialization
* TODO(mark): merge this with the constructor?
* @returns {Promise<Object>} Object with Zarr store details and device limits
*/
async init(t) {
if (z("INIT()"), this.initStatus !== V.NOT_STARTED)
return n.debug("VolumeDataManager init() was called more than once!"), this.initStatus === V.COMPLETE ? {
success: !0,
deviceLimits: this.deviceLimits,
zarrStore: this.zarrStore,
// physicalScale: this.physicalScale,
physicalSizeTotal: this.zarrStore.physicalSizeTotal,
physicalSizeVoxel: this.zarrStore.physicalSizeVoxel,
error: null
} : this.initStatus === V.FAILED ? {
success: !1,
error: this.initError || "Unknown initialization error"
} : {
success: !1,
pending: !0,
error: "Initialization in progress"
};
this.initStatus = V.IN_PROGRESS, z("INIT() IN PROGRESS");
try {
const e = this.images?.[this.imageLayerScopes?.[0]]?.image?.instance;
if (this.ngffMetadata = e.vivLoader.metadata, n.debug("ngffMetadata", this.ngffMetadata), !e || e.getType() !== "ome-zarr")
throw new Error("Invalid imageWrapper or not an OME-Zarr image");
const i = e.getMultiResolutionStats(), a = Ze(i), r = i.length;
this.zarrStore.resolutions = r;
const o = e.getData();
if (!Array.isArray(o) || o.length < 1)
throw new Error("Not a multiresolution loader");
if (!we(o[0].labels, ["t", "c", "z", "y", "x"]))
throw new Error("Expected OME-Zarr data with dimensions [t, c, z, y, x]");
n.debug("vivData", o);
const l = o.map((s) => s._data), u = new Array(r).fill(null);
if (l.length > 0) {
const s = l[0];
if (this.zarrStore = {
resolutions: r,
chunkSize: s.chunks,
shapes: a,
arrays: l,
dtype: s.dtype,
physicalSizeTotal: [],
// Will be populated if metadata exists
physicalSizeVoxel: [],
// Will be populated if metadata exists
brickLayout: [],
// Calculate from shapes and chunk sizes
// store: this.store,
// group: this.group,
channelCount: a[0][1],
scales: u
}, this.channels.colorMappings = new Array(Math.min(this.zarrStore.channelCount, 7)).fill(-1), this.channels.zarrMappings = new Array(Math.min(this.zarrStore.channelCount, 7)).fill(void 0), this.channels.downsampleMin = new Array(Math.min(this.zarrStore.channelCount, 7)).fill(void 0), this.channels.downsampleMax = new Array(Math.min(this.zarrStore.channelCount, 7)).fill(void 0), s.meta && s.meta.physicalSizes) {
const { x: d, y: m, z: h } = s.meta.physicalSizes, x = h?.size || 1, b = m?.size || 1, R = d?.size || 1;
this.zarrStore.physicalSizeVoxel = [x, b, R], s.shape && s.shape.length >= 5 && (this.zarrStore.physicalSizeTotal = [
(s.shape[2] || 1) * x,
(s.shape[3] || 1) * b,
(s.shape[4] || 1) * R
]);
} else
this.zarrStore.physicalSizeVoxel = [1, 1, 1], this.zarrStore.physicalSizeTotal = [
s.shape[2] || 1,
s.shape[3] || 1,
s.shape[4] || 1
];
const { multiscales: v } = this.ngffMetadata;
if (!v)
throw new Error("Expected multiscales metadata in group.attrs");
if (v?.[0]?.datasets?.[0]?.coordinateTransformations) {
for (let d = 0; d < r; d++)
if (v?.[0]?.datasets?.[d]?.coordinateTransformations?.[0]?.scale) {
const { scale: m } = v[0].datasets[d].coordinateTransformations[0];
u[d] = [m[4], m[3], m[2]];
}
} else {
n.error("no coordinateTransformations available, assuming downsampling ratio of 2 per dimension");
for (let d = 0; d < r; d++) {
const m = 2 ** d;
u[d] = [m, m, m];
}
}
this.zarrStore.scales = u;
const { coordinateTransformations: g } = this.ngffMetadata;
if (g?.[0]?.scale) {
const { scale: d } = g[0], m = d.length;
if (m >= 3) {
const h = d[m - 3], x = d[m - 2], b = d[m - 1];
this.zarrStore.physicalSizeVoxel = [h, x, b], s.shape && s.shape.length >= 5 && (this.zarrStore.physicalSizeTotal = [
(s.shape[2] || 1) * h,
(s.shape[3] || 1) * x,
(s.shape[4] || 1) * b
]);
}
}
this.zarrStore.brickLayout = Xe(e.getMultiResolutionStats()), n.debug("config", t);
const { omero: c } = this.ngffMetadata || {};
if (!c)
throw new Error("Expected omero metadata in ngffMetadata");
n.debug("omero", c), Object.keys(t).forEach((d, m) => {
const h = t[d].spatialTargetC;
this.channels.zarrMappings[m] = h, this.channels.colorMappings[m] = m, this.channels.downsampleMin[m] = c?.channels?.[h]?.window?.min || 0, this.channels.downsampleMax[m] = c?.channels?.[h]?.window?.max || 65535;
}), n.debug("zarrMappings after init", this.channels.zarrMappings), n.debug("colorMappings after init", this.channels.colorMappings), n.debug("downsampleMin after init", this.channels.downsampleMin), n.debug("downsampleMax after init", this.channels.downsampleMax), this.initMRMCPT();
}
return this.initStatus = V.COMPLETE, z("INIT() COMPLETE"), {
success: !0,
deviceLimits: this.deviceLimits,
zarrStore: this.zarrStore,
// physicalScale: this.physicalScale,
physicalSizeTotal: this.zarrStore.physicalSizeTotal,
physicalSizeVoxel: this.zarrStore.physicalSizeVoxel,
error: null
};
} catch (e) {
return z("INIT() FAILED"), n.error("Error initializing VolumeDataManager:", e), this.initStatus = V.FAILED, this.initError = e.message || "Unknown error", {
success: !1,
error: this.initError
};
}
}
/**
* Initialize the BrickCache and PageTable
* MRMCPT: multi-resolution multi-channel page table
*
* Depends on:
* - zarrStore.brickLayout
* - zarrMappings.length (zarrMappings: the zarr channel index for every one of the up to 7 channels)
* -
*/
initMRMCPT() {
z("initMRMCPT");
const { PT: t, ptTHREE: e, bcTHREE: i } = je(this.zarrStore.brickLayout, this.channels.zarrMappings.length);
this.PT = t, this.ptTHREE = e, this.bcTHREE = i, z("initMRMCPT() COMPLETE");
}
async initTexture() {
const t = [
{ x: 0, y: 0, z: 1 }
];
z("initTexture - loading first brick"), await this.handleBrickRequests(t);
}
updateChannels(t) {
if (z("updateChannels"), n.debug("channelProps", t), n.debug("this.channels.zarrMappings", this.channels.zarrMappings), n.debug("this.channels.colorMappings", this.channels.colorMappings), n.debug("this.channels.downsampleMin", this.channels.downsampleMin), n.debug("this.channels.downsampleMax", this.channels.downsampleMax), this.channels.zarrMappings.length === 0) {
n.debug("channels not initialized yet");
return;
}
const e = Object.values(t).map((l) => l.spatialTargetC).filter((l) => l !== void 0), i = this.channels.zarrMappings.filter((l) => l !== void 0), a = [...new Set(e)].sort((l, u) => l - u), r = [...new Set(i)].sort((l, u) => l - u);
a.length === r.length && a.every((l, u) => l === r[u]) && n.debug("Channel mappings unchanged, skipping update"), n.debug("Channel mappings changed:", {
current: r,
requested: a
}), Object.entries(t).forEach(([l, u]) => {
const s = u.spatialTargetC;
n.debug(`UI channel "${l}" wants zarr channel ${s}`);
const v = this.channels.zarrMappings.indexOf(s);
if (v === -1) {
const g = this.channels.zarrMappings.findIndex((c) => c === void 0);
if (g !== -1)
this.channels.zarrMappings[g] = s, n.debug("channelData", u), n.debug("this.ngffMetadata?.omero?.channels", this.ngffMetadata?.omero?.channels), n.debug("targetZarrChannel", s), this.channels.downsampleMin[g] = this.ngffMetadata?.omero?.channels?.[s]?.window?.min || 0, this.channels.downsampleMax[g] = this.ngffMetadata?.omero?.channels?.[s]?.window?.max || 65535, n.debug(`Mapped zarr channel ${s} to slot ${g}`), n.debug("channels", this.channels);
else {
n.debug("No free slots found, looking for unused mapped channels");
const c = this.channels.zarrMappings.filter((h) => h !== void 0), d = e, m = c.filter((h) => !d.includes(h));
if (n.debug("Currently mapped:", c), n.debug("Still requested:", d), n.debug("Unused mapped channels:", m), m.length > 0) {
const h = this.channels.zarrMappings.findIndex((x) => m.includes(x));
if (h !== -1) {
const x = this.channels.zarrMappings[h];
this.channels.zarrMappings[h] = s, this.channels.downsampleMin[h] = this.ngffMetadata?.omero?.channels?.[s]?.window?.min || 0, this.channels.downsampleMax[h] = this.ngffMetadata?.omero?.channels?.[s]?.window?.max || 65535, n.debug(`Reused slot ${h}: ${x} -> ${s}`), this._purgeChannel(h);
} else
n.error("Could not find slot to reuse - this should not happen");
} else
n.error("All slots are full and all mapped channels are still in use");
}
} else
n.debug(`Zarr channel ${s} already mapped to slot ${v}`);
});
const o = Object.values(t).map((l) => {
const u = this.channels.zarrMappings.indexOf(l.spatialTargetC);
return u !== -1 ? u : -1;
});
for (; o.length < 7; )
o.push(-1);
n.debug("newColorMappings", o), this.channels.colorMappings = o, n.debug("updatedChannels", this.channels), this._lastChannelConfig = {
zarrMappings: [...this.channels.zarrMappings],
colorMappings: [...this.channels.colorMappings],
downsampleMin: [...this.channels.downsampleMin],
downsampleMax: [...this.channels.downsampleMax]
};
}
/**
* Try to load a resolution level
* @param {number} resolutionIndex - The resolution level to load
* @param {Array} arrays - Array to store the loaded arrays
* @returns {Promise} Promise resolving when the resolution is loaded or rejected
*/
/*
async tryLoadResolution(resolutionIndex, arrays) {
logWithColor('tryLoadResolution');
log.debug(resolutionIndex, arrays);
try {
const array = await zarrita.open(this.group.resolve(String(resolutionIndex)));
// Create new arrays to avoid modifying parameters directly
const newArrays = [...arrays];
newArrays[resolutionIndex] = array;
// Update the original arrays
Object.assign(arrays, newArrays);
logWithColor('tryLoadResolution() COMPLETE');
return { success: true, level: resolutionIndex };
} catch (err) {
log.error(`Failed to load resolution ${resolutionIndex}:`, err);
return { success: false, level: resolutionIndex, error: err.message };
}
}
*/
/**
* Get physical dimensions
* @returns {Array} Physical dimensions [X, Y, Z]
*/
getPhysicalDimensionsXYZ() {
n.debug("getPhysicalDimensionsXYZ"), n.debug("this.zarrStore.physicalSizeTotal", this.zarrStore.physicalSizeTotal);
const t = [
this.zarrStore.physicalSizeTotal[2],
this.zarrStore.physicalSizeTotal[1],
this.zarrStore.physicalSizeTotal[0]
];
return n.debug("out", t), t;
}
/**
* Get the maximum resolution
* @returns {number} Maximum resolution
*/
getMaxResolutionXYZ() {
n.debug("getMaxResolutionXYZ"), n.debug("this.zarrStore.shapes", this.zarrStore.shapes);
const t = [
this.zarrStore.shapes[0][4],
this.zarrStore.shapes[0][3],
this.zarrStore.shapes[0][2]
];
return n.debug("out", t), t;
}
getOriginalScaleXYZ() {
z("getOriginalScaleXYZ"), n.debug("this.zarrStore.physicalSizeVoxel", this.zarrStore.physicalSizeVoxel);
const t = [
this.zarrStore.physicalSizeVoxel[2],
this.zarrStore.physicalSizeVoxel[1],
this.zarrStore.physicalSizeVoxel[0]
];
return n.debug("out", t), t;
}
getNormalizedScaleXYZ() {
n.debug("getNormalizedScaleXYZ");
const t = [
1,
this.zarrStore.physicalSizeVoxel[1] / this.zarrStore.physicalSizeVoxel[2],
this.zarrStore.physicalSizeVoxel[0] / this.zarrStore.physicalSizeVoxel[0]
];
return n.debug("out", t), t;
}
getBoxDimensionsXYZ() {
n.debug("getBoxDimensionsXYZ"), n.debug("this.zarrStore.shapes", this.zarrStore.shapes);
const t = [
1,
this.zarrStore.shapes[0][3] / this.zarrStore.shapes[0][4],
this.zarrStore.shapes[0][2] / this.zarrStore.shapes[0][4]
];
return n.debug("out", t), t;
}
/**
* Load a specific Zarr chunk based on [t,c,z,y,x] coordinates
* @param {number} t - Time point (default 0)
* @param {number} c - Channel (default 0)
* @param {number} z - Z coordinate
* @param {number} y - Y coordinate
* @param {number} x - X coordinate
* @param {number} resolution - Resolution level
* @returns {Promise<Uint8Array>} 32x32x32 chunk data
*/
async loadZarrChunk(t = 0, e = 0, i, a, r, o) {
if (!this.zarrStore || !this.zarrStore.arrays[o])
throw new Error("Zarr store or resolution not initialized");
n.debug("loadZarrChunk", { t, c: e, z: i, y: a, x: r, resolution: o });
const u = await this.zarrStore.arrays[o].getChunk([t, e, i, a, r]);
if (!u)
throw new Error(`No chunk found at coordinates [${t},${e},${i},${a},${r}]`);
if (u.data.length !== S * S * S)
throw new Error(`Unexpected chunk size: ${u.data.length}`);
return u.data;
}
/**
* Process the buffer of brick requests from the shader, turning them into
* actual Promises for Zarr chunks on the JS side.
* @param {Uint8Array} buffer The bufRequest (of length width*height*4)
* containing the brick requests from the shader.
* @param {object} optsForWeighting
* @param {number} optsForWeighting.width The width of the render target.
* @param {number} optsForWeighting.height The height of the render target.
* @param {number} optsForWeighting.sigmaNormalized The normalized sigma value
* to use for weighting the brick requests based on their distance from
* the center of the render target.
*/
async processRequestData(t, e) {
if (this.isBusy) {
n.debug("processRequestData: already busy, skipping");
return;
}
if (this.noNewRequests) {
n.debug("processRequestData: loading stopped by user, skipping");
return;
}
if (this.isContextLost()) {
n.debug("processRequestData: WebGL context is lost, skipping");
return;
}
this.isBusy = !0, this.triggerRequest = !1;
const { requests: i, origRequestCount: a } = We(t, this.k, e);
i.length === 0 && (this.noNewRequests = !0), n.debug(`processRequestData: handling ${i.length} requests of ${a}`), await this.handleBrickRequests(i), this.triggerUsage = !0, this.isBusy = !1;
}
async processUsageData(t) {
if (this.isBusy) {
n.debug("processUsageData: already busy, skipping"), this.needsBailout = !0;
return;
}
if (this.isContextLost()) {
n.debug("processUsageData: WebGL context is lost, skipping");
return;
}
this.isBusy = !0, this.triggerUsage = !1;
const e = ++this.timeStamp, i = /* @__PURE__ */ new Set();
for (let a = 0; a < t.length; a += 4) {
const r = t[a], o = t[a + 1], l = t[a + 2];
if ((r | o | l) === 0)
continue;
const u = l * B * G + o * B + r;
u < this.BCTimeStamps.length && i.add(u);
}
Array.from(i).forEach((a) => {
this.BCTimeStamps[a] = e;
}), this.BCFull && this._buildLRU(), this.triggerRequest = !0, this.isBusy = !1;
}
// Helper method to update PT entries for evicted bricks
_evictBrick(t) {
const e = this.bc2pt[t];
if (!e)
return;
const [i, a] = this.BCMinMax[t] || [0, 0], r = (0 | 1 << 30 | Math.min(127, i >> 1) << 23 | Math.min(127, a >> 1) << 16) >>> 0;
this._updatePTEntry(e.x, e.y, e.z, r), this.bc2pt[t] = null;
}
_purgeChannel(t) {
if (n.debug("purging channel", t), n.debug("corresponding zarr channel", this.channels.zarrMappings[t]), !this.ptTHREE) {
n.error("pagetable texture not initialized");
return;
}
if (this.isContextLost()) {
n.warn("WebGL context is lost, skipping channel purge");
return;
}
this.channels.downsampleMin[t] = void 0, this.channels.downsampleMax[t] = void 0, this.channels.zarrMappings[t] = void 0;
const e = this.PT.channelOffsets[t];
n.debug("channelMask", e), n.error("TODO: not tested yet");
const { gl: i } = this, a = this.renderer.properties.get(this.ptTHREE).__webglTexture;
i.activeTexture(i.TEXTURE0), i.bindTexture(i.TEXTURE_3D, a);
for (let r = 0; r < this.zarrStore.resolutions; r++) {
const o = [
this.PT.anchors[r][2] * e[2],
this.PT.anchors[r][1] * e[1],
this.PT.anchors[r][0] * e[0]
];
n.debug("anchor", o);
const l = this.zarrStore.brickLayout[r], u = l[0] * l[1] * l[2];
n.debug("extents", l), n.debug("size", u), i.texSubImage3D(i.TEXTURE_3D, 0, o[0], o[1], o[2], l[0], l[1], l[2], i.RED_INTEGER, i.UNSIGNED_INT, new Uint32Array(u));
}
i.bindTexture(i.TEXTURE_3D, null);
}
// Update a PT entry
_updatePTEntry(t, e, i, a) {
if (!this.ptTHREE)
return;
if (this.isContextLost()) {
n.warn("WebGL context is lost, skipping PT entry update");
return;
}
const { gl: r } = this, o = this.renderer.properties.get(this.ptTHREE).__webglTexture;
r.activeTexture(r.TEXTURE0), r.bindTexture(r.TEXTURE_3D, o), r.texSubImage3D(r.TEXTURE_3D, 0, t, e, i, 1, 1, 1, r.RED_INTEGER, r.UNSIGNED_INT, new Uint32Array([a])), r.bindTexture(r.TEXTURE_3D, null);
}
/* ------------------------------------------------------------- *
* 2. Allocate the next n free bricks in the brick cache *
* ------------------------------------------------------------- */
/**
*
* @param {number} n The number of slots to allocate
* @returns {{ bcIndex, x, y, z }[]} Array of brick cache coordinates for the allocated slots.
*/
_allocateBCSlots(t) {
let e = [];
const i = B * G * ce;
if (!this.BCFull && this.BCUnusedIndex + t > i && (this.BCFull = !0, n.debug("BRICK CACHE FULL")), this.BCFull)
this.LRUStack.length < t && this._buildLRU(), e = this.LRUStack.splice(0, t).map((a) => {
this._evictBrick(a);
const r = Math.floor(a / (B * G)), o = a - r * B * G, l = Math.floor(o / B), u = o % B;
return { bcIndex: a, x: u, y: l, z: r };
});
else {
for (let a = 0; a < t; ++a) {
const r = (this.BCUnusedIndex + a) % i, o = Math.floor(r / (B * G)), l = r - o * B * G, u = Math.floor(l / B), s = l % B;
e.push({ bcIndex: r, x: s, y: u, z: o });
}
this.BCUnusedIndex += t;
}
return e;
}
/* ------------------------------------------------------------- *
* 4. Upload one brick + PT entry *
* ------------------------------------------------------------- */
async _uploadBrick(t, e) {
if (n.debug("uploading brick", t, e), this.isContextLost()) {
n.warn("WebGL context is lost, skipping brick upload");
return;
}
if (t.x >= this.PT.xExtent || t.y >= this.PT.yExtent || t.z >= this.PT.zTotal || t.x < 0 || t.y < 0 || t.z < 0) {
n.error("this.PT", this.PT), n.error("ptCoord out of bounds", t);
return;
}
const { channel: i, resolution: a, x: r, y: o, z: l } = He(t.x, t.y, t.z, { PT_zExtent: this.PT.zExtent, PT_z0Extent: this.PT.z0Extent, PT_anchors: this.PT.anchors });
if (!this.channels || !this.channels.zarrMappings || this.channels.zarrMappings.length === 0) {
n.error("Channel mappings not initialized, skipping brick upload");
return;
}
if (i < 0 || i >= this.channels.zarrMappings.length) {
n.error("Channel index out of bounds", { channel: i, mappingsLength: this.channels.zarrMappings.length });
return;
}
const u = this.channels.zarrMappings[i];
if (u === void 0 || u === -1)
if (n.warn("zarrChannel is undefined or -1", {
zarrChannel: u,
channel: i,
ptCoord: t,
channelMappings: this.channels.zarrMappings,
contextLost: this.isContextLost()
}), this._lastChannelConfig && this._lastChannelConfig.zarrMappings[i] !== void 0) {
n.warn("Attempting to use last known channel config"), this.channels.zarrMappings = [...this._lastChannelConfig.zarrMappings], this.channels.colorMappings = [...this._lastChannelConfig.colorMappings], this.channels.downsampleMin = [...this._lastChannelConfig.downsampleMin], this.channels.downsampleMax = [...this._lastChannelConfig.downsampleMax];
const R = this.channels.zarrMappings[i];
if (R !== void 0 && R !== -1)
n.debug("Successfully restored channel mapping, continuing with upload");
else {
n.error("Could not restore valid channel mapping, aborting brick upload");
return;
}
} else {
n.error("No fallback channel config available, aborting brick upload");
return;
}
n.debug("starting to load zarr chunk", { resolution: a, z: l, y: o, x: r, zarrChannel: u });
let s = await this.loadZarrChunk(0, u, l, o, r, a);
if (n.debug("chunk", s), s instanceof Uint16Array) {
if (n.debug("chunk is Uint16Array, converting to Uint8Array"), this.channels.downsampleMin[i] === void 0) {
const T = this.channels.zarrMappings[i];
n.debug("channelId was not found in this.channels.downsampleMin[channel]", T), this.channels.downsampleMin[i] = this.ngffMetadata?.omero?.channels?.[T]?.window?.min || 0, this.channels.downsampleMax[i] = this.ngffMetadata?.omero?.channels?.[T]?.window?.max || 65535, n.debug("this.channels.downsampleMin[channel]", this.channels.downsampleMin[i]), n.debug("this.channels.downsampleMax[channel]", this.channels.downsampleMax[i]);
}
const R = new Uint16Array(s.buffer), E = new Uint8Array(s.length), k = this.channels.downsampleMin[i], O = this.channels.downsampleMax[i], X = O - k;
for (let T = 0; T < R.length; T++) {
const L = R[T], U = Math.max(k, Math.min(O, L));
E[T] = (U - k) * 255 / X | 0;
}
s = E;
}
if (!(s instanceof Uint8Array))
throw new Error(`Unsupported chunk type: ${s.constructor.name}. Expected Uint8Array.`);
let v = 255, g = 0;
for (let R = 0; R < s.length; ++R) {
const E = s[R];
E < v && (v = E), E > g && (g = E);
}
const { gl: c } = this, d = this.renderer.properties.get(this.bcTHREE).__webglTexture;
c.activeTexture(c.TEXTURE2), c.bindTexture(c.TEXTURE_3D, d), c.pixelStorei(c.UNPACK_ALIGNMENT, 1), c.texSubImage3D(c.TEXTURE_3D, 0, e.x * S, e.y * S, e.z * S, S, S, S, c.RED, c.UNSIGNED_BYTE, s), c.pixelStorei(c.UNPACK_ALIGNMENT, 4), c.bindTexture(c.TEXTURE_3D, null);
const m = c.getError();
m !== c.NO_ERROR && n.error("WebGL error during brick upload:", m, s), i >= this.channels.zarrMappings.length && (n.debug("channel is out of bounds", i), v = 255, g = 255);
const h = Ye(v, g, e.x, e.y, e.z), x = this.renderer.properties.get(this.ptTHREE).__webglTexture;
c.activeTexture(c.TEXTURE0), c.bindTexture(c.TEXTURE_3D, x), c.texSubImage3D(c.TEXTURE_3D, 0, t.x, t.y, t.z, 1, 1, 1, c.RED_INTEGER, c.UNSIGNED_INT, new Uint32Array([h])), c.bindTexture(c.TEXTURE_3D, null);
const b = c.getError();
b !== c.NO_ERROR && n.error("WebGL error during pagetable upload:", b, s), this.BCTimeStamps[e.bcIndex] = this.timeStamp, this.BCMinMax[e.bcIndex] = [v, g], this.bc2pt[e.bcIndex] = t;
}
/* ------------------------------------------------------------- *
* 5. Public: handle a batch of PT requests (array of {x,y,z}) *
* ------------------------------------------------------------- */
async handleBrickRequests(t) {
if (t.length === 0)
return;
this.totalBricksRequested = t.length, this.currentRequestCount = t.length;
const e = this._allocateBCSlots(t.length);
n.debug("Handling brick requests:", { requestCount: t.length, slotCount: e.length }), n.debug("handleBrickRequests: starting for loop");
for (let i = 0; i < t.length; ++i) {
await this._uploadBrick(t[i], e[i]), this.currentRequestCount = t.length - i - 1;
const a = this.bricksEverLoaded.size;
if (this.bricksEverLoaded.add(`${t[i].x},${t[i].y},${t[i].z}`), a === this.bricksEverLoaded.size && n.debug("DUPLICATE BRICK LOADED", t[i]), this.needsBailout) {
n.debug("Bailing out of handleBrickRequests early due to needsBailout flag"), this.needsBailout = !1, this.currentRequestCount = 0;
break;
}
}
this.currentRequestCount = 0, n.debug("this.bricksEverLoaded", this.bricksEverLoaded);
}
/* --------------------------------------------------------- *
* Rebuild the LRUStack with the k least-recently-used bricks *
* --------------------------------------------------------- */
_buildLRU() {
const t = this.BCTimeStamps.map((e, i) => ({ index: i, time: e }));
this.LRUStack = t.sort((e, i) => e.time - i.time).slice(0, this.k).map((e) => e.index);
}
}
const Qe = `//
// Output: Unnormalized ray direction from camera to each vertex
// Used by fragment shader for ray marching through the volume
out vec3 rayDirUnnorm;
// Output: Camera position transformed into volume's local coordinate system
// Used to calculate ray origins in the fragment shader
out vec3 cameraCorrected;
// Volume scale uniform (likely for anisotropic voxels)
uniform vec3 u_vol_scale;
// Volume size uniform
uniform vec3 u_size;
// Output: Vertex positions normalized to [0,1] range within volume bounds
// Standard coordinate system for volume sampling
varying vec3 worldSpaceCoords;
// Output: Texture coordinates for sampling volume data
varying vec2 vUv;
// Output: Final clip-space position (stored for fragment shader access)
varying vec4 glPosition;
// Volume bounding box size uniform
uniform highp vec3 boxSize;
void main()
{
// Transform vertex positions from [-0.5, 0.5] range to [0, 1] range
// This is the standard coordinate system for volume sampling
//
// Mathematical transformation:
// worldSpaceCoords = (position / boxSize) + 0.5
//
// Example:
// position = (-0.5, -0.5, -0.5) → worldSpaceCoords = (0, 0, 0)
// position = ( 0.0, 0.0, 0.0) → worldSpaceCoords = (0.5, 0.5, 0.5)
// position = ( 0.5, 0.5, 0.5) → worldSpaceCoords = (1, 1, 1)
worldSpaceCoords = position / boxSize + vec3(0.5, 0.5, 0.5); //move it from [-0.5;0.5] to [0,1]
// Transform camera position into volume's local coordinate system
// This gives us the ray origin in volume space
cameraCorrected = (inverse(modelMatrix) * vec4(cameraPosition, 1.)).xyz;
// Calculate unnormalized ray direction from camera to each vertex
// Used by fragment shader for ray marching through the volume
rayDirUnnorm = position - cameraCorrected;
// Apply standard MVP transformation to get clip-space coordinates
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
// Store clip-space position for fragment shader access
glPosition = gl_Position;
// Pass through texture coordinates for volume sampling
vUv = uv;
}
`, Je = `//
// #include <packing>
precision highp float;
precision highp int;
precision highp sampler3D;
precision highp usampler3D;
// ========================================
// INPUT VARIABLES (from vertex shader)
// ========================================
// Unnormalized ray direction from camera
in vec3 rayDirUnnorm;
// Camera position in world space
in vec3 cameraCorrected;
// ========================================
// TEXTURE SAMPLERS
// ========================================
// 3D texture containing cached brick data (2048x2048x128)
// (2048*2048*128)/(32*32*32) = 16,384 bricks can be stored?
uniform sampler3D brickCacheTex;
// 3D texture containing page table entries (brick metadata)
uniform usampler3D pageTableTex;
// ========================================
// RENDERING PARAMETERS/CONSTANTS
// ========================================
// Rendering style: 0=MIP, 1=MinIP, 2=standard volume rendering, 3=DEBUG
uniform int u_renderstyle;
// Global opacity multiplier for volume rendering
uniform float opacity;
// ========================================
// CONTRAST LIMITS (per channel)
// per channel min/max values for value normalization
// ========================================
uniform vec2 clim0;
uniform vec2 clim1;
uniform vec2 clim2;
uniform vec2 clim3;
uniform vec2 clim4;
uniform vec2 clim5;
uniform vec2 clim6;
// ========================================
// CLIPPING PLANES
// e.g., for X-axis clipping: (min_x, max_x) or (-1, -1) if disabled
// ========================================
uniform vec2 xClip;
uniform vec2 yClip;
uniform vec2 zClip;
// ========================================
// CHANNEL COLORS AND OPACITIES
// rgb -- color values, a -- visibility (boolean)
// ========================================
uniform vec4 color0;
uniform vec4 color1;
uniform vec4 color2;
uniform vec4 color3;
uniform vec4 color4;
uniform vec4 color5;
uniform vec4 color6;
// maps colors to physical spaces
uniform int channelMapping[7];
// ========================================
// VOLUME AND RESOLUTION PARAMETERS
// ========================================
// Volume bounding box size in world space
uniform highp vec3 boxSize;
// Rendering resolution level (affects step size)
// stepsize, correlates with resolution
uniform int renderRes;
// Volume dimensions in voxels (x, y, z)
// resolution 0 voxel extents
uniform uvec3 voxelExtents;
// Global resolution range: (min_res, max_res)
// global range of requested resolutions
uniform ivec2 resGlobal;
// Maximum number of active channels
// max number of channels (relevant for the cache statistics)
// between 1 and 7
uniform int maxChannels;
// ========================================
// PER-CHANNEL RESOLUTION RANGES
// per color channel resolution range
// Each channel can have different available resolution levels
// e.g., for Channel 0: (min_res, max_res)
// ========================================
uniform ivec2 res0;
uniform ivec2 res1;
uniform ivec2 res2;
uniform ivec2 res3;
uniform ivec2 res4;
uniform ivec2 res5;
uniform ivec2 res6;
// Channel 7: unused
uniform ivec2 res7;
// ========================================
// LEVEL-OF-DETAIL PARAMETERS
// controls how fast we decrease the resolution
// ========================================
// LOD factor for distance-based resolution selection
uniform float lodFactor;
// ========================================
// ANCHOR POINTS (per resolution level)
// per resolution anchor point for pagetable
// ========================================
// Anchor points define the origin of page table for each resolution level
// Resolution 0 anchor point (highest detail)
uniform uvec3 anchor0;
uniform uvec3 anchor1;
uniform uvec3 anchor2;
uniform uvec3 anchor3;
uniform uvec3 anchor4;
uniform uvec3 anchor5;
uniform uvec3 anchor6;
uniform uvec3 anchor7;
uniform uvec3 anchor8;
uniform uvec3 anchor9;
// Resolution 9 anchor point (lowest detail)
// ========================================
// SCALE FACTORS (per resolution level)
// per resolution downsample factor
// ========================================
// Scale factors determine voxel size at each resolution level
// Resolution 0 scale factors (should be 1,1,1)
uniform vec3 scale0;
uniform vec3 scale1;
uniform vec3 scale2;
uniform vec3 scale3;
uniform vec3 scale4;
uniform vec3 scale5;
uniform vec3 scale6;
uniform vec3 scale7;
uniform vec3 scale8;
uniform vec3 scale9;
// Resolution 9 scale factors
// ========================================
// VARYING VARIABLES (unused but required)
// ========================================
// Fragment position (unused)
varying vec4 glPosition;
// World space coordinates (used for depth only)
varying vec3 worldSpaceCoords;
// ========================================
// OUTPUT VARIABLES (multiple render targets)
// output buffers
// ========================================
// Final rendered color (sRGB)
layout(location = 0) out vec4 gColor;
// Brick loading requests (packed coordinates)
layout(location = 1) out vec4 gRequest;
// Brick usage tracking (for cache management)
layout(location = 2) out vec4 gUsage;
// ========================================
// CONSTANTS
// ========================================
// Size of each brick in voxels (32x32x32)
const float BRICK_SIZE = 32.0;
// Brick cache texture width
const float BRICK_CACHE_SIZE_X = 2048.0;
// Brick cache texture height
const float BRICK_CACHE_SIZE_Y = 2048.0;
// Brick cache texture depth
const float BRICK_CACHE_SIZE_Z = 128.0;
// Number of bricks in X (64)
const float BRICK_CACHE_BRICKS_X = BRICK_CACHE_SIZE_X / BRICK_SIZE;
// Number of bricks in Y (64)
const float BRICK_CACHE_BRICKS_Y = BRICK_CACHE_SIZE_Y / BRICK_SIZE;
// Number of bricks in Z (4)
const float BRICK_CACHE_BRICKS_Z = BRICK_CACHE_SIZE_Z / BRICK_SIZE;
// ========================================
// RAY-VOLUME INTERSECTION
// calculating the intersection of the ray with the bounding box
// ========================================
// Calculates the intersection of a ray with the volume's bounding box
// Returns (entry_time, exit_time) for the ray-box intersection
// Handles clipping planes by adjusting the bounding box
//
// Parameters:
// orig - vec3: Ray origin point in world space
// dir - vec3: Ray direction vector (should be normalized)
//
// Returns:
// vec2: (entry_time, exit_time) where:
// - entry_time: Distance along ray to enter the volume
// - exit_time: Distance along ray to exit the volume
// - If no intersection: entry_time > exit_time
vec2 intersect_hit(vec3 orig, vec3 dir) {
// Start with full volume bounds
vec3 boxMin = vec3(-0.5) * boxSize;
vec3 boxMax = vec3(0.5) * boxSize;
// Apply clipping planes if they're active (xClip.x > -1.0 means active)
if (xClip.x > -1.0) {
boxMin.x = xClip.x - (boxSize.x / 2.0);
if (xClip.y < boxSize.x)
boxMax.x = xClip.y - (boxSize.x / 2.0);
}
if (yClip.x > -1.0) {
boxMin.y = yClip.x - (boxSize.y / 2.0);
if (yClip.y < boxSize.y)
boxMax.y = yClip.y - (boxSize.y / 2.0);
}
if (zClip.x > -1.0) {
boxMin.z = zClip.x - (boxSize.z / 2.0);
if (zClip.y < boxSize.z)
boxMax.z = zClip.y - (boxSize.z / 2.0);
}
// Standard ray-box intersection algorithm
vec3 invDir = 1.0 / dir;
vec3 tmin0 = (boxMin - orig) * invDir;
vec3 tmax0 = (boxMax - orig) * invDir;
vec3 tmin = min(tmin0, tmax0);
vec3 tmax = max(tmin0, tmax0);
float t0 = max(tmin.x, max(tmin.y, tmin.z)); // Entry time
float t1 = min(tmax.x, min(tmax.y, tmax.z)); // Exit time
return vec2(t0, t1);
}
// ========================================
// UTILITY FUNCTIONS
// ========================================
// Pseudo-random number generator for jittered sampling
// random number generator based on the uv coordinate
// Author @patriciogv - 2015
// http://patriciogonzalezvivo.com
//
// Parameters:
// None (uses gl_FragCoord.xy as input)
//
// Returns:
// float: Random value between 0.0 and 1.0 based on fragment coordinates
float random() {
return fract(sin(dot(gl_FragCoord.xy, vec2(12.9898,78.233)))* 43758.5453123);
}
// Convert from linear RGB to sRGB color space
// Implements the standard sRGB transfer function for gamma correction
//
// Parameters:
// x - float: Linear RGB value between 0.0 and 1.0
//
// Returns:
// float: sRGB value between 0.0 and 1.0
float linear_to_srgb(float x) {
if (x <= 0.0031308f) {
return 12.92f * x;
}
return 1.055f * pow(x, 1.f / 2.4f) - 0.055f;
}
// Convert from linear RGB to sRGB color space (vector version)
// Applies sRGB conversion to each RGB component while preserving alpha
//
// Parameters:
// x - vec4: Linear RGBA color with components between 0.0 and 1.0
//
// Returns:
// vec4: sRGB RGBA color with components between 0.0 and 1.0
vec4 linear_to_srgb(vec4 x) {
return vec4(linear_to_srgb(x.r), linear_to_srgb(x.g), linear_to_srgb(x.b), x.a);
}
// ========================================
// PAGE TABLE COORDINATE PACKING
// transform the pagetable coordinate into a RGBA8 value
// ========================================
// Packs 3D page table coordinates into RGBA8 texture format
// Uses 10 bits for X, 10 bits for Y, 12 bits for Z
//
// Parameters:
// coord - uvec3: 3D coordinates to pack (X, Y, Z components)
// - X coordinate: 10-bit unsigned integer (0-1023)
// - Y coordinate: 10-bit unsigned integer (0-1023)
// - Z coordinate: 12-bit unsigned integer (0-4095)
//
// Returns:
// vec4: RGBA8 encoded coordinates with components between 0.0 and 1.0
// - R: Upper 8 bits of packed 32-bit value
// - G: Middle-upper 8 bits of packed 32-bit value
// - B: Middle-lower 8 bits of packed 32-bit value
// - A: Lower 8 bits of packed 32-bit value
vec4 packPTCoordToRGBA8(uvec3 coord) {
uint x = coord.x & 0x3FFu; // 10 bits for X coordinate
uint y = coord.y & 0x3FFu; // 10 bits for Y coordinate
uint z = coord.z & 0xFFFu; // 12 bi