lucky-money-event
Version:
Lucky Money event
1,453 lines (1,429 loc) • 83.5 kB
JavaScript
import { E as p, U as Ne, T as Z, l as P, d as ce, I as y, t as w, M as T, a4 as X, R as j, w as M, H as he, a1 as G, a2 as fe, c as C, B as v, _ as S, ab as L, S as A, y as D, ac as qe, ad as N, L as I, ae as B, s as q, v as Qe, G as Ke, af as H, n as pe, q as me, a6 as ge, a9 as xe, o as Je, p as Ze, a7 as et, a8 as tt, aa as rt, ag as st, ah as it, ai as at, aj as Y, ak as nt, al as ot, D as _e, m as be, am as z, e as _, an as ut } from "./index-Dzqa3T3s.js";
import { c as k, a as lt, b as dt, B as ye } from "./colorToUniform-C2jGzNe1.js";
class ve {
/**
* Initialize the plugin with scope of application instance
* @private
* @param {object} [options] - See application options
*/
static init(e) {
Object.defineProperty(
this,
"resizeTo",
{
set(t) {
globalThis.removeEventListener("resize", this.queueResize), this._resizeTo = t, t && (globalThis.addEventListener("resize", this.queueResize), this.resize());
},
get() {
return this._resizeTo;
}
}
), this.queueResize = () => {
this._resizeTo && (this._cancelResize(), this._resizeId = requestAnimationFrame(() => this.resize()));
}, this._cancelResize = () => {
this._resizeId && (cancelAnimationFrame(this._resizeId), this._resizeId = null);
}, this.resize = () => {
if (!this._resizeTo)
return;
this._cancelResize();
let t, r;
if (this._resizeTo === globalThis.window)
t = globalThis.innerWidth, r = globalThis.innerHeight;
else {
const { clientWidth: s, clientHeight: a } = this._resizeTo;
t = s, r = a;
}
this.renderer.resize(t, r), this.render();
}, this._resizeId = null, this._resizeTo = null, this.resizeTo = e.resizeTo || null;
}
/**
* Clean up the ticker, scoped to application
* @private
*/
static destroy() {
globalThis.removeEventListener("resize", this.queueResize), this._cancelResize(), this._cancelResize = null, this.queueResize = null, this.resizeTo = null, this.resize = null;
}
}
ve.extension = p.Application;
class Te {
/**
* Initialize the plugin with scope of application instance
* @private
* @param {object} [options] - See application options
*/
static init(e) {
e = Object.assign({
autoStart: !0,
sharedTicker: !1
}, e), Object.defineProperty(
this,
"ticker",
{
set(t) {
this._ticker && this._ticker.remove(this.render, this), this._ticker = t, t && t.add(this.render, this, Ne.LOW);
},
get() {
return this._ticker;
}
}
), this.stop = () => {
this._ticker.stop();
}, this.start = () => {
this._ticker.start();
}, this._ticker = null, this.ticker = e.sharedTicker ? Z.shared : new Z(), e.autoStart && this.start();
}
/**
* Clean up the ticker, scoped to application.
* @private
*/
static destroy() {
if (this._ticker) {
const e = this._ticker;
this.ticker = null, e.destroy();
}
}
}
Te.extension = p.Application;
class we {
constructor(e) {
this._renderer = e;
}
push(e, t, r) {
this._renderer.renderPipes.batch.break(r), r.add({
renderPipeId: "filter",
canBundle: !1,
action: "pushFilter",
container: t,
filterEffect: e
});
}
pop(e, t, r) {
this._renderer.renderPipes.batch.break(r), r.add({
renderPipeId: "filter",
action: "popFilter",
canBundle: !1
});
}
execute(e) {
e.action === "pushFilter" ? this._renderer.filter.push(e) : e.action === "popFilter" && this._renderer.filter.pop();
}
destroy() {
this._renderer = null;
}
}
we.extension = {
type: [
p.WebGLPipes,
p.WebGPUPipes,
p.CanvasPipes
],
name: "filter"
};
function ct(i, e) {
e.clear();
const t = e.matrix;
for (let r = 0; r < i.length; r++) {
const s = i[r];
s.globalDisplayStatus < 7 || (e.matrix = s.worldTransform, e.addBounds(s.bounds));
}
return e.matrix = t, e;
}
const ht = new X({
attributes: {
aPosition: {
buffer: new Float32Array([0, 0, 1, 0, 1, 1, 0, 1]),
format: "float32x2",
stride: 2 * 4,
offset: 0
}
},
indexBuffer: new Uint32Array([0, 1, 2, 0, 2, 3])
});
class ft {
constructor() {
this.skip = !1, this.inputTexture = null, this.backTexture = null, this.filters = null, this.bounds = new he(), this.container = null, this.blendRequired = !1, this.outputRenderSurface = null, this.globalFrame = { x: 0, y: 0, width: 0, height: 0 };
}
}
class Pe {
constructor(e) {
this._filterStackIndex = 0, this._filterStack = [], this._filterGlobalUniforms = new P({
uInputSize: { value: new Float32Array(4), type: "vec4<f32>" },
uInputPixel: { value: new Float32Array(4), type: "vec4<f32>" },
uInputClamp: { value: new Float32Array(4), type: "vec4<f32>" },
uOutputFrame: { value: new Float32Array(4), type: "vec4<f32>" },
uGlobalFrame: { value: new Float32Array(4), type: "vec4<f32>" },
uOutputTexture: { value: new Float32Array(4), type: "vec4<f32>" }
}), this._globalFilterBindGroup = new ce({}), this.renderer = e;
}
/**
* The back texture of the currently active filter. Requires the filter to have `blendRequired` set to true.
* @readonly
*/
get activeBackTexture() {
return this._activeFilterData?.backTexture;
}
/**
* Pushes a filter instruction onto the filter stack.
* @param instruction - The instruction containing the filter effect and container.
* @internal
*/
push(e) {
const t = this.renderer, r = e.filterEffect.filters, s = this._pushFilterData();
s.skip = !1, s.filters = r, s.container = e.container, s.outputRenderSurface = t.renderTarget.renderSurface;
const a = t.renderTarget.renderTarget.colorTexture.source, n = a.resolution, o = a.antialias;
if (r.length === 0) {
s.skip = !0;
return;
}
const l = s.bounds;
if (this._calculateFilterArea(e, l), this._calculateFilterBounds(s, t.renderTarget.rootViewPort, o, n, 1), s.skip)
return;
const d = this._getPreviousFilterData(), h = this._findFilterResolution(n);
let u = 0, c = 0;
d && (u = d.bounds.minX, c = d.bounds.minY), this._calculateGlobalFrame(
s,
u,
c,
h,
a.width,
a.height
), this._setupFilterTextures(s, l, t, d);
}
/**
* Applies filters to a texture.
*
* This method takes a texture and a list of filters, applies the filters to the texture,
* and returns the resulting texture.
* @param {object} params - The parameters for applying filters.
* @param {Texture} params.texture - The texture to apply filters to.
* @param {Filter[]} params.filters - The filters to apply.
* @returns {Texture} The resulting texture after all filters have been applied.
* @example
*
* ```ts
* // Create a texture and a list of filters
* const texture = new Texture(...);
* const filters = [new BlurFilter(), new ColorMatrixFilter()];
*
* // Apply the filters to the texture
* const resultTexture = filterSystem.applyToTexture({ texture, filters });
*
* // Use the resulting texture
* sprite.texture = resultTexture;
* ```
*
* Key Points:
* 1. padding is not currently supported here - so clipping may occur with filters that use padding.
* 2. If all filters are disabled or skipped, the original texture is returned.
*/
generateFilteredTexture({ texture: e, filters: t }) {
const r = this._pushFilterData();
this._activeFilterData = r, r.skip = !1, r.filters = t;
const s = e.source, a = s.resolution, n = s.antialias;
if (t.length === 0)
return r.skip = !0, e;
const o = r.bounds;
if (o.addRect(e.frame), this._calculateFilterBounds(r, o.rectangle, n, a, 0), r.skip)
return e;
const l = a;
this._calculateGlobalFrame(
r,
0,
0,
l,
s.width,
s.height
), r.outputRenderSurface = y.getOptimalTexture(
o.width,
o.height,
r.resolution,
r.antialias
), r.backTexture = w.EMPTY, r.inputTexture = e, this.renderer.renderTarget.finishRenderPass(), this._applyFiltersToTexture(r, !0);
const c = r.outputRenderSurface;
return c.source.alphaMode = "premultiplied-alpha", c;
}
/** @internal */
pop() {
const e = this.renderer, t = this._popFilterData();
t.skip || (e.globalUniforms.pop(), e.renderTarget.finishRenderPass(), this._activeFilterData = t, this._applyFiltersToTexture(t, !1), t.blendRequired && y.returnTexture(t.backTexture), y.returnTexture(t.inputTexture));
}
/**
* Copies the last render surface to a texture.
* @param lastRenderSurface - The last render surface to copy from.
* @param bounds - The bounds of the area to copy.
* @param previousBounds - The previous bounds to use for offsetting the copy.
*/
getBackTexture(e, t, r) {
const s = e.colorTexture.source._resolution, a = y.getOptimalTexture(
t.width,
t.height,
s,
!1
);
let n = t.minX, o = t.minY;
r && (n -= r.minX, o -= r.minY), n = Math.floor(n * s), o = Math.floor(o * s);
const l = Math.ceil(t.width * s), d = Math.ceil(t.height * s);
return this.renderer.renderTarget.copyToTexture(
e,
a,
{ x: n, y: o },
{ width: l, height: d },
{ x: 0, y: 0 }
), a;
}
/**
* Applies a filter to a texture.
* @param filter - The filter to apply.
* @param input - The input texture.
* @param output - The output render surface.
* @param clear - Whether to clear the output surface before applying the filter.
*/
applyFilter(e, t, r, s) {
const a = this.renderer, n = this._activeFilterData, l = n.outputRenderSurface === r, d = a.renderTarget.rootRenderTarget.colorTexture.source._resolution, h = this._findFilterResolution(d);
let u = 0, c = 0;
if (l) {
const f = this._findPreviousFilterOffset();
u = f.x, c = f.y;
}
this._updateFilterUniforms(t, r, n, u, c, h, l, s), this._setupBindGroupsAndRender(e, t, a);
}
/**
* Multiply _input normalized coordinates_ to this matrix to get _sprite texture normalized coordinates_.
*
* Use `outputMatrix * vTextureCoord` in the shader.
* @param outputMatrix - The matrix to output to.
* @param {Sprite} sprite - The sprite to map to.
* @returns The mapped matrix.
*/
calculateSpriteMatrix(e, t) {
const r = this._activeFilterData, s = e.set(
r.inputTexture._source.width,
0,
0,
r.inputTexture._source.height,
r.bounds.minX,
r.bounds.minY
), a = t.worldTransform.copyTo(T.shared), n = t.renderGroup || t.parentRenderGroup;
return n && n.cacheToLocalTransform && a.prepend(n.cacheToLocalTransform), a.invert(), s.prepend(a), s.scale(
1 / t.texture.frame.width,
1 / t.texture.frame.height
), s.translate(t.anchor.x, t.anchor.y), s;
}
destroy() {
}
/**
* Sets up the bind groups and renders the filter.
* @param filter - The filter to apply
* @param input - The input texture
* @param renderer - The renderer instance
*/
_setupBindGroupsAndRender(e, t, r) {
if (r.renderPipes.uniformBatch) {
const s = r.renderPipes.uniformBatch.getUboResource(this._filterGlobalUniforms);
this._globalFilterBindGroup.setResource(s, 0);
} else
this._globalFilterBindGroup.setResource(this._filterGlobalUniforms, 0);
this._globalFilterBindGroup.setResource(t.source, 1), this._globalFilterBindGroup.setResource(t.source.style, 2), e.groups[0] = this._globalFilterBindGroup, r.encoder.draw({
geometry: ht,
shader: e,
state: e._state,
topology: "triangle-list"
}), r.type === j.WEBGL && r.renderTarget.finishRenderPass();
}
/**
* Sets up the filter textures including input texture and back texture if needed.
* @param filterData - The filter data to update
* @param bounds - The bounds for the texture
* @param renderer - The renderer instance
* @param previousFilterData - The previous filter data for back texture calculation
*/
_setupFilterTextures(e, t, r, s) {
if (e.backTexture = w.EMPTY, e.blendRequired) {
r.renderTarget.finishRenderPass();
const a = r.renderTarget.getRenderTarget(e.outputRenderSurface);
e.backTexture = this.getBackTexture(a, t, s?.bounds);
}
e.inputTexture = y.getOptimalTexture(
t.width,
t.height,
e.resolution,
e.antialias
), r.renderTarget.bind(e.inputTexture, !0), r.globalUniforms.push({
offset: t
});
}
/**
* Calculates and sets the global frame for the filter.
* @param filterData - The filter data to update
* @param offsetX - The X offset
* @param offsetY - The Y offset
* @param globalResolution - The global resolution
* @param sourceWidth - The source texture width
* @param sourceHeight - The source texture height
*/
_calculateGlobalFrame(e, t, r, s, a, n) {
const o = e.globalFrame;
o.x = t * s, o.y = r * s, o.width = a * s, o.height = n * s;
}
/**
* Updates the filter uniforms with the current filter state.
* @param input - The input texture
* @param output - The output render surface
* @param filterData - The current filter data
* @param offsetX - The X offset for positioning
* @param offsetY - The Y offset for positioning
* @param resolution - The current resolution
* @param isFinalTarget - Whether this is the final render target
* @param clear - Whether to clear the output surface
*/
_updateFilterUniforms(e, t, r, s, a, n, o, l) {
const d = this._filterGlobalUniforms.uniforms, h = d.uOutputFrame, u = d.uInputSize, c = d.uInputPixel, f = d.uInputClamp, x = d.uGlobalFrame, g = d.uOutputTexture;
o ? (h[0] = r.bounds.minX - s, h[1] = r.bounds.minY - a) : (h[0] = 0, h[1] = 0), h[2] = e.frame.width, h[3] = e.frame.height, u[0] = e.source.width, u[1] = e.source.height, u[2] = 1 / u[0], u[3] = 1 / u[1], c[0] = e.source.pixelWidth, c[1] = e.source.pixelHeight, c[2] = 1 / c[0], c[3] = 1 / c[1], f[0] = 0.5 * c[2], f[1] = 0.5 * c[3], f[2] = e.frame.width * u[2] - 0.5 * c[2], f[3] = e.frame.height * u[3] - 0.5 * c[3];
const m = this.renderer.renderTarget.rootRenderTarget.colorTexture;
x[0] = s * n, x[1] = a * n, x[2] = m.source.width * n, x[3] = m.source.height * n, t instanceof w && (t.source.resource = null);
const b = this.renderer.renderTarget.getRenderTarget(t);
this.renderer.renderTarget.bind(t, !!l), t instanceof w ? (g[0] = t.frame.width, g[1] = t.frame.height) : (g[0] = b.width, g[1] = b.height), g[2] = b.isRoot ? -1 : 1, this._filterGlobalUniforms.update();
}
/**
* Finds the correct resolution by looking back through the filter stack.
* @param rootResolution - The fallback root resolution to use
* @returns The resolution from the previous filter or root resolution
*/
_findFilterResolution(e) {
let t = this._filterStackIndex - 1;
for (; t > 0 && this._filterStack[t].skip; )
--t;
return t > 0 && this._filterStack[t].inputTexture ? this._filterStack[t].inputTexture.source._resolution : e;
}
/**
* Finds the offset from the previous non-skipped filter in the stack.
* @returns The offset coordinates from the previous filter
*/
_findPreviousFilterOffset() {
let e = 0, t = 0, r = this._filterStackIndex;
for (; r > 0; ) {
r--;
const s = this._filterStack[r];
if (!s.skip) {
e = s.bounds.minX, t = s.bounds.minY;
break;
}
}
return { x: e, y: t };
}
/**
* Calculates the filter area bounds based on the instruction type.
* @param instruction - The filter instruction
* @param bounds - The bounds object to populate
*/
_calculateFilterArea(e, t) {
if (e.renderables ? ct(e.renderables, t) : e.filterEffect.filterArea ? (t.clear(), t.addRect(e.filterEffect.filterArea), t.applyMatrix(e.container.worldTransform)) : e.container.getFastGlobalBounds(!0, t), e.container) {
const s = (e.container.renderGroup || e.container.parentRenderGroup).cacheToLocalTransform;
s && t.applyMatrix(s);
}
}
_applyFiltersToTexture(e, t) {
const r = e.inputTexture, s = e.bounds, a = e.filters;
if (this._globalFilterBindGroup.setResource(r.source.style, 2), this._globalFilterBindGroup.setResource(e.backTexture.source, 3), a.length === 1)
a[0].apply(this, r, e.outputRenderSurface, t);
else {
let n = e.inputTexture;
const o = y.getOptimalTexture(
s.width,
s.height,
n.source._resolution,
!1
);
let l = o, d = 0;
for (d = 0; d < a.length - 1; ++d) {
a[d].apply(this, n, l, !0);
const u = n;
n = l, l = u;
}
a[d].apply(this, n, e.outputRenderSurface, t), y.returnTexture(o);
}
}
_calculateFilterBounds(e, t, r, s, a) {
const n = this.renderer, o = e.bounds, l = e.filters;
let d = 1 / 0, h = 0, u = !0, c = !1, f = !1, x = !0;
for (let g = 0; g < l.length; g++) {
const m = l[g];
if (d = Math.min(d, m.resolution === "inherit" ? s : m.resolution), h += m.padding, m.antialias === "off" ? u = !1 : m.antialias === "inherit" && u && (u = r), m.clipToViewport || (x = !1), !!!(m.compatibleRenderers & n.type)) {
f = !1;
break;
}
if (m.blendRequired && !(n.backBuffer?.useBackBuffer ?? !0)) {
M("Blend filter requires backBuffer on WebGL renderer to be enabled. Set `useBackBuffer: true` in the renderer options."), f = !1;
break;
}
f = m.enabled || f, c || (c = m.blendRequired);
}
if (!f) {
e.skip = !0;
return;
}
if (x && o.fitBounds(0, t.width / s, 0, t.height / s), o.scale(d).ceil().scale(1 / d).pad((h | 0) * a), !o.isPositive) {
e.skip = !0;
return;
}
e.antialias = u, e.resolution = d, e.blendRequired = c;
}
_popFilterData() {
return this._filterStackIndex--, this._filterStack[this._filterStackIndex];
}
_getPreviousFilterData() {
let e, t = this._filterStackIndex - 1;
for (; t > 1 && (t--, e = this._filterStack[t], !!e.skip); )
;
return e;
}
_pushFilterData() {
let e = this._filterStack[this._filterStackIndex];
return e || (e = this._filterStack[this._filterStackIndex] = new ft()), this._filterStackIndex++, e;
}
}
Pe.extension = {
type: [
p.WebGLSystem,
p.WebGPUSystem
],
name: "filter"
};
const Se = class Ce extends X {
constructor(...e) {
let t = e[0] ?? {};
t instanceof Float32Array && (G(fe, "use new MeshGeometry({ positions, uvs, indices }) instead"), t = {
positions: t,
uvs: e[1],
indices: e[2]
}), t = { ...Ce.defaultOptions, ...t };
const r = t.positions || new Float32Array([0, 0, 1, 0, 1, 1, 0, 1]);
let s = t.uvs;
s || (t.positions ? s = new Float32Array(r.length) : s = new Float32Array([0, 0, 1, 0, 1, 1, 0, 1]));
const a = t.indices || new Uint32Array([0, 1, 2, 0, 2, 3]), n = t.shrinkBuffersToFit, o = new C({
data: r,
label: "attribute-mesh-positions",
shrinkToFit: n,
usage: v.VERTEX | v.COPY_DST
}), l = new C({
data: s,
label: "attribute-mesh-uvs",
shrinkToFit: n,
usage: v.VERTEX | v.COPY_DST
}), d = new C({
data: a,
label: "index-mesh-buffer",
shrinkToFit: n,
usage: v.INDEX | v.COPY_DST
});
super({
attributes: {
aPosition: {
buffer: o,
format: "float32x2",
stride: 2 * 4,
offset: 0
},
aUV: {
buffer: l,
format: "float32x2",
stride: 2 * 4,
offset: 0
}
},
indexBuffer: d,
topology: t.topology
}), this.batchMode = "auto";
}
/** The positions of the mesh. */
get positions() {
return this.attributes.aPosition.buffer.data;
}
/**
* Set the positions of the mesh.
* When setting the positions, its important that the uvs array is at least as long as the positions array.
* otherwise the geometry will not be valid.
* @param {Float32Array} value - The positions of the mesh.
*/
set positions(e) {
this.attributes.aPosition.buffer.data = e;
}
/** The UVs of the mesh. */
get uvs() {
return this.attributes.aUV.buffer.data;
}
/**
* Set the UVs of the mesh.
* Its important that the uvs array you set is at least as long as the positions array.
* otherwise the geometry will not be valid.
* @param {Float32Array} value - The UVs of the mesh.
*/
set uvs(e) {
this.attributes.aUV.buffer.data = e;
}
/** The indices of the mesh. */
get indices() {
return this.indexBuffer.data;
}
set indices(e) {
this.indexBuffer.data = e;
}
};
Se.defaultOptions = {
topology: "triangle-list",
shrinkBuffersToFit: !1
};
let Q = Se;
function pt(i) {
const e = i._stroke, t = i._fill, s = [`div { ${[
`color: ${S.shared.setValue(t.color).toHex()}`,
`font-size: ${i.fontSize}px`,
`font-family: ${i.fontFamily}`,
`font-weight: ${i.fontWeight}`,
`font-style: ${i.fontStyle}`,
`font-variant: ${i.fontVariant}`,
`letter-spacing: ${i.letterSpacing}px`,
`text-align: ${i.align}`,
`padding: ${i.padding}px`,
`white-space: ${i.whiteSpace === "pre" && i.wordWrap ? "pre-wrap" : i.whiteSpace}`,
...i.lineHeight ? [`line-height: ${i.lineHeight}px`] : [],
...i.wordWrap ? [
`word-wrap: ${i.breakWords ? "break-all" : "break-word"}`,
`max-width: ${i.wordWrapWidth}px`
] : [],
...e ? [Be(e)] : [],
...i.dropShadow ? [Ue(i.dropShadow)] : [],
...i.cssOverrides
].join(";")} }`];
return mt(i.tagStyles, s), s.join(" ");
}
function Ue(i) {
const e = S.shared.setValue(i.color).setAlpha(i.alpha).toHexa(), t = Math.round(Math.cos(i.angle) * i.distance), r = Math.round(Math.sin(i.angle) * i.distance), s = `${t}px ${r}px`;
return i.blur > 0 ? `text-shadow: ${s} ${i.blur}px ${e}` : `text-shadow: ${s} ${e}`;
}
function Be(i) {
return [
`-webkit-text-stroke-width: ${i.width}px`,
`-webkit-text-stroke-color: ${S.shared.setValue(i.color).toHex()}`,
`text-stroke-width: ${i.width}px`,
`text-stroke-color: ${S.shared.setValue(i.color).toHex()}`,
"paint-order: stroke"
].join(";");
}
const ee = {
fontSize: "font-size: {{VALUE}}px",
fontFamily: "font-family: {{VALUE}}",
fontWeight: "font-weight: {{VALUE}}",
fontStyle: "font-style: {{VALUE}}",
fontVariant: "font-variant: {{VALUE}}",
letterSpacing: "letter-spacing: {{VALUE}}px",
align: "text-align: {{VALUE}}",
padding: "padding: {{VALUE}}px",
whiteSpace: "white-space: {{VALUE}}",
lineHeight: "line-height: {{VALUE}}px",
wordWrapWidth: "max-width: {{VALUE}}px"
}, te = {
fill: (i) => `color: ${S.shared.setValue(i).toHex()}`,
breakWords: (i) => `word-wrap: ${i ? "break-all" : "break-word"}`,
stroke: Be,
dropShadow: Ue
};
function mt(i, e) {
for (const t in i) {
const r = i[t], s = [];
for (const a in r)
te[a] ? s.push(te[a](r[a])) : ee[a] && s.push(ee[a].replace("{{VALUE}}", r[a]));
e.push(`${t} { ${s.join(";")} }`);
}
}
class K extends L {
constructor(e = {}) {
super(e), this._cssOverrides = [], this.cssOverrides = e.cssOverrides ?? [], this.tagStyles = e.tagStyles ?? {};
}
/**
* List of CSS style overrides to apply to the HTML text.
* These styles are added after the built-in styles and can override any default styling.
* @advanced
*/
set cssOverrides(e) {
this._cssOverrides = e instanceof Array ? e : [e], this.update();
}
/** @advanced */
get cssOverrides() {
return this._cssOverrides;
}
/**
* Updates the text style and triggers a refresh of the CSS style cache.
* This method is called automatically when style properties are changed.
* @example
* ```ts
* // Update after multiple changes
* const text = new HTMLText({
* text: 'Hello World',
* style
* });
*
* style.fontSize = 32;
* style.fill = '#00ff00';
* style.fontFamily = 'Arial';
* style.update(); // Apply all changes at once
* ```
* @advanced
* @see {@link HTMLTextStyle#cssStyle} For accessing the generated CSS
* @see {@link HTMLTextStyle#cssOverrides} For managing CSS overrides
*/
update() {
this._cssStyle = null, super.update();
}
/**
* Creates a new HTMLTextStyle object with the same values as this one.
* This creates a deep copy of all style properties, including dropShadow and tag styles.
* @example
* ```ts
* // Create original style
* const originalStyle = new HTMLTextStyle({
* fontSize: 24,
* fill: '#ff0000',
* tagStyles: {
* header: { fontSize: 32, fill: '#00ff00' }
* }
* });
*
* // Clone the style
* const clonedStyle = originalStyle.clone();
*
* // Modify cloned style independently
* clonedStyle.fontSize = 36;
* clonedStyle.fill = '#0000ff';
*
* // Original style remains unchanged
* console.log(originalStyle.fontSize); // Still 24
* console.log(originalStyle.fill); // Still '#ff0000'
* ```
*
* Properties that are cloned:
* - Basic text properties (fontSize, fontFamily, etc.)
* - Fill and stroke styles
* - Drop shadow configuration
* - CSS overrides
* - Tag styles (deep copied)
* - Word wrap settings
* - Alignment and spacing
* @returns {HTMLTextStyle} A new HTMLTextStyle instance with the same properties
* @see {@link HTMLTextStyle} For available style properties
* @see {@link HTMLTextStyle#cssOverrides} For CSS override handling
* @see {@link HTMLTextStyle#tagStyles} For tag style configuration
* @standard
*/
clone() {
return new K({
align: this.align,
breakWords: this.breakWords,
dropShadow: this.dropShadow ? { ...this.dropShadow } : null,
fill: this._fill,
fontFamily: this.fontFamily,
fontSize: this.fontSize,
fontStyle: this.fontStyle,
fontVariant: this.fontVariant,
fontWeight: this.fontWeight,
letterSpacing: this.letterSpacing,
lineHeight: this.lineHeight,
padding: this.padding,
stroke: this._stroke,
whiteSpace: this.whiteSpace,
wordWrap: this.wordWrap,
wordWrapWidth: this.wordWrapWidth,
cssOverrides: this.cssOverrides,
tagStyles: { ...this.tagStyles }
});
}
/**
* The CSS style string that will be applied to the HTML text.
* @advanced
*/
get cssStyle() {
return this._cssStyle || (this._cssStyle = pt(this)), this._cssStyle;
}
/**
* Add a style override, this can be any CSS property
* it will override any built-in style. This is the
* property and the value as a string (e.g., `color: red`).
* This will override any other internal style.
* @param {string} value - CSS style(s) to add.
* @example
* style.addOverride('background-color: red');
* @advanced
*/
addOverride(...e) {
const t = e.filter((r) => !this.cssOverrides.includes(r));
t.length > 0 && (this.cssOverrides.push(...t), this.update());
}
/**
* Remove any overrides that match the value.
* @param {string} value - CSS style to remove.
* @example
* style.removeOverride('background-color: red');
* @advanced
*/
removeOverride(...e) {
const t = e.filter((r) => this.cssOverrides.includes(r));
t.length > 0 && (this.cssOverrides = this.cssOverrides.filter((r) => !t.includes(r)), this.update());
}
/**
* Sets the fill style for the text. HTML text only supports color fills (string or number values).
* Texture fills are not supported and will trigger a warning in debug mode.
* @example
* ```ts
* // Using hex colors
* const text = new HTMLText({
* text: 'Colored Text',
* style: {
* fill: 0xff0000 // Red color
* }
* });
*
* // Using CSS color strings
* text.style.fill = '#00ff00'; // Hex string (Green)
* text.style.fill = 'blue'; // Named color
* text.style.fill = 'rgb(255,0,0)' // RGB
* text.style.fill = '#f0f'; // Short hex
*
* // Invalid usage (will trigger warning in debug)
* text.style.fill = {
* type: 'pattern',
* texture: Texture.from('pattern.png')
* }; // Not supported, falls back to default
* ```
* @param value - The fill color to use. Must be a string or number.
* @throws {Warning} In debug mode when attempting to use unsupported fill types
* @see {@link TextStyle#fill} For full fill options in canvas text
* @standard
*/
set fill(e) {
typeof e != "string" && typeof e != "number" && M("[HTMLTextStyle] only color fill is not supported by HTMLText"), super.fill = e;
}
/**
* Sets the stroke style for the text. HTML text only supports color strokes (string or number values).
* Texture strokes are not supported and will trigger a warning in debug mode.
* @example
* ```ts
* // Using hex colors
* const text = new HTMLText({
* text: 'Outlined Text',
* style: {
* stroke: 0xff0000 // Red outline
* }
* });
*
* // Using CSS color strings
* text.style.stroke = '#00ff00'; // Hex string (Green)
* text.style.stroke = 'blue'; // Named color
* text.style.stroke = 'rgb(255,0,0)' // RGB
* text.style.stroke = '#f0f'; // Short hex
*
* // Using stroke width
* text.style = {
* stroke: {
* color: '#ff0000',
* width: 2
* }
* };
*
* // Remove stroke
* text.style.stroke = null;
*
* // Invalid usage (will trigger warning in debug)
* text.style.stroke = {
* type: 'pattern',
* texture: Texture.from('pattern.png')
* }; // Not supported, falls back to default
* ```
* @param value - The stroke style to use. Must be a string, number, or stroke configuration object
* @throws {Warning} In debug mode when attempting to use unsupported stroke types
* @see {@link TextStyle#stroke} For full stroke options in canvas text
* @standard
*/
set stroke(e) {
e && typeof e != "string" && typeof e != "number" && M("[HTMLTextStyle] only color stroke is not supported by HTMLText"), super.stroke = e;
}
}
const re = "http://www.w3.org/2000/svg", se = "http://www.w3.org/1999/xhtml";
class Fe {
constructor() {
this.svgRoot = document.createElementNS(re, "svg"), this.foreignObject = document.createElementNS(re, "foreignObject"), this.domElement = document.createElementNS(se, "div"), this.styleElement = document.createElementNS(se, "style"), this.image = new Image();
const { foreignObject: e, svgRoot: t, styleElement: r, domElement: s } = this;
e.setAttribute("width", "10000"), e.setAttribute("height", "10000"), e.style.overflow = "hidden", t.appendChild(e), e.appendChild(r), e.appendChild(s);
}
}
let ie;
function gt(i, e, t, r) {
r || (r = ie || (ie = new Fe()));
const { domElement: s, styleElement: a, svgRoot: n } = r;
s.innerHTML = `<style>${e.cssStyle};</style><div style='padding:0'>${i}</div>`, s.setAttribute("style", "transform-origin: top left; display: inline-block"), t && (a.textContent = t), document.body.appendChild(n);
const o = s.getBoundingClientRect();
n.remove();
const l = e.padding * 2;
return {
width: o.width - l,
height: o.height - l
};
}
class xt {
constructor() {
this.batches = [], this.batched = !1;
}
destroy() {
this.batches.forEach((e) => {
D.return(e);
}), this.batches.length = 0;
}
}
class Re {
constructor(e, t) {
this.state = A.for2d(), this.renderer = e, this._adaptor = t, this.renderer.runners.contextChange.add(this);
}
contextChange() {
this._adaptor.contextChange(this.renderer);
}
validateRenderable(e) {
const t = e.context, r = !!e._gpuData, s = this.renderer.graphicsContext.updateGpuContext(t);
return !!(s.isBatchable || r !== s.isBatchable);
}
addRenderable(e, t) {
const r = this.renderer.graphicsContext.updateGpuContext(e.context);
e.didViewUpdate && this._rebuild(e), r.isBatchable ? this._addToBatcher(e, t) : (this.renderer.renderPipes.batch.break(t), t.add(e));
}
updateRenderable(e) {
const r = this._getGpuDataForRenderable(e).batches;
for (let s = 0; s < r.length; s++) {
const a = r[s];
a._batcher.updateElement(a);
}
}
execute(e) {
if (!e.isRenderable)
return;
const t = this.renderer, r = e.context;
if (!t.graphicsContext.getGpuContext(r).batches.length)
return;
const a = r.customShader || this._adaptor.shader;
this.state.blendMode = e.groupBlendMode;
const n = a.resources.localUniforms.uniforms;
n.uTransformMatrix = e.groupTransform, n.uRound = t._roundPixels | e._roundPixels, k(
e.groupColorAlpha,
n.uColor,
0
), this._adaptor.execute(this, e);
}
_rebuild(e) {
const t = this._getGpuDataForRenderable(e), r = this.renderer.graphicsContext.updateGpuContext(e.context);
t.destroy(), r.isBatchable && this._updateBatchesForRenderable(e, t);
}
_addToBatcher(e, t) {
const r = this.renderer.renderPipes.batch, s = this._getGpuDataForRenderable(e).batches;
for (let a = 0; a < s.length; a++) {
const n = s[a];
r.addToBatch(n, t);
}
}
_getGpuDataForRenderable(e) {
return e._gpuData[this.renderer.uid] || this._initGpuDataForRenderable(e);
}
_initGpuDataForRenderable(e) {
const t = new xt();
return e._gpuData[this.renderer.uid] = t, t;
}
_updateBatchesForRenderable(e, t) {
const r = e.context, s = this.renderer.graphicsContext.getGpuContext(r), a = this.renderer._roundPixels | e._roundPixels;
t.batches = s.batches.map((n) => {
const o = D.get(qe);
return n.copyTo(o), o.renderable = e, o.roundPixels = a, o;
});
}
destroy() {
this.renderer = null, this._adaptor.destroy(), this._adaptor = null, this.state = null;
}
}
Re.extension = {
type: [
p.WebGLPipes,
p.WebGPUPipes,
p.CanvasPipes
],
name: "graphics"
};
const Me = class Ge extends Q {
constructor(...e) {
super({});
let t = e[0] ?? {};
typeof t == "number" && (G(fe, "PlaneGeometry constructor changed please use { width, height, verticesX, verticesY } instead"), t = {
width: t,
height: e[1],
verticesX: e[2],
verticesY: e[3]
}), this.build(t);
}
/**
* Refreshes plane coordinates
* @param options - Options to be applied to plane geometry
*/
build(e) {
e = { ...Ge.defaultOptions, ...e }, this.verticesX = this.verticesX ?? e.verticesX, this.verticesY = this.verticesY ?? e.verticesY, this.width = this.width ?? e.width, this.height = this.height ?? e.height;
const t = this.verticesX * this.verticesY, r = [], s = [], a = [], n = this.verticesX - 1, o = this.verticesY - 1, l = this.width / n, d = this.height / o;
for (let u = 0; u < t; u++) {
const c = u % this.verticesX, f = u / this.verticesX | 0;
r.push(c * l, f * d), s.push(c / n, f / o);
}
const h = n * o;
for (let u = 0; u < h; u++) {
const c = u % n, f = u / n | 0, x = f * this.verticesX + c, g = f * this.verticesX + c + 1, m = (f + 1) * this.verticesX + c, b = (f + 1) * this.verticesX + c + 1;
a.push(
x,
g,
m,
g,
b,
m
);
}
this.buffers[0].data = new Float32Array(r), this.buffers[1].data = new Float32Array(s), this.indexBuffer.data = new Uint32Array(a), this.buffers[0].update(), this.buffers[1].update(), this.indexBuffer.update();
}
};
Me.defaultOptions = {
width: 100,
height: 100,
verticesX: 10,
verticesY: 10
};
let _t = Me;
class J {
constructor() {
this.batcherName = "default", this.packAsQuad = !1, this.indexOffset = 0, this.attributeOffset = 0, this.roundPixels = 0, this._batcher = null, this._batch = null, this._textureMatrixUpdateId = -1, this._uvUpdateId = -1;
}
get blendMode() {
return this.renderable.groupBlendMode;
}
get topology() {
return this._topology || this.geometry.topology;
}
set topology(e) {
this._topology = e;
}
reset() {
this.renderable = null, this.texture = null, this._batcher = null, this._batch = null, this.geometry = null, this._uvUpdateId = -1, this._textureMatrixUpdateId = -1;
}
/**
* Sets the texture for the batchable mesh.
* As it does so, it resets the texture matrix update ID.
* this is to ensure that the texture matrix is recalculated when the uvs are referenced
* @param value - The texture to set.
*/
setTexture(e) {
this.texture !== e && (this.texture = e, this._textureMatrixUpdateId = -1);
}
get uvs() {
const t = this.geometry.getBuffer("aUV"), r = t.data;
let s = r;
const a = this.texture.textureMatrix;
return a.isSimple || (s = this._transformedUvs, (this._textureMatrixUpdateId !== a._updateID || this._uvUpdateId !== t._updateID) && ((!s || s.length < r.length) && (s = this._transformedUvs = new Float32Array(r.length)), this._textureMatrixUpdateId = a._updateID, this._uvUpdateId = t._updateID, a.multiplyUvs(r, s))), s;
}
get positions() {
return this.geometry.positions;
}
get indices() {
return this.geometry.indices;
}
get color() {
return this.renderable.groupColorAlpha;
}
get groupTransform() {
return this.renderable.groupTransform;
}
get attributeSize() {
return this.geometry.positions.length / 2;
}
get indexSize() {
return this.geometry.indices.length;
}
}
class ae {
destroy() {
}
}
class Ae {
constructor(e, t) {
this.localUniforms = new P({
uTransformMatrix: { value: new T(), type: "mat3x3<f32>" },
uColor: { value: new Float32Array([1, 1, 1, 1]), type: "vec4<f32>" },
uRound: { value: 0, type: "f32" }
}), this.localUniformsBindGroup = new ce({
0: this.localUniforms
}), this.renderer = e, this._adaptor = t, this._adaptor.init();
}
validateRenderable(e) {
const t = this._getMeshData(e), r = t.batched, s = e.batched;
if (t.batched = s, r !== s)
return !0;
if (s) {
const a = e._geometry;
if (a.indices.length !== t.indexSize || a.positions.length !== t.vertexSize)
return t.indexSize = a.indices.length, t.vertexSize = a.positions.length, !0;
const n = this._getBatchableMesh(e);
return n.texture.uid !== e._texture.uid && (n._textureMatrixUpdateId = -1), !n._batcher.checkAndUpdateTexture(
n,
e._texture
);
}
return !1;
}
addRenderable(e, t) {
const r = this.renderer.renderPipes.batch, { batched: s } = this._getMeshData(e);
if (s) {
const a = this._getBatchableMesh(e);
a.setTexture(e._texture), a.geometry = e._geometry, r.addToBatch(a, t);
} else
r.break(t), t.add(e);
}
updateRenderable(e) {
if (e.batched) {
const t = this._getBatchableMesh(e);
t.setTexture(e._texture), t.geometry = e._geometry, t._batcher.updateElement(t);
}
}
execute(e) {
if (!e.isRenderable)
return;
e.state.blendMode = N(e.groupBlendMode, e.texture._source);
const t = this.localUniforms;
t.uniforms.uTransformMatrix = e.groupTransform, t.uniforms.uRound = this.renderer._roundPixels | e._roundPixels, t.update(), k(
e.groupColorAlpha,
t.uniforms.uColor,
0
), this._adaptor.execute(this, e);
}
_getMeshData(e) {
var t, r;
return (t = e._gpuData)[r = this.renderer.uid] || (t[r] = new ae()), e._gpuData[this.renderer.uid].meshData || this._initMeshData(e);
}
_initMeshData(e) {
return e._gpuData[this.renderer.uid].meshData = {
batched: e.batched,
indexSize: e._geometry.indices?.length,
vertexSize: e._geometry.positions?.length
}, e._gpuData[this.renderer.uid].meshData;
}
_getBatchableMesh(e) {
var t, r;
return (t = e._gpuData)[r = this.renderer.uid] || (t[r] = new ae()), e._gpuData[this.renderer.uid].batchableMesh || this._initBatchableMesh(e);
}
_initBatchableMesh(e) {
const t = new J();
return t.renderable = e, t.setTexture(e._texture), t.transform = e.groupTransform, t.roundPixels = this.renderer._roundPixels | e._roundPixels, e._gpuData[this.renderer.uid].batchableMesh = t, t;
}
destroy() {
this.localUniforms = null, this.localUniformsBindGroup = null, this._adaptor.destroy(), this._adaptor = null, this.renderer = null;
}
}
Ae.extension = {
type: [
p.WebGLPipes,
p.WebGPUPipes,
p.CanvasPipes
],
name: "mesh"
};
class bt {
execute(e, t) {
const r = e.state, s = e.renderer, a = t.shader || e.defaultShader;
a.resources.uTexture = t.texture._source, a.resources.uniforms = e.localUniforms;
const n = s.gl, o = e.getBuffers(t);
s.shader.bind(a), s.state.set(r), s.geometry.bind(o.geometry, a.glProgram);
const d = o.geometry.indexBuffer.data.BYTES_PER_ELEMENT === 2 ? n.UNSIGNED_SHORT : n.UNSIGNED_INT;
n.drawElements(n.TRIANGLES, t.particleChildren.length * 6, d, 0);
}
}
class yt {
execute(e, t) {
const r = e.renderer, s = t.shader || e.defaultShader;
s.groups[0] = r.renderPipes.uniformBatch.getUniformBindGroup(e.localUniforms, !0), s.groups[1] = r.texture.getTextureBindGroup(t.texture);
const a = e.state, n = e.getBuffers(t);
r.encoder.draw({
geometry: n.geometry,
shader: t.shader || e.defaultShader,
state: a,
size: t.particleChildren.length * 6
});
}
}
function ne(i, e = null) {
const t = i * 6;
if (t > 65535 ? e || (e = new Uint32Array(t)) : e || (e = new Uint16Array(t)), e.length !== t)
throw new Error(`Out buffer length is incorrect, got ${e.length} and expected ${t}`);
for (let r = 0, s = 0; r < t; r += 6, s += 4)
e[r + 0] = s + 0, e[r + 1] = s + 1, e[r + 2] = s + 2, e[r + 3] = s + 0, e[r + 4] = s + 2, e[r + 5] = s + 3;
return e;
}
function vt(i) {
return {
dynamicUpdate: oe(i, !0),
staticUpdate: oe(i, !1)
};
}
function oe(i, e) {
const t = [];
t.push(`
var index = 0;
for (let i = 0; i < ps.length; ++i)
{
const p = ps[i];
`);
let r = 0;
for (const a in i) {
const n = i[a];
if (e !== n.dynamic)
continue;
t.push(`offset = index + ${r}`), t.push(n.code);
const o = I(n.format);
r += o.stride / 4;
}
t.push(`
index += stride * 4;
}
`), t.unshift(`
var stride = ${r};
`);
const s = t.join(`
`);
return new Function("ps", "f32v", "u32v", s);
}
class Tt {
constructor(e) {
this._size = 0, this._generateParticleUpdateCache = {};
const t = this._size = e.size ?? 1e3, r = e.properties;
let s = 0, a = 0;
for (const h in r) {
const u = r[h], c = I(u.format);
u.dynamic ? a += c.stride : s += c.stride;
}
this._dynamicStride = a / 4, this._staticStride = s / 4, this.staticAttributeBuffer = new B(t * 4 * s), this.dynamicAttributeBuffer = new B(t * 4 * a), this.indexBuffer = ne(t);
const n = new X();
let o = 0, l = 0;
this._staticBuffer = new C({
data: new Float32Array(1),
label: "static-particle-buffer",
shrinkToFit: !1,
usage: v.VERTEX | v.COPY_DST
}), this._dynamicBuffer = new C({
data: new Float32Array(1),
label: "dynamic-particle-buffer",
shrinkToFit: !1,
usage: v.VERTEX | v.COPY_DST
});
for (const h in r) {
const u = r[h], c = I(u.format);
u.dynamic ? (n.addAttribute(u.attributeName, {
buffer: this._dynamicBuffer,
stride: this._dynamicStride * 4,
offset: o * 4,
format: u.format
}), o += c.size) : (n.addAttribute(u.attributeName, {
buffer: this._staticBuffer,
stride: this._staticStride * 4,
offset: l * 4,
format: u.format
}), l += c.size);
}
n.addIndex(this.indexBuffer);
const d = this.getParticleUpdate(r);
this._dynamicUpload = d.dynamicUpdate, this._staticUpload = d.staticUpdate, this.geometry = n;
}
getParticleUpdate(e) {
const t = wt(e);
return this._generateParticleUpdateCache[t] ? this._generateParticleUpdateCache[t] : (this._generateParticleUpdateCache[t] = this.generateParticleUpdate(e), this._generateParticleUpdateCache[t]);
}
generateParticleUpdate(e) {
return vt(e);
}
update(e, t) {
e.length > this._size && (t = !0, this._size = Math.max(e.length, this._size * 1.5 | 0), this.staticAttributeBuffer = new B(this._size * this._staticStride * 4 * 4), this.dynamicAttributeBuffer = new B(this._size * this._dynamicStride * 4 * 4), this.indexBuffer = ne(this._size), this.geometry.indexBuffer.setDataWithSize(
this.indexBuffer,
this.indexBuffer.byteLength,
!0
));
const r = this.dynamicAttributeBuffer;
if (this._dynamicUpload(e, r.float32View, r.uint32View), this._dynamicBuffer.setDataWithSize(
this.dynamicAttributeBuffer.float32View,
e.length * this._dynamicStride * 4,
!0
), t) {
const s = this.staticAttributeBuffer;
this._staticUpload(e, s.float32View, s.uint32View), this._staticBuffer.setDataWithSize(
s.float32View,
e.length * this._staticStride * 4,
!0
);
}
}
destroy() {
this._staticBuffer.destroy(), this._dynamicBuffer.destroy(), this.geometry.destroy();
}
}
function wt(i) {
const e = [];
for (const t in i) {
const r = i[t];
e.push(t, r.code, r.dynamic ? "d" : "s");
}
return e.join("_");
}
var Pt = `varying vec2 vUV;
varying vec4 vColor;
uniform sampler2D uTexture;
void main(void){
vec4 color = texture2D(uTexture, vUV) * vColor;
gl_FragColor = color;
}`, St = `attribute vec2 aVertex;
attribute vec2 aUV;
attribute vec4 aColor;
attribute vec2 aPosition;
attribute float aRotation;
uniform mat3 uTranslationMatrix;
uniform float uRound;
uniform vec2 uResolution;
uniform vec4 uColor;
varying vec2 vUV;
varying vec4 vColor;
vec2 roundPixels(vec2 position, vec2 targetSize)
{
return (floor(((position * 0.5 + 0.5) * targetSize) + 0.5) / targetSize) * 2.0 - 1.0;
}
void main(void){
float cosRotation = cos(aRotation);
float sinRotation = sin(aRotation);
float x = aVertex.x * cosRotation - aVertex.y * sinRotation;
float y = aVertex.x * sinRotation + aVertex.y * cosRotation;
vec2 v = vec2(x, y);
v = v + aPosition;
gl_Position = vec4((uTranslationMatrix * vec3(v, 1.0)).xy, 0.0, 1.0);
if(uRound == 1.0)
{
gl_Position.xy = roundPixels(gl_Position.xy, uResolution);
}
vUV = aUV;
vColor = vec4(aColor.rgb * aColor.a, aColor.a) * uColor;
}
`, ue = `
struct ParticleUniforms {
uProjectionMatrix:mat3x3<f32>,
uColor:vec4<f32>,
uResolution:vec2<f32>,
uRoundPixels:f32,
};
@group(0) @binding(0) var<uniform> uniforms: ParticleUniforms;
@group(1) @binding(0) var uTexture: texture_2d<f32>;
@group(1) @binding(1) var uSampler : sampler;
struct VSOutput {
@builtin(position) position: vec4<f32>,
@location(0) uv : vec2<f32>,
@location(1) color : vec4<f32>,
};
@vertex
fn mainVertex(
@location(0) aVertex: vec2<f32>,
@location(1) aPosition: vec2<f32>,
@location(2) aUV: vec2<f32>,
@location(3) aColor: vec4<f32>,
@location(4) aRotation: f32,
) -> VSOutput {
let v = vec2(
aVertex.x * cos(aRotation) - aVertex.y * sin(aRotation),
aVertex.x * sin(aRotation) + aVertex.y * cos(aRotation)
) + aPosition;
let position = vec4((uniforms.uProjectionMatrix * vec3(v, 1.0)).xy, 0.0, 1.0);
let vColor = vec4(aColor.rgb * aColor.a, aColor.a) * uniforms.uColor;
return VSOutput(
position,
aUV,
vColor,
);
}
@fragment
fn mainFragment(
@location(0) uv: vec2<f32>,
@location(1) color: vec4<f32>,
@builtin(position) position: vec4<f32>,
) -> @location(0) vec4<f32> {
var sample = textureSample(uTexture, uSampler, uv) * color;
return sample;
}`;
class Ct extends q {
constructor() {
const e = Qe.from({
vertex: St,
fragment: Pt
}), t = Ke.from({
fragment: {
source: ue,
entryPoint: "mainFragment"
},
vertex: {
source: ue,
entryPoint: "mainVertex"
}
});
super({
glProgram: e,
gpuProgram: t,
resources: {
// this will be replaced with the texture from the particle container
uTexture: w.WHITE.source,
// this will be replaced with the texture style from the particle container
uSampler: new H({}),
// this will be replaced with the local uniforms from the particle container
uniforms: {
uTranslationMatrix: { value: new T(), type: "mat3x3<f32>" },
uColor: { value: new S(16777215), type: "vec4<f32>" },
uRound: { value: 1, type: "f32" },
uResolution: { value: [0, 0], type: "vec2<f32>" }
}
}
});
}
}
class De {
/**
* @param renderer - The renderer this sprite batch works for.
* @param adaptor
*/
constructor(e, t) {
this.state = A.for2d(), this.localUniforms = new P({
uTranslationMatrix: { value: new T(), type: "mat3x3<f32>" },
uColor: { value: new Float32Array(4), type: "vec4<f32>" },
uRound: { value: 1, type: "f32" },
uResolution: { value: [0, 0], type: "vec2<f32>" }
}), this.renderer = e, this.adaptor = t, this.defaultShader = new Ct(), this.state = A.for2d();
}
validateRenderable(e) {
return !1;
}
addRenderable(e, t) {
this.renderer.renderPipes.batch.break(t), t.add(e);
}
getBuffers(e) {
return e._gpuData[this.renderer.uid] || this._initBuffer(e);
}
_initBuffer(e) {
return e._gpuData[this.renderer.uid] = new Tt({
size: e.particleChildren.length,
properties: e._properties
}), e._gpuData[this.renderer.uid];
}
updateRenderable(e) {
}
execute(e) {
const t = e.particleChildren;
if (t.length === 0)
return;
const r = this.renderer, s = this.getBuffers(e);
e.texture || (e.texture = t[0].texture);
const a = this