@banuba/webar
Version:
Banuba WebAR SDK
1,124 lines (1,070 loc) • 518 kB
JavaScript
var ch = Object.defineProperty;
var dh = (r, a, o) => a in r ? ch(r, a, { enumerable: !0, configurable: !0, writable: !0, value: o }) : r[a] = o;
var te = (r, a, o) => (dh(r, typeof a != "symbol" ? a + "" : a, o), o);
let hh = 0;
const vs = () => hh++, ws = "KGZ1bmN0aW9uKCl7InVzZSBzdHJpY3QiO2FkZEV2ZW50TGlzdGVuZXIoIm1lc3NhZ2UiLCh7ZGF0YTp0fSk9Pntjb25zdCBzPXtpZDp0LmlkfTtzZXRUaW1lb3V0KHBvc3RNZXNzYWdlLHQudGltZW91dCxzKX0pfSkoKTsK", wo = typeof window < "u" && window.Blob && new Blob([atob(ws)], { type: "text/javascript;charset=utf-8" });
function ph() {
let r;
try {
if (r = wo && (window.URL || window.webkitURL).createObjectURL(wo), !r)
throw "";
return new Worker(r);
} catch {
return new Worker("data:application/javascript;base64," + ws);
} finally {
r && (window.URL || window.webkitURL).revokeObjectURL(r);
}
}
let Fn;
const Ai = /* @__PURE__ */ new Map(), Es = (r, a) => {
const o = vs(), l = { id: o, timeout: a };
return Ai.set(l.id, r), Fn || (Fn = new ph(), Fn.onmessage = ({ data: m }) => {
const w = Ai.get(m.id);
Ai.delete(m.id), w();
}), Fn.postMessage(l), o;
}, _h = 60, Eo = 1e3 / _h, Rn = [];
let xo = 0;
const xs = (r) => {
const a = vs();
if (Rn.length === 0) {
const o = performance.now(), l = Eo - (o - xo) % Eo;
Es(() => {
const m = xo = performance.now(), w = [...Rn];
Rn.length = 0, w.forEach((v) => v(m));
}, l);
}
return Rn.push(r), a;
}, bh = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
__proto__: null,
requestAnimationFrame: xs,
setTimeout: Es
}, Symbol.toStringTag, { value: "Module" })), mh = (...r) => window.setTimeout(...r), Mn = /* @__PURE__ */ new Map(), gh = (r) => {
const a = window.requestAnimationFrame((...o) => {
Mn.delete(a), r(...o);
});
return Mn.set(a, r), a;
};
typeof document < "u" && document.addEventListener("visibilitychange", () => {
document.visibilityState !== "visible" && Mn.forEach((r, a) => {
Mn.delete(a), cancelAnimationFrame(a), xs(r);
});
});
const yh = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
__proto__: null,
requestAnimationFrame: gh,
setTimeout: mh
}, Symbol.toStringTag, { value: "Module" })), vh = typeof document < "u" ? document : { visibilityState: "hidden" }, Ss = () => vh.visibilityState === "visible" ? yh : bh, Qr = (r) => Ss().requestAnimationFrame(r), Ts = (r, a) => Ss().setTimeout(r, a), Cs = (r) => Promise.resolve().then(r), _a = {
requestAnimationFrame: Qr,
setTimeout: Ts
}, _m = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
__proto__: null,
nextTick: Cs,
requestAnimationFrame: Qr,
setTimeout: Ts,
timers: _a
}, Symbol.toStringTag, { value: "Module" })), wh = () => new Promise((r) => Qr(r)), ba = (r = -1) => function(a, o, l) {
const m = l, w = m.value;
return { ...m, value: async function* (...P) {
const L = w.apply(this, P);
let N = 0, H = 0;
for (; ; ) {
const re = 1e3 / r, z = 0.1 * re;
for (; (H = performance.now()) - N < re - z; )
await wh();
N = H;
const { done: G, value: ve } = await L.next();
if (G)
return ve;
const ce = yield ve;
typeof ce < "u" && (r = ce);
}
} };
}, $n = async (r, a = {}) => new Promise((o) => {
const l = document.createElement("video");
if (l.muted = !0, l.controls = !1, l.playsInline = !0, Object.assign(l, a), r instanceof globalThis.MediaStream)
l.srcObject = r, l.addEventListener("ended", () => l.srcObject = null, { once: !0 }), r.addEventListener("inactive", () => l.dispatchEvent(new CustomEvent("ended")), {
once: !0
});
else {
if (typeof r != "string") {
const w = r = URL.createObjectURL(r);
l.addEventListener("emptied", () => URL.revokeObjectURL(w), { once: !0 });
}
l.crossOrigin = "anonymous", l.src = r, l.addEventListener("ended", () => l.src = "", { once: !0 });
}
l.style.position = "fixed", l.style.zIndex = "-9999999", l.style.opacity = "0.0000000001", document.body.appendChild(l), l.addEventListener("emptied", () => l.remove(), { once: !0 });
const m = setInterval(() => l.readyState, 300);
l.addEventListener("play", () => clearInterval(m), { once: !0 }), l.addEventListener("play", () => o(l), { once: !0 }), l.addEventListener("loadedmetadata", () => l.play(), { once: !0 });
}), Eh = (r) => new Promise((a, o) => {
const l = document.createElement("img");
l.onload = () => a(l), l.onerror = o, l.crossOrigin = "anonymous", l.src = typeof r == "string" ? r : URL.createObjectURL(r);
}), So = /* @__PURE__ */ new Map(), xh = (r, a, o) => r * (1 - o) + a * o, Ji = (r) => `webar::${r}:start`, Ii = (r) => `webar::${r}:end`, ma = (r) => {
let a = { internalName: r + ":" + Math.random() };
return performance.mark(Ji(a.internalName)), a;
}, ga = (r) => {
const a = r.internalName;
performance.mark(Ii(a));
let o = performance.measure(a, Ji(a), Ii(a));
o || (o = performance.getEntriesByName(a)[0]), performance.clearMarks(Ji(a)), performance.clearMarks(Ii(a)), performance.clearMeasures(a);
const { duration: l } = o, m = a.split(":")[0];
let { averagedDuration: w = 0 } = So.get(m) || {};
return w = xh(w, l, 0.05), So.set(m, { averagedDuration: w }), { instantDuration: l, averagedDuration: w };
}, Fs = (r, a = (o) => console.warn(o)) => function(o, l, m) {
const w = m.value;
if (typeof w != "function")
throw new TypeError("Only functions can be marked as deprecated");
return { ...m, value: function(...P) {
return a.call(
this,
`DEPRECATION: ${o.constructor.name}.${l}() is deprecated. ${r}`
), w.call(this, ...P);
} };
};
let en = class {
constructor() {
te(this, "_emitter", new EventTarget());
}
addEventListener(a, o, l) {
this._emitter.addEventListener(a, o, l);
}
removeEventListener(a, o, l) {
this._emitter.removeEventListener(a, o, l);
}
dispatchEvent(a) {
return this._emitter.dispatchEvent(a);
}
removeAllEventListeners() {
this._emitter = new EventTarget();
}
};
const Sh = (r, a, o) => fetch(r, a).then((l) => {
if (!l.body)
return l;
let m = 0;
const w = Number(l.headers.get("content-length") || 0), v = l.body.getReader();
return new Response(
new ReadableStream({
async start(b) {
for (; ; ) {
const { done: P, value: L } = await v.read();
if (P ? m = w : m += L.byteLength, o?.onProgress?.({ total: w, transferred: m }), P)
break;
b.enqueue(L);
}
b.close();
}
}),
l
);
}), Th = () => (
// The meta.env.SUPPORTED_BROWSERS will be replaced during build with RegExp, see vite.config.js
/Edge?\/(79|[89]\d|\d{3,})(\.\d+|)(\.\d+|)|Firefox\/(6[5-9]|[7-9]\d|\d{3,})\.\d+(\.\d+|)|Chrom(ium|e)\/(5[7-9]|[6-9]\d|\d{3,})\.\d+(\.\d+|)([\d.]+$|.*Safari\/(?![\d.]+ Edge\/[\d.]+$))|Maci.* Version\/(1[5-9]|[2-9]\d|\d{3,})\.\d+([,.]\d+|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(4[4-9]|[5-9]\d|\d{3,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(1[5-9]|[2-9]\d|\d{3,})[._]\d+([._]\d+|)|Mobile Safari.+OPR\/(7[2-9]|[89]\d|\d{3,})\.\d+\.\d+|Android.+Chrom(ium|e)\/(10[7-9]|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+(UC? ?Browser|UCWEB|U3)[ /]?(1[3-9]|[2-9]\d|\d{3,})\.\d+\.\d+|SamsungBrowser\/([7-9]|\d{2,})\.\d+|Android.+MQ{2}Browser\/(1[3-9]|[2-9]\d|\d{3,})(\.\d+|)(\.\d+|)|baidubrowser[\s/](1[3-9]|[2-9]\d|\d{3,})(\.\d+|)(\.\d+|)/.test(navigator.userAgent)
), Ch = typeof window < "u" && /^((?!chrome|android).)*safari/i.test(window.navigator?.userAgent), Rs = typeof OffscreenCanvas < "u" && !Ch, As = {
// https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices#avoid_alphafalse_which_can_be_expensive
alpha: !0,
antialias: !1,
depth: !1,
// since this context is designed to process video, it's better to be synchronized with the browser renderer
desynchronized: !1,
// avoid setting `powerPreference` to `"high-performance"` - it highly increases GPU usage
// powerPreference: "high-performance",
premultipliedAlpha: !1,
preserveDrawingBuffer: !1,
stencil: !1
};
let ie;
const Fh = (() => {
if (typeof window > "u" || !Th() || (ie ?? (ie = ya().getContext("webgl2", As)), ie === null))
return !1;
const r = ie.createTexture();
ie.bindTexture(ie.TEXTURE_2D, r), ie.texImage2D(ie.TEXTURE_2D, 0, ie.RGB, 1, 1, 0, ie.RGB, ie.UNSIGNED_BYTE, null);
const a = ie.createFramebuffer();
ie.bindFramebuffer(ie.FRAMEBUFFER, a), ie.framebufferTexture2D(ie.FRAMEBUFFER, ie.COLOR_ATTACHMENT0, ie.TEXTURE_2D, r, 0);
const o = ie.getParameter(ie.IMPLEMENTATION_COLOR_READ_FORMAT);
return ie.bindFramebuffer(ie.FRAMEBUFFER, null), ie.bindTexture(ie.TEXTURE_2D, null), ie.deleteFramebuffer(a), ie.deleteTexture(r), o === ie.RGB;
})(), Rh = async (r, a, o, l = "RGBA") => {
ie ?? (ie = ya().getContext("webgl2", As)), ie.canvas.width = r.width, ie.canvas.height = r.height, l === "RGB" && ie.pixelStorei(ie.PACK_ALIGNMENT, 1);
const m = ie.createTexture();
ie.bindTexture(ie.TEXTURE_2D, m), ie.texParameteri(ie.TEXTURE_2D, ie.TEXTURE_MIN_FILTER, ie.NEAREST), ie.texParameteri(ie.TEXTURE_2D, ie.TEXTURE_MAG_FILTER, ie.LINEAR), ie.texImage2D(ie.TEXTURE_2D, 0, ie[l], ie[l], ie.UNSIGNED_BYTE, r);
const w = ie.createFramebuffer();
ie.bindFramebuffer(ie.FRAMEBUFFER, w), ie.framebufferTexture2D(ie.FRAMEBUFFER, ie.COLOR_ATTACHMENT0, ie.TEXTURE_2D, m, 0);
const v = ie.createBuffer();
ie.bindBuffer(ie.PIXEL_PACK_BUFFER, v), ie.bufferData(ie.PIXEL_PACK_BUFFER, a.byteLength, ie.STREAM_READ), ie.readPixels(
o.x,
o.y,
o.width,
o.height,
ie[l],
ie.UNSIGNED_BYTE,
0
), ie.bindBuffer(ie.PIXEL_PACK_BUFFER, null), ie.bindFramebuffer(ie.FRAMEBUFFER, null), ie.deleteFramebuffer(w), ie.bindTexture(ie.TEXTURE_2D, null), ie.deleteTexture(m);
const b = ie.fenceSync(ie.SYNC_GPU_COMMANDS_COMPLETE, 0);
ie.flush(), await Ah(ie, b).finally(() => ie.deleteSync(b)), ie.bindBuffer(ie.PIXEL_PACK_BUFFER, v), ie.getBufferSubData(
ie.PIXEL_PACK_BUFFER,
0,
new DataView(a.buffer()),
a.byteOffset,
a.byteLength
), ie.bindBuffer(ie.PIXEL_PACK_BUFFER, null), ie.deleteBuffer(v);
}, Ah = (r, a) => new Promise(
(o, l) => function m() {
const w = r.clientWaitSync(a, 0, 0);
if (w === r.WAIT_FAILED)
return l(new Error("GPU operations complete wait failed"));
if (w === r.CONDITION_SATISFIED || w === r.ALREADY_SIGNALED)
return o();
_a.setTimeout(m, 2);
}()
);
function Ih(r = 256, a = 128) {
const o = document.createElement("canvas");
return o.width = r, o.height = a, o;
}
function kh(r = 256, a = 128) {
return new OffscreenCanvas(r, a);
}
function ya(r = 256, a = 128) {
return Rs ? kh(r, a) : Ih(r, a);
}
const Wn = (r = {}) => {
const a = ({ displayWidth: l, displayHeight: m, visibleRect: w = null }) => {
let v = w?.x ?? 0, b = w?.y ?? 0, P = w?.width ?? l, L = w?.height ?? m;
if (r.crop) {
const N = r?.orientation ?? 0;
let [H, re, z, G] = [0, 0, 0, 0];
N == 90 || N == 270 ? [re, H, G, z] = r.crop(L, P) : [H, re, z, G] = r.crop(P, L), [v, b, P, L] = [v + H, b + re, z, G];
}
return [l, m] = [P, L], {
visibleRect: { x: v, y: b, width: P, height: L },
displayWidth: l,
displayHeight: m,
horizontalFlip: r.horizontalFlip,
orientation: r.orientation,
textureOrientation: r.textureOrientation
};
};
return { getSourceOptions: (l) => {
let m = l instanceof HTMLVideoElement ? l.videoWidth : l.width, w = l instanceof HTMLVideoElement ? l.videoHeight : l.height;
return a({ displayWidth: m, displayHeight: w });
}, getFrameOptions: a };
};
class Jr {
constructor(a, o = {}, l = null) {
te(this, "_source", null);
te(this, "_visibleRect", { x: 0, y: 0, width: 0, height: 0 });
te(this, "_deleter");
te(this, "horizontalFlip", !1);
te(this, "orientation", 0);
te(this, "textureOrientation", this.orientation);
te(this, "frameTimestamp", performance.now());
const m = a instanceof HTMLVideoElement ? a.videoWidth : a.width, w = a instanceof HTMLVideoElement ? a.videoHeight : a.height;
this._visibleRect.x = o.visibleRect?.x ?? 0, this._visibleRect.y = o.visibleRect?.y ?? 0, this._visibleRect.width = o.visibleRect?.width ?? m, this._visibleRect.height = o.visibleRect?.height ?? w, this.horizontalFlip = o.horizontalFlip ?? this.horizontalFlip, this.orientation = o.orientation ?? this.orientation, this.textureOrientation = o.textureOrientation ?? this.textureOrientation, a.width = m, a.height = w, this._source = a, this._deleter = l;
}
/** @internal */
get texture() {
return this._source?.width == this.displayWidth && this._source?.height == this.displayHeight ? this._source : null;
}
get displayWidth() {
return this._visibleRect.width;
}
get displayHeight() {
return this._visibleRect.height;
}
/** Pixel format of the Frame */
get format() {
return this._source ? Fh ? "RGB" : "RGBA" : null;
}
/** @returns The number of bytes required to hold the Frame pixels */
allocationSize() {
if (!this.format)
throw new Error("Failed to execute 'allocationSize' on 'Frame': Frame is closed.");
const { width: a, height: o } = { width: this._visibleRect.width, height: this._visibleRect.height };
return a * o * this.format.length;
}
/** Copies the Frame pixels to the destination */
async copyTo(a) {
if (!this._source)
throw new Error("Failed to execute 'copyTo' on 'Frame': Frame is closed.");
return await Rh(this._source, a, this._visibleRect, this.format), [];
}
/** Releases GPU resources held by the Frame */
close() {
this._deleter && this._deleter(), this._source = null;
}
}
var Ph = Object.defineProperty, Dh = Object.getOwnPropertyDescriptor, Lh = (r, a, o, l) => {
for (var m = l > 1 ? void 0 : l ? Dh(a, o) : a, w = r.length - 1, v; w >= 0; w--)
(v = r[w]) && (m = (l ? v(a, o, m) : v(m)) || m);
return l && m && Ph(a, o, m), m;
}, Is;
let Bh = class {
constructor(a) {
te(this, "_src");
/** @internal */
te(this, "kind", "image");
this._src = a;
}
async *[Is = Symbol.asyncIterator](a) {
const o = await Eh(this._src), l = Wn(a);
yield new Jr(o, l.getSourceOptions(o), () => {
URL.revokeObjectURL(o.src), o.src = "";
});
}
};
Lh([
ba(30)
], Bh.prototype, Is, 1);
var Mh = Object.defineProperty, Oh = Object.getOwnPropertyDescriptor, Nh = (r, a, o, l) => {
for (var m = l > 1 ? void 0 : l ? Oh(a, o) : a, w = r.length - 1, v; w >= 0; w--)
(v = r[w]) && (m = (l ? v(a, o, m) : v(m)) || m);
return l && m && Mh(a, o, m), m;
}, ks, zt;
const Ps = (zt = class {
/**
* Creates MediaStream input from {@link https://developer.mozilla.org/en-US/docs/Web/API/MediaStream/MediaStream | MediaStream}
* @example
* ```ts
* const stream = new MediaStream(
* await navigator.mediaDevices.getUserMedia({ video: true })
* )
* ```
*/
constructor(a) {
// @ts-expect-error: Property '_stream' has no initializer and is not definitely assigned in the constructor.
te(this, "_stream");
/** @internal */
te(this, "kind", "stream");
if (!zt.cache.has(a))
zt.cache.set(a, this);
else
return zt.cache.get(a);
this._stream = a;
}
async *[ks = Symbol.asyncIterator](a) {
const o = Wn(a);
if (typeof MediaStreamTrackProcessor < "u") {
const l = this._stream.getVideoTracks()[0];
if (l.readyState === "ended")
return;
const w = new MediaStreamTrackProcessor({ track: l }).readable.getReader();
try {
for (; ; ) {
const { done: v, value: b } = await w.read();
if (v)
return;
const P = new VideoFrame(b, o.getFrameOptions(b));
P.horizontalFlip = a?.horizontalFlip ?? !0, P.orientation = a?.orientation ?? 0, P.textureOrientation = a?.textureOrientation ?? P.orientation, P.frameTimestamp = b.timestamp, b.close(), yield P;
}
} finally {
w.releaseLock();
}
} else {
const l = await $n(this._stream), m = "requestVideoFrameCallback" in l ? l.requestVideoFrameCallback.bind(l) : requestAnimationFrame;
for (; !l.paused; )
await new Promise(m), yield new Jr(l, o.getSourceOptions(l));
URL.revokeObjectURL(l.src), l.src = "", l.srcObject = null;
}
}
/** Stops underlying media stream */
stop() {
for (const a of this._stream.getVideoTracks())
a.stop();
this._stream && zt.cache.delete(this._stream);
}
}, te(zt, "cache", /* @__PURE__ */ new WeakMap()), zt);
Nh([
ba(30)
], Ps.prototype, ks, 1);
let ki = Ps, gm = class {
/**
* Creates ReadableStream input from {@link https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream | ReadableStream}
*/
constructor(a) {
te(this, "_readable");
/** @internal */
te(this, "kind", "stream");
this._readable = a;
}
/**
* Yields a sequence of {@link Frame | frames}
* @internal
*/
async *[Symbol.asyncIterator](a) {
const o = Wn(a), l = this._readable.getReader();
try {
for (; ; ) {
const { done: m, value: w } = await l.read();
if (m)
return;
const v = new VideoFrame(w, o.getFrameOptions(w));
v.horizontalFlip = a?.horizontalFlip ?? !0, v.orientation = a?.orientation ?? 0, v.textureOrientation = a?.textureOrientation ?? v.orientation, v.frameTimestamp = w.timestamp, w.close(), yield v;
}
} finally {
l.releaseLock();
}
}
/** Stops underlying readable stream */
stop() {
this._readable.cancel();
}
};
var Uh = Object.defineProperty, $h = Object.getOwnPropertyDescriptor, Wh = (r, a, o, l) => {
for (var m = l > 1 ? void 0 : l ? $h(a, o) : a, w = r.length - 1, v; w >= 0; w--)
(v = r[w]) && (m = (l ? v(a, o, m) : v(m)) || m);
return l && m && Uh(a, o, m), m;
}, Ds;
const jh = {
loop: !1
};
class Gh {
/** @param options - options to be merged with {@link defaultVideoOptions} */
constructor(a, o) {
te(this, "_src");
te(this, "_options");
te(this, "_video", null);
/** @internal */
te(this, "kind", "video");
this._src = a, this._options = {
...jh,
...o
};
}
async *[Ds = Symbol.asyncIterator](a) {
const o = await (this._video ?? (this._video = $n(this._src, this._options))), l = Wn(a), m = "requestVideoFrameCallback" in o ? o.requestVideoFrameCallback.bind(o) : requestAnimationFrame;
for (; !o.paused; )
await new Promise(m), yield new Jr(o, l.getSourceOptions(o));
}
/** Stops underlying video */
stop() {
this._video && this._video.then(
(a) => (URL.revokeObjectURL(a.src), a.src = "", a.srcObject = null)
), this._video = null;
}
}
Wh([
ba(30)
], Gh.prototype, Ds, 1);
const Vh = `#define GLSLIFY 1
attribute vec2 a_position;
varying vec2 v_tex_uv;
void main() {
v_tex_uv.x = (a_position.x + 1.) * .5;
v_tex_uv.y = 1. - (a_position.y + 1.) * .5;
gl_Position = vec4(a_position, 0., 1.);
}
`, zh = `precision highp float;
#define GLSLIFY 1
varying vec2 v_tex_uv;
uniform sampler2D u_texture;
uniform vec2 u_viewsize;
/**
* u_filters.x - denoising algorithm to use
* 1 - FSR
* 2 - Bilateral
* any other value - none
* u_filters.y - light correction coefficient in [0, 2]
* 1 - no light correction
*/
uniform vec2 u_filters;
// https://github.com/glslify/glslify#importing-a-glsl-module
// https://github.com/glslify/glslify#passing-references-between-modules
// Copyright (c) 2021 Advanced Micro Devices, Inc. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
// FidelityFX FSR v1.0.2 by AMD
// ported to mpv by agyild - https://gist.github.com/agyild/82219c545228d70c5604f865ce0b0ce5
// ported to WebGL by goingdigital - https://www.shadertoy.com/view/stXSWB
// using colorspace functions from tobspr - https://github.com/tobspr/GLSL-Color-Spaces/blob/master/ColorSpaces.inc.glsl
#define SHARPENING 2.0 // Sharpening intensity: Adjusts sharpening intensity by averaging the original pixels to the sharpened result. 1.0 is the unmodified default. 0.0 to 1.0.
#define CONTRAST 2.0 // Adjusts the range the shader adapts to high contrast (0 is not all the way off). Higher values = more high contrast sharpening. 0.0 to 1.0.
#define PERFORMANCE 1 // Whether to use optimizations for performance with loss of quality
// Used to convert from linear RGB to XYZ space
const mat3 RGB_2_XYZ_2717090884 = (mat3(
0.4124564, 0.2126729, 0.0193339,
0.3575761, 0.7151522, 0.1191920,
0.1804375, 0.0721750, 0.9503041
));
// Used to convert from XYZ to linear RGB space
const mat3 XYZ_2_RGB_2717090884 = (mat3(
3.2404542,-0.9692660, 0.0556434,
-1.5371385, 1.8760108,-0.2040259,
-0.4985314, 0.0415560, 1.0572252
));
// Converts a color from linear RGB to XYZ space
vec3 rgb_to_xyz_2717090884(vec3 rgb) {
return RGB_2_XYZ_2717090884 * rgb;
}
// Converts a color from XYZ to linear RGB space
vec3 xyz_to_rgb_2717090884(vec3 xyz) {
return XYZ_2_RGB_2717090884 * xyz;
}
/* EASU stage
*
* This takes a reduced resolution source, and scales it up while preserving detail.
*
* Updates:
* stretch definition fixed. Thanks nehon for the bug report!
*/
vec3 FsrEasuCF(vec2 p) {
vec2 uv = (p + .5) / u_viewsize;
vec4 color = texture2D(u_texture, uv);
return rgb_to_xyz_2717090884(color.rgb);
}
/**** EASU ****/
void FsrEasuCon(
out vec4 con0,
out vec4 con1,
out vec4 con2,
out vec4 con3,
// This the rendered image resolution being upscaled
vec2 inputViewportInPixels,
// This is the resolution of the resource containing the input image (useful for dynamic resolution)
vec2 inputSizeInPixels,
// This is the display resolution which the input image gets upscaled to
vec2 outputSizeInPixels
)
{
// Output integer position to a pixel position in viewport.
con0 = vec4(
inputViewportInPixels.x/outputSizeInPixels.x,
inputViewportInPixels.y/outputSizeInPixels.y,
.5*inputViewportInPixels.x/outputSizeInPixels.x-.5,
.5*inputViewportInPixels.y/outputSizeInPixels.y-.5
);
// Viewport pixel position to normalized image space.
// This is used to get upper-left of 'F' tap.
con1 = vec4(1.,1.,1.,-1.)/inputSizeInPixels.xyxy;
// Centers of gather4, first offset from upper-left of 'F'.
// +---+---+
// | | |
// +--(0)--+
// | b | c |
// +---F---+---+---+
// | e | f | g | h |
// +--(1)--+--(2)--+
// | i | j | k | l |
// +---+---+---+---+
// | n | o |
// +--(3)--+
// | | |
// +---+---+
// These are from (0) instead of 'F'.
con2 = vec4(-1.,2.,1.,2.)/inputSizeInPixels.xyxy;
con3 = vec4(0.,4.,0.,0.)/inputSizeInPixels.xyxy;
}
// Filtering for a given tap for the scalar.
void FsrEasuTapF(
inout vec3 aC, // Accumulated color, with negative lobe.
inout float aW, // Accumulated weight.
vec2 off_0, // Pixel offset from resolve position to tap.
vec2 dir_0, // Gradient direction.
vec2 len_0, // Length.
float lob_0, // Negative lobe strength.
float clp_0, // Clipping point.
vec3 c_0
)
{
// Tap color.
// Rotate offset by direction.
vec2 v = vec2(dot(off_0, dir_0), dot(off_0,vec2(-dir_0.y,dir_0.x)));
// Anisotropy.
v *= len_0;
// Compute distance^2.
float d2 = min(dot(v,v),clp_0);
// Limit to the window as at corner, 2 taps can easily be outside.
// Approximation of lancos2 without sin() or rcp(), or sqrt() to get x.
// (25/16 * (2/5 * x^2 - 1)^2 - (25/16 - 1)) * (1/4 * x^2 - 1)^2
// |_______________________________________| |_______________|
// base window
// The general form of the 'base' is,
// (a*(b*x^2-1)^2-(a-1))
// Where 'a=1/(2*b-b^2)' and 'b' moves around the negative lobe.
float wB = .4 * d2 - 1.;
float wA = lob_0 * d2 -1.;
wB *= wB;
wA *= wA;
wB = 1.5625*wB-.5625;
float w= wB * wA;
// Do weighted average.
aC += c_0*w;
aW += w;
}
//------------------------------------------------------------------------------------------------------------------------------
// Accumulate direction and length.
void FsrEasuSetF(
inout vec2 dir,
inout float len,
float w,
float lA,float lB,float lC,float lD,float lE
)
{
// Direction is the '+' diff.
// a
// b c d
// e
// Then takes magnitude from abs average of both sides of 'c'.
// Length converts gradient reversal to 0, smoothly to non-reversal at 1, shaped, then adding horz and vert terms.
float lenX = max(abs(lD - lC), abs(lC - lB));
float dirX = lD - lB;
dir.x += dirX * w;
lenX = clamp(abs(dirX)/lenX,0.,1.);
lenX *= lenX;
len += lenX * w;
// Repeat for the y axis.
float lenY = max(abs(lE - lC), abs(lC - lA));
float dirY = lE - lA;
dir.y += dirY * w;
lenY = clamp(abs(dirY) / lenY,0.,1.);
lenY *= lenY;
len += lenY * w;
}
//------------------------------------------------------------------------------------------------------------------------------
void FsrEasuF(
out vec3 pix,
vec2 ip, // Integer pixel position in output.
// Constants generated by FsrEasuCon().
vec4 con0, // xy = output to input scale, zw = first pixel offset correction
vec4 con1_0,
vec4 con2_0,
vec4 con3_0
)
{
//------------------------------------------------------------------------------------------------------------------------------
// Get position of 'f'.
vec2 pp = ip * con0.xy + con0.zw; // Corresponding input pixel/subpixel
vec2 fp = floor(pp);// fp = source nearest pixel
pp -= fp; // pp = source subpixel
//------------------------------------------------------------------------------------------------------------------------------
// 12-tap kernel.
// b c
// e f g h
// i j k l
// n o
// Gather 4 ordering.
// a b
// r g
vec2 p0 = fp * con1_0.xy + con1_0.zw;
// These are from p0 to avoid pulling two constants on pre-Navi hardware.
vec2 p1 = p0 + con2_0.xy;
vec2 p2 = p0 + con2_0.zw;
vec2 p3 = p0 + con3_0.xy;
// TextureGather is not available on WebGL2
vec4 off = vec4(-.5,.5,-.5,.5)*con1_0.xxyy;
// textureGather to texture offsets
// x=west y=east z=north w=south
vec3 bC = FsrEasuCF(p0 + off.xw); float bL = bC.g + 0.5 *(bC.r + bC.b);
vec3 cC = FsrEasuCF(p0 + off.yw); float cL = cC.g + 0.5 *(cC.r + cC.b);
vec3 iC = FsrEasuCF(p1 + off.xw); float iL = iC.g + 0.5 *(iC.r + iC.b);
vec3 jC = FsrEasuCF(p1 + off.yw); float jL = jC.g + 0.5 *(jC.r + jC.b);
vec3 fC = FsrEasuCF(p1 + off.yz); float fL = fC.g + 0.5 *(fC.r + fC.b);
vec3 eC = FsrEasuCF(p1 + off.xz); float eL = eC.g + 0.5 *(eC.r + eC.b);
vec3 kC = FsrEasuCF(p2 + off.xw); float kL = kC.g + 0.5 *(kC.r + kC.b);
vec3 lC = FsrEasuCF(p2 + off.yw); float lL = lC.g + 0.5 *(lC.r + lC.b);
vec3 hC = FsrEasuCF(p2 + off.yz); float hL = hC.g + 0.5 *(hC.r + hC.b);
vec3 gC = FsrEasuCF(p2 + off.xz); float gL = gC.g + 0.5 *(gC.r + gC.b);
vec3 oC = FsrEasuCF(p3 + off.yz); float oL = oC.g + 0.5 *(oC.r + oC.b);
vec3 nC = FsrEasuCF(p3 + off.xz); float nL = nC.g + 0.5 *(nC.r + nC.b);
//------------------------------------------------------------------------------------------------------------------------------
// Simplest multi-channel approximate luma possible (luma times 2, in 2 FMA/MAD).
// Accumulate for bilinear interpolation.
vec2 dir = vec2(0.);
float len = 0.;
FsrEasuSetF(dir, len, (1.-pp.x)*(1.-pp.y), bL, eL, fL, gL, jL);
FsrEasuSetF(dir, len, pp.x *(1.-pp.y), cL, fL, gL, hL, kL);
FsrEasuSetF(dir, len, (1.-pp.x)* pp.y , fL, iL, jL, kL, nL);
FsrEasuSetF(dir, len, pp.x * pp.y , gL, jL, kL, lL, oL);
//------------------------------------------------------------------------------------------------------------------------------
// Normalize with approximation, and cleanup close to zero.
vec2 dir2 = dir * dir;
float dirR = dir2.x + dir2.y;
bool zro = dirR < (1.0/32768.0);
dirR = inversesqrt(dirR);
#if (PERFORMANCE == 1)
if (zro) {
vec4 w = vec4(0.0);
w.x = (1.0 - pp.x) * (1.0 - pp.y);
w.y = pp.x * (1.0 - pp.y);
w.z = (1.0 - pp.x) * pp.y;
w.w = pp.x * pp.y;
pix.r = clamp(dot(w, vec4(fL, gL, jL, kL)), 0.0, 1.0);
return;
}
#elif (PERFORMANCE == 0)
dirR = zro ? 1.0 : dirR;
dir.x = zro ? 1.0 : dir.x;
#endif
dir *= vec2(dirR);
// Transform from {0 to 2} to {0 to 1} range, and shape with square.
len = len * 0.5;
len *= len;
// Stretch kernel {1.0 vert|horz, to sqrt(2.0) on diagonal}.
float stretch = dot(dir,dir) / (max(abs(dir.x), abs(dir.y)));
// Anisotropic length after rotation,
// x := 1.0 lerp to 'stretch' on edges
// y := 1.0 lerp to 2x on edges
vec2 len2 = vec2(1. +(stretch-1.0)*len, 1. -.5 * len);
// Based on the amount of 'edge',
// the window shifts from +/-{sqrt(2.0) to slightly beyond 2.0}.
float lob = .5 - .29 * len;
// Set distance^2 clipping point to the end of the adjustable window.
float clp = 1./lob;
//------------------------------------------------------------------------------------------------------------------------------
// Accumulation mixed with min/max of 4 nearest.
// b c
// e f g h
// i j k l
// n o
// Accumulation.
vec3 aC = vec3(0);
float aW = 0.;
FsrEasuTapF(aC, aW, vec2( 0.,-1.)-pp, dir, len2, lob, clp, bC);
FsrEasuTapF(aC, aW, vec2( 1.,-1.)-pp, dir, len2, lob, clp, cC);
FsrEasuTapF(aC, aW, vec2(-1., 1.)-pp, dir, len2, lob, clp, iC);
FsrEasuTapF(aC, aW, vec2( 0., 1.)-pp, dir, len2, lob, clp, jC);
FsrEasuTapF(aC, aW, vec2( 0., 0.)-pp, dir, len2, lob, clp, fC);
FsrEasuTapF(aC, aW, vec2(-1., 0.)-pp, dir, len2, lob, clp, eC);
FsrEasuTapF(aC, aW, vec2( 1., 1.)-pp, dir, len2, lob, clp, kC);
FsrEasuTapF(aC, aW, vec2( 2., 1.)-pp, dir, len2, lob, clp, lC);
FsrEasuTapF(aC, aW, vec2( 2., 0.)-pp, dir, len2, lob, clp, hC);
FsrEasuTapF(aC, aW, vec2( 1., 0.)-pp, dir, len2, lob, clp, gC);
FsrEasuTapF(aC, aW, vec2( 1., 2.)-pp, dir, len2, lob, clp, oC);
FsrEasuTapF(aC, aW, vec2( 0., 2.)-pp, dir, len2, lob, clp, nC);
//------------------------------------------------------------------------------------------------------------------------------
// Normalize and dering.
#if (PERFORMANCE == 1)
pix = aC/aW;
#elif (PERFORMANCE == 0)
vec3 min4 = min(min(fC,gC),min(jC,kC));
vec3 max4 = max(max(fC,gC),max(jC,kC));
pix=min(max4,max(min4,aC/aW));
#endif
}
void EASU( out vec4 fragColor, in vec2 fragCoord )
{
vec3 c;
vec4 con0,con1,con2,con3;
// "rendersize" refers to size of source image before upscaling.
vec2 rendersize = u_viewsize;
FsrEasuCon(
con0, con1, con2, con3, rendersize, rendersize, rendersize
);
FsrEasuF(c, fragCoord, con0, con1, con2, con3);
fragColor = vec4(xyz_to_rgb_2717090884(c.xyz), 1);
}
vec4 getPixel(vec2 pos) {
vec2 coord = (pos + .5) / u_viewsize;
coord.y = 1.0 - coord.y;
return texture2D(u_texture, coord);
}
vec4 fsr_easu_2717090884(vec2 uv) {
vec4 e = getPixel(gl_FragCoord.xy);
vec4 e_xyz = vec4(rgb_to_xyz_2717090884(e.rgb), 1);
EASU(e_xyz, (gl_FragCoord.xy + 0.5) / u_viewsize);
// fetch a 3x3 neighborhood around the pixel 'e',
// a b c
// d(e)f
// g h i
vec3 a = getPixel(gl_FragCoord.xy + vec2(-1.0,-1.0)).rgb;
vec3 b = getPixel(gl_FragCoord.xy + vec2( 0.0,-1.0)).rgb;
vec3 c = getPixel(gl_FragCoord.xy + vec2( 1.0,-1.0)).rgb;
vec3 f = getPixel(gl_FragCoord.xy + vec2( 1.0, 0.0)).rgb;
vec3 g = getPixel(gl_FragCoord.xy + vec2(-1.0, 1.0)).rgb;
vec3 h = getPixel(gl_FragCoord.xy + vec2( 0.0, 1.0)).rgb;
vec3 d = getPixel(gl_FragCoord.xy + vec2(-1.0, 0.0)).rgb;
vec3 i = getPixel(gl_FragCoord.xy + vec2( 1.0, 1.0)).rgb;;
// Soft min and max.
// a b c b
// d e f * 0.5 + d e f * 0.5
// g h i h
// These are 2.0x bigger (factored out the extra multiply).
vec3 mnRGB = min(min(min(d, e.rgb), min(f, b)), h);
vec3 mnRGB2 = min(mnRGB, min(min(a, c), min(g, i)));
mnRGB += mnRGB2;
vec3 mxRGB = max(max(max(d, e.rgb), max(f, b)), h);
vec3 mxRGB2 = max(mxRGB, max(max(a, c), max(g, i)));
mxRGB += mxRGB2;
// Smooth minimum distance to signal limit divided by smooth max.
vec3 rcpMRGB = 1.0 / mxRGB;
vec3 ampRGB = clamp(min(mnRGB, 2.0 - mxRGB) * rcpMRGB, 0.0, 1.0);
// Shaping amount of sharpening.
ampRGB = inversesqrt(ampRGB);
float peak = -3.0 * clamp(CONTRAST, 0.0, 1.0) + 8.0;
vec3 wRGB = -(1.0 / (ampRGB * peak));
vec3 rcpWeightRGB = 1.0 / (4.0 * wRGB + 1.0);
// 0 w 0
// Filter shape: w 1 w
// 0 w 0
vec3 window = (b + d) + (f + h);
vec3 outColor = clamp((window * wRGB + e.rgb) * rcpWeightRGB, 0.0, 1.0);
return vec4(mix(e.rgb, outColor, SHARPENING), e.a);
}
// https://github.com/glslify/glslify#exporting-a-glsl-module
#define DIFF 1.0
#define RADIUS 4.0
void bilateral_iter_3977570374(vec2 random_dir, vec2 radius, float diff, vec4 pixel, vec2 uv, inout vec3 result, inout float totalWeight)
{
vec2 dir = random_dir * radius;
vec3 randomPixel = texture2D(u_texture, uv + dir).xyz;
vec3 delta = randomPixel - pixel.rgb;
float weight = exp(-dot(delta, delta) / diff);
result += randomPixel * weight;
totalWeight += weight;
}
vec4 bilateral(vec2 uv)
{
vec2 radius = (RADIUS / u_viewsize);
float diff = DIFF / 255.0;
vec4 pixel = texture2D(u_texture, uv);
vec3 result = vec3(0.0, 0.0, 0.0);
float totalWeight = 0.0;
// uroll loop and substitute precalculated random vectors for GLSL 1.0 ES:
bilateral_iter_3977570374(vec2(-0.886051297,0.447155535), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(0.270759493,0.537728608), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.896959424,0.440607518), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.804274619,0.125076547), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(0.373693645,0.240383312), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.850325704,-0.192106694), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.453608066,0.889671504), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.280496657,0.206442386), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(0.840040743,-0.36367026), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.151598319,-0.884027064), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.221440807,0.593896627), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.797481239,-0.243254974), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(0.48824361,0.225083455), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.0387817062,0.838459492), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(0.92897892,-0.133588716), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.693672359,-0.706737161), radius, diff, pixel, uv, result, totalWeight);
result = result / totalWeight;
return vec4(result, pixel.a);
}
// https://github.com/glslify/glslify#exporting-a-glsl-module
vec3 rgb2hsv(vec3 c)
{
vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));
float d = q.x - min(q.w, q.y);
float e = 1.0e-10;
return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
}
vec3 hsv2rgb(vec3 c)
{
vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
}
vec4 light_correction_1117569599(vec4 c, float s)
{
vec3 hsv = rgb2hsv(c.rgb);
hsv.y = pow(hsv.y, pow(s, -0.5));
hsv.z = pow(hsv.z, s);
vec3 rgb = hsv2rgb(hsv);
return vec4(rgb, c.a);
}
// https://github.com/glslify/glslify#exporting-a-glsl-module
void main() {
vec4 c;
if (u_filters.x == 1.)
c = fsr_easu_2717090884(v_tex_uv);
else if (u_filters.x == 2.)
c = bilateral(v_tex_uv);
else
c = texture2D(u_texture, v_tex_uv);
if (u_filters.y != 1.)
c = light_correction_1117569599(c, u_filters.y);
gl_FragColor = c;
}`, Hh = (r, a, o) => {
const l = r.createProgram();
return r.attachShader(l, a), r.attachShader(l, o), r.linkProgram(l), r.useProgram(l), l;
}, To = (r, a, o) => {
const l = r.createShader(a);
return r.shaderSource(l, o), r.compileShader(l), l;
}, Kh = (r) => {
const a = r.createTexture();
return r.bindTexture(r.TEXTURE_2D, a), r.texImage2D(r.TEXTURE_2D, 0, r.RGB, 1, 1, 0, r.RGB, r.UNSIGNED_BYTE, null), r.texParameteri(r.TEXTURE_2D, r.TEXTURE_WRAP_S, r.CLAMP_TO_EDGE), r.texParameteri(r.TEXTURE_2D, r.TEXTURE_WRAP_T, r.CLAMP_TO_EDGE), r.texParameteri(r.TEXTURE_2D, r.TEXTURE_MIN_FILTER, r.NEAREST), r.texParameteri(r.TEXTURE_2D, r.TEXTURE_MAG_FILTER, r.LINEAR), r.bindTexture(r.TEXTURE_2D, null), a;
}, Co = (r, a) => {
let o = 0, l = 1;
const m = document.createElement("canvas"), w = m.captureStream(30), v = m.getContext("webgl"), b = To(v, v.VERTEX_SHADER, Vh), P = To(v, v.FRAGMENT_SHADER, zh), L = Hh(v, b, P), N = Kh(v);
v.bindTexture(v.TEXTURE_2D, N);
const H = v.getAttribLocation(L, "a_position"), re = v.createBuffer();
v.bindBuffer(v.ARRAY_BUFFER, re), v.bufferData(
v.ARRAY_BUFFER,
// prettier-ignore
new Float32Array([
-1,
-1,
1,
-1,
-1,
1,
-1,
1,
1,
-1,
1,
1
]),
v.STATIC_DRAW
), v.enableVertexAttribArray(H), v.vertexAttribPointer(H, 2, v.FLOAT, !1, 0, 0);
const z = v.getUniformLocation(L, "u_viewsize"), G = v.getUniformLocation(L, "u_filters");
v.uniform2fv(G, new Float32Array([o, l])), $n(r).then((ce) => {
const ue = ce.requestVideoFrameCallback?.bind(ce) || _a.requestAnimationFrame;
(function Q() {
ce.ended || !w.active || (ue(Q), v.texImage2D(v.TEXTURE_2D, 0, v.RGBA, v.RGBA, v.UNSIGNED_BYTE, ce), (m.width !== ce.videoWidth || m.height !== ce.videoHeight) && (v.viewport(0, 0, m.width = ce.videoWidth, m.height = ce.videoHeight), v.uniform2fv(z, new Float32Array([m.width, m.height]))), v.drawArrays(v.TRIANGLES, 0, 6));
})();
}), v.deleteProgram(L), v.deleteShader(P), v.deleteShader(b);
const ve = {
/** Enhanced stream */
stream: w,
/**
* @param {number} value - denoise algorithm to use
* - Pass 1 to use FSR algorithm
* - Pass 2 to use Bilateral algorithm
* - Pass any other number to disabled denoising
*/
denoise(ce) {
v.uniform2fv(G, new Float32Array([o = ce, l]));
},
/**
* @param {number} value - exposure compensation coefficient in [0, 2] range
* - Pass value less than to 1 increase exposure
* - Pass value greater than 1 to reduce exposure
* See the {@link https://fujifilm-dsc.com/en/manual/x-pro2/images/exp_exposure_480.gif | image} for visual example
* Inspired by MediaTrackConstraints {@link https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackConstraints#exposurecompensation | Exposure compensation} parameter.
*/
exposureCompensation(ce) {
v.uniform2fv(G, new Float32Array([o, l = ce]));
}
};
if (a)
for (const [ce, ue] of Object.entries(a))
ve[ce](ue);
return ve;
}, Xh = typeof screen < "u" && screen.height > screen.width, ea = {
facingMode: "user",
width: { min: 640, ideal: 1280, max: 1920 },
height: { min: 480, ideal: 720, max: 1080 },
resizeMode: { ideal: "crop-and-scale" }
};
Xh && (delete ea.width, delete ea.height);
class vm {
/**
* @param videoConstraints - constraints to be merged with {@link defaultVideoConstraints}
* and to be passed to {@link https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia | navigator.mediaDevices.getUserMedia()}
*/
constructor(a) {
te(this, "_stream", null);
te(this, "_constraints");
te(this, "_preferences", {});
te(this, "_enhancer", null);
/** @internal */
te(this, "kind", "stream");
this._constraints = {
...ea,
...a
};
}
/**
* Specifies if the webcam is currently active.
*
* The webcam is considered active if it has been started and has not been stopped afterwards
*/
get active() {
return !!this._stream;
}
/**
* @param {number} algorithm - denoise algorithm to use
* - Pass false or 0 to disabled denoising
* - Pass true or 1 to use FSR algorithm
* - Pass 2 to use Bilateral algorithm
* @internal
*/
denoise(a) {
this._preferences.denoise = Number(a), this._enhancer?.denoise(this._preferences.denoise);
}
/**
* @param {number} coefficient - exposure compensation coefficient in [0, 2] range
* - Pass value less than 1 to increase exposure
* - Pass value greater than 1 to reduce exposure
* See the {@link https://fujifilm-dsc.com/en/manual/x-pro2/images/exp_exposure_480.gif | image} for visual example
* @internal
*/
setExposureCompensation(a) {
this._preferences.exposureCompensation = a, this._enhancer?.exposureCompensation(this._preferences.exposureCompensation);
}
/**
* Manually starts webcam
*
* > Ordinary webcam is lazily started during async iteration over it.
* >
* > But sometimes you may want to manually pre-start webcam e.g during parallel creation of a {@link Player} instance:
* > ```ts
* > const [webcam, player] = await Promise.all([
* > new Webcam().start(),
* > Player.create({ clientToken: "xxx-xxx-xxx" }),
* > ])
* >
* > player.use(webcam)
* > ```
*/
async start() {
return await (this._stream ?? (this._stream = Fo(this._constraints))), this;
}
/**
* Yields a sequence of {@link Frame | frames}
* @internal
*/
async *[Symbol.asyncIterator](a) {
const o = await (this._stream ?? (this._stream = Fo(this._constraints))), l = this._enhancer = Pi(this._preferences) ? Co(o, this._preferences) : null;
let w = new ki(l ? l.stream : o)[Symbol.asyncIterator]({ horizontalFlip: !0, ...a }), v;
for (; ; ) {
if (!this._enhancer && Pi(this._preferences)) {
const L = this._enhancer = Co(o, this._preferences);
w = new ki(L.stream)[Symbol.asyncIterator]({ horizontalFlip: !0, ...a });
}
this._enhancer && !Pi(this._preferences) && (this._enhancer.stream.getTracks().forEach((N) => N.stop()), this._enhancer = null, w = new ki(o)[Symbol.asyncIterator]({ horizontalFlip: !0, ...a }));
const { done: b, value: P } = await w.next(v);
if (b)
break;
v = yield P;
}
this.stop();
}
/** Turns off webcam */
stop() {
this._stream && this._stream.then((a) => a.getTracks().forEach((o) => o.stop())), this._enhancer && this._enhancer.stream.getTracks().forEach((a) => a.stop()), this._stream = null, this._enhancer = null;
}
}
const Fo = async (r) => {
if (typeof navigator.mediaDevices > "u")
throw new Error(
`SecureContext is required to access webcam
It‘s likely you need to set up HTTPS/TLS for your website
See https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia#Encryption_based_security for details `
);
return await navigator.mediaDevices.getUserMedia({ video: r });
}, Pi = (r) => typeof r.exposureCompensation == "number" && r.exposureCompensation !== 1 || r.denoise === 1 || r.denoise === 2, wm = { createVideoElement: $n, createCanvas: ya };
let Yh = "useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict", Ls = (r = 21) => {
let a = "", o = r;
for (; o--; )
a += Yh[Math.random() * 64 | 0];
return a;
};
const Bs = "KGZ1bmN0aW9uKCl7InVzZSBzdHJpY3QiO3ZhciBzPVVpbnQ4QXJyYXkseD1VaW50MTZBcnJheSxPPVVpbnQzMkFycmF5LEg9bmV3IHMoWzAsMCwwLDAsMCwwLDAsMCwxLDEsMSwxLDIsMiwyLDIsMywzLDMsMyw0LDQsNCw0LDUsNSw1LDUsMCwwLDAsMF0pLEk9bmV3IHMoWzAsMCwwLDAsMSwxLDIsMiwzLDMsNCw0LDUsNSw2LDYsNyw3LDgsOCw5LDksMTAsMTAsMTEsMTEsMTIsMTIsMTMsMTMsMCwwXSksbDE9bmV3IHMoWzE2LDE3LDE4LDAsOCw3LDksNiwxMCw1LDExLDQsMTIsMywxMywyLDE0LDEsMTVdKSxKPWZ1bmN0aW9uKHIsdCl7Zm9yKHZhciBhPW5ldyB4KDMxKSxuPTA7bjwzMTsrK24pYVtuXT10Kz0xPDxyW24tMV07Zm9yKHZhciB2PW5ldyBPKGFbMzBdKSxuPTE7bjwzMDsrK24pZm9yKHZhciBpPWFbbl07aTxhW24rMV07KytpKXZbaV09aS1hW25dPDw1fG47cmV0dXJuW2Esdl19LEs9SihILDIpLFE9S1swXSxjMT1LWzFdO1FbMjhdPTI1OCxjMVsyNThdPTI4O2Zvcih2YXIgczE9SihJLDApLGQxPXMxWzBdLFU9bmV3IHgoMzI3NjgpLHU9MDt1PDMyNzY4OysrdSl7dmFyIEM9KHUmNDM2OTApPj4+MXwodSYyMTg0NSk8PDE7Qz0oQyY1MjQyOCk+Pj4yfChDJjEzMTA3KTw8MixDPShDJjYxNjgwKT4+PjR8KEMmMzg1NSk8PDQsVVt1XT0oKEMmNjUyODApPj4+OHwoQyYyNTUpPDw4KT4+PjF9Zm9yKHZhciB6PWZ1bmN0aW9uKHQsYSxuKXtmb3IodmFyIHY9dC5sZW5ndGgsaT0wLGM9bmV3IHgoYSk7aTx2OysraSl0W2ldJiYrK2NbdFtpXS0xXTt2YXIgZj1uZXcgeChhKTtmb3IoaT0wO2k8YTsrK2kpZltpXT1mW2ktMV0rY1tpLTFdPDwxO3ZhciBvO2lmKG4pe289bmV3IHgoMTw8YSk7dmFyIGU9MTUtYTtmb3IoaT0wO2k8djsrK2kpaWYodFtpXSlmb3IodmFyIGw9aTw8NHx0W2ldLGI9YS10W2ldLGQ9Zlt0W2ldLTFdKys8PGIseT1kfCgxPDxiKS0xO2Q8PXk7KytkKW9bVVtkXT4+PmVdPWx9ZWxzZSBmb3Iobz1uZXcgeCh2KSxpPTA7aTx2OysraSl0W2ldJiYob1tpXT1VW2ZbdFtpXS0xXSsrXT4+PjE1LXRbaV0pO3JldHVybiBvfSxCPW5ldyBzKDI4OCksdT0wO3U8MTQ0OysrdSlCW3VdPTg7Zm9yKHZhciB1PTE0NDt1PDI1NjsrK3UpQlt1XT05O2Zvcih2YXIgdT0yNTY7dTwyODA7Kyt1KUJbdV09Nztmb3IodmFyIHU9MjgwO3U8Mjg4OysrdSlCW3VdPTg7Zm9yKHZhciBWPW5ldyBzKDMyKSx1PTA7dTwzMjsrK3UpVlt1XT01O3ZhciBnMT16KEIsOSwxKSx3MT16KFYsNSwxKSxXPWZ1bmN0aW9uKHIpe2Zvcih2YXIgdD1yWzBdLGE9MTthPHIubGVuZ3RoOysrYSlyW2FdPnQmJih0PXJbYV0pO3JldHVybiB0fSxoPWZ1bmN0aW9uKHIsdCxhKXt2YXIgbj10Lzh8MDtyZXR1cm4ocltuXXxyW24rMV08PDgpPj4odCY3KSZhfSxYPWZ1bmN0aW9uKHIsdCl7dmFyIGE9dC84fDA7cmV0dXJuKHJbYV18clthKzFdPDw4fHJbYSsyXTw8MTYpPj4odCY3KX0saDE9ZnVuY3Rpb24ocil7cmV0dXJuKHIrNykvOHwwfSxqPWZ1bmN0aW9uKHIsdCxhKXsodD09bnVsbHx8dDwwKSYmKHQ9MCksKGE9PW51bGx8fGE+ci5sZW5ndGgpJiYoYT1yLmxlbmd0aCk7dmFyIG49bmV3KHIuQllURVNfUEVSX0VMRU1FTlQ9PTI/eDpyLkJZVEVTX1BFUl9FTEVNRU5UPT00P086cykoYS10KTtyZXR1cm4gbi5zZXQoci5zdWJhcnJheSh0LGEpKSxufSxtMT1bInVuZXhwZWN0ZWQgRU9GIiwiaW52YWxpZCBibG9jayB0eXBlIiwiaW52YWxpZCBsZW5ndGgvbGl0ZXJhbCIsImludmFsaWQgZGlzdGFuY2UiLCJzdHJlYW0gZmluaXNoZWQiLCJubyBzdHJlYW0gaGFuZGxlciIsLCJubyBjYWxsYmFjayIsImludmFsaWQgVVRGLTggZGF0YSIsImV4dHJhIGZpZWxkIHRvbyBsb25nIiwiZGF0ZSBub3QgaW4gcmFuZ2UgMTk4MC0yMDk5IiwiZmlsZW5hbWUgdG9vIGxvbmciLCJzdHJlYW0gZmluaXNoaW5nIiwiaW52YWxpZCB6aXAgZGF0YSJdLGc9ZnVuY3Rpb24ocix0LGEpe3ZhciBuPW5ldyBFcnJvcih0fHxtMVtyXSk7aWYobi5jb2RlPXIsRXJyb3IuY2FwdHVyZVN0YWNrVHJhY2UmJkVycm9yLmNhcHR1cmVTdGFja1RyYWNlKG4sZyksIWEpdGhyb3cgbjtyZXR1cm4gbn0sYjE9ZnVuY3Rpb24ocix0LGEpe3ZhciBuPXIubGVuZ3RoO2lmKCFufHxhJiZhLmYmJiFhLmwpcmV0dXJuIHR8fG5ldyBzKDApO3ZhciB2PSF0fHxhLGk9IWF8fGEuaTthfHwoYT17fSksdHx8KHQ9bmV3IHMobiozKSk7dmFyIGM9ZnVuY3Rpb24odTEpe3ZhciB2MT10Lmxlbmd0aDtpZih1MT52MSl7dmFyIGYxPW5ldyBzKE1hdGgubWF4KHYxKjIsdTEpKTtmMS5zZXQodCksdD1mMX19LGY9YS5mfHwwLG89YS5wfHwwLGU9YS5ifHwwLGw9YS5sLGI9YS5kLGQ9YS5tLHk9YS5uLFI9bio4O2Rve2lmKCFsKXtmPWgocixvLDEpO3ZhciBZPWgocixvKzEsMyk7aWYobys9MyxZKWlmKFk9PTEpbD1nMSxiPXcxLGQ9OSx5PTU7ZWxzZSBpZihZPT0yKXt2YXIgUz1oKHIsbywzMSkrMjU3LHIxPWgocixvKzEwLDE1KSs0LHQxPVMraChyLG8rNSwzMSkrMTtvKz0xNDtmb3IodmFyIEY9bmV3IHModDEpLEc9bmV3IHMoMTkpLHc9MDt3PHIxOysrdylHW2wxW3ddXT1oKHIsbyt3KjMsNyk7bys9cjEqMztmb3IodmFyIGExPVcoRyksQjE9KDE8PGExKS0xLFIxPXooRyxhMSwxKSx3PTA7dzx0MTspe3ZhciBuMT1SMVtoKHIsbyxCMSldO28rPW4xJjE1O3ZhciBwPW4xPj4+NDtpZihwPDE2KUZbdysrXT1wO2Vsc2V7dmFyIFQ9MCxOPTA7Zm9yKHA9PTE2PyhOPTMraChyLG8sMyksbys9MixUPUZbdy0xXSk6cD09MTc/KE49MytoKHIsbyw3KSxvKz0zKTpwPT0xOCYmKE49MTEraChyLG8sMTI3KSxvKz03KTtOLS07KUZbdysrXT1UfX12YXIgaTE9Ri5zdWJhcnJheSgwLFMpLF89Ri5zdWJhcnJheShTKTtkPVcoaTEpLHk9VyhfKSxsPXooaTEsZCwxKSxiPXooXyx5LDEpfWVsc2UgZygxKTtlbHNle3ZhciBwPWgxKG8pKzQsTD1yW3AtNF18cltwLTNdPDw4LFo9cCtMO2lmKFo+bil7aSYmZygwKTticmVha312JiZjKGUrTCksdC5zZXQoci5zdWJhcnJheShwLFopLGUpLGEuYj1lKz1MLGEucD1vPVoqOCxhLmY9Zjtjb250aW51ZX1pZihvPlIpe2kmJmcoMCk7YnJlYWt9fXYmJmMoZSsxMzEwNzIpO2Zvcih2YXIgWTE9KDE8PGQpLTEsRjE9KDE8PHkpLTEsJD1vOzskPW8pe3ZhciBUPWxbWChyLG8pJlkxXSxrPVQ+Pj40O2lmKG8rPVQmMTUsbz5SKXtpJiZnKDApO2JyZWFrfWlmKFR8fGcoMiksazwyNTYpdFtlKytdPWs7ZWxzZSBpZihrPT0yNTYpeyQ9byxsPW51bGw7YnJlYWt9ZWxzZXt2YXIgbzE9ay0yNTQ7aWYoaz4yNjQpe3ZhciB3PWstMjU3LE09SFt3XTtvMT1oKHIsbywoMTw8TSktMSkrUVt3XSxvKz1NfXZhciBQPWJbWChyLG8pJkYxXSxEPVA+Pj40O1B8fGcoMyksbys9UCYxNTt2YXIgXz1kMVtEXTtpZihEPjMpe3ZhciBNPUlbRF07Xys9WChyLG8pJigxPDxNKS0xLG8rPU19aWYobz5SKXtpJiZnKDApO2JyZWFrfXYmJmMoZSsxMzEwNzIpO2Zvcih2YXIgZTE9ZStvMTtlPGUxO2UrPTQpdFtlXT10W2UtX10sdFtlKzFdPXRbZSsxLV9dLHRbZSsyXT10W2UrMi1fXSx0W2UrM109dFtlKzMtX107ZT1lMX19YS5sPWwsYS5wPSQsYS5iPWUsYS5mPWYsbCYmKGY9MSxhLm09ZCxhLmQ9YixhLm49eSl9d2hpbGUoIWYpO3JldHVybiBlPT10Lmxlbmd0aD90OmoodCwwLGUpfSx5MT1uZXcgcygwKSxFPWZ1bmN0aW9uKHIsdCl7cmV0dXJuIHJbdF18clt0KzFdPDw4fSxtPWZ1bmN0aW9uKHIsdCl7cmV0dXJuKHJbdF18clt0KzFdPDw4fHJbdCsyXTw8MTZ8clt0KzNdPDwyNCk+Pj4wfSxxPWZ1bmN0aW9uKHIsdCl7cmV0dXJuIG0ocix0KSttKHIsdCs0KSo0Mjk0OTY3Mjk2fTtmdW5jdGlvbiBFMShyLHQpe3JldHVybiBiMShyLHQpfXZhciBBPXR5cGVvZiBUZXh0