@banuba/webar
Version:
Banuba WebAR SDK
1,083 lines (1,029 loc) • 493 kB
JavaScript
var uh = Object.defineProperty;
var fh = (t, i, o) => i in t ? uh(t, i, { enumerable: !0, configurable: !0, writable: !0, value: o }) : t[i] = o;
var re = (t, i, o) => (fh(t, typeof i != "symbol" ? i + "" : i, o), o);
let ch = 0;
const vs = () => ch++, ws = "KGZ1bmN0aW9uKCl7InVzZSBzdHJpY3QiO2FkZEV2ZW50TGlzdGVuZXIoIm1lc3NhZ2UiLCh7ZGF0YTp0fSk9Pntjb25zdCBzPXtpZDp0LmlkfTtzZXRUaW1lb3V0KHBvc3RNZXNzYWdlLHQudGltZW91dCxzKX0pfSkoKTsK", wo = typeof window < "u" && window.Blob && new Blob([atob(ws)], { type: "text/javascript;charset=utf-8" });
function dh() {
let t;
try {
if (t = wo && (window.URL || window.webkitURL).createObjectURL(wo), !t)
throw "";
return new Worker(t);
} catch {
return new Worker("data:application/javascript;base64," + ws);
} finally {
t && (window.URL || window.webkitURL).revokeObjectURL(t);
}
}
let Tn;
const Ti = /* @__PURE__ */ new Map(), Es = (t, i) => {
const o = vs(), s = { id: o, timeout: i };
return Ti.set(s.id, t), Tn || (Tn = new dh(), Tn.onmessage = ({ data: b }) => {
const w = Ti.get(b.id);
Ti.delete(b.id), w();
}), Tn.postMessage(s), o;
}, hh = 60, Eo = 1e3 / hh, Fn = [];
let xo = 0;
const xs = (t) => {
const i = vs();
if (Fn.length === 0) {
const o = performance.now(), s = Eo - (o - xo) % Eo;
Es(() => {
const b = xo = performance.now(), w = [...Fn];
Fn.length = 0, w.forEach((v) => v(b));
}, s);
}
return Fn.push(t), i;
}, ph = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
__proto__: null,
requestAnimationFrame: xs,
setTimeout: Es
}, Symbol.toStringTag, { value: "Module" })), bh = (...t) => window.setTimeout(...t), Ln = /* @__PURE__ */ new Map(), _h = (t) => {
const i = window.requestAnimationFrame((...o) => {
Ln.delete(i), t(...o);
});
return Ln.set(i, t), i;
};
typeof document < "u" && document.addEventListener("visibilitychange", () => {
document.visibilityState !== "visible" && Ln.forEach((t, i) => {
Ln.delete(i), cancelAnimationFrame(i), xs(t);
});
});
const mh = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
__proto__: null,
requestAnimationFrame: _h,
setTimeout: bh
}, Symbol.toStringTag, { value: "Module" })), gh = typeof document < "u" ? document : { visibilityState: "hidden" }, Ss = () => gh.visibilityState === "visible" ? mh : ph, qt = (t) => Ss().requestAnimationFrame(t), Ts = (t, i) => Ss().setTimeout(t, i), Fs = (t) => Promise.resolve().then(t), ca = {
requestAnimationFrame: qt,
setTimeout: Ts
}, hm = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
__proto__: null,
nextTick: Fs,
requestAnimationFrame: qt,
setTimeout: Ts,
timers: ca
}, Symbol.toStringTag, { value: "Module" })), yh = () => new Promise((t) => qt(t)), da = (t = -1) => function(i, o, s) {
const b = s, w = b.value;
return { ...b, value: async function* (...D) {
const L = w.apply(this, D);
let N = 0, X = 0;
for (; ; ) {
const ie = 1e3 / t, K = 0.1 * ie;
for (; (X = performance.now()) - N < ie - K; )
await yh();
N = X;
const { done: V, value: ve } = await L.next();
if (V)
return ve;
const ue = yield ve;
typeof ue < "u" && (t = ue);
}
} };
}, On = async (t, i = {}) => new Promise((o) => {
const s = document.createElement("video");
if (s.muted = !0, s.controls = !1, s.playsInline = !0, Object.assign(s, i), t instanceof globalThis.MediaStream)
s.srcObject = t, s.addEventListener("ended", () => s.srcObject = null, { once: !0 }), t.addEventListener("inactive", () => s.dispatchEvent(new CustomEvent("ended")), {
once: !0
});
else {
if (typeof t != "string") {
const w = t = URL.createObjectURL(t);
s.addEventListener("emptied", () => URL.revokeObjectURL(w), { once: !0 });
}
s.crossOrigin = "anonymous", s.src = t, s.addEventListener("ended", () => s.src = "", { once: !0 });
}
s.style.position = "fixed", s.style.zIndex = "-9999999", s.style.opacity = "0.0000000001", document.body.appendChild(s), s.addEventListener("emptied", () => s.remove(), { once: !0 });
const b = setInterval(() => s.readyState, 300);
s.addEventListener("play", () => clearInterval(b), { once: !0 }), s.addEventListener("play", () => o(s), { once: !0 }), s.addEventListener("loadedmetadata", () => s.play(), { once: !0 });
}), vh = (t) => new Promise((i, o) => {
const s = document.createElement("img");
s.onload = () => i(s), s.onerror = o, s.crossOrigin = "anonymous", s.src = typeof t == "string" ? t : URL.createObjectURL(t);
}), So = /* @__PURE__ */ new Map(), wh = (t, i, o) => t * (1 - o) + i * o, Yi = (t) => `webar::${t}:start`, Fi = (t) => `webar::${t}:end`, ha = (t) => {
let i = { internalName: t + ":" + Math.random() };
return performance.mark(Yi(i.internalName)), i;
}, pa = (t) => {
const i = t.internalName;
performance.mark(Fi(i));
let o = performance.measure(i, Yi(i), Fi(i));
o || (o = performance.getEntriesByName(i)[0]), performance.clearMarks(Yi(i)), performance.clearMarks(Fi(i)), performance.clearMeasures(i);
const { duration: s } = o, b = i.split(":")[0];
let { averagedDuration: w = 0 } = So.get(b) || {};
return w = wh(w, s, 0.05), So.set(b, { averagedDuration: w }), { instantDuration: s, averagedDuration: w };
}, Rs = (t, i = (o) => console.warn(o)) => function(o, s, b) {
const w = b.value;
if (typeof w != "function")
throw new TypeError("Only functions can be marked as deprecated");
return { ...b, value: function(...D) {
return i.call(
this,
`DEPRECATION: ${o.constructor.name}.${s}() is deprecated. ${t}`
), w.call(this, ...D);
} };
};
let Qt = class {
constructor() {
re(this, "_emitter", new EventTarget());
}
addEventListener(i, o, s) {
this._emitter.addEventListener(i, o, s);
}
removeEventListener(i, o, s) {
this._emitter.removeEventListener(i, o, s);
}
dispatchEvent(i) {
return this._emitter.dispatchEvent(i);
}
removeAllEventListeners() {
this._emitter = new EventTarget();
}
};
const Eh = (t, i, o) => fetch(t, i).then((s) => {
if (!s.body)
return s;
let b = 0;
const w = Number(s.headers.get("content-length") || 0), v = s.body.getReader();
return new Response(
new ReadableStream({
async start(p) {
for (; ; ) {
const { done: D, value: L } = await v.read();
if (D ? b = w : b += L.byteLength, o?.onProgress?.({ total: w, transferred: b }), D)
break;
p.enqueue(L);
}
p.close();
}
}),
s
);
}), xh = () => (
// The meta.env.SUPPORTED_BROWSERS will be replaced during build with RegExp, see vite.config.js
/Edge?\/(79|[89]\d|\d{3,})(\.\d+|)(\.\d+|)|Firefox\/(6[5-9]|[7-9]\d|\d{3,})\.\d+(\.\d+|)|Chrom(ium|e)\/(5[7-9]|[6-9]\d|\d{3,})\.\d+(\.\d+|)([\d.]+$|.*Safari\/(?![\d.]+ Edge\/[\d.]+$))|Maci.* Version\/(1[5-9]|[2-9]\d|\d{3,})\.\d+([,.]\d+|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(4[4-9]|[5-9]\d|\d{3,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(1[5-9]|[2-9]\d|\d{3,})[._]\d+([._]\d+|)|Mobile Safari.+OPR\/(7[2-9]|[89]\d|\d{3,})\.\d+\.\d+|Android.+Chrom(ium|e)\/(10[7-9]|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+(UC? ?Browser|UCWEB|U3)[ /]?(1[3-9]|[2-9]\d|\d{3,})\.\d+\.\d+|SamsungBrowser\/([7-9]|\d{2,})\.\d+|Android.+MQ{2}Browser\/(1[3-9]|[2-9]\d|\d{3,})(\.\d+|)(\.\d+|)|baidubrowser[\s/](1[3-9]|[2-9]\d|\d{3,})(\.\d+|)(\.\d+|)/.test(navigator.userAgent)
), Sh = typeof window < "u" && /^((?!chrome|android).)*safari/i.test(window.navigator?.userAgent), Cs = typeof OffscreenCanvas < "u" && !Sh, As = {
// https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices#avoid_alphafalse_which_can_be_expensive
alpha: !0,
antialias: !1,
depth: !1,
// since this context is designed to process video, it's better to be synchronized with the browser renderer
desynchronized: !1,
// avoid setting `powerPreference` to `"high-performance"` - it highly increases GPU usage
// powerPreference: "high-performance",
premultipliedAlpha: !1,
preserveDrawingBuffer: !1,
stencil: !1
};
let ne;
const Th = (() => {
if (typeof window > "u" || !xh() || (ne ?? (ne = ba().getContext("webgl2", As)), ne === null))
return !1;
const t = ne.createTexture();
ne.bindTexture(ne.TEXTURE_2D, t), ne.texImage2D(ne.TEXTURE_2D, 0, ne.RGB, 1, 1, 0, ne.RGB, ne.UNSIGNED_BYTE, null);
const i = ne.createFramebuffer();
ne.bindFramebuffer(ne.FRAMEBUFFER, i), ne.framebufferTexture2D(ne.FRAMEBUFFER, ne.COLOR_ATTACHMENT0, ne.TEXTURE_2D, t, 0);
const o = ne.getParameter(ne.IMPLEMENTATION_COLOR_READ_FORMAT);
return ne.bindFramebuffer(ne.FRAMEBUFFER, null), ne.bindTexture(ne.TEXTURE_2D, null), ne.deleteFramebuffer(i), ne.deleteTexture(t), o === ne.RGB;
})(), Fh = async (t, i, o, s = "RGBA") => {
ne ?? (ne = ba().getContext("webgl2", As)), ne.canvas.width = t.width, ne.canvas.height = t.height, s === "RGB" && ne.pixelStorei(ne.PACK_ALIGNMENT, 1);
const b = ne.createTexture();
ne.bindTexture(ne.TEXTURE_2D, b), ne.texParameteri(ne.TEXTURE_2D, ne.TEXTURE_MIN_FILTER, ne.NEAREST), ne.texParameteri(ne.TEXTURE_2D, ne.TEXTURE_MAG_FILTER, ne.LINEAR), ne.texImage2D(ne.TEXTURE_2D, 0, ne[s], ne[s], ne.UNSIGNED_BYTE, t);
const w = ne.createFramebuffer();
ne.bindFramebuffer(ne.FRAMEBUFFER, w), ne.framebufferTexture2D(ne.FRAMEBUFFER, ne.COLOR_ATTACHMENT0, ne.TEXTURE_2D, b, 0);
const v = ne.createBuffer();
ne.bindBuffer(ne.PIXEL_PACK_BUFFER, v), ne.bufferData(ne.PIXEL_PACK_BUFFER, i.byteLength, ne.STREAM_READ), ne.readPixels(
o.x,
o.y,
o.width,
o.height,
ne[s],
ne.UNSIGNED_BYTE,
0
), ne.bindBuffer(ne.PIXEL_PACK_BUFFER, null), ne.bindFramebuffer(ne.FRAMEBUFFER, null), ne.deleteFramebuffer(w), ne.bindTexture(ne.TEXTURE_2D, null), ne.deleteTexture(b);
const p = ne.fenceSync(ne.SYNC_GPU_COMMANDS_COMPLETE, 0);
ne.flush(), await Rh(ne, p).finally(() => ne.deleteSync(p)), ne.bindBuffer(ne.PIXEL_PACK_BUFFER, v), ne.getBufferSubData(
ne.PIXEL_PACK_BUFFER,
0,
new DataView(i.buffer()),
i.byteOffset,
i.byteLength
), ne.bindBuffer(ne.PIXEL_PACK_BUFFER, null), ne.deleteBuffer(v);
}, Rh = (t, i) => new Promise(
(o, s) => function b() {
const w = t.clientWaitSync(i, 0, 0);
if (w === t.WAIT_FAILED)
return s(new Error("GPU operations complete wait failed"));
if (w === t.CONDITION_SATISFIED || w === t.ALREADY_SIGNALED)
return o();
ca.setTimeout(b, 2);
}()
);
function Ch(t = 256, i = 128) {
const o = document.createElement("canvas");
return o.width = t, o.height = i, o;
}
function Ah(t = 256, i = 128) {
return new OffscreenCanvas(t, i);
}
function ba(t = 256, i = 128) {
return Cs ? Ah(t, i) : Ch(t, i);
}
const _a = (t = {}) => {
const i = ({ displayWidth: s, displayHeight: b, visibleRect: w = null }) => {
let v = w?.x ?? 0, p = w?.y ?? 0, D = w?.width ?? s, L = w?.height ?? b;
if (t.crop) {
const [N, X, ie, K] = t.crop(D, L);
[v, p, D, L] = [v + N, p + X, ie, K];
}
return [s, b] = [D, L], {
visibleRect: { x: v, y: p, width: D, height: L },
displayWidth: s,
displayHeight: b,
horizontalFlip: !!t.horizontalFlip
};
};
return { getSourceOptions: (s) => {
let b = s instanceof HTMLVideoElement ? s.videoWidth : s.width, w = s instanceof HTMLVideoElement ? s.videoHeight : s.height;
return i({ displayWidth: b, displayHeight: w });
}, getFrameOptions: i };
};
class Un {
constructor(i, o = {}, s = null) {
re(this, "_source", null);
re(this, "_visibleRect", { x: 0, y: 0, width: 0, height: 0 });
re(this, "_deleter");
re(this, "horizontalFlip", !1);
const b = i instanceof HTMLVideoElement ? i.videoWidth : i.width, w = i instanceof HTMLVideoElement ? i.videoHeight : i.height;
this._visibleRect.x = o.visibleRect?.x ?? 0, this._visibleRect.y = o.visibleRect?.y ?? 0, this._visibleRect.width = o.visibleRect?.width ?? b, this._visibleRect.height = o.visibleRect?.height ?? w, this.horizontalFlip = o.horizontalFlip ?? this.horizontalFlip, i.width = b, i.height = w, this._source = i, this._deleter = s;
}
/** @internal */
get texture() {
return this._source?.width == this.displayWidth && this._source?.height == this.displayHeight ? this._source : null;
}
get displayWidth() {
return this._visibleRect.width;
}
get displayHeight() {
return this._visibleRect.height;
}
/** Pixel format of the Frame */
get format() {
return this._source ? Th ? "RGB" : "RGBA" : null;
}
/** @returns The number of bytes required to hold the Frame pixels */
allocationSize() {
if (!this.format)
throw new Error("Failed to execute 'allocationSize' on 'Frame': Frame is closed.");
const { width: i, height: o } = { width: this._visibleRect.width, height: this._visibleRect.height };
return i * o * this.format.length;
}
/** Copies the Frame pixels to the destination */
async copyTo(i) {
if (!this._source)
throw new Error("Failed to execute 'copyTo' on 'Frame': Frame is closed.");
return await Fh(this._source, i, this._visibleRect, this.format), [];
}
/** Releases GPU resources held by the Frame */
close() {
this._deleter && this._deleter(), this._source = null;
}
}
var Ih = Object.defineProperty, Ph = Object.getOwnPropertyDescriptor, kh = (t, i, o, s) => {
for (var b = s > 1 ? void 0 : s ? Ph(i, o) : i, w = t.length - 1, v; w >= 0; w--)
(v = t[w]) && (b = (s ? v(i, o, b) : v(b)) || b);
return s && b && Ih(i, o, b), b;
}, Is;
let Dh = class {
constructor(i) {
re(this, "_src");
/** @internal */
re(this, "kind", "image");
this._src = i;
}
async *[Is = Symbol.asyncIterator](i) {
const o = await vh(this._src), s = _a(i);
yield new Un(o, s.getSourceOptions(o), () => {
URL.revokeObjectURL(o.src), o.src = "";
});
}
};
kh([
da(30)
], Dh.prototype, Is, 1);
var Lh = Object.defineProperty, Mh = Object.getOwnPropertyDescriptor, Bh = (t, i, o, s) => {
for (var b = s > 1 ? void 0 : s ? Mh(i, o) : i, w = t.length - 1, v; w >= 0; w--)
(v = t[w]) && (b = (s ? v(i, o, b) : v(b)) || b);
return s && b && Lh(i, o, b), b;
}, Ps, zr;
const ks = (zr = class {
/**
* Creates MediaStream input from {@link https://developer.mozilla.org/en-US/docs/Web/API/MediaStream/MediaStream | MediaStream}
* @example
* ```ts
* const stream = new MediaStream(
* await navigator.mediaDevices.getUserMedia({ video: true })
* )
* ```
*/
constructor(i) {
// @ts-expect-error: Property '_stream' has no initializer and is not definitely assigned in the constructor.
re(this, "_stream");
/** @internal */
re(this, "kind", "stream");
if (!zr.cache.has(i))
zr.cache.set(i, this);
else
return zr.cache.get(i);
this._stream = i;
}
async *[Ps = Symbol.asyncIterator](i) {
const o = _a(i);
if ("MediaStreamTrackProcessor" in window) {
const b = new MediaStreamTrackProcessor({ track: this._stream.getVideoTracks()[0] }).readable.getReader();
try {
for (; ; ) {
const { done: w, value: v } = await b.read();
if (w)
return;
const p = new VideoFrame(v, o.getFrameOptions(v));
p.horizontalFlip = i?.horizontalFlip ?? !0, v.close(), yield p;
}
} finally {
b.releaseLock();
}
} else {
const s = await On(this._stream), b = "requestVideoFrameCallback" in s ? s.requestVideoFrameCallback.bind(s) : requestAnimationFrame;
for (; !s.paused; )
await new Promise(b), yield new Un(s, o.getSourceOptions(s));
URL.revokeObjectURL(s.src), s.src = "", s.srcObject = null;
}
}
/** Stops underlying media stream */
stop() {
for (const i of this._stream.getVideoTracks())
i.stop();
this._stream && zr.cache.delete(this._stream);
}
}, re(zr, "cache", /* @__PURE__ */ new WeakMap()), zr);
Bh([
da(30)
], ks.prototype, Ps, 1);
let Ri = ks;
var Nh = Object.defineProperty, Oh = Object.getOwnPropertyDescriptor, Uh = (t, i, o, s) => {
for (var b = s > 1 ? void 0 : s ? Oh(i, o) : i, w = t.length - 1, v; w >= 0; w--)
(v = t[w]) && (b = (s ? v(i, o, b) : v(b)) || b);
return s && b && Nh(i, o, b), b;
}, Ds;
const $h = {
loop: !1
};
class Wh {
/** @param options - options to be merged with {@link defaultVideoOptions} */
constructor(i, o) {
re(this, "_src");
re(this, "_options");
re(this, "_video", null);
/** @internal */
re(this, "kind", "video");
this._src = i, this._options = {
...$h,
...o
};
}
async *[Ds = Symbol.asyncIterator](i) {
const o = await (this._video ?? (this._video = On(this._src, this._options))), s = _a(i), b = "requestVideoFrameCallback" in o ? o.requestVideoFrameCallback.bind(o) : requestAnimationFrame;
for (; !o.paused; )
await new Promise(b), yield new Un(o, s.getSourceOptions(o));
}
/** Stops underlying video */
stop() {
this._video && this._video.then(
(i) => (URL.revokeObjectURL(i.src), i.src = "", i.srcObject = null)
), this._video = null;
}
}
Uh([
da(30)
], Wh.prototype, Ds, 1);
const jh = `#define GLSLIFY 1
attribute vec2 a_position;
varying vec2 v_tex_uv;
void main() {
v_tex_uv.x = (a_position.x + 1.) * .5;
v_tex_uv.y = 1. - (a_position.y + 1.) * .5;
gl_Position = vec4(a_position, 0., 1.);
}
`, Gh = `precision highp float;
#define GLSLIFY 1
varying vec2 v_tex_uv;
uniform sampler2D u_texture;
uniform vec2 u_viewsize;
/**
* u_filters.x - denoising algorithm to use
* 1 - FSR
* 2 - Bilateral
* any other value - none
* u_filters.y - light correction coefficient in [0, 2]
* 1 - no light correction
*/
uniform vec2 u_filters;
// https://github.com/glslify/glslify#importing-a-glsl-module
// https://github.com/glslify/glslify#passing-references-between-modules
// Copyright (c) 2021 Advanced Micro Devices, Inc. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
// FidelityFX FSR v1.0.2 by AMD
// ported to mpv by agyild - https://gist.github.com/agyild/82219c545228d70c5604f865ce0b0ce5
// ported to WebGL by goingdigital - https://www.shadertoy.com/view/stXSWB
// using colorspace functions from tobspr - https://github.com/tobspr/GLSL-Color-Spaces/blob/master/ColorSpaces.inc.glsl
#define SHARPENING 2.0 // Sharpening intensity: Adjusts sharpening intensity by averaging the original pixels to the sharpened result. 1.0 is the unmodified default. 0.0 to 1.0.
#define CONTRAST 2.0 // Adjusts the range the shader adapts to high contrast (0 is not all the way off). Higher values = more high contrast sharpening. 0.0 to 1.0.
#define PERFORMANCE 1 // Whether to use optimizations for performance with loss of quality
// Used to convert from linear RGB to XYZ space
const mat3 RGB_2_XYZ_2717090884 = (mat3(
0.4124564, 0.2126729, 0.0193339,
0.3575761, 0.7151522, 0.1191920,
0.1804375, 0.0721750, 0.9503041
));
// Used to convert from XYZ to linear RGB space
const mat3 XYZ_2_RGB_2717090884 = (mat3(
3.2404542,-0.9692660, 0.0556434,
-1.5371385, 1.8760108,-0.2040259,
-0.4985314, 0.0415560, 1.0572252
));
// Converts a color from linear RGB to XYZ space
vec3 rgb_to_xyz_2717090884(vec3 rgb) {
return RGB_2_XYZ_2717090884 * rgb;
}
// Converts a color from XYZ to linear RGB space
vec3 xyz_to_rgb_2717090884(vec3 xyz) {
return XYZ_2_RGB_2717090884 * xyz;
}
/* EASU stage
*
* This takes a reduced resolution source, and scales it up while preserving detail.
*
* Updates:
* stretch definition fixed. Thanks nehon for the bug report!
*/
vec3 FsrEasuCF(vec2 p) {
vec2 uv = (p + .5) / u_viewsize;
vec4 color = texture2D(u_texture, uv);
return rgb_to_xyz_2717090884(color.rgb);
}
/**** EASU ****/
void FsrEasuCon(
out vec4 con0,
out vec4 con1,
out vec4 con2,
out vec4 con3,
// This the rendered image resolution being upscaled
vec2 inputViewportInPixels,
// This is the resolution of the resource containing the input image (useful for dynamic resolution)
vec2 inputSizeInPixels,
// This is the display resolution which the input image gets upscaled to
vec2 outputSizeInPixels
)
{
// Output integer position to a pixel position in viewport.
con0 = vec4(
inputViewportInPixels.x/outputSizeInPixels.x,
inputViewportInPixels.y/outputSizeInPixels.y,
.5*inputViewportInPixels.x/outputSizeInPixels.x-.5,
.5*inputViewportInPixels.y/outputSizeInPixels.y-.5
);
// Viewport pixel position to normalized image space.
// This is used to get upper-left of 'F' tap.
con1 = vec4(1.,1.,1.,-1.)/inputSizeInPixels.xyxy;
// Centers of gather4, first offset from upper-left of 'F'.
// +---+---+
// | | |
// +--(0)--+
// | b | c |
// +---F---+---+---+
// | e | f | g | h |
// +--(1)--+--(2)--+
// | i | j | k | l |
// +---+---+---+---+
// | n | o |
// +--(3)--+
// | | |
// +---+---+
// These are from (0) instead of 'F'.
con2 = vec4(-1.,2.,1.,2.)/inputSizeInPixels.xyxy;
con3 = vec4(0.,4.,0.,0.)/inputSizeInPixels.xyxy;
}
// Filtering for a given tap for the scalar.
void FsrEasuTapF(
inout vec3 aC, // Accumulated color, with negative lobe.
inout float aW, // Accumulated weight.
vec2 off_0, // Pixel offset from resolve position to tap.
vec2 dir_0, // Gradient direction.
vec2 len_0, // Length.
float lob_0, // Negative lobe strength.
float clp_0, // Clipping point.
vec3 c_0
)
{
// Tap color.
// Rotate offset by direction.
vec2 v = vec2(dot(off_0, dir_0), dot(off_0,vec2(-dir_0.y,dir_0.x)));
// Anisotropy.
v *= len_0;
// Compute distance^2.
float d2 = min(dot(v,v),clp_0);
// Limit to the window as at corner, 2 taps can easily be outside.
// Approximation of lancos2 without sin() or rcp(), or sqrt() to get x.
// (25/16 * (2/5 * x^2 - 1)^2 - (25/16 - 1)) * (1/4 * x^2 - 1)^2
// |_______________________________________| |_______________|
// base window
// The general form of the 'base' is,
// (a*(b*x^2-1)^2-(a-1))
// Where 'a=1/(2*b-b^2)' and 'b' moves around the negative lobe.
float wB = .4 * d2 - 1.;
float wA = lob_0 * d2 -1.;
wB *= wB;
wA *= wA;
wB = 1.5625*wB-.5625;
float w= wB * wA;
// Do weighted average.
aC += c_0*w;
aW += w;
}
//------------------------------------------------------------------------------------------------------------------------------
// Accumulate direction and length.
void FsrEasuSetF(
inout vec2 dir,
inout float len,
float w,
float lA,float lB,float lC,float lD,float lE
)
{
// Direction is the '+' diff.
// a
// b c d
// e
// Then takes magnitude from abs average of both sides of 'c'.
// Length converts gradient reversal to 0, smoothly to non-reversal at 1, shaped, then adding horz and vert terms.
float lenX = max(abs(lD - lC), abs(lC - lB));
float dirX = lD - lB;
dir.x += dirX * w;
lenX = clamp(abs(dirX)/lenX,0.,1.);
lenX *= lenX;
len += lenX * w;
// Repeat for the y axis.
float lenY = max(abs(lE - lC), abs(lC - lA));
float dirY = lE - lA;
dir.y += dirY * w;
lenY = clamp(abs(dirY) / lenY,0.,1.);
lenY *= lenY;
len += lenY * w;
}
//------------------------------------------------------------------------------------------------------------------------------
void FsrEasuF(
out vec3 pix,
vec2 ip, // Integer pixel position in output.
// Constants generated by FsrEasuCon().
vec4 con0, // xy = output to input scale, zw = first pixel offset correction
vec4 con1_0,
vec4 con2_0,
vec4 con3_0
)
{
//------------------------------------------------------------------------------------------------------------------------------
// Get position of 'f'.
vec2 pp = ip * con0.xy + con0.zw; // Corresponding input pixel/subpixel
vec2 fp = floor(pp);// fp = source nearest pixel
pp -= fp; // pp = source subpixel
//------------------------------------------------------------------------------------------------------------------------------
// 12-tap kernel.
// b c
// e f g h
// i j k l
// n o
// Gather 4 ordering.
// a b
// r g
vec2 p0 = fp * con1_0.xy + con1_0.zw;
// These are from p0 to avoid pulling two constants on pre-Navi hardware.
vec2 p1 = p0 + con2_0.xy;
vec2 p2 = p0 + con2_0.zw;
vec2 p3 = p0 + con3_0.xy;
// TextureGather is not available on WebGL2
vec4 off = vec4(-.5,.5,-.5,.5)*con1_0.xxyy;
// textureGather to texture offsets
// x=west y=east z=north w=south
vec3 bC = FsrEasuCF(p0 + off.xw); float bL = bC.g + 0.5 *(bC.r + bC.b);
vec3 cC = FsrEasuCF(p0 + off.yw); float cL = cC.g + 0.5 *(cC.r + cC.b);
vec3 iC = FsrEasuCF(p1 + off.xw); float iL = iC.g + 0.5 *(iC.r + iC.b);
vec3 jC = FsrEasuCF(p1 + off.yw); float jL = jC.g + 0.5 *(jC.r + jC.b);
vec3 fC = FsrEasuCF(p1 + off.yz); float fL = fC.g + 0.5 *(fC.r + fC.b);
vec3 eC = FsrEasuCF(p1 + off.xz); float eL = eC.g + 0.5 *(eC.r + eC.b);
vec3 kC = FsrEasuCF(p2 + off.xw); float kL = kC.g + 0.5 *(kC.r + kC.b);
vec3 lC = FsrEasuCF(p2 + off.yw); float lL = lC.g + 0.5 *(lC.r + lC.b);
vec3 hC = FsrEasuCF(p2 + off.yz); float hL = hC.g + 0.5 *(hC.r + hC.b);
vec3 gC = FsrEasuCF(p2 + off.xz); float gL = gC.g + 0.5 *(gC.r + gC.b);
vec3 oC = FsrEasuCF(p3 + off.yz); float oL = oC.g + 0.5 *(oC.r + oC.b);
vec3 nC = FsrEasuCF(p3 + off.xz); float nL = nC.g + 0.5 *(nC.r + nC.b);
//------------------------------------------------------------------------------------------------------------------------------
// Simplest multi-channel approximate luma possible (luma times 2, in 2 FMA/MAD).
// Accumulate for bilinear interpolation.
vec2 dir = vec2(0.);
float len = 0.;
FsrEasuSetF(dir, len, (1.-pp.x)*(1.-pp.y), bL, eL, fL, gL, jL);
FsrEasuSetF(dir, len, pp.x *(1.-pp.y), cL, fL, gL, hL, kL);
FsrEasuSetF(dir, len, (1.-pp.x)* pp.y , fL, iL, jL, kL, nL);
FsrEasuSetF(dir, len, pp.x * pp.y , gL, jL, kL, lL, oL);
//------------------------------------------------------------------------------------------------------------------------------
// Normalize with approximation, and cleanup close to zero.
vec2 dir2 = dir * dir;
float dirR = dir2.x + dir2.y;
bool zro = dirR < (1.0/32768.0);
dirR = inversesqrt(dirR);
#if (PERFORMANCE == 1)
if (zro) {
vec4 w = vec4(0.0);
w.x = (1.0 - pp.x) * (1.0 - pp.y);
w.y = pp.x * (1.0 - pp.y);
w.z = (1.0 - pp.x) * pp.y;
w.w = pp.x * pp.y;
pix.r = clamp(dot(w, vec4(fL, gL, jL, kL)), 0.0, 1.0);
return;
}
#elif (PERFORMANCE == 0)
dirR = zro ? 1.0 : dirR;
dir.x = zro ? 1.0 : dir.x;
#endif
dir *= vec2(dirR);
// Transform from {0 to 2} to {0 to 1} range, and shape with square.
len = len * 0.5;
len *= len;
// Stretch kernel {1.0 vert|horz, to sqrt(2.0) on diagonal}.
float stretch = dot(dir,dir) / (max(abs(dir.x), abs(dir.y)));
// Anisotropic length after rotation,
// x := 1.0 lerp to 'stretch' on edges
// y := 1.0 lerp to 2x on edges
vec2 len2 = vec2(1. +(stretch-1.0)*len, 1. -.5 * len);
// Based on the amount of 'edge',
// the window shifts from +/-{sqrt(2.0) to slightly beyond 2.0}.
float lob = .5 - .29 * len;
// Set distance^2 clipping point to the end of the adjustable window.
float clp = 1./lob;
//------------------------------------------------------------------------------------------------------------------------------
// Accumulation mixed with min/max of 4 nearest.
// b c
// e f g h
// i j k l
// n o
// Accumulation.
vec3 aC = vec3(0);
float aW = 0.;
FsrEasuTapF(aC, aW, vec2( 0.,-1.)-pp, dir, len2, lob, clp, bC);
FsrEasuTapF(aC, aW, vec2( 1.,-1.)-pp, dir, len2, lob, clp, cC);
FsrEasuTapF(aC, aW, vec2(-1., 1.)-pp, dir, len2, lob, clp, iC);
FsrEasuTapF(aC, aW, vec2( 0., 1.)-pp, dir, len2, lob, clp, jC);
FsrEasuTapF(aC, aW, vec2( 0., 0.)-pp, dir, len2, lob, clp, fC);
FsrEasuTapF(aC, aW, vec2(-1., 0.)-pp, dir, len2, lob, clp, eC);
FsrEasuTapF(aC, aW, vec2( 1., 1.)-pp, dir, len2, lob, clp, kC);
FsrEasuTapF(aC, aW, vec2( 2., 1.)-pp, dir, len2, lob, clp, lC);
FsrEasuTapF(aC, aW, vec2( 2., 0.)-pp, dir, len2, lob, clp, hC);
FsrEasuTapF(aC, aW, vec2( 1., 0.)-pp, dir, len2, lob, clp, gC);
FsrEasuTapF(aC, aW, vec2( 1., 2.)-pp, dir, len2, lob, clp, oC);
FsrEasuTapF(aC, aW, vec2( 0., 2.)-pp, dir, len2, lob, clp, nC);
//------------------------------------------------------------------------------------------------------------------------------
// Normalize and dering.
#if (PERFORMANCE == 1)
pix = aC/aW;
#elif (PERFORMANCE == 0)
vec3 min4 = min(min(fC,gC),min(jC,kC));
vec3 max4 = max(max(fC,gC),max(jC,kC));
pix=min(max4,max(min4,aC/aW));
#endif
}
void EASU( out vec4 fragColor, in vec2 fragCoord )
{
vec3 c;
vec4 con0,con1,con2,con3;
// "rendersize" refers to size of source image before upscaling.
vec2 rendersize = u_viewsize;
FsrEasuCon(
con0, con1, con2, con3, rendersize, rendersize, rendersize
);
FsrEasuF(c, fragCoord, con0, con1, con2, con3);
fragColor = vec4(xyz_to_rgb_2717090884(c.xyz), 1);
}
vec4 getPixel(vec2 pos) {
vec2 coord = (pos + .5) / u_viewsize;
coord.y = 1.0 - coord.y;
return texture2D(u_texture, coord);
}
vec4 fsr_easu_2717090884(vec2 uv) {
vec4 e = getPixel(gl_FragCoord.xy);
vec4 e_xyz = vec4(rgb_to_xyz_2717090884(e.rgb), 1);
EASU(e_xyz, (gl_FragCoord.xy + 0.5) / u_viewsize);
// fetch a 3x3 neighborhood around the pixel 'e',
// a b c
// d(e)f
// g h i
vec3 a = getPixel(gl_FragCoord.xy + vec2(-1.0,-1.0)).rgb;
vec3 b = getPixel(gl_FragCoord.xy + vec2( 0.0,-1.0)).rgb;
vec3 c = getPixel(gl_FragCoord.xy + vec2( 1.0,-1.0)).rgb;
vec3 f = getPixel(gl_FragCoord.xy + vec2( 1.0, 0.0)).rgb;
vec3 g = getPixel(gl_FragCoord.xy + vec2(-1.0, 1.0)).rgb;
vec3 h = getPixel(gl_FragCoord.xy + vec2( 0.0, 1.0)).rgb;
vec3 d = getPixel(gl_FragCoord.xy + vec2(-1.0, 0.0)).rgb;
vec3 i = getPixel(gl_FragCoord.xy + vec2( 1.0, 1.0)).rgb;;
// Soft min and max.
// a b c b
// d e f * 0.5 + d e f * 0.5
// g h i h
// These are 2.0x bigger (factored out the extra multiply).
vec3 mnRGB = min(min(min(d, e.rgb), min(f, b)), h);
vec3 mnRGB2 = min(mnRGB, min(min(a, c), min(g, i)));
mnRGB += mnRGB2;
vec3 mxRGB = max(max(max(d, e.rgb), max(f, b)), h);
vec3 mxRGB2 = max(mxRGB, max(max(a, c), max(g, i)));
mxRGB += mxRGB2;
// Smooth minimum distance to signal limit divided by smooth max.
vec3 rcpMRGB = 1.0 / mxRGB;
vec3 ampRGB = clamp(min(mnRGB, 2.0 - mxRGB) * rcpMRGB, 0.0, 1.0);
// Shaping amount of sharpening.
ampRGB = inversesqrt(ampRGB);
float peak = -3.0 * clamp(CONTRAST, 0.0, 1.0) + 8.0;
vec3 wRGB = -(1.0 / (ampRGB * peak));
vec3 rcpWeightRGB = 1.0 / (4.0 * wRGB + 1.0);
// 0 w 0
// Filter shape: w 1 w
// 0 w 0
vec3 window = (b + d) + (f + h);
vec3 outColor = clamp((window * wRGB + e.rgb) * rcpWeightRGB, 0.0, 1.0);
return vec4(mix(e.rgb, outColor, SHARPENING), e.a);
}
// https://github.com/glslify/glslify#exporting-a-glsl-module
#define DIFF 1.0
#define RADIUS 4.0
void bilateral_iter_3977570374(vec2 random_dir, vec2 radius, float diff, vec4 pixel, vec2 uv, inout vec3 result, inout float totalWeight)
{
vec2 dir = random_dir * radius;
vec3 randomPixel = texture2D(u_texture, uv + dir).xyz;
vec3 delta = randomPixel - pixel.rgb;
float weight = exp(-dot(delta, delta) / diff);
result += randomPixel * weight;
totalWeight += weight;
}
vec4 bilateral(vec2 uv)
{
vec2 radius = (RADIUS / u_viewsize);
float diff = DIFF / 255.0;
vec4 pixel = texture2D(u_texture, uv);
vec3 result = vec3(0.0, 0.0, 0.0);
float totalWeight = 0.0;
// uroll loop and substitute precalculated random vectors for GLSL 1.0 ES:
bilateral_iter_3977570374(vec2(-0.886051297,0.447155535), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(0.270759493,0.537728608), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.896959424,0.440607518), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.804274619,0.125076547), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(0.373693645,0.240383312), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.850325704,-0.192106694), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.453608066,0.889671504), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.280496657,0.206442386), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(0.840040743,-0.36367026), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.151598319,-0.884027064), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.221440807,0.593896627), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.797481239,-0.243254974), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(0.48824361,0.225083455), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.0387817062,0.838459492), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(0.92897892,-0.133588716), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.693672359,-0.706737161), radius, diff, pixel, uv, result, totalWeight);
result = result / totalWeight;
return vec4(result, pixel.a);
}
// https://github.com/glslify/glslify#exporting-a-glsl-module
vec3 rgb2hsv(vec3 c)
{
vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));
float d = q.x - min(q.w, q.y);
float e = 1.0e-10;
return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
}
vec3 hsv2rgb(vec3 c)
{
vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
}
vec4 light_correction_1117569599(vec4 c, float s)
{
vec3 hsv = rgb2hsv(c.rgb);
hsv.y = pow(hsv.y, pow(s, -0.5));
hsv.z = pow(hsv.z, s);
vec3 rgb = hsv2rgb(hsv);
return vec4(rgb, c.a);
}
// https://github.com/glslify/glslify#exporting-a-glsl-module
void main() {
vec4 c;
if (u_filters.x == 1.)
c = fsr_easu_2717090884(v_tex_uv);
else if (u_filters.x == 2.)
c = bilateral(v_tex_uv);
else
c = texture2D(u_texture, v_tex_uv);
if (u_filters.y != 1.)
c = light_correction_1117569599(c, u_filters.y);
gl_FragColor = c;
}`, Vh = (t, i, o) => {
const s = t.createProgram();
return t.attachShader(s, i), t.attachShader(s, o), t.linkProgram(s), t.useProgram(s), s;
}, To = (t, i, o) => {
const s = t.createShader(i);
return t.shaderSource(s, o), t.compileShader(s), s;
}, zh = (t) => {
const i = t.createTexture();
return t.bindTexture(t.TEXTURE_2D, i), t.texImage2D(t.TEXTURE_2D, 0, t.RGB, 1, 1, 0, t.RGB, t.UNSIGNED_BYTE, null), t.texParameteri(t.TEXTURE_2D, t.TEXTURE_WRAP_S, t.CLAMP_TO_EDGE), t.texParameteri(t.TEXTURE_2D, t.TEXTURE_WRAP_T, t.CLAMP_TO_EDGE), t.texParameteri(t.TEXTURE_2D, t.TEXTURE_MIN_FILTER, t.NEAREST), t.texParameteri(t.TEXTURE_2D, t.TEXTURE_MAG_FILTER, t.LINEAR), t.bindTexture(t.TEXTURE_2D, null), i;
}, Fo = (t, i) => {
let o = 0, s = 1;
const b = document.createElement("canvas"), w = b.captureStream(30), v = b.getContext("webgl"), p = To(v, v.VERTEX_SHADER, jh), D = To(v, v.FRAGMENT_SHADER, Gh), L = Vh(v, p, D), N = zh(v);
v.bindTexture(v.TEXTURE_2D, N);
const X = v.getAttribLocation(L, "a_position"), ie = v.createBuffer();
v.bindBuffer(v.ARRAY_BUFFER, ie), v.bufferData(
v.ARRAY_BUFFER,
// prettier-ignore
new Float32Array([
-1,
-1,
1,
-1,
-1,
1,
-1,
1,
1,
-1,
1,
1
]),
v.STATIC_DRAW
), v.enableVertexAttribArray(X), v.vertexAttribPointer(X, 2, v.FLOAT, !1, 0, 0);
const K = v.getUniformLocation(L, "u_viewsize"), V = v.getUniformLocation(L, "u_filters");
v.uniform2fv(V, new Float32Array([o, s])), On(t).then((ue) => {
const le = ue.requestVideoFrameCallback?.bind(ue) || ca.requestAnimationFrame;
(function Q() {
ue.ended || !w.active || (le(Q), v.texImage2D(v.TEXTURE_2D, 0, v.RGBA, v.RGBA, v.UNSIGNED_BYTE, ue), (b.width !== ue.videoWidth || b.height !== ue.videoHeight) && (v.viewport(0, 0, b.width = ue.videoWidth, b.height = ue.videoHeight), v.uniform2fv(K, new Float32Array([b.width, b.height]))), v.drawArrays(v.TRIANGLES, 0, 6));
})();
}), v.deleteProgram(L), v.deleteShader(D), v.deleteShader(p);
const ve = {
/** Enhanced stream */
stream: w,
/**
* @param {number} value - denoise algorithm to use
* - Pass 1 to use FSR algorithm
* - Pass 2 to use Bilateral algorithm
* - Pass any other number to disabled denoising
*/
denoise(ue) {
v.uniform2fv(V, new Float32Array([o = ue, s]));
},
/**
* @param {number} value - exposure compensation coefficient in [0, 2] range
* - Pass value less than to 1 increase exposure
* - Pass value greater than 1 to reduce exposure
* See the {@link https://fujifilm-dsc.com/en/manual/x-pro2/images/exp_exposure_480.gif | image} for visual example
* Inspired by MediaTrackConstraints {@link https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackConstraints#exposurecompensation | Exposure compensation} parameter.
*/
exposureCompensation(ue) {
v.uniform2fv(V, new Float32Array([o, s = ue]));
}
};
if (i)
for (const [ue, le] of Object.entries(i))
ve[ue](le);
return ve;
}, Hh = typeof screen < "u" && screen.height > screen.width, Zi = {
facingMode: "user",
width: { min: 640, ideal: 1280, max: 1920 },
height: { min: 480, ideal: 720, max: 1080 },
resizeMode: { ideal: "crop-and-scale" }
};
Hh && (delete Zi.width, delete Zi.height);
class _m {
/**
* @param videoConstraints - constraints to be merged with {@link defaultVideoConstraints}
* and to be passed to {@link https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia | navigator.mediaDevices.getUserMedia()}
*/
constructor(i) {
re(this, "_stream", null);
re(this, "_constraints");
re(this, "_preferences", {});
re(this, "_enhancer", null);
/** @internal */
re(this, "kind", "stream");
this._constraints = {
...Zi,
...i
};
}
/**
* Specifies if the webcam is currently active.
*
* The webcam is considered active if it has been started and has not been stopped afterwards
*/
get active() {
return !!this._stream;
}
/**
* @param {number} algorithm - denoise algorithm to use
* - Pass false or 0 to disabled denoising
* - Pass true or 1 to use FSR algorithm
* - Pass 2 to use Bilateral algorithm
* @internal
*/
denoise(i) {
this._preferences.denoise = Number(i), this._enhancer?.denoise(this._preferences.denoise);
}
/**
* @param {number} coefficient - exposure compensation coefficient in [0, 2] range
* - Pass value less than 1 to increase exposure
* - Pass value greater than 1 to reduce exposure
* See the {@link https://fujifilm-dsc.com/en/manual/x-pro2/images/exp_exposure_480.gif | image} for visual example
* @internal
*/
setExposureCompensation(i) {
this._preferences.exposureCompensation = i, this._enhancer?.exposureCompensation(this._preferences.exposureCompensation);
}
/**
* Manually starts webcam
*
* > Ordinary webcam is lazily started during async iteration over it.
* >
* > But sometimes you may want to manually pre-start webcam e.g during parallel creation of a {@link Player} instance:
* > ```ts
* > const [webcam, player] = await Promise.all([
* > new Webcam().start(),
* > Player.create({ clientToken: "xxx-xxx-xxx" }),
* > ])
* >
* > player.use(webcam)
* > ```
*/
async start() {
return await (this._stream ?? (this._stream = Ro(this._constraints))), this;
}
/**
* Yields a sequence of {@link Frame | frames}
* @internal
*/
async *[Symbol.asyncIterator](i) {
const o = await (this._stream ?? (this._stream = Ro(this._constraints))), s = this._enhancer = Ci(this._preferences) ? Fo(o, this._preferences) : null;
let w = new Ri(s ? s.stream : o)[Symbol.asyncIterator]({ horizontalFlip: !0, ...i }), v;
for (; ; ) {
if (!this._enhancer && Ci(this._preferences)) {
const L = this._enhancer = Fo(o, this._preferences);
w = new Ri(L.stream)[Symbol.asyncIterator]({ horizontalFlip: !0, ...i });
}
this._enhancer && !Ci(this._preferences) && (this._enhancer.stream.getTracks().forEach((N) => N.stop()), this._enhancer = null, w = new Ri(o)[Symbol.asyncIterator]({ horizontalFlip: !0, ...i }));
const { done: p, value: D } = await w.next(v);
if (p)
break;
v = yield D;
}
this.stop();
}
/** Turns off webcam */
stop() {
this._stream && this._stream.then((i) => i.getTracks().forEach((o) => o.stop())), this._enhancer && this._enhancer.stream.getTracks().forEach((i) => i.stop()), this._stream = null, this._enhancer = null;
}
}
const Ro = async (t) => {
if (typeof navigator.mediaDevices > "u")
throw new Error(
`SecureContext is required to access webcam
It‘s likely you need to set up HTTPS/TLS for your website
See https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia#Encryption_based_security for details `
);
return await navigator.mediaDevices.getUserMedia({ video: t });
}, Ci = (t) => typeof t.exposureCompensation == "number" && t.exposureCompensation !== 1 || t.denoise === 1 || t.denoise === 2, mm = { createVideoElement: On, createCanvas: ba };
let Kh = "useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict", Ls = (t = 21) => {
let i = "", o = t;
for (; o--; )
i += Kh[Math.random() * 64 | 0];
return i;
};
const Ms = "KGZ1bmN0aW9uKCl7InVzZSBzdHJpY3QiO3ZhciBzPVVpbnQ4QXJyYXkseD1VaW50MTZBcnJheSxPPVVpbnQzMkFycmF5LEg9bmV3IHMoWzAsMCwwLDAsMCwwLDAsMCwxLDEsMSwxLDIsMiwyLDIsMywzLDMsMyw0LDQsNCw0LDUsNSw1LDUsMCwwLDAsMF0pLEk9bmV3IHMoWzAsMCwwLDAsMSwxLDIsMiwzLDMsNCw0LDUsNSw2LDYsNyw3LDgsOCw5LDksMTAsMTAsMTEsMTEsMTIsMTIsMTMsMTMsMCwwXSksbDE9bmV3IHMoWzE2LDE3LDE4LDAsOCw3LDksNiwxMCw1LDExLDQsMTIsMywxMywyLDE0LDEsMTVdKSxKPWZ1bmN0aW9uKHIsdCl7Zm9yKHZhciBhPW5ldyB4KDMxKSxuPTA7bjwzMTsrK24pYVtuXT10Kz0xPDxyW24tMV07Zm9yKHZhciB2PW5ldyBPKGFbMzBdKSxuPTE7bjwzMDsrK24pZm9yKHZhciBpPWFbbl07aTxhW24rMV07KytpKXZbaV09aS1hW25dPDw1fG47cmV0dXJuW2Esdl19LEs9SihILDIpLFE9S1swXSxjMT1LWzFdO1FbMjhdPTI1OCxjMVsyNThdPTI4O2Zvcih2YXIgczE9SihJLDApLGQxPXMxWzBdLFU9bmV3IHgoMzI3NjgpLHU9MDt1PDMyNzY4OysrdSl7dmFyIEM9KHUmNDM2OTApPj4+MXwodSYyMTg0NSk8PDE7Qz0oQyY1MjQyOCk+Pj4yfChDJjEzMTA3KTw8MixDPShDJjYxNjgwKT4+PjR8KEMmMzg1NSk8PDQsVVt1XT0oKEMmNjUyODApPj4+OHwoQyYyNTUpPDw4KT4+PjF9Zm9yKHZhciB6PWZ1bmN0aW9uKHQsYSxuKXtmb3IodmFyIHY9dC5sZW5ndGgsaT0wLGM9bmV3IHgoYSk7aTx2OysraSl0W2ldJiYrK2NbdFtpXS0xXTt2YXIgZj1uZXcgeChhKTtmb3IoaT0wO2k8YTsrK2kpZltpXT1mW2ktMV0rY1tpLTFdPDwxO3ZhciBvO2lmKG4pe289bmV3IHgoMTw8YSk7dmFyIGU9MTUtYTtmb3IoaT0wO2k8djsrK2kpaWYodFtpXSlmb3IodmFyIGw9aTw8NHx0W2ldLGI9YS10W2ldLGQ9Zlt0W2ldLTFdKys8PGIseT1kfCgxPDxiKS0xO2Q8PXk7KytkKW9bVVtkXT4+PmVdPWx9ZWxzZSBmb3Iobz1uZXcgeCh2KSxpPTA7aTx2OysraSl0W2ldJiYob1tpXT1VW2ZbdFtpXS0xXSsrXT4+PjE1LXRbaV0pO3JldHVybiBvfSxCPW5ldyBzKDI4OCksdT0wO3U8MTQ0OysrdSlCW3VdPTg7Zm9yKHZhciB1PTE0NDt1PDI1NjsrK3UpQlt1XT05O2Zvcih2YXIgdT0yNTY7dTwyODA7Kyt1KUJbdV09Nztmb3IodmFyIHU9MjgwO3U8Mjg4OysrdSlCW3VdPTg7Zm9yKHZhciBWPW5ldyBzKDMyKSx1PTA7dTwzMjsrK3UpVlt1XT01O3ZhciBnMT16KEIsOSwxKSx3MT16KFYsNSwxKSxXPWZ1bmN0aW9uKHIpe2Zvcih2YXIgdD1yWzBdLGE9MTthPHIubGVuZ3RoOysrYSlyW2FdPnQmJih0PXJbYV0pO3JldHVybiB0fSxoPWZ1bmN0aW9uKHIsdCxhKXt2YXIgbj10Lzh8MDtyZXR1cm4ocltuXXxyW24rMV08PDgpPj4odCY3KSZhfSxYPWZ1bmN0aW9uKHIsdCl7dmFyIGE9dC84fDA7cmV0dXJuKHJbYV18clthKzFdPDw4fHJbYSsyXTw8MTYpPj4odCY3KX0saDE9ZnVuY3Rpb24ocil7cmV0dXJuKHIrNykvOHwwfSxqPWZ1bmN0aW9uKHIsdCxhKXsodD09bnVsbHx8dDwwKSYmKHQ9MCksKGE9PW51bGx8fGE+ci5sZW5ndGgpJiYoYT1yLmxlbmd0aCk7dmFyIG49bmV3KHIuQllURVNfUEVSX0VMRU1FTlQ9PTI/eDpyLkJZVEVTX1BFUl9FTEVNRU5UPT00P086cykoYS10KTtyZXR1cm4gbi5zZXQoci5zdWJhcnJheSh0LGEpKSxufSxtMT1bInVuZXhwZWN0ZWQgRU9GIiwiaW52YWxpZCBibG9jayB0eXBlIiwiaW52YWxpZCBsZW5ndGgvbGl0ZXJhbCIsImludmFsaWQgZGlzdGFuY2UiLCJzdHJlYW0gZmluaXNoZWQiLCJubyBzdHJlYW0gaGFuZGxlciIsLCJubyBjYWxsYmFjayIsImludmFsaWQgVVRGLTggZGF0YSIsImV4dHJhIGZpZWxkIHRvbyBsb25nIiwiZGF0ZSBub3QgaW4gcmFuZ2UgMTk4MC0yMDk5IiwiZmlsZW5hbWUgdG9vIGxvbmciLCJzdHJlYW0gZmluaXNoaW5nIiwiaW52YWxpZCB6aXAgZGF0YSJdLGc9ZnVuY3Rpb24ocix0LGEpe3ZhciBuPW5ldyBFcnJvcih0fHxtMVtyXSk7aWYobi5jb2RlPXIsRXJyb3IuY2FwdHVyZVN0YWNrVHJhY2UmJkVycm9yLmNhcHR1cmVTdGFja1RyYWNlKG4sZyksIWEpdGhyb3cgbjtyZXR1cm4gbn0sYjE9ZnVuY3Rpb24ocix0LGEpe3ZhciBuPXIubGVuZ3RoO2lmKCFufHxhJiZhLmYmJiFhLmwpcmV0dXJuIHR8fG5ldyBzKDApO3ZhciB2PSF0fHxhLGk9IWF8fGEuaTthfHwoYT17fSksdHx8KHQ9bmV3IHMobiozKSk7dmFyIGM9ZnVuY3Rpb24odTEpe3ZhciB2MT10Lmxlbmd0aDtpZih1MT52MSl7dmFyIGYxPW5ldyBzKE1hdGgubWF4KHYxKjIsdTEpKTtmMS5zZXQodCksdD1mMX19LGY9YS5mfHwwLG89YS5wfHwwLGU9YS5ifHwwLGw9YS5sLGI9YS5kLGQ9YS5tLHk9YS5uLFI9bio4O2Rve2lmKCFsKXtmPWgocixvLDEpO3ZhciBZPWgocixvKzEsMyk7aWYobys9MyxZKWlmKFk9PTEpbD1nMSxiPXcxLGQ9OSx5PTU7ZWxzZSBpZihZPT0yKXt2YXIgUz1oKHIsbywzMSkrMjU3LHIxPWgocixvKzEwLDE1KSs0LHQxPVMraChyLG8rNSwzMSkrMTtvKz0xNDtmb3IodmFyIEY9bmV3IHModDEpLEc9bmV3IHMoMTkpLHc9MDt3PHIxOysrdylHW2wxW3ddXT1oKHIsbyt3KjMsNyk7bys9cjEqMztmb3IodmFyIGExPVcoRyksQjE9KDE8PGExKS0xLFIxPXooRyxhMSwxKSx3PTA7dzx0MTspe3ZhciBuMT1SMVtoKHIsbyxCMSldO28rPW4xJjE1O3ZhciBwPW4xPj4+NDtpZihwPDE2KUZbdysrXT1wO2Vsc2V7dmFyIFQ9MCxOPTA7Zm9yKHA9PTE2PyhOPTMraChyLG8sMyksbys9MixUPUZbdy0xXSk6cD09MTc/KE49MytoKHIsbyw3KSxvKz0zKTpwPT0xOCYmKE49MTEraChyLG8sMTI3KSxvKz03KTtOLS07KUZbdysrXT1UfX12YXIgaTE9Ri5zdWJhcnJheSgwLFMpLF89Ri5zdWJhcnJheShTKTtkPVcoaTEpLHk9VyhfKSxsPXooaTEsZCwxKSxiPXooXyx5LDEpfWVsc2UgZygxKTtlbHNle3ZhciBwPWgxKG8pKzQsTD1yW3AtNF18cltwLTNdPDw4LFo9cCtMO2lmKFo+bil7aSYmZygwKTticmVha312JiZjKGUrTCksdC5zZXQoci5zdWJhcnJheShwLFopLGUpLGEuYj1lKz1MLGEucD1vPVoqOCxhLmY9Zjtjb250aW51ZX1pZihvPlIpe2kmJmcoMCk7YnJlYWt9fXYmJmMoZSsxMzEwNzIpO2Zvcih2YXIgWTE9KDE8PGQpLTEsRjE9KDE8PHkpLTEsJD1vOzskPW8pe3ZhciBUPWxbWChyLG8pJlkxXSxrPVQ+Pj40O2lmKG8rPVQmMTUsbz5SKXtpJiZnKDApO2JyZWFrfWlmKFR8fGcoMiksazwyNTYpdFtlKytdPWs7ZWxzZSBpZihrPT0yNTYpeyQ9byxsPW51bGw7YnJlYWt9ZWxzZXt2YXIgbzE9ay0yNTQ7aWYoaz4yNjQpe3ZhciB3PWstMjU3LE09SFt3XTtvMT1oKHIsbywoMTw8TSktMSkrUVt3XSxvKz1NfXZhciBQPWJbWChyLG8pJkYxXSxEPVA+Pj40O1B8fGcoMyksbys9UCYxNTt2YXIgXz1kMVtEXTtpZihEPjMpe3ZhciBNPUlbRF07Xys9WChyLG8pJigxPDxNKS0xLG8rPU19aWYobz5SKXtpJiZnKDApO2JyZWFrfXYmJmMoZSsxMzEwNzIpO2Zvcih2YXIgZTE9ZStvMTtlPGUxO2UrPTQpdFtlXT10W2UtX10sdFtlKzFdPXRbZSsxLV9dLHRbZSsyXT10W2UrMi1fXSx0W2UrM109dFtlKzMtX107ZT1lMX19YS5sPWwsYS5wPSQsYS5iPWUsYS5mPWYsbCYmKGY9MSxhLm09ZCxhLmQ9YixhLm49eSl9d2hpbGUoIWYpO3JldHVybiBlPT10Lmxlbmd0aD90OmoodCwwLGUpfSx5MT1uZXcgcygwKSxFPWZ1bmN0aW9uKHIsdCl7cmV0dXJuIHJbdF18clt0KzFdPDw4fSxtPWZ1bmN0aW9uKHIsdCl7cmV0dXJuKHJbdF18clt0KzFdPDw4fHJbdCsyXTw8MTZ8clt0KzNdPDwyNCk+Pj4wfSxxPWZ1bmN0aW9uKHIsdCl7cmV0dXJuIG0ocix0KSttKHIsdCs0KSo0Mjk0OTY3Mjk2fTtmdW5jdGlvbiBFMShyLHQpe3JldHVybiBiMShyLHQpfXZhciBBPXR5cGVvZiBUZXh0RGVjb2RlcjwidSImJm5ldyBUZXh0RGVjb2RlcixwMT0wO3RyeXtBLmRlY29kZSh5MSx7c3RyZWFtOiEwfSkscDE9MX1jYXRjaHt9dmFyIEMxPWZ1bmN0aW9uKHIpe2Zvcih2YXIgdD0iIixhPTA7Oyl7dmFyIG49clthKytdLHY9KG4+MTI3KSsobj4yMjMpKyhuPjIzOSk7aWYoYSt2PnIubGVuZ3RoKXJldHVyblt0LGoocixhLTEpXTt2P3Y9PTM/KG49KChuJjE1KTw8MTh8KHJbYSsrXSY2Myk8PDEyfChyW2ErK10mNjMpPDw2fHJbYSsrXSY2MyktNjU1MzYsdCs9U3RyaW5nLmZyb21DaGFyQ29kZSg1NTI5NnxuPj4xMCw1NjMyMHxuJjEwMjMpKTp2JjE/dCs9U3RyaW5nLmZyb21DaGFyQ29kZSgobiYzMSk8PDZ8clthKytdJjYzKTp0Kz1TdHJpbmcuZnJvbUNoYXJDb2RlKChuJjE1KTw8MTJ8KHJbYSsrXSY2Myk8PDZ8clthKytdJjYzKTp0Kz1TdHJpbmcuZnJvbUNoYXJDb2RlKG4pfX07ZnVuY3Rpb24gUzEocix0KXtpZih0KXtmb3IodmFyIGE9IiIsbj0wO248ci5sZW5ndGg7bis9MTYzODQpYSs9U3RyaW5nLmZyb21DaGFyQ29kZS5hcHBseShudWxsLHIuc3ViYXJyYXkobixuKzE2Mzg0KSk7cmV0dXJuIGF9ZWxzZXtpZihBKXJldHVybiBBLmRlY29kZShyKTt2YXIgdj1DMShyKSxpPXZbMF0sYz12WzFdO3JldHVybiBjLmxlbmd0aCYmZyg4KSxpfX12YXIgXzE9ZnVuY3Rpb24ocix0KXtyZXR1cm4gdCszMCtFKHIsdCsyNikrRShyLHQrMjgpfSx4MT1mdW5jdGlvbihyLHQsYSl7dmFyIG49RShyLHQrMjgpLHY9UzEoci5zdWJhcnJheSh0KzQ2LHQrNDYrbiksIShFKHIsdCs4KSYyMDQ4KSksaT10KzQ2K24sYz1tKHIsdCsyMCksZj1hJiZjPT00Mjk0OTY3Mjk1P1QxKHIsaSk6W2MsbShyLHQrMjQpLG0ocix0KzQyKV0sbz1mWzBdLGU9ZlsxXSxsPWZbMl07cmV0dXJuW0Uocix0KzEwKSxvLGUsdixpK0Uocix0KzMwKStFKHIsdCszMiksbF19LFQxPWZ1bmN0aW9uKHIsdCl7Zm9yKDtFKHIsdCkhPTE7dCs9NCtFKHIsdCsyKSk7cmV0dXJuW3Eocix0KzEyKSxxKHIsdCs0KSxxKHIsdCsyMCldfTtmdW5jdGlvbiBrMShyLHQpe2Zvcih2YXIgYT17fSxuPXIubGVuZ3RoLTIyO20ocixuKSE9MTAxMDEwMjU2Oy0tbikoIW58fHIubGVuZ3RoLW4+NjU1NTgpJiZnKDEzKTt2YXIgdj1FKHIsbis4KTtpZighdilyZXR1cm57fTt2YXIgaT1tKHIsbisxNiksYz1pPT00Mjk0OTY3Mjk1O2MmJihuPW0ocixuLTEyKSxtKHIsbikhPTEwMTA3NTc5MiYmZygxMyksdj1tKHIsbiszMiksaT1tKHI