@banuba/webar
Version:
Banuba WebAR SDK
503 lines (449 loc) • 368 kB
JavaScript
var Th=Object.defineProperty,Rh=(r,o,s)=>o in r?Th(r,o,{enumerable:!0,configurable:!0,writable:!0,value:s}):r[o]=s,ee=(r,o,s)=>(Rh(r,typeof o!="symbol"?o+"":o,s),s);let Ch=0;const Sa=()=>Ch++,xa="KGZ1bmN0aW9uKCl7InVzZSBzdHJpY3QiO2FkZEV2ZW50TGlzdGVuZXIoIm1lc3NhZ2UiLCh7ZGF0YTp0fSk9Pntjb25zdCBzPXtpZDp0LmlkfTtzZXRUaW1lb3V0KHBvc3RNZXNzYWdlLHQudGltZW91dCxzKX0pfSkoKTsK",Ta=typeof window<"u"&&window.Blob&&new Blob([atob(xa)],{type:"text/javascript;charset=utf-8"});function kh(){let r;try{if(r=Ta&&(window.URL||window.webkitURL).createObjectURL(Ta),!r)throw"";return new Worker(r)}catch{return new Worker("data:application/javascript;base64,"+xa)}finally{r&&(window.URL||window.webkitURL).revokeObjectURL(r)}}let yn;const Ri=new Map,Ra=(r,o)=>{const s=Sa(),u={id:s,timeout:o};return Ri.set(u.id,r),yn||(yn=new kh,yn.onmessage=({data:g})=>{const v=Ri.get(g.id);Ri.delete(g.id),v()}),yn.postMessage(u),s},Ph=60,Ca=1e3/Ph,_n=[];let ka=0;const Pa=r=>{const o=Sa();if(_n.length===0){const s=performance.now(),u=Ca-(s-ka)%Ca;Ra(()=>{const g=ka=performance.now(),v=[..._n];_n.length=0,v.forEach(E=>E(g))},u)}return _n.push(r),o},Ah=Object.freeze(Object.defineProperty({__proto__:null,requestAnimationFrame:Pa,setTimeout:Ra},Symbol.toStringTag,{value:"Module"})),Lh=(...r)=>window.setTimeout(...r),vn=new Map,Ih=r=>{const o=window.requestAnimationFrame((...s)=>{vn.delete(o),r(...s)});return vn.set(o,r),o};typeof document<"u"&&document.addEventListener("visibilitychange",()=>{document.visibilityState!=="visible"&&vn.forEach((r,o)=>{vn.delete(o),cancelAnimationFrame(o),Pa(r)})});const Fh=Object.freeze(Object.defineProperty({__proto__:null,requestAnimationFrame:Ih,setTimeout:Lh},Symbol.toStringTag,{value:"Module"})),Dh=typeof document<"u"?document:{visibilityState:"hidden"},Aa=()=>Dh.visibilityState==="visible"?Fh:Ah,Or=r=>Aa().requestAnimationFrame(r),La=(r,o)=>Aa().setTimeout(r,o),Ia=r=>Promise.resolve().then(r),Ci={requestAnimationFrame:Or,setTimeout:La},Nh=Object.freeze(Object.defineProperty({__proto__:null,nextTick:Ia,requestAnimationFrame:Or,setTimeout:La,timers:Ci},Symbol.toStringTag,{value:"Module"})),Mh=()=>new Promise(r=>Or(r)),ki=(r=-1)=>function(o,s,u){const g=u,v=g.value;return{...g,value:async function*(...E){const y=v.apply(this,E);let I=0,L=0;for(;;){const U=1e3/r,V=.1*U;for(;(L=performance.now())-I<U-V;)await Mh();I=L;const{done:$,value:Z}=await y.next();if($)return Z;const K=yield Z;typeof K<"u"&&(r=K)}}}},wn=async(r,o={})=>new Promise(s=>{const u=document.createElement("video");if(u.muted=!0,u.controls=!1,u.playsInline=!0,Object.assign(u,o),r instanceof globalThis.MediaStream)u.srcObject=r,u.addEventListener("ended",()=>u.srcObject=null,{once:!0}),r.addEventListener("inactive",()=>u.dispatchEvent(new CustomEvent("ended")),{once:!0});else{if(typeof r!="string"){const v=r=URL.createObjectURL(r);u.addEventListener("emptied",()=>URL.revokeObjectURL(v),{once:!0})}u.crossOrigin="anonymous",u.src=r,u.addEventListener("ended",()=>u.src="",{once:!0})}u.style.position="fixed",u.style.zIndex="-9999999",u.style.opacity="0.0000000001",document.body.appendChild(u),u.addEventListener("emptied",()=>u.remove(),{once:!0});const g=setInterval(()=>u.readyState,300);u.addEventListener("play",()=>clearInterval(g),{once:!0}),u.addEventListener("play",()=>s(u),{once:!0}),u.addEventListener("loadedmetadata",()=>u.play(),{once:!0})}),Oh=r=>new Promise((o,s)=>{const u=document.createElement("img");u.onload=()=>o(u),u.onerror=s,u.crossOrigin="anonymous",u.src=typeof r=="string"?r:URL.createObjectURL(r)}),Fa=new Map,Bh=(r,o,s)=>r*(1-s)+o*s,Pi=r=>`webar::${r}:start`,Ai=r=>`webar::${r}:end`,Li=r=>{let o={internalName:r+":"+Math.random()};return performance.mark(Pi(o.internalName)),o},Ii=r=>{const o=r.internalName;performance.mark(Ai(o));let s=performance.measure(o,Pi(o),Ai(o));s||(s=performance.getEntriesByName(o)[0]),performance.clearMarks(Pi(o)),performance.clearMarks(Ai(o)),performance.clearMeasures(o);const{duration:u}=s,g=o.split(":")[0];let{averagedDuration:v=0}=Fa.get(g)||{};return v=Bh(v,u,.05),Fa.set(g,{averagedDuration:v}),{instantDuration:u,averagedDuration:v}},Da=(r,o=s=>console.warn(s))=>function(s,u,g){const v=g.value;if(typeof v!="function")throw new TypeError("Only functions can be marked as deprecated");return{...g,value:function(...E){return o.call(this,`DEPRECATION: ${s.constructor.name}.${u}() is deprecated. ${r}`),v.call(this,...E)}}};let Br=class{constructor(){ee(this,"_emitter",new EventTarget)}addEventListener(r,o,s){this._emitter.addEventListener(r,o,s)}removeEventListener(r,o,s){this._emitter.removeEventListener(r,o,s)}dispatchEvent(r){return this._emitter.dispatchEvent(r)}removeAllEventListeners(){this._emitter=new EventTarget}};const Uh=(r,o,s)=>fetch(r,o).then(u=>{if(!u.body)return u;let g=0;const v=Number(u.headers.get("content-length")||0),E=u.body.getReader();return new Response(new ReadableStream({async start(y){for(;;){const{done:I,value:L}=await E.read();if(I?g=v:g+=L.byteLength,s?.onProgress?.({total:v,transferred:g}),I)break;y.enqueue(L)}y.close()}}),u)}),Na=()=>/Edge?\/(79|[89]\d|\d{3,})(\.\d+|)(\.\d+|)|Firefox\/(6[5-9]|[7-9]\d|\d{3,})\.\d+(\.\d+|)|Chrom(ium|e)\/(5[7-9]|[6-9]\d|\d{3,})\.\d+(\.\d+|)([\d.]+$|.*Safari\/(?![\d.]+ Edge\/[\d.]+$))|Maci.* Version\/(1[5-9]|[2-9]\d|\d{3,})\.\d+([,.]\d+|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(4[4-9]|[5-9]\d|\d{3,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(1[5-9]|[2-9]\d|\d{3,})[._]\d+([._]\d+|)|Mobile Safari.+OPR\/(7[2-9]|[89]\d|\d{3,})\.\d+\.\d+|Android.+Chrom(ium|e)\/(10[7-9]|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+(UC? ?Browser|UCWEB|U3)[ /]?(1[3-9]|[2-9]\d|\d{3,})\.\d+\.\d+|SamsungBrowser\/([7-9]|\d{2,})\.\d+|Android.+MQ{2}Browser\/(1[3-9]|[2-9]\d|\d{3,})(\.\d+|)(\.\d+|)|baidubrowser[\s/](1[3-9]|[2-9]\d|\d{3,})(\.\d+|)(\.\d+|)/.test(navigator.userAgent),jh=typeof window<"u"&&/^((?!chrome|android).)*safari/i.test(window.navigator?.userAgent),Ma=typeof OffscreenCanvas<"u"&&!jh,Oa={alpha:!0,antialias:!1,depth:!1,desynchronized:!1,premultipliedAlpha:!1,preserveDrawingBuffer:!1,stencil:!1};let ne;const Wh=(()=>{if(typeof window>"u"||!Na()||(ne??(ne=Fi().getContext("webgl2",Oa)),ne===null))return!1;const r=ne.createTexture();ne.bindTexture(ne.TEXTURE_2D,r),ne.texImage2D(ne.TEXTURE_2D,0,ne.RGB,1,1,0,ne.RGB,ne.UNSIGNED_BYTE,null);const o=ne.createFramebuffer();ne.bindFramebuffer(ne.FRAMEBUFFER,o),ne.framebufferTexture2D(ne.FRAMEBUFFER,ne.COLOR_ATTACHMENT0,ne.TEXTURE_2D,r,0);const s=ne.getParameter(ne.IMPLEMENTATION_COLOR_READ_FORMAT);return ne.bindFramebuffer(ne.FRAMEBUFFER,null),ne.bindTexture(ne.TEXTURE_2D,null),ne.deleteFramebuffer(o),ne.deleteTexture(r),s===ne.RGB})(),zh=async(r,o,s,u="RGBA")=>{ne??(ne=Fi().getContext("webgl2",Oa)),ne.canvas.width=r.width,ne.canvas.height=r.height,u==="RGB"&&ne.pixelStorei(ne.PACK_ALIGNMENT,1);const g=ne.createTexture();ne.bindTexture(ne.TEXTURE_2D,g),ne.texParameteri(ne.TEXTURE_2D,ne.TEXTURE_MIN_FILTER,ne.NEAREST),ne.texParameteri(ne.TEXTURE_2D,ne.TEXTURE_MAG_FILTER,ne.LINEAR),ne.texImage2D(ne.TEXTURE_2D,0,ne[u],ne[u],ne.UNSIGNED_BYTE,r);const v=ne.createFramebuffer();ne.bindFramebuffer(ne.FRAMEBUFFER,v),ne.framebufferTexture2D(ne.FRAMEBUFFER,ne.COLOR_ATTACHMENT0,ne.TEXTURE_2D,g,0);const E=ne.createBuffer();ne.bindBuffer(ne.PIXEL_PACK_BUFFER,E),ne.bufferData(ne.PIXEL_PACK_BUFFER,o.byteLength,ne.STREAM_READ),ne.readPixels(s.x,s.y,s.width,s.height,ne[u],ne.UNSIGNED_BYTE,0),ne.bindBuffer(ne.PIXEL_PACK_BUFFER,null),ne.bindFramebuffer(ne.FRAMEBUFFER,null),ne.deleteFramebuffer(v),ne.bindTexture(ne.TEXTURE_2D,null),ne.deleteTexture(g);const y=ne.fenceSync(ne.SYNC_GPU_COMMANDS_COMPLETE,0);ne.flush(),await $h(ne,y).finally(()=>ne.deleteSync(y)),ne.bindBuffer(ne.PIXEL_PACK_BUFFER,E),ne.getBufferSubData(ne.PIXEL_PACK_BUFFER,0,new DataView(o.buffer()),o.byteOffset,o.byteLength),ne.bindBuffer(ne.PIXEL_PACK_BUFFER,null),ne.deleteBuffer(E)},$h=(r,o)=>new Promise((s,u)=>function g(){const v=r.clientWaitSync(o,0,0);if(v===r.WAIT_FAILED)return u(new Error("GPU operations complete wait failed"));if(v===r.CONDITION_SATISFIED||v===r.ALREADY_SIGNALED)return s();Ci.setTimeout(g,2)}());function Gh(r=256,o=128){const s=document.createElement("canvas");return s.width=r,s.height=o,s}function Vh(r=256,o=128){return new OffscreenCanvas(r,o)}function Fi(r=256,o=128){return Ma?Vh(r,o):Gh(r,o)}const En=(r={})=>{const o=({displayWidth:s,displayHeight:u,visibleRect:g=null})=>{let v=g?.x??0,E=g?.y??0,y=g?.width??s,I=g?.height??u;if(r.crop){const L=r?.orientation??0;let[U,V,$,Z]=[0,0,0,0];L==90||L==270?[V,U,Z,$]=r.crop(I,y):[U,V,$,Z]=r.crop(y,I),[v,E,y,I]=[v+U,E+V,$,Z]}return[s,u]=[y,I],{visibleRect:{x:v,y:E,width:y,height:I},displayWidth:s,displayHeight:u,horizontalFlip:r.horizontalFlip,orientation:r.orientation,textureOrientation:r.textureOrientation}};return{getSourceOptions:s=>{let u=s instanceof HTMLVideoElement?s.videoWidth:s.width,g=s instanceof HTMLVideoElement?s.videoHeight:s.height;return o({displayWidth:u,displayHeight:g})},getFrameOptions:o}};class Ur{constructor(o,s={},u=null){ee(this,"_source",null),ee(this,"_visibleRect",{x:0,y:0,width:0,height:0}),ee(this,"_deleter"),ee(this,"horizontalFlip",!1),ee(this,"orientation",0),ee(this,"textureOrientation",this.orientation),ee(this,"frameTimestamp",performance.now());const g=o instanceof HTMLVideoElement?o.videoWidth:o.width,v=o instanceof HTMLVideoElement?o.videoHeight:o.height;this._visibleRect.x=s.visibleRect?.x??0,this._visibleRect.y=s.visibleRect?.y??0,this._visibleRect.width=s.visibleRect?.width??g,this._visibleRect.height=s.visibleRect?.height??v,this.horizontalFlip=s.horizontalFlip??this.horizontalFlip,this.orientation=s.orientation??this.orientation,this.textureOrientation=s.textureOrientation??this.textureOrientation,o.width=g,o.height=v,this._source=o,this._deleter=u}get texture(){return this._source?.width==this.displayWidth&&this._source?.height==this.displayHeight?this._source:null}get displayWidth(){return this._visibleRect.width}get displayHeight(){return this._visibleRect.height}get format(){return this._source?Wh?"RGB":"RGBA":null}allocationSize(){if(!this.format)throw new Error("Failed to execute 'allocationSize' on 'Frame': Frame is closed.");const{width:o,height:s}={width:this._visibleRect.width,height:this._visibleRect.height};return o*s*this.format.length}async copyTo(o){if(!this._source)throw new Error("Failed to execute 'copyTo' on 'Frame': Frame is closed.");return await zh(this._source,o,this._visibleRect,this.format),[]}close(){this._deleter&&this._deleter(),this._source=null}}var Kh=Object.defineProperty,Hh=Object.getOwnPropertyDescriptor,Yh=(r,o,s,u)=>{for(var g=u>1?void 0:u?Hh(o,s):o,v=r.length-1,E;v>=0;v--)(E=r[v])&&(g=(u?E(o,s,g):E(g))||g);return u&&g&&Kh(o,s,g),g},Ba;let Ua=class{constructor(r){ee(this,"_src"),ee(this,"kind","image"),this._src=r}async*[Ba=Symbol.asyncIterator](r){const o=await Oh(this._src),s=En(r);yield new Ur(o,s.getSourceOptions(o),()=>{URL.revokeObjectURL(o.src),o.src=""})}};Yh([ki(30)],Ua.prototype,Ba,1);var Xh=Object.defineProperty,Zh=Object.getOwnPropertyDescriptor,Qh=(r,o,s,u)=>{for(var g=u>1?void 0:u?Zh(o,s):o,v=r.length-1,E;v>=0;v--)(E=r[v])&&(g=(u?E(o,s,g):E(g))||g);return u&&g&&Xh(o,s,g),g},ja,ar;const Wa=(ar=class{constructor(r){if(ee(this,"_stream"),ee(this,"kind","stream"),!ar.cache.has(r))ar.cache.set(r,this);else return ar.cache.get(r);this._stream=r}async*[ja=Symbol.asyncIterator](r){const o=En(r);if(typeof MediaStreamTrackProcessor<"u"){const s=this._stream.getVideoTracks()[0];if(s.readyState==="ended")return;const u=new MediaStreamTrackProcessor({track:s}).readable.getReader();try{for(;;){const{done:g,value:v}=await u.read();if(g)return;const E=new VideoFrame(v,o.getFrameOptions(v));E.horizontalFlip=r?.horizontalFlip??!0,E.orientation=r?.orientation??0,E.textureOrientation=r?.textureOrientation??E.orientation,E.frameTimestamp=v.timestamp,v.close(),yield E}}finally{u.releaseLock()}}else{const s=await wn(this._stream),u="requestVideoFrameCallback"in s?s.requestVideoFrameCallback.bind(s):requestAnimationFrame;for(;!s.paused;)await new Promise(u),yield new Ur(s,o.getSourceOptions(s));URL.revokeObjectURL(s.src),s.src="",s.srcObject=null}}stop(){for(const r of this._stream.getVideoTracks())r.stop();this._stream&&ar.cache.delete(this._stream)}},ee(ar,"cache",new WeakMap),ar);Qh([ki(30)],Wa.prototype,ja,1);let Sn=Wa,qh=class{constructor(r){ee(this,"_readable"),ee(this,"kind","stream"),this._readable=r}async*[Symbol.asyncIterator](r){const o=En(r),s=this._readable.getReader();try{for(;;){const{done:u,value:g}=await s.read();if(u)return;const v=new VideoFrame(g,o.getFrameOptions(g));v.horizontalFlip=r?.horizontalFlip??!0,v.orientation=r?.orientation??0,v.textureOrientation=r?.textureOrientation??v.orientation,v.frameTimestamp=g.timestamp,g.close(),yield v}}finally{s.releaseLock()}}stop(){this._readable.cancel()}};var Jh=Object.defineProperty,ep=Object.getOwnPropertyDescriptor,tp=(r,o,s,u)=>{for(var g=u>1?void 0:u?ep(o,s):o,v=r.length-1,E;v>=0;v--)(E=r[v])&&(g=(u?E(o,s,g):E(g))||g);return u&&g&&Jh(o,s,g),g},za;const $a={loop:!1};class Ga{constructor(o,s){ee(this,"_src"),ee(this,"_options"),ee(this,"_video",null),ee(this,"kind","video"),this._src=o,this._options={...$a,...s}}async*[za=Symbol.asyncIterator](o){const s=await(this._video??(this._video=wn(this._src,this._options))),u=En(o),g="requestVideoFrameCallback"in s?s.requestVideoFrameCallback.bind(s):requestAnimationFrame;for(;!s.paused;)await new Promise(g),yield new Ur(s,u.getSourceOptions(s))}stop(){this._video&&this._video.then(o=>(URL.revokeObjectURL(o.src),o.src="",o.srcObject=null)),this._video=null}}tp([ki(30)],Ga.prototype,za,1);const rp=`#define GLSLIFY 1
attribute vec2 a_position;
varying vec2 v_tex_uv;
void main() {
v_tex_uv.x = (a_position.x + 1.) * .5;
v_tex_uv.y = 1. - (a_position.y + 1.) * .5;
gl_Position = vec4(a_position, 0., 1.);
}
`,np=`precision highp float;
#define GLSLIFY 1
varying vec2 v_tex_uv;
uniform sampler2D u_texture;
uniform vec2 u_viewsize;
/**
* u_filters.x - denoising algorithm to use
* 1 - FSR
* 2 - Bilateral
* any other value - none
* u_filters.y - light correction coefficient in [0, 2]
* 1 - no light correction
*/
uniform vec2 u_filters;
// https://github.com/glslify/glslify#importing-a-glsl-module
// https://github.com/glslify/glslify#passing-references-between-modules
// Copyright (c) 2021 Advanced Micro Devices, Inc. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
// FidelityFX FSR v1.0.2 by AMD
// ported to mpv by agyild - https://gist.github.com/agyild/82219c545228d70c5604f865ce0b0ce5
// ported to WebGL by goingdigital - https://www.shadertoy.com/view/stXSWB
// using colorspace functions from tobspr - https://github.com/tobspr/GLSL-Color-Spaces/blob/master/ColorSpaces.inc.glsl
#define SHARPENING 2.0 // Sharpening intensity: Adjusts sharpening intensity by averaging the original pixels to the sharpened result. 1.0 is the unmodified default. 0.0 to 1.0.
#define CONTRAST 2.0 // Adjusts the range the shader adapts to high contrast (0 is not all the way off). Higher values = more high contrast sharpening. 0.0 to 1.0.
#define PERFORMANCE 1 // Whether to use optimizations for performance with loss of quality
// Used to convert from linear RGB to XYZ space
const mat3 RGB_2_XYZ_2717090884 = (mat3(
0.4124564, 0.2126729, 0.0193339,
0.3575761, 0.7151522, 0.1191920,
0.1804375, 0.0721750, 0.9503041
));
// Used to convert from XYZ to linear RGB space
const mat3 XYZ_2_RGB_2717090884 = (mat3(
3.2404542,-0.9692660, 0.0556434,
-1.5371385, 1.8760108,-0.2040259,
-0.4985314, 0.0415560, 1.0572252
));
// Converts a color from linear RGB to XYZ space
vec3 rgb_to_xyz_2717090884(vec3 rgb) {
return RGB_2_XYZ_2717090884 * rgb;
}
// Converts a color from XYZ to linear RGB space
vec3 xyz_to_rgb_2717090884(vec3 xyz) {
return XYZ_2_RGB_2717090884 * xyz;
}
/* EASU stage
*
* This takes a reduced resolution source, and scales it up while preserving detail.
*
* Updates:
* stretch definition fixed. Thanks nehon for the bug report!
*/
vec3 FsrEasuCF(vec2 p) {
vec2 uv = (p + .5) / u_viewsize;
vec4 color = texture2D(u_texture, uv);
return rgb_to_xyz_2717090884(color.rgb);
}
/**** EASU ****/
void FsrEasuCon(
out vec4 con0,
out vec4 con1,
out vec4 con2,
out vec4 con3,
// This the rendered image resolution being upscaled
vec2 inputViewportInPixels,
// This is the resolution of the resource containing the input image (useful for dynamic resolution)
vec2 inputSizeInPixels,
// This is the display resolution which the input image gets upscaled to
vec2 outputSizeInPixels
)
{
// Output integer position to a pixel position in viewport.
con0 = vec4(
inputViewportInPixels.x/outputSizeInPixels.x,
inputViewportInPixels.y/outputSizeInPixels.y,
.5*inputViewportInPixels.x/outputSizeInPixels.x-.5,
.5*inputViewportInPixels.y/outputSizeInPixels.y-.5
);
// Viewport pixel position to normalized image space.
// This is used to get upper-left of 'F' tap.
con1 = vec4(1.,1.,1.,-1.)/inputSizeInPixels.xyxy;
// Centers of gather4, first offset from upper-left of 'F'.
// +---+---+
// | | |
// +--(0)--+
// | b | c |
// +---F---+---+---+
// | e | f | g | h |
// +--(1)--+--(2)--+
// | i | j | k | l |
// +---+---+---+---+
// | n | o |
// +--(3)--+
// | | |
// +---+---+
// These are from (0) instead of 'F'.
con2 = vec4(-1.,2.,1.,2.)/inputSizeInPixels.xyxy;
con3 = vec4(0.,4.,0.,0.)/inputSizeInPixels.xyxy;
}
// Filtering for a given tap for the scalar.
void FsrEasuTapF(
inout vec3 aC, // Accumulated color, with negative lobe.
inout float aW, // Accumulated weight.
vec2 off_0, // Pixel offset from resolve position to tap.
vec2 dir_0, // Gradient direction.
vec2 len_0, // Length.
float lob_0, // Negative lobe strength.
float clp_0, // Clipping point.
vec3 c_0
)
{
// Tap color.
// Rotate offset by direction.
vec2 v = vec2(dot(off_0, dir_0), dot(off_0,vec2(-dir_0.y,dir_0.x)));
// Anisotropy.
v *= len_0;
// Compute distance^2.
float d2 = min(dot(v,v),clp_0);
// Limit to the window as at corner, 2 taps can easily be outside.
// Approximation of lancos2 without sin() or rcp(), or sqrt() to get x.
// (25/16 * (2/5 * x^2 - 1)^2 - (25/16 - 1)) * (1/4 * x^2 - 1)^2
// |_______________________________________| |_______________|
// base window
// The general form of the 'base' is,
// (a*(b*x^2-1)^2-(a-1))
// Where 'a=1/(2*b-b^2)' and 'b' moves around the negative lobe.
float wB = .4 * d2 - 1.;
float wA = lob_0 * d2 -1.;
wB *= wB;
wA *= wA;
wB = 1.5625*wB-.5625;
float w= wB * wA;
// Do weighted average.
aC += c_0*w;
aW += w;
}
//------------------------------------------------------------------------------------------------------------------------------
// Accumulate direction and length.
void FsrEasuSetF(
inout vec2 dir,
inout float len,
float w,
float lA,float lB,float lC,float lD,float lE
)
{
// Direction is the '+' diff.
// a
// b c d
// e
// Then takes magnitude from abs average of both sides of 'c'.
// Length converts gradient reversal to 0, smoothly to non-reversal at 1, shaped, then adding horz and vert terms.
float lenX = max(abs(lD - lC), abs(lC - lB));
float dirX = lD - lB;
dir.x += dirX * w;
lenX = clamp(abs(dirX)/lenX,0.,1.);
lenX *= lenX;
len += lenX * w;
// Repeat for the y axis.
float lenY = max(abs(lE - lC), abs(lC - lA));
float dirY = lE - lA;
dir.y += dirY * w;
lenY = clamp(abs(dirY) / lenY,0.,1.);
lenY *= lenY;
len += lenY * w;
}
//------------------------------------------------------------------------------------------------------------------------------
void FsrEasuF(
out vec3 pix,
vec2 ip, // Integer pixel position in output.
// Constants generated by FsrEasuCon().
vec4 con0, // xy = output to input scale, zw = first pixel offset correction
vec4 con1_0,
vec4 con2_0,
vec4 con3_0
)
{
//------------------------------------------------------------------------------------------------------------------------------
// Get position of 'f'.
vec2 pp = ip * con0.xy + con0.zw; // Corresponding input pixel/subpixel
vec2 fp = floor(pp);// fp = source nearest pixel
pp -= fp; // pp = source subpixel
//------------------------------------------------------------------------------------------------------------------------------
// 12-tap kernel.
// b c
// e f g h
// i j k l
// n o
// Gather 4 ordering.
// a b
// r g
vec2 p0 = fp * con1_0.xy + con1_0.zw;
// These are from p0 to avoid pulling two constants on pre-Navi hardware.
vec2 p1 = p0 + con2_0.xy;
vec2 p2 = p0 + con2_0.zw;
vec2 p3 = p0 + con3_0.xy;
// TextureGather is not available on WebGL2
vec4 off = vec4(-.5,.5,-.5,.5)*con1_0.xxyy;
// textureGather to texture offsets
// x=west y=east z=north w=south
vec3 bC = FsrEasuCF(p0 + off.xw); float bL = bC.g + 0.5 *(bC.r + bC.b);
vec3 cC = FsrEasuCF(p0 + off.yw); float cL = cC.g + 0.5 *(cC.r + cC.b);
vec3 iC = FsrEasuCF(p1 + off.xw); float iL = iC.g + 0.5 *(iC.r + iC.b);
vec3 jC = FsrEasuCF(p1 + off.yw); float jL = jC.g + 0.5 *(jC.r + jC.b);
vec3 fC = FsrEasuCF(p1 + off.yz); float fL = fC.g + 0.5 *(fC.r + fC.b);
vec3 eC = FsrEasuCF(p1 + off.xz); float eL = eC.g + 0.5 *(eC.r + eC.b);
vec3 kC = FsrEasuCF(p2 + off.xw); float kL = kC.g + 0.5 *(kC.r + kC.b);
vec3 lC = FsrEasuCF(p2 + off.yw); float lL = lC.g + 0.5 *(lC.r + lC.b);
vec3 hC = FsrEasuCF(p2 + off.yz); float hL = hC.g + 0.5 *(hC.r + hC.b);
vec3 gC = FsrEasuCF(p2 + off.xz); float gL = gC.g + 0.5 *(gC.r + gC.b);
vec3 oC = FsrEasuCF(p3 + off.yz); float oL = oC.g + 0.5 *(oC.r + oC.b);
vec3 nC = FsrEasuCF(p3 + off.xz); float nL = nC.g + 0.5 *(nC.r + nC.b);
//------------------------------------------------------------------------------------------------------------------------------
// Simplest multi-channel approximate luma possible (luma times 2, in 2 FMA/MAD).
// Accumulate for bilinear interpolation.
vec2 dir = vec2(0.);
float len = 0.;
FsrEasuSetF(dir, len, (1.-pp.x)*(1.-pp.y), bL, eL, fL, gL, jL);
FsrEasuSetF(dir, len, pp.x *(1.-pp.y), cL, fL, gL, hL, kL);
FsrEasuSetF(dir, len, (1.-pp.x)* pp.y , fL, iL, jL, kL, nL);
FsrEasuSetF(dir, len, pp.x * pp.y , gL, jL, kL, lL, oL);
//------------------------------------------------------------------------------------------------------------------------------
// Normalize with approximation, and cleanup close to zero.
vec2 dir2 = dir * dir;
float dirR = dir2.x + dir2.y;
bool zro = dirR < (1.0/32768.0);
dirR = inversesqrt(dirR);
#if (PERFORMANCE == 1)
if (zro) {
vec4 w = vec4(0.0);
w.x = (1.0 - pp.x) * (1.0 - pp.y);
w.y = pp.x * (1.0 - pp.y);
w.z = (1.0 - pp.x) * pp.y;
w.w = pp.x * pp.y;
pix.r = clamp(dot(w, vec4(fL, gL, jL, kL)), 0.0, 1.0);
return;
}
#elif (PERFORMANCE == 0)
dirR = zro ? 1.0 : dirR;
dir.x = zro ? 1.0 : dir.x;
#endif
dir *= vec2(dirR);
// Transform from {0 to 2} to {0 to 1} range, and shape with square.
len = len * 0.5;
len *= len;
// Stretch kernel {1.0 vert|horz, to sqrt(2.0) on diagonal}.
float stretch = dot(dir,dir) / (max(abs(dir.x), abs(dir.y)));
// Anisotropic length after rotation,
// x := 1.0 lerp to 'stretch' on edges
// y := 1.0 lerp to 2x on edges
vec2 len2 = vec2(1. +(stretch-1.0)*len, 1. -.5 * len);
// Based on the amount of 'edge',
// the window shifts from +/-{sqrt(2.0) to slightly beyond 2.0}.
float lob = .5 - .29 * len;
// Set distance^2 clipping point to the end of the adjustable window.
float clp = 1./lob;
//------------------------------------------------------------------------------------------------------------------------------
// Accumulation mixed with min/max of 4 nearest.
// b c
// e f g h
// i j k l
// n o
// Accumulation.
vec3 aC = vec3(0);
float aW = 0.;
FsrEasuTapF(aC, aW, vec2( 0.,-1.)-pp, dir, len2, lob, clp, bC);
FsrEasuTapF(aC, aW, vec2( 1.,-1.)-pp, dir, len2, lob, clp, cC);
FsrEasuTapF(aC, aW, vec2(-1., 1.)-pp, dir, len2, lob, clp, iC);
FsrEasuTapF(aC, aW, vec2( 0., 1.)-pp, dir, len2, lob, clp, jC);
FsrEasuTapF(aC, aW, vec2( 0., 0.)-pp, dir, len2, lob, clp, fC);
FsrEasuTapF(aC, aW, vec2(-1., 0.)-pp, dir, len2, lob, clp, eC);
FsrEasuTapF(aC, aW, vec2( 1., 1.)-pp, dir, len2, lob, clp, kC);
FsrEasuTapF(aC, aW, vec2( 2., 1.)-pp, dir, len2, lob, clp, lC);
FsrEasuTapF(aC, aW, vec2( 2., 0.)-pp, dir, len2, lob, clp, hC);
FsrEasuTapF(aC, aW, vec2( 1., 0.)-pp, dir, len2, lob, clp, gC);
FsrEasuTapF(aC, aW, vec2( 1., 2.)-pp, dir, len2, lob, clp, oC);
FsrEasuTapF(aC, aW, vec2( 0., 2.)-pp, dir, len2, lob, clp, nC);
//------------------------------------------------------------------------------------------------------------------------------
// Normalize and dering.
#if (PERFORMANCE == 1)
pix = aC/aW;
#elif (PERFORMANCE == 0)
vec3 min4 = min(min(fC,gC),min(jC,kC));
vec3 max4 = max(max(fC,gC),max(jC,kC));
pix=min(max4,max(min4,aC/aW));
#endif
}
void EASU( out vec4 fragColor, in vec2 fragCoord )
{
vec3 c;
vec4 con0,con1,con2,con3;
// "rendersize" refers to size of source image before upscaling.
vec2 rendersize = u_viewsize;
FsrEasuCon(
con0, con1, con2, con3, rendersize, rendersize, rendersize
);
FsrEasuF(c, fragCoord, con0, con1, con2, con3);
fragColor = vec4(xyz_to_rgb_2717090884(c.xyz), 1);
}
vec4 getPixel(vec2 pos) {
vec2 coord = (pos + .5) / u_viewsize;
coord.y = 1.0 - coord.y;
return texture2D(u_texture, coord);
}
vec4 fsr_easu_2717090884(vec2 uv) {
vec4 e = getPixel(gl_FragCoord.xy);
vec4 e_xyz = vec4(rgb_to_xyz_2717090884(e.rgb), 1);
EASU(e_xyz, (gl_FragCoord.xy + 0.5) / u_viewsize);
// fetch a 3x3 neighborhood around the pixel 'e',
// a b c
// d(e)f
// g h i
vec3 a = getPixel(gl_FragCoord.xy + vec2(-1.0,-1.0)).rgb;
vec3 b = getPixel(gl_FragCoord.xy + vec2( 0.0,-1.0)).rgb;
vec3 c = getPixel(gl_FragCoord.xy + vec2( 1.0,-1.0)).rgb;
vec3 f = getPixel(gl_FragCoord.xy + vec2( 1.0, 0.0)).rgb;
vec3 g = getPixel(gl_FragCoord.xy + vec2(-1.0, 1.0)).rgb;
vec3 h = getPixel(gl_FragCoord.xy + vec2( 0.0, 1.0)).rgb;
vec3 d = getPixel(gl_FragCoord.xy + vec2(-1.0, 0.0)).rgb;
vec3 i = getPixel(gl_FragCoord.xy + vec2( 1.0, 1.0)).rgb;;
// Soft min and max.
// a b c b
// d e f * 0.5 + d e f * 0.5
// g h i h
// These are 2.0x bigger (factored out the extra multiply).
vec3 mnRGB = min(min(min(d, e.rgb), min(f, b)), h);
vec3 mnRGB2 = min(mnRGB, min(min(a, c), min(g, i)));
mnRGB += mnRGB2;
vec3 mxRGB = max(max(max(d, e.rgb), max(f, b)), h);
vec3 mxRGB2 = max(mxRGB, max(max(a, c), max(g, i)));
mxRGB += mxRGB2;
// Smooth minimum distance to signal limit divided by smooth max.
vec3 rcpMRGB = 1.0 / mxRGB;
vec3 ampRGB = clamp(min(mnRGB, 2.0 - mxRGB) * rcpMRGB, 0.0, 1.0);
// Shaping amount of sharpening.
ampRGB = inversesqrt(ampRGB);
float peak = -3.0 * clamp(CONTRAST, 0.0, 1.0) + 8.0;
vec3 wRGB = -(1.0 / (ampRGB * peak));
vec3 rcpWeightRGB = 1.0 / (4.0 * wRGB + 1.0);
// 0 w 0
// Filter shape: w 1 w
// 0 w 0
vec3 window = (b + d) + (f + h);
vec3 outColor = clamp((window * wRGB + e.rgb) * rcpWeightRGB, 0.0, 1.0);
return vec4(mix(e.rgb, outColor, SHARPENING), e.a);
}
// https://github.com/glslify/glslify#exporting-a-glsl-module
#define DIFF 1.0
#define RADIUS 4.0
void bilateral_iter_3977570374(vec2 random_dir, vec2 radius, float diff, vec4 pixel, vec2 uv, inout vec3 result, inout float totalWeight)
{
vec2 dir = random_dir * radius;
vec3 randomPixel = texture2D(u_texture, uv + dir).xyz;
vec3 delta = randomPixel - pixel.rgb;
float weight = exp(-dot(delta, delta) / diff);
result += randomPixel * weight;
totalWeight += weight;
}
vec4 bilateral(vec2 uv)
{
vec2 radius = (RADIUS / u_viewsize);
float diff = DIFF / 255.0;
vec4 pixel = texture2D(u_texture, uv);
vec3 result = vec3(0.0, 0.0, 0.0);
float totalWeight = 0.0;
// uroll loop and substitute precalculated random vectors for GLSL 1.0 ES:
bilateral_iter_3977570374(vec2(-0.886051297,0.447155535), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(0.270759493,0.537728608), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.896959424,0.440607518), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.804274619,0.125076547), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(0.373693645,0.240383312), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.850325704,-0.192106694), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.453608066,0.889671504), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.280496657,0.206442386), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(0.840040743,-0.36367026), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.151598319,-0.884027064), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.221440807,0.593896627), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.797481239,-0.243254974), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(0.48824361,0.225083455), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.0387817062,0.838459492), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(0.92897892,-0.133588716), radius, diff, pixel, uv, result, totalWeight);
bilateral_iter_3977570374(vec2(-0.693672359,-0.706737161), radius, diff, pixel, uv, result, totalWeight);
result = result / totalWeight;
return vec4(result, pixel.a);
}
// https://github.com/glslify/glslify#exporting-a-glsl-module
vec3 rgb2hsv(vec3 c)
{
vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));
float d = q.x - min(q.w, q.y);
float e = 1.0e-10;
return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
}
vec3 hsv2rgb(vec3 c)
{
vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
}
vec4 light_correction_1117569599(vec4 c, float s)
{
vec3 hsv = rgb2hsv(c.rgb);
hsv.y = pow(hsv.y, pow(s, -0.5));
hsv.z = pow(hsv.z, s);
vec3 rgb = hsv2rgb(hsv);
return vec4(rgb, c.a);
}
// https://github.com/glslify/glslify#exporting-a-glsl-module
void main() {
vec4 c;
if (u_filters.x == 1.)
c = fsr_easu_2717090884(v_tex_uv);
else if (u_filters.x == 2.)
c = bilateral(v_tex_uv);
else
c = texture2D(u_texture, v_tex_uv);
if (u_filters.y != 1.)
c = light_correction_1117569599(c, u_filters.y);
gl_FragColor = c;
}`,ip=(r,o,s)=>{const u=r.createProgram();return r.attachShader(u,o),r.attachShader(u,s),r.linkProgram(u),r.useProgram(u),u},Va=(r,o,s)=>{const u=r.createShader(o);return r.shaderSource(u,s),r.compileShader(u),u},op=r=>{const o=r.createTexture();return r.bindTexture(r.TEXTURE_2D,o),r.texImage2D(r.TEXTURE_2D,0,r.RGB,1,1,0,r.RGB,r.UNSIGNED_BYTE,null),r.texParameteri(r.TEXTURE_2D,r.TEXTURE_WRAP_S,r.CLAMP_TO_EDGE),r.texParameteri(r.TEXTURE_2D,r.TEXTURE_WRAP_T,r.CLAMP_TO_EDGE),r.texParameteri(r.TEXTURE_2D,r.TEXTURE_MIN_FILTER,r.NEAREST),r.texParameteri(r.TEXTURE_2D,r.TEXTURE_MAG_FILTER,r.LINEAR),r.bindTexture(r.TEXTURE_2D,null),o},Ka=(r,o)=>{let s=0,u=1;const g=document.createElement("canvas"),v=g.captureStream(30),E=g.getContext("webgl"),y=Va(E,E.VERTEX_SHADER,rp),I=Va(E,E.FRAGMENT_SHADER,np),L=ip(E,y,I),U=op(E);E.bindTexture(E.TEXTURE_2D,U);const V=E.getAttribLocation(L,"a_position"),$=E.createBuffer();E.bindBuffer(E.ARRAY_BUFFER,$),E.bufferData(E.ARRAY_BUFFER,new Float32Array([-1,-1,1,-1,-1,1,-1,1,1,-1,1,1]),E.STATIC_DRAW),E.enableVertexAttribArray(V),E.vertexAttribPointer(V,2,E.FLOAT,!1,0,0);const Z=E.getUniformLocation(L,"u_viewsize"),K=E.getUniformLocation(L,"u_filters");E.uniform2fv(K,new Float32Array([s,u])),wn(r).then(se=>{const ge=se.requestVideoFrameCallback?.bind(se)||Ci.requestAnimationFrame;(function Q(){se.ended||!v.active||(ge(Q),E.texImage2D(E.TEXTURE_2D,0,E.RGBA,E.RGBA,E.UNSIGNED_BYTE,se),(g.width!==se.videoWidth||g.height!==se.videoHeight)&&(E.viewport(0,0,g.width=se.videoWidth,g.height=se.videoHeight),E.uniform2fv(Z,new Float32Array([g.width,g.height]))),E.drawArrays(E.TRIANGLES,0,6))})()}),E.deleteProgram(L),E.deleteShader(I),E.deleteShader(y);const Le={stream:v,denoise(se){E.uniform2fv(K,new Float32Array([s=se,u]))},exposureCompensation(se){E.uniform2fv(K,new Float32Array([s,u=se]))}};if(o)for(const[se,ge]of Object.entries(o))Le[se](ge);return Le},ap=typeof screen<"u"&&screen.height>screen.width,xn={facingMode:"user",width:{min:640,ideal:1280,max:1920},height:{min:480,ideal:720,max:1080},resizeMode:{ideal:"crop-and-scale"}};ap&&(delete xn.width,delete xn.height);class sp{constructor(o){ee(this,"_stream",null),ee(this,"_constraints"),ee(this,"_preferences",{}),ee(this,"_enhancer",null),ee(this,"kind","stream"),this._constraints={...xn,...o}}get active(){return!!this._stream}denoise(o){this._preferences.denoise=Number(o),this._enhancer?.denoise(this._preferences.denoise)}setExposureCompensation(o){this._preferences.exposureCompensation=o,this._enhancer?.exposureCompensation(this._preferences.exposureCompensation)}async start(){return await(this._stream??(this._stream=Ha(this._constraints))),this}async*[Symbol.asyncIterator](o){const s=await(this._stream??(this._stream=Ha(this._constraints))),u=this._enhancer=Di(this._preferences)?Ka(s,this._preferences):null;let g=new Sn(u?u.stream:s)[Symbol.asyncIterator]({horizontalFlip:!0,...o}),v;for(;;){if(!this._enhancer&&Di(this._preferences)){const I=this._enhancer=Ka(s,this._preferences);g=new Sn(I.stream)[Symbol.asyncIterator]({horizontalFlip:!0,...o})}this._enhancer&&!Di(this._preferences)&&(this._enhancer.stream.getTracks().forEach(I=>I.stop()),this._enhancer=null,g=new Sn(s)[Symbol.asyncIterator]({horizontalFlip:!0,...o}));const{done:E,value:y}=await g.next(v);if(E)break;v=yield y}this.stop()}stop(){this._stream&&this._stream.then(o=>o.getTracks().forEach(s=>s.stop())),this._enhancer&&this._enhancer.stream.getTracks().forEach(o=>o.stop()),this._stream=null,this._enhancer=null}}const Ha=async r=>{if(typeof navigator.mediaDevices>"u")throw new Error(`SecureContext is required to access webcam
It\u2018s likely you need to set up HTTPS/TLS for your website
See https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia#Encryption_based_security for details `);return await navigator.mediaDevices.getUserMedia({video:r})},Di=r=>typeof r.exposureCompensation=="number"&&r.exposureCompensation!==1||r.denoise===1||r.denoise===2,lp={createVideoElement:wn,createCanvas:Fi};let up="useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict",Ya=(r=21)=>{let o="",s=r;for(;s--;)o+=up[Math.random()*64|0];return o};const Xa="KGZ1bmN0aW9uKCl7InVzZSBzdHJpY3QiO3ZhciBzPVVpbnQ4QXJyYXkseD1VaW50MTZBcnJheSxPPVVpbnQzMkFycmF5LEg9bmV3IHMoWzAsMCwwLDAsMCwwLDAsMCwxLDEsMSwxLDIsMiwyLDIsMywzLDMsMyw0LDQsNCw0LDUsNSw1LDUsMCwwLDAsMF0pLEk9bmV3IHMoWzAsMCwwLDAsMSwxLDIsMiwzLDMsNCw0LDUsNSw2LDYsNyw3LDgsOCw5LDksMTAsMTAsMTEsMTEsMTIsMTIsMTMsMTMsMCwwXSksbDE9bmV3IHMoWzE2LDE3LDE4LDAsOCw3LDksNiwxMCw1LDExLDQsMTIsMywxMywyLDE0LDEsMTVdKSxKPWZ1bmN0aW9uKHIsdCl7Zm9yKHZhciBhPW5ldyB4KDMxKSxuPTA7bjwzMTsrK24pYVtuXT10Kz0xPDxyW24tMV07Zm9yKHZhciB2PW5ldyBPKGFbMzBdKSxuPTE7bjwzMDsrK24pZm9yKHZhciBpPWFbbl07aTxhW24rMV07KytpKXZbaV09aS1hW25dPDw1fG47cmV0dXJuW2Esdl19LEs9SihILDIpLFE9S1swXSxjMT1LWzFdO1FbMjhdPTI1OCxjMVsyNThdPTI4O2Zvcih2YXIgczE9SihJLDApLGQxPXMxWzBdLFU9bmV3IHgoMzI3NjgpLHU9MDt1PDMyNzY4OysrdSl7dmFyIEM9KHUmNDM2OTApPj4+MXwodSYyMTg0NSk8PDE7Qz0oQyY1MjQyOCk+Pj4yfChDJjEzMTA3KTw8MixDPShDJjYxNjgwKT4+PjR8KEMmMzg1NSk8PDQsVVt1XT0oKEMmNjUyODApPj4+OHwoQyYyNTUpPDw4KT4+PjF9Zm9yKHZhciB6PWZ1bmN0aW9uKHQsYSxuKXtmb3IodmFyIHY9dC5sZW5ndGgsaT0wLGM9bmV3IHgoYSk7aTx2OysraSl0W2ldJiYrK2NbdFtpXS0xXTt2YXIgZj1uZXcgeChhKTtmb3IoaT0wO2k8YTsrK2kpZltpXT1mW2ktMV0rY1tpLTFdPDwxO3ZhciBvO2lmKG4pe289bmV3IHgoMTw8YSk7dmFyIGU9MTUtYTtmb3IoaT0wO2k8djsrK2kpaWYodFtpXSlmb3IodmFyIGw9aTw8NHx0W2ldLGI9YS10W2ldLGQ9Zlt0W2ldLTFdKys8PGIseT1kfCgxPDxiKS0xO2Q8PXk7KytkKW9bVVtkXT4+PmVdPWx9ZWxzZSBmb3Iobz1uZXcgeCh2KSxpPTA7aTx2OysraSl0W2ldJiYob1tpXT1VW2ZbdFtpXS0xXSsrXT4+PjE1LXRbaV0pO3JldHVybiBvfSxCPW5ldyBzKDI4OCksdT0wO3U8MTQ0OysrdSlCW3VdPTg7Zm9yKHZhciB1PTE0NDt1PDI1NjsrK3UpQlt1XT05O2Zvcih2YXIgdT0yNTY7dTwyODA7Kyt1KUJbdV09Nztmb3IodmFyIHU9MjgwO3U8Mjg4OysrdSlCW3VdPTg7Zm9yKHZhciBWPW5ldyBzKDMyKSx1PTA7dTwzMjsrK3UpVlt1XT01O3ZhciBnMT16KEIsOSwxKSx3MT16KFYsNSwxKSxXPWZ1bmN0aW9uKHIpe2Zvcih2YXIgdD1yWzBdLGE9MTthPHIubGVuZ3RoOysrYSlyW2FdPnQmJih0PXJbYV0pO3JldHVybiB0fSxoPWZ1bmN0aW9uKHIsdCxhKXt2YXIgbj10Lzh8MDtyZXR1cm4ocltuXXxyW24rMV08PDgpPj4odCY3KSZhfSxYPWZ1bmN0aW9uKHIsdCl7dmFyIGE9dC84fDA7cmV0dXJuKHJbYV18clthKzFdPDw4fHJbYSsyXTw8MTYpPj4odCY3KX0saDE9ZnVuY3Rpb24ocil7cmV0dXJuKHIrNykvOHwwfSxqPWZ1bmN0aW9uKHIsdCxhKXsodD09bnVsbHx8dDwwKSYmKHQ9MCksKGE9PW51bGx8fGE+ci5sZW5ndGgpJiYoYT1yLmxlbmd0aCk7dmFyIG49bmV3KHIuQllURVNfUEVSX0VMRU1FTlQ9PTI/eDpyLkJZVEVTX1BFUl9FTEVNRU5UPT00P086cykoYS10KTtyZXR1cm4gbi5zZXQoci5zdWJhcnJheSh0LGEpKSxufSxtMT1bInVuZXhwZWN0ZWQgRU9GIiwiaW52YWxpZCBibG9jayB0eXBlIiwiaW52YWxpZCBsZW5ndGgvbGl0ZXJhbCIsImludmFsaWQgZGlzdGFuY2UiLCJzdHJlYW0gZmluaXNoZWQiLCJubyBzdHJlYW0gaGFuZGxlciIsLCJubyBjYWxsYmFjayIsImludmFsaWQgVVRGLTggZGF0YSIsImV4dHJhIGZpZWxkIHRvbyBsb25nIiwiZGF0ZSBub3QgaW4gcmFuZ2UgMTk4MC0yMDk5IiwiZmlsZW5hbWUgdG9vIGxvbmciLCJzdHJlYW0gZmluaXNoaW5nIiwiaW52YWxpZCB6aXAgZGF0YSJdLGc9ZnVuY3Rpb24ocix0LGEpe3ZhciBuPW5ldyBFcnJvcih0fHxtMVtyXSk7aWYobi5jb2RlPXIsRXJyb3IuY2FwdHVyZVN0YWNrVHJhY2UmJkVycm9yLmNhcHR1cmVTdGFja1RyYWNlKG4sZyksIWEpdGhyb3cgbjtyZXR1cm4gbn0sYjE9ZnVuY3Rpb24ocix0LGEpe3ZhciBuPXIubGVuZ3RoO2lmKCFufHxhJiZhLmYmJiFhLmwpcmV0dXJuIHR8fG5ldyBzKDApO3ZhciB2PSF0fHxhLGk9IWF8fGEuaTthfHwoYT17fSksdHx8KHQ9bmV3IHMobiozKSk7dmFyIGM9ZnVuY3Rpb24odTEpe3ZhciB2MT10Lmxlbmd0aDtpZih1MT52MSl7dmFyIGYxPW5ldyBzKE1hdGgubWF4KHYxKjIsdTEpKTtmMS5zZXQodCksdD1mMX19LGY9YS5mfHwwLG89YS5wfHwwLGU9YS5ifHwwLGw9YS5sLGI9YS5kLGQ9YS5tLHk9YS5uLFI9bio4O2Rve2lmKCFsKXtmPWgocixvLDEpO3ZhciBZPWgocixvKzEsMyk7aWYobys9MyxZKWlmKFk9PTEpbD1nMSxiPXcxLGQ9OSx5PTU7ZWxzZSBpZihZPT0yKXt2YXIgUz1oKHIsbywzMSkrMjU3LHIxPWgocixvKzEwLDE1KSs0LHQxPVMraChyLG8rNSwzMSkrMTtvKz0xNDtmb3IodmFyIEY9bmV3IHModDEpLEc9bmV3IHMoMTkpLHc9MDt3PHIxOysrdylHW2wxW3ddXT1oKHIsbyt3KjMsNyk7bys9cjEqMztmb3IodmFyIGExPVcoRyksQjE9KDE8PGExKS0xLFIxPXooRyxhMSwxKSx3PTA7dzx0MTspe3ZhciBuMT1SMVtoKHIsbyxCMSldO28rPW4xJjE1O3ZhciBwPW4xPj4+NDtpZihwPDE2KUZbdysrXT1wO2Vsc2V7dmFyIFQ9MCxOPTA7Zm9yKHA9PTE2PyhOPTMraChyLG8sMyksbys9MixUPUZbdy0xXSk6cD09MTc/KE49MytoKHIsbyw3KSxvKz0zKTpwPT0xOCYmKE49MTEraChyLG8sMTI3KSxvKz03KTtOLS07KUZbdysrXT1UfX12YXIgaTE9Ri5zdWJhcnJheSgwLFMpLF89Ri5zdWJhcnJheShTKTtkPVcoaTEpLHk9VyhfKSxsPXooaTEsZCwxKSxiPXooXyx5LDEpfWVsc2UgZygxKTtlbHNle3ZhciBwPWgxKG8pKzQsTD1yW3AtNF18cltwLTNdPDw4LFo9cCtMO2lmKFo+bil7aSYmZygwKTticmVha312JiZjKGUrTCksdC5zZXQoci5zdWJhcnJheShwLFopLGUpLGEuYj1lKz1MLGEucD1vPVoqOCxhLmY9Zjtjb250aW51ZX1pZihvPlIpe2kmJmcoMCk7YnJlYWt9fXYmJmMoZSsxMzEwNzIpO2Zvcih2YXIgWTE9KDE8PGQpLTEsRjE9KDE8PHkpLTEsJD1vOzskPW8pe3ZhciBUPWxbWChyLG8pJlkxXSxrPVQ+Pj40O2lmKG8rPVQmMTUsbz5SKXtpJiZnKDApO2JyZWFrfWlmKFR8fGcoMiksazwyNTYpdFtlKytdPWs7ZWxzZSBpZihrPT0yNTYpeyQ9byxsPW51bGw7YnJlYWt9ZWxzZXt2YXIgbzE9ay0yNTQ7aWYoaz4yNjQpe3ZhciB3PWstMjU3LE09SFt3XTtvMT1oKHIsbywoMTw8TSktMSkrUVt3XSxvKz1NfXZhciBQPWJbWChyLG8pJkYxXSxEPVA+Pj40O1B8fGcoMyksbys9UCYxNTt2YXIgXz1kMVtEXTtpZihEPjMpe3ZhciBNPUlbRF07Xys9WChyLG8pJigxPDxNKS0xLG8rPU19aWYobz5SKXtpJiZnKDApO2JyZWFrfXYmJmMoZSsxMzEwNzIpO2Zvcih2YXIgZTE9ZStvMTtlPGUxO2UrPTQpdFtlXT10W2UtX10sdFtlKzFdPXRbZSsxLV9dLHRbZSsyXT10W2UrMi1fXSx0W2UrM109dFtlKzMtX107ZT1lMX19YS5sPWwsYS5wPSQsYS5iPWUsYS5mPWYsbCYmKGY9MSxhLm09ZCxhLmQ9YixhLm49eSl9d2hpbGUoIWYpO3JldHVybiBlPT10Lmxlbmd0aD90OmoodCwwLGUpfSx5MT1uZXcgcygwKSxFPWZ1bmN0aW9uKHIsdCl7cmV0dXJuIHJbdF18clt0KzFdPDw4fSxtPWZ1bmN0aW9uKHIsdCl7cmV0dXJuKHJbdF18clt0KzFdPDw4fHJbdCsyXTw8MTZ8clt0KzNdPDwyNCk+Pj4wfSxxPWZ1bmN0aW9uKHIsdCl7cmV0dXJuIG0ocix0KSttKHIsdCs0KSo0Mjk0OTY3Mjk2fTtmdW5jdGlvbiBFMShyLHQpe3JldHVybiBiMShyLHQpfXZhciBBPXR5cGVvZiBUZXh0RGVjb2RlcjwidSImJm5ldyBUZXh0RGVjb2RlcixwMT0wO3RyeXtBLmRlY29kZSh5MSx7c3RyZWFtOiEwfSkscDE9MX1jYXRjaHt9dmFyIEMxPWZ1bmN0aW9uKHIpe2Zvcih2YXIgdD0iIixhPTA7Oyl7dmFyIG49clthKytdLHY9KG4+MTI3KSsobj4yMjMpKyhuPjIzOSk7aWYoYSt2PnIubGVuZ3RoKXJldHVyblt0LGoocixhLTEpXTt2P3Y9PTM/KG49KChuJjE1KTw8MTh8KHJbYSsrXSY2Myk8PDEyfChyW2ErK10mNjMpPDw2fHJbYSsrXSY2MyktNjU1MzYsdCs9U3RyaW5nLmZyb21DaGFyQ29kZSg1NTI5NnxuPj4xMCw1NjMyMHxuJjEwMjMpKTp2JjE/dCs9U3RyaW5nLmZyb21DaGFyQ29kZSgobiYzMSk8PDZ8clthKytdJjYzKTp0Kz1TdHJpbmcuZnJvbUNoYXJDb2RlKChuJjE1KTw8MTJ8KHJbYSsrXSY2Myk8PDZ8clthKytdJjYzKTp0Kz1TdHJpbmcuZnJvbUNoYXJDb2RlKG4pfX07ZnVuY3Rpb24gUzEocix0KXtpZih0KXtmb3IodmFyIGE9IiIsbj0wO248ci5sZW5ndGg7bis9MTYzODQpYSs9U3RyaW5nLmZyb21DaGFyQ29kZS5hcHBseShudWxsLHIuc3ViYXJyYXkobixuKzE2Mzg0KSk7cmV0dXJuIGF9ZWxzZXtpZihBKXJldHVybiBBLmRlY29kZShyKTt2YXIgdj1DMShyKSxpPXZbMF0sYz12WzFdO3JldHVybiBjLmxlbmd0aCYmZyg4KSxpfX12YXIgXzE9ZnVuY3Rpb24ocix0KXtyZXR1cm4gdCszMCtFKHIsdCsyNikrRShyLHQrMjgpfSx4MT1mdW5jdGlvbihyLHQsYSl7dmFyIG49RShyLHQrMjgpLHY9UzEoci5zdWJhcnJheSh0KzQ2LHQrNDYrbiksIShFKHIsdCs4KSYyMDQ4KSksaT10KzQ2K24sYz1tKHIsdCsyMCksZj1hJiZjPT00Mjk0OTY3Mjk1P1QxKHIsaSk6W2MsbShyLHQrMjQpLG0ocix0KzQyKV0sbz1mWzBdLGU9ZlsxXSxsPWZbMl07cmV0dXJuW0Uocix0KzEwKSxvLGUsdixpK0Uocix0KzMwKStFKHIsdCszMiksbF19LFQxPWZ1bmN0aW9uKHIsdCl7Zm9yKDtFKHIsdCkhPTE7dCs9NCtFKHIsdCsyKSk7cmV0dXJuW3Eocix0KzEyKSxxKHIsdCs0KSxxKHIsdCsyMCldfTtmdW5jdGlvbiBrMShyLHQpe2Zvcih2YXIgYT17fSxuPXIubGVuZ3RoLTIyO20ocixuKSE9MTAxMDEwMjU2Oy0tbikoIW58fHIubGVuZ3RoLW4+NjU1NTgpJiZnKDEzKTt2YXIgdj1FKHIsbis4KTtpZighdilyZXR1cm57fTt2YXIgaT1tKHIsbisxNiksYz1pPT00Mjk0OTY3Mjk1O2MmJihuPW0ocixuLTEyKSxtKHIsbikhPTEwMTA3NTc5MiYmZygxMyksdj1tKHIsbiszMiksaT1tKHIsbis0OCkpO2Zvcih2YXIgZj10JiZ0LmZpbHRlcixvPTA7bzx2Oysrbyl7dmFyIGU9eDEocixpLGMpLGw9ZVswXSxiPWVbMV0sZD1lWzJdLHk9ZVszXSxSPWVbNF0sWT1lWzVdLFM9XzEocixZKTtpPVIsKCFmfHxmKHtuYW1lOnksc2l6ZTpiLG9yaWdpbmFsU2l6ZTpkLGNvbXByZXNzaW9uOmx9KSkmJihsP2w9PTg/YVt5XT1FMShyLnN1YmFycmF5KFMsUytiKSxuZXcgcyhkKSk6ZygxNCwidW5rbm93biBjb21wcmVzc2lvbiB0eXBlICIrbCk6YVt5XT1qKHIsUyxTK2IpKX1yZXR1cm4gYX1jb25zdCB6MT1yPT5rMShyLHtmaWx0ZXI6KHtuYW1lOnR9KT0+ISh0LnN0YXJ0c1dpdGgoIl9fTUFDT1NYLyIpfHx0LmluY2x1ZGVzKCIuRFNfU3RvcmUiKSl9KTthZGRFdmVudExpc3RlbmVyKCJtZXNzYWdlIiwoe2RhdGE6cn0pPT57bGV0IHQ7dHJ5e3Q9e2lkOnIuaWQsZGF0YTp6MShyLmRhdGEpfX1jYXRjaChhKXt0PXtpZDpyLmlkLGVycm9yOmEubWVzc2FnZX19cG9zdE1lc3NhZ2UodCl9KX0pKCk7Cg==",Za=typeof window<"u"&&window.Blob&&new Blob([atob(Xa)],{type:"text/javascript;charset=utf-8"});function cp(){let r;try{if(r=Za&&(window.URL||window.webkitURL).createObjectURL(Za),!r)throw"";return new Worker(r)}catch{return new Worker("data:application/javascript;base64,"+Xa)}finally{r&&(window.URL||window.webkitURL).revokeObjectURL(r)}}let jr;const fp=async r=>new Promise((o,s)=>{const u=Ya(),g=new Uint8Array(r),v={id:u,data:g};jr||(jr=new cp);const E=({data:y})=>{y.id===v.id&&(jr.removeEventListener("message",E),"error"in y&&s(new Error(y.error)),"data"in y&&o(y.data))};jr.addEventListener("message",E),jr.postMessage(v,[r])}),Ni="/";class Qa{constructor(o){ee(this,"_source",null),ee(this,"_fs",null),ee(this,"_mountpoint",Ni),ee(this,"_data",{}),this._source=o}static async preload(o,s){if(Array.isArray(o)){const g=s?.onProgress;return await Promise.all(o.map((v,E)=>{const y=g?{onProgress:(...I)=>g(E,...I)}:{};return this.preload(v,y)}))}const u=new this(o);return await u.load(s),u}async _fetch(o,s){return await Uh(o,{},s).then(u=>{if(u.ok)return u.blob();throw new Error(`Failed to fetch ${o.url} ${u.status} (${u.statusText})`)}).then(u=>{if(u.size>0)return u;throw new Error(`The source must not be empty. Received ${u.size} bytes size source.`)})}async _unzip(o){if(!o.type.includes("zip"))throw new TypeError(`The source type must be "application/zip"-like. Received: "${o.type}".`);return await o.arrayBuffer().then(fp).then(s=>Object.entries(s)).then(s=>Object.fromEntries(s))}async load(o){let s=this._source;return typeof s=="string"&&(s=new Request(s)),s instanceof Request&&(s=await this._fetch(s,o)),s instanceof Blob&&(s=await this._unzip(s)),s instanceof Object&&s.constructor===Object&&await Promise.all(Object.entries(s).map(([u,g])=>this.writeFile(u,g))),this._source=null,this._data}_fsWriteFile(o,s){this._fs&&(o=`${this._mountpoint}${o.startsWith("/")?o.substring(1):o}`,this._fs.writeFile(o,s))}async writeFile(o,s){const u=new Uint8Array(s instanceof Blob?await s.arrayBuffer():s);this._data[o]=u,this._fsWriteFile(o,this._data[o])}mount(o,s=Ni){this._fs=o,this._mountpoint=s.endsWith("/")?s:`${s}/`,Object.entries(this._data).forEach(([u,g])=>this._fsWriteFile(u,g))}unmount(){this._fs=null,this._mountpoint=Ni}}var dp=Object.defineProperty,hp=Object.getOwnPropertyDescriptor,pp=(r,o,s,u)=>{for(var g=u>1?void 0:u?hp(o,s):o,v=r.length-1,E;v>=0;v--)(E=r[v])&&(g=(u?E(o,s,g):E(g))||g);return u&&g&&dp(o,s,g),g};class qa{constructor(o){ee(this,"name",`effects/${Ya()}`),ee(this,"_player",null),ee(this,"_resource"),this._resource=new mp(o)}static async preload(o,s){if(Array.isArray(o)){const g=s?.onProgress;return await Promise.all(o.map((v,E)=>{const y=g?{onProgress:(...I)=>g(E,...I)}:{};return this.preload(v,y)}))}const u=new this(o);return await u._load(s),u}async _load(o){await this._resource.load(o)}async _bind(o){await this._resource.load(),this._player=o,this._resource.mount(this._player.FS,this.name)}_unbind(){this._resource.unmount(),this._player=null}async writeFile(o,s){return this._resource.writeFile(o,s)}callJsMethod(o,s=""){if(!this._player){console.warn("The method won't evaluate: the effect is not applied to a player.");return}return this._player.callJsMethod(o,s)}async evalJs(o){if(!this._player){console.warn("The script won't evaluate: the effect is not applied to a player.");return}return await this._player.evalJs(o)}}pp([Da("Please, use Effect.evalJs() instead.")],qa.prototype,"callJsMethod",1);class mp extends Qa{async _unzip(o){let s=await super._unzip(o);const u=Object.keys(s).map(v=>v.split("/").find(Boolean)),g=u[0];return u.every(v=>v===g)&&(s=Object.fromEntries(Object.entries(s).map(([v,E])=>[v.replace(`${g}/`,""),E]))),s}}let bp=class{constructor(r){ee(this,"_resource"),this._resource=new Qa(r)}static async preload(r,o){if(Array.isArray(r)){const u=o?.onProgress;return await Promise.all(r.map((g,v)=>{const E=u?{onProgress:(...y)=>u(v,...y)}:{};return this.preload(g,E)}))}const s=new this(r);return await s._load(o),s}async _load(r){await this._resource.load(r)}async _bind(r){await this._resource.load(),this._resource.mount(r.FS)}};var gp=(()=>{var r=typeof document<"u"?document.currentScript?.src:void 0;return async function(o={}){var s,u=o,g,v,E=new Promise((e,t)=>{g=e,v=t}),y=!1;u.expectedDataFileDownloads??(u.expectedDataFileDownloads=0),u.expectedDataFileDownloads++,(()=>{var e=typeof ENVIRONMENT_IS_PTHREAD<"u"&&ENVIRONMENT_IS_PTHREAD,t=typeof ENVIRONMENT_IS_WASM_WORKER<"u"&&ENVIRONMENT_IS_WASM_WORKER;if(e||t)return;function n(i){typeof window=="object"?window.encodeURIComponent(window.location.pathname.substring(0,window.location.pathname.lastIndexOf("/"))+"/"):typeof process>"u"&&typeof location<"u"&&encodeURIComponent(location.pathname.substring(0,location.pathname.lastIndexOf("/"))+"/");var a="BanubaSDK.data",f="BanubaSDK.data",m=u.locateFile?u.locateFile(f,""):f,h=i.remote_package_size;function w(N,W,J,ce){u.dataFileDownloads??(u.dataFileDownloads={}),fetch(N).catch(ie=>Promise.reject(new Error(`Network Error: ${N}`,{cause:ie}))).then(ie=>{if(!ie.ok)return Promise.reject(new Error(`${ie.status}: ${ie.url}`));if(!ie.body&&ie.arrayBuffer)return ie.arrayBuffer().then(J);const he=ie.body.getReader(),be=()=>he.read().then(De).catch(Je=>Promise.reject(new Error(`Unexpected error while handling : ${ie.url} ${Je}`,{cause:Je}))),de=[],ye=ie.headers,Ce=Number(ye.get("Content-Length")??W);let oe=0;const De=({done:Je,value:st})=>{if(Je){const Ue=new Uint8Array(de.map(fe=>fe.length).reduce((fe,Se)=>fe+Se,0));let rt=0;for(const fe of de)Ue.set(fe,rt),rt+=fe.length;J(Ue.buffer)}else{de.push(st),oe+=st.length,u.dataFileDownloads[N]={loaded:oe,total:Ce};let Ue=0,rt=0;for(const fe of Object.values(u.dataFileDownloads))Ue+=fe.loaded,rt+=fe.total;return u.setStatus?.(`Downloading data... (${Ue}/${rt})`),be()}};return u.setStatus?.("Downloading data..."),be()})}var C=null,P=u.getPreloadedPackage?u.getPreloadedPackage(m,h):null;P||w(m,h,N=>{C?(C(N),C=null):P=N});function B(N){function W(be,de){if(!be)throw de+new Error().stack}N.FS_createPath("/","bnb_js",!0,!0),N.FS_createPath("/","bnb_prefabs",!0,!0),N.FS_create