UNPKG

@banuba/webar

Version:

Banuba WebAR SDK

503 lines (449 loc) 351 kB
var _m=Object.defineProperty,mm=(Me,ot,He)=>ot in Me?_m(Me,ot,{enumerable:!0,configurable:!0,writable:!0,value:He}):Me[ot]=He,re=(Me,ot,He)=>(mm(Me,typeof ot!="symbol"?ot+"":ot,He),He);(function(Me,ot){typeof exports=="object"&&typeof module<"u"?ot(exports):typeof define=="function"&&define.amd?define(["exports"],ot):(Me=typeof globalThis<"u"?globalThis:Me||self,ot(Me.BanubaSDK={}))})(this,function(Me){var ot,He;let su=0;const Oi=()=>su++,ji="KGZ1bmN0aW9uKCl7InVzZSBzdHJpY3QiO2FkZEV2ZW50TGlzdGVuZXIoIm1lc3NhZ2UiLCh7ZGF0YTp0fSk9Pntjb25zdCBzPXtpZDp0LmlkfTtzZXRUaW1lb3V0KHBvc3RNZXNzYWdlLHQudGltZW91dCxzKX0pfSkoKTsK",Ui=typeof window<"u"&&window.Blob&&new Blob([atob(ji)],{type:"text/javascript;charset=utf-8"});function lu(){let r;try{if(r=Ui&&(window.URL||window.webkitURL).createObjectURL(Ui),!r)throw"";return new Worker(r)}catch{return new Worker("data:application/javascript;base64,"+ji)}finally{r&&(window.URL||window.webkitURL).revokeObjectURL(r)}}let Hr;const Jn=new Map,Wi=(r,o)=>{const a=Oi(),u={id:a,timeout:o};return Jn.set(u.id,r),Hr||(Hr=new lu,Hr.onmessage=({data:b})=>{const _=Jn.get(b.id);Jn.delete(b.id),_()}),Hr.postMessage(u),a},$i=1e3/60,Yr=[];let Gi=0;const zi=r=>{const o=Oi();if(Yr.length===0){const a=performance.now(),u=$i-(a-Gi)%$i;Wi(()=>{const b=Gi=performance.now(),_=[...Yr];Yr.length=0,_.forEach(w=>w(b))},u)}return Yr.push(r),o},uu=Object.freeze(Object.defineProperty({__proto__:null,requestAnimationFrame:zi,setTimeout:Wi},Symbol.toStringTag,{value:"Module"})),cu=(...r)=>window.setTimeout(...r),Zr=new Map,fu=r=>{const o=window.requestAnimationFrame((...a)=>{Zr.delete(o),r(...a)});return Zr.set(o,r),o};typeof document<"u"&&document.addEventListener("visibilitychange",()=>{document.visibilityState!=="visible"&&Zr.forEach((r,o)=>{Zr.delete(o),cancelAnimationFrame(o),zi(r)})});const du=Object.freeze(Object.defineProperty({__proto__:null,requestAnimationFrame:fu,setTimeout:cu},Symbol.toStringTag,{value:"Module"})),hu=typeof document<"u"?document:{visibilityState:"hidden"},Vi=()=>hu.visibilityState==="visible"?du:uu,Fr=r=>Vi().requestAnimationFrame(r),Xi=(r,o)=>Vi().setTimeout(r,o),Ki=r=>Promise.resolve().then(r),eo={requestAnimationFrame:Fr,setTimeout:Xi},pu=Object.freeze(Object.defineProperty({__proto__:null,nextTick:Ki,requestAnimationFrame:Fr,setTimeout:Xi,timers:eo},Symbol.toStringTag,{value:"Module"})),bu=()=>new Promise(r=>Fr(r)),to=(r=-1)=>function(o,a,u){const b=u,_=b.value;return{...b,value:async function*(...w){const m=_.apply(this,w);let F=0,D=0;for(;;){const O=1e3/r,H=.1*O;for(;(D=performance.now())-F<O-H;)await bu();F=D;const{done:V,value:q}=await m.next();if(V)return q;const X=yield q;typeof X<"u"&&(r=X)}}}},Qr=async(r,o={})=>new Promise(a=>{const u=document.createElement("video");if(u.muted=!0,u.controls=!1,u.playsInline=!0,Object.assign(u,o),r instanceof globalThis.MediaStream)u.srcObject=r,u.addEventListener("ended",()=>u.srcObject=null,{once:!0}),r.addEventListener("inactive",()=>u.dispatchEvent(new CustomEvent("ended")),{once:!0});else{if(typeof r!="string"){const _=r=URL.createObjectURL(r);u.addEventListener("emptied",()=>URL.revokeObjectURL(_),{once:!0})}u.crossOrigin="anonymous",u.src=r,u.addEventListener("ended",()=>u.src="",{once:!0})}u.style.position="fixed",u.style.zIndex="-9999999",u.style.opacity="0.0000000001",document.body.appendChild(u),u.addEventListener("emptied",()=>u.remove(),{once:!0});const b=setInterval(()=>u.readyState,300);u.addEventListener("play",()=>clearInterval(b),{once:!0}),u.addEventListener("play",()=>a(u),{once:!0}),u.addEventListener("loadedmetadata",()=>u.play(),{once:!0})}),mu=r=>new Promise((o,a)=>{const u=document.createElement("img");u.onload=()=>o(u),u.onerror=a,u.crossOrigin="anonymous",u.src=typeof r=="string"?r:URL.createObjectURL(r)}),Hi=new Map,gu=(r,o,a)=>r*(1-a)+o*a,ro=r=>`webar::${r}:start`,no=r=>`webar::${r}:end`,oo=r=>{let o={internalName:r+":"+Math.random()};return performance.mark(ro(o.internalName)),o},io=r=>{const o=r.internalName;performance.mark(no(o));let a=performance.measure(o,ro(o),no(o));a||(a=performance.getEntriesByName(o)[0]),performance.clearMarks(ro(o)),performance.clearMarks(no(o)),performance.clearMeasures(o);const{duration:u}=a,b=o.split(":")[0];let{averagedDuration:_=0}=Hi.get(b)||{};return _=gu(_,u,.05),Hi.set(b,{averagedDuration:_}),{instantDuration:u,averagedDuration:_}},Yi=(r,o=a=>console.warn(a))=>function(a,u,b){const _=b.value;if(typeof _!="function")throw new TypeError("Only functions can be marked as deprecated");return{...b,value:function(...w){return o.call(this,`DEPRECATION: ${a.constructor.name}.${u}() is deprecated. ${r}`),_.call(this,...w)}}};let Dr=class{constructor(){re(this,"_emitter",new EventTarget)}addEventListener(r,o,a){this._emitter.addEventListener(r,o,a)}removeEventListener(r,o,a){this._emitter.removeEventListener(r,o,a)}dispatchEvent(r){return this._emitter.dispatchEvent(r)}removeAllEventListeners(){this._emitter=new EventTarget}};const yu=(r,o,a)=>fetch(r,o).then(u=>{if(!u.body)return u;let b=0;const _=Number(u.headers.get("content-length")||0),w=u.body.getReader();return new Response(new ReadableStream({async start(m){for(;;){const{done:F,value:D}=await w.read();if(F?b=_:b+=D.byteLength,a?.onProgress?.({total:_,transferred:b}),F)break;m.enqueue(D)}m.close()}}),u)}),Zi=()=>/Edge?\/(79|[89]\d|\d{3,})(\.\d+|)(\.\d+|)|Firefox\/(6[5-9]|[7-9]\d|\d{3,})\.\d+(\.\d+|)|Chrom(ium|e)\/(5[7-9]|[6-9]\d|\d{3,})\.\d+(\.\d+|)([\d.]+$|.*Safari\/(?![\d.]+ Edge\/[\d.]+$))|Maci.* Version\/(1[5-9]|[2-9]\d|\d{3,})\.\d+([,.]\d+|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(4[4-9]|[5-9]\d|\d{3,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(1[5-9]|[2-9]\d|\d{3,})[._]\d+([._]\d+|)|Mobile Safari.+OPR\/(7[2-9]|[89]\d|\d{3,})\.\d+\.\d+|Android.+Chrom(ium|e)\/(10[7-9]|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+(UC? ?Browser|UCWEB|U3)[ /]?(1[3-9]|[2-9]\d|\d{3,})\.\d+\.\d+|SamsungBrowser\/([7-9]|\d{2,})\.\d+|Android.+MQ{2}Browser\/(1[3-9]|[2-9]\d|\d{3,})(\.\d+|)(\.\d+|)|baidubrowser[\s/](1[3-9]|[2-9]\d|\d{3,})(\.\d+|)(\.\d+|)/.test(navigator.userAgent),vu=typeof window<"u"&&/^((?!chrome|android).)*safari/i.test(window.navigator?.userAgent),Qi=typeof OffscreenCanvas<"u"&&!vu,qi={alpha:!0,antialias:!1,depth:!1,desynchronized:!1,premultipliedAlpha:!1,preserveDrawingBuffer:!1,stencil:!1};let ne;const _u=(()=>{if(typeof window>"u"||!Zi()||(ne??(ne=ao().getContext("webgl2",qi)),ne===null))return!1;const r=ne.createTexture();ne.bindTexture(ne.TEXTURE_2D,r),ne.texImage2D(ne.TEXTURE_2D,0,ne.RGB,1,1,0,ne.RGB,ne.UNSIGNED_BYTE,null);const o=ne.createFramebuffer();ne.bindFramebuffer(ne.FRAMEBUFFER,o),ne.framebufferTexture2D(ne.FRAMEBUFFER,ne.COLOR_ATTACHMENT0,ne.TEXTURE_2D,r,0);const a=ne.getParameter(ne.IMPLEMENTATION_COLOR_READ_FORMAT);return ne.bindFramebuffer(ne.FRAMEBUFFER,null),ne.bindTexture(ne.TEXTURE_2D,null),ne.deleteFramebuffer(o),ne.deleteTexture(r),a===ne.RGB})(),wu=async(r,o,a,u="RGBA")=>{ne??(ne=ao().getContext("webgl2",qi)),ne.canvas.width=r.width,ne.canvas.height=r.height,u==="RGB"&&ne.pixelStorei(ne.PACK_ALIGNMENT,1);const b=ne.createTexture();ne.bindTexture(ne.TEXTURE_2D,b),ne.texParameteri(ne.TEXTURE_2D,ne.TEXTURE_MIN_FILTER,ne.NEAREST),ne.texParameteri(ne.TEXTURE_2D,ne.TEXTURE_MAG_FILTER,ne.LINEAR),ne.texImage2D(ne.TEXTURE_2D,0,ne[u],ne[u],ne.UNSIGNED_BYTE,r);const _=ne.createFramebuffer();ne.bindFramebuffer(ne.FRAMEBUFFER,_),ne.framebufferTexture2D(ne.FRAMEBUFFER,ne.COLOR_ATTACHMENT0,ne.TEXTURE_2D,b,0);const w=ne.createBuffer();ne.bindBuffer(ne.PIXEL_PACK_BUFFER,w),ne.bufferData(ne.PIXEL_PACK_BUFFER,o.byteLength,ne.STREAM_READ),ne.readPixels(a.x,a.y,a.width,a.height,ne[u],ne.UNSIGNED_BYTE,0),ne.bindBuffer(ne.PIXEL_PACK_BUFFER,null),ne.bindFramebuffer(ne.FRAMEBUFFER,null),ne.deleteFramebuffer(_),ne.bindTexture(ne.TEXTURE_2D,null),ne.deleteTexture(b);const m=ne.fenceSync(ne.SYNC_GPU_COMMANDS_COMPLETE,0);ne.flush(),await Eu(ne,m).finally(()=>ne.deleteSync(m)),ne.bindBuffer(ne.PIXEL_PACK_BUFFER,w),ne.getBufferSubData(ne.PIXEL_PACK_BUFFER,0,new DataView(o.buffer()),o.byteOffset,o.byteLength),ne.bindBuffer(ne.PIXEL_PACK_BUFFER,null),ne.deleteBuffer(w)},Eu=(r,o)=>new Promise((a,u)=>function b(){const _=r.clientWaitSync(o,0,0);if(_===r.WAIT_FAILED)return u(new Error("GPU operations complete wait failed"));if(_===r.CONDITION_SATISFIED||_===r.ALREADY_SIGNALED)return a();eo.setTimeout(b,2)}());function Su(r=256,o=128){const a=document.createElement("canvas");return a.width=r,a.height=o,a}function xu(r=256,o=128){return new OffscreenCanvas(r,o)}function ao(r=256,o=128){return Qi?xu(r,o):Su(r,o)}const so=(r={})=>{const o=({displayWidth:a,displayHeight:u,visibleRect:b=null})=>{let _=b?.x??0,w=b?.y??0,m=b?.width??a,F=b?.height??u;if(r.crop){const[D,O,H,V]=r.crop(m,F);[_,w,m,F]=[_+D,w+O,H,V]}return[a,u]=[m,F],{visibleRect:{x:_,y:w,width:m,height:F},displayWidth:a,displayHeight:u,horizontalFlip:!!r.horizontalFlip}};return{getSourceOptions:a=>{let u=a instanceof HTMLVideoElement?a.videoWidth:a.width,b=a instanceof HTMLVideoElement?a.videoHeight:a.height;return o({displayWidth:u,displayHeight:b})},getFrameOptions:o}};class qr{constructor(o,a={},u=null){re(this,"_source",null),re(this,"_visibleRect",{x:0,y:0,width:0,height:0}),re(this,"_deleter"),re(this,"horizontalFlip",!1);const b=o instanceof HTMLVideoElement?o.videoWidth:o.width,_=o instanceof HTMLVideoElement?o.videoHeight:o.height;this._visibleRect.x=a.visibleRect?.x??0,this._visibleRect.y=a.visibleRect?.y??0,this._visibleRect.width=a.visibleRect?.width??b,this._visibleRect.height=a.visibleRect?.height??_,this.horizontalFlip=a.horizontalFlip??this.horizontalFlip,o.width=b,o.height=_,this._source=o,this._deleter=u}get texture(){return this._source?.width==this.displayWidth&&this._source?.height==this.displayHeight?this._source:null}get displayWidth(){return this._visibleRect.width}get displayHeight(){return this._visibleRect.height}get format(){return this._source?_u?"RGB":"RGBA":null}allocationSize(){if(!this.format)throw new Error("Failed to execute 'allocationSize' on 'Frame': Frame is closed.");const{width:o,height:a}={width:this._visibleRect.width,height:this._visibleRect.height};return o*a*this.format.length}async copyTo(o){if(!this._source)throw new Error("Failed to execute 'copyTo' on 'Frame': Frame is closed.");return await wu(this._source,o,this._visibleRect,this.format),[]}close(){this._deleter&&this._deleter(),this._source=null}}var Tu=Object.defineProperty,Cu=Object.getOwnPropertyDescriptor,ku=(r,o,a,u)=>{for(var b=u>1?void 0:u?Cu(o,a):o,_=r.length-1,w;_>=0;_--)(w=r[_])&&(b=(u?w(o,a,b):w(b))||b);return u&&b&&Tu(o,a,b),b},Ji;let ea=class{constructor(r){re(this,"_src"),re(this,"kind","image"),this._src=r}async*[Ji=Symbol.asyncIterator](r){const o=await mu(this._src),a=so(r);yield new qr(o,a.getSourceOptions(o),()=>{URL.revokeObjectURL(o.src),o.src=""})}};ku([to(30)],ea.prototype,Ji,1);var Ru=Object.defineProperty,Pu=Object.getOwnPropertyDescriptor,Au=(r,o,a,u)=>{for(var b=u>1?void 0:u?Pu(o,a):o,_=r.length-1,w;_>=0;_--)(w=r[_])&&(b=(u?w(o,a,b):w(b))||b);return u&&b&&Ru(o,a,b),b},ta;const ra=(ot=class{constructor(r){if(re(this,"_stream"),re(this,"kind","stream"),!ot.cache.has(r))ot.cache.set(r,this);else return ot.cache.get(r);this._stream=r}async*[ta=Symbol.asyncIterator](r){const o=so(r);if("MediaStreamTrackProcessor"in window){const a=new MediaStreamTrackProcessor({track:this._stream.getVideoTracks()[0]}).readable.getReader();try{for(;;){const{done:u,value:b}=await a.read();if(u)return;const _=new VideoFrame(b,o.getFrameOptions(b));_.horizontalFlip=r?.horizontalFlip??!0,b.close(),yield _}}finally{a.releaseLock()}}else{const a=await Qr(this._stream),u="requestVideoFrameCallback"in a?a.requestVideoFrameCallback.bind(a):requestAnimationFrame;for(;!a.paused;)await new Promise(u),yield new qr(a,o.getSourceOptions(a));URL.revokeObjectURL(a.src),a.src="",a.srcObject=null}}stop(){for(const r of this._stream.getVideoTracks())r.stop();this._stream&&ot.cache.delete(this._stream)}},re(ot,"cache",new WeakMap),ot);Au([to(30)],ra.prototype,ta,1);let Jr=ra;var Lu=Object.defineProperty,Iu=Object.getOwnPropertyDescriptor,Fu=(r,o,a,u)=>{for(var b=u>1?void 0:u?Iu(o,a):o,_=r.length-1,w;_>=0;_--)(w=r[_])&&(b=(u?w(o,a,b):w(b))||b);return u&&b&&Lu(o,a,b),b},na;const oa={loop:!1};class ia{constructor(o,a){re(this,"_src"),re(this,"_options"),re(this,"_video",null),re(this,"kind","video"),this._src=o,this._options={...oa,...a}}async*[na=Symbol.asyncIterator](o){const a=await(this._video??(this._video=Qr(this._src,this._options))),u=so(o),b="requestVideoFrameCallback"in a?a.requestVideoFrameCallback.bind(a):requestAnimationFrame;for(;!a.paused;)await new Promise(b),yield new qr(a,u.getSourceOptions(a))}stop(){this._video&&this._video.then(o=>(URL.revokeObjectURL(o.src),o.src="",o.srcObject=null)),this._video=null}}Fu([to(30)],ia.prototype,na,1);const Du=`#define GLSLIFY 1 attribute vec2 a_position; varying vec2 v_tex_uv; void main() { v_tex_uv.x = (a_position.x + 1.) * .5; v_tex_uv.y = 1. - (a_position.y + 1.) * .5; gl_Position = vec4(a_position, 0., 1.); } `,Mu=`precision highp float; #define GLSLIFY 1 varying vec2 v_tex_uv; uniform sampler2D u_texture; uniform vec2 u_viewsize; /** * u_filters.x - denoising algorithm to use * 1 - FSR * 2 - Bilateral * any other value - none * u_filters.y - light correction coefficient in [0, 2] * 1 - no light correction */ uniform vec2 u_filters; // https://github.com/glslify/glslify#importing-a-glsl-module // https://github.com/glslify/glslify#passing-references-between-modules // Copyright (c) 2021 Advanced Micro Devices, Inc. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // // FidelityFX FSR v1.0.2 by AMD // ported to mpv by agyild - https://gist.github.com/agyild/82219c545228d70c5604f865ce0b0ce5 // ported to WebGL by goingdigital - https://www.shadertoy.com/view/stXSWB // using colorspace functions from tobspr - https://github.com/tobspr/GLSL-Color-Spaces/blob/master/ColorSpaces.inc.glsl #define SHARPENING 2.0 // Sharpening intensity: Adjusts sharpening intensity by averaging the original pixels to the sharpened result. 1.0 is the unmodified default. 0.0 to 1.0. #define CONTRAST 2.0 // Adjusts the range the shader adapts to high contrast (0 is not all the way off). Higher values = more high contrast sharpening. 0.0 to 1.0. #define PERFORMANCE 1 // Whether to use optimizations for performance with loss of quality // Used to convert from linear RGB to XYZ space const mat3 RGB_2_XYZ_2717090884 = (mat3( 0.4124564, 0.2126729, 0.0193339, 0.3575761, 0.7151522, 0.1191920, 0.1804375, 0.0721750, 0.9503041 )); // Used to convert from XYZ to linear RGB space const mat3 XYZ_2_RGB_2717090884 = (mat3( 3.2404542,-0.9692660, 0.0556434, -1.5371385, 1.8760108,-0.2040259, -0.4985314, 0.0415560, 1.0572252 )); // Converts a color from linear RGB to XYZ space vec3 rgb_to_xyz_2717090884(vec3 rgb) { return RGB_2_XYZ_2717090884 * rgb; } // Converts a color from XYZ to linear RGB space vec3 xyz_to_rgb_2717090884(vec3 xyz) { return XYZ_2_RGB_2717090884 * xyz; } /* EASU stage * * This takes a reduced resolution source, and scales it up while preserving detail. * * Updates: * stretch definition fixed. Thanks nehon for the bug report! */ vec3 FsrEasuCF(vec2 p) { vec2 uv = (p + .5) / u_viewsize; vec4 color = texture2D(u_texture, uv); return rgb_to_xyz_2717090884(color.rgb); } /**** EASU ****/ void FsrEasuCon( out vec4 con0, out vec4 con1, out vec4 con2, out vec4 con3, // This the rendered image resolution being upscaled vec2 inputViewportInPixels, // This is the resolution of the resource containing the input image (useful for dynamic resolution) vec2 inputSizeInPixels, // This is the display resolution which the input image gets upscaled to vec2 outputSizeInPixels ) { // Output integer position to a pixel position in viewport. con0 = vec4( inputViewportInPixels.x/outputSizeInPixels.x, inputViewportInPixels.y/outputSizeInPixels.y, .5*inputViewportInPixels.x/outputSizeInPixels.x-.5, .5*inputViewportInPixels.y/outputSizeInPixels.y-.5 ); // Viewport pixel position to normalized image space. // This is used to get upper-left of 'F' tap. con1 = vec4(1.,1.,1.,-1.)/inputSizeInPixels.xyxy; // Centers of gather4, first offset from upper-left of 'F'. // +---+---+ // | | | // +--(0)--+ // | b | c | // +---F---+---+---+ // | e | f | g | h | // +--(1)--+--(2)--+ // | i | j | k | l | // +---+---+---+---+ // | n | o | // +--(3)--+ // | | | // +---+---+ // These are from (0) instead of 'F'. con2 = vec4(-1.,2.,1.,2.)/inputSizeInPixels.xyxy; con3 = vec4(0.,4.,0.,0.)/inputSizeInPixels.xyxy; } // Filtering for a given tap for the scalar. void FsrEasuTapF( inout vec3 aC, // Accumulated color, with negative lobe. inout float aW, // Accumulated weight. vec2 off_0, // Pixel offset from resolve position to tap. vec2 dir_0, // Gradient direction. vec2 len_0, // Length. float lob_0, // Negative lobe strength. float clp_0, // Clipping point. vec3 c_0 ) { // Tap color. // Rotate offset by direction. vec2 v = vec2(dot(off_0, dir_0), dot(off_0,vec2(-dir_0.y,dir_0.x))); // Anisotropy. v *= len_0; // Compute distance^2. float d2 = min(dot(v,v),clp_0); // Limit to the window as at corner, 2 taps can easily be outside. // Approximation of lancos2 without sin() or rcp(), or sqrt() to get x. // (25/16 * (2/5 * x^2 - 1)^2 - (25/16 - 1)) * (1/4 * x^2 - 1)^2 // |_______________________________________| |_______________| // base window // The general form of the 'base' is, // (a*(b*x^2-1)^2-(a-1)) // Where 'a=1/(2*b-b^2)' and 'b' moves around the negative lobe. float wB = .4 * d2 - 1.; float wA = lob_0 * d2 -1.; wB *= wB; wA *= wA; wB = 1.5625*wB-.5625; float w= wB * wA; // Do weighted average. aC += c_0*w; aW += w; } //------------------------------------------------------------------------------------------------------------------------------ // Accumulate direction and length. void FsrEasuSetF( inout vec2 dir, inout float len, float w, float lA,float lB,float lC,float lD,float lE ) { // Direction is the '+' diff. // a // b c d // e // Then takes magnitude from abs average of both sides of 'c'. // Length converts gradient reversal to 0, smoothly to non-reversal at 1, shaped, then adding horz and vert terms. float lenX = max(abs(lD - lC), abs(lC - lB)); float dirX = lD - lB; dir.x += dirX * w; lenX = clamp(abs(dirX)/lenX,0.,1.); lenX *= lenX; len += lenX * w; // Repeat for the y axis. float lenY = max(abs(lE - lC), abs(lC - lA)); float dirY = lE - lA; dir.y += dirY * w; lenY = clamp(abs(dirY) / lenY,0.,1.); lenY *= lenY; len += lenY * w; } //------------------------------------------------------------------------------------------------------------------------------ void FsrEasuF( out vec3 pix, vec2 ip, // Integer pixel position in output. // Constants generated by FsrEasuCon(). vec4 con0, // xy = output to input scale, zw = first pixel offset correction vec4 con1_0, vec4 con2_0, vec4 con3_0 ) { //------------------------------------------------------------------------------------------------------------------------------ // Get position of 'f'. vec2 pp = ip * con0.xy + con0.zw; // Corresponding input pixel/subpixel vec2 fp = floor(pp);// fp = source nearest pixel pp -= fp; // pp = source subpixel //------------------------------------------------------------------------------------------------------------------------------ // 12-tap kernel. // b c // e f g h // i j k l // n o // Gather 4 ordering. // a b // r g vec2 p0 = fp * con1_0.xy + con1_0.zw; // These are from p0 to avoid pulling two constants on pre-Navi hardware. vec2 p1 = p0 + con2_0.xy; vec2 p2 = p0 + con2_0.zw; vec2 p3 = p0 + con3_0.xy; // TextureGather is not available on WebGL2 vec4 off = vec4(-.5,.5,-.5,.5)*con1_0.xxyy; // textureGather to texture offsets // x=west y=east z=north w=south vec3 bC = FsrEasuCF(p0 + off.xw); float bL = bC.g + 0.5 *(bC.r + bC.b); vec3 cC = FsrEasuCF(p0 + off.yw); float cL = cC.g + 0.5 *(cC.r + cC.b); vec3 iC = FsrEasuCF(p1 + off.xw); float iL = iC.g + 0.5 *(iC.r + iC.b); vec3 jC = FsrEasuCF(p1 + off.yw); float jL = jC.g + 0.5 *(jC.r + jC.b); vec3 fC = FsrEasuCF(p1 + off.yz); float fL = fC.g + 0.5 *(fC.r + fC.b); vec3 eC = FsrEasuCF(p1 + off.xz); float eL = eC.g + 0.5 *(eC.r + eC.b); vec3 kC = FsrEasuCF(p2 + off.xw); float kL = kC.g + 0.5 *(kC.r + kC.b); vec3 lC = FsrEasuCF(p2 + off.yw); float lL = lC.g + 0.5 *(lC.r + lC.b); vec3 hC = FsrEasuCF(p2 + off.yz); float hL = hC.g + 0.5 *(hC.r + hC.b); vec3 gC = FsrEasuCF(p2 + off.xz); float gL = gC.g + 0.5 *(gC.r + gC.b); vec3 oC = FsrEasuCF(p3 + off.yz); float oL = oC.g + 0.5 *(oC.r + oC.b); vec3 nC = FsrEasuCF(p3 + off.xz); float nL = nC.g + 0.5 *(nC.r + nC.b); //------------------------------------------------------------------------------------------------------------------------------ // Simplest multi-channel approximate luma possible (luma times 2, in 2 FMA/MAD). // Accumulate for bilinear interpolation. vec2 dir = vec2(0.); float len = 0.; FsrEasuSetF(dir, len, (1.-pp.x)*(1.-pp.y), bL, eL, fL, gL, jL); FsrEasuSetF(dir, len, pp.x *(1.-pp.y), cL, fL, gL, hL, kL); FsrEasuSetF(dir, len, (1.-pp.x)* pp.y , fL, iL, jL, kL, nL); FsrEasuSetF(dir, len, pp.x * pp.y , gL, jL, kL, lL, oL); //------------------------------------------------------------------------------------------------------------------------------ // Normalize with approximation, and cleanup close to zero. vec2 dir2 = dir * dir; float dirR = dir2.x + dir2.y; bool zro = dirR < (1.0/32768.0); dirR = inversesqrt(dirR); #if (PERFORMANCE == 1) if (zro) { vec4 w = vec4(0.0); w.x = (1.0 - pp.x) * (1.0 - pp.y); w.y = pp.x * (1.0 - pp.y); w.z = (1.0 - pp.x) * pp.y; w.w = pp.x * pp.y; pix.r = clamp(dot(w, vec4(fL, gL, jL, kL)), 0.0, 1.0); return; } #elif (PERFORMANCE == 0) dirR = zro ? 1.0 : dirR; dir.x = zro ? 1.0 : dir.x; #endif dir *= vec2(dirR); // Transform from {0 to 2} to {0 to 1} range, and shape with square. len = len * 0.5; len *= len; // Stretch kernel {1.0 vert|horz, to sqrt(2.0) on diagonal}. float stretch = dot(dir,dir) / (max(abs(dir.x), abs(dir.y))); // Anisotropic length after rotation, // x := 1.0 lerp to 'stretch' on edges // y := 1.0 lerp to 2x on edges vec2 len2 = vec2(1. +(stretch-1.0)*len, 1. -.5 * len); // Based on the amount of 'edge', // the window shifts from +/-{sqrt(2.0) to slightly beyond 2.0}. float lob = .5 - .29 * len; // Set distance^2 clipping point to the end of the adjustable window. float clp = 1./lob; //------------------------------------------------------------------------------------------------------------------------------ // Accumulation mixed with min/max of 4 nearest. // b c // e f g h // i j k l // n o // Accumulation. vec3 aC = vec3(0); float aW = 0.; FsrEasuTapF(aC, aW, vec2( 0.,-1.)-pp, dir, len2, lob, clp, bC); FsrEasuTapF(aC, aW, vec2( 1.,-1.)-pp, dir, len2, lob, clp, cC); FsrEasuTapF(aC, aW, vec2(-1., 1.)-pp, dir, len2, lob, clp, iC); FsrEasuTapF(aC, aW, vec2( 0., 1.)-pp, dir, len2, lob, clp, jC); FsrEasuTapF(aC, aW, vec2( 0., 0.)-pp, dir, len2, lob, clp, fC); FsrEasuTapF(aC, aW, vec2(-1., 0.)-pp, dir, len2, lob, clp, eC); FsrEasuTapF(aC, aW, vec2( 1., 1.)-pp, dir, len2, lob, clp, kC); FsrEasuTapF(aC, aW, vec2( 2., 1.)-pp, dir, len2, lob, clp, lC); FsrEasuTapF(aC, aW, vec2( 2., 0.)-pp, dir, len2, lob, clp, hC); FsrEasuTapF(aC, aW, vec2( 1., 0.)-pp, dir, len2, lob, clp, gC); FsrEasuTapF(aC, aW, vec2( 1., 2.)-pp, dir, len2, lob, clp, oC); FsrEasuTapF(aC, aW, vec2( 0., 2.)-pp, dir, len2, lob, clp, nC); //------------------------------------------------------------------------------------------------------------------------------ // Normalize and dering. #if (PERFORMANCE == 1) pix = aC/aW; #elif (PERFORMANCE == 0) vec3 min4 = min(min(fC,gC),min(jC,kC)); vec3 max4 = max(max(fC,gC),max(jC,kC)); pix=min(max4,max(min4,aC/aW)); #endif } void EASU( out vec4 fragColor, in vec2 fragCoord ) { vec3 c; vec4 con0,con1,con2,con3; // "rendersize" refers to size of source image before upscaling. vec2 rendersize = u_viewsize; FsrEasuCon( con0, con1, con2, con3, rendersize, rendersize, rendersize ); FsrEasuF(c, fragCoord, con0, con1, con2, con3); fragColor = vec4(xyz_to_rgb_2717090884(c.xyz), 1); } vec4 getPixel(vec2 pos) { vec2 coord = (pos + .5) / u_viewsize; coord.y = 1.0 - coord.y; return texture2D(u_texture, coord); } vec4 fsr_easu_2717090884(vec2 uv) { vec4 e = getPixel(gl_FragCoord.xy); vec4 e_xyz = vec4(rgb_to_xyz_2717090884(e.rgb), 1); EASU(e_xyz, (gl_FragCoord.xy + 0.5) / u_viewsize); // fetch a 3x3 neighborhood around the pixel 'e', // a b c // d(e)f // g h i vec3 a = getPixel(gl_FragCoord.xy + vec2(-1.0,-1.0)).rgb; vec3 b = getPixel(gl_FragCoord.xy + vec2( 0.0,-1.0)).rgb; vec3 c = getPixel(gl_FragCoord.xy + vec2( 1.0,-1.0)).rgb; vec3 f = getPixel(gl_FragCoord.xy + vec2( 1.0, 0.0)).rgb; vec3 g = getPixel(gl_FragCoord.xy + vec2(-1.0, 1.0)).rgb; vec3 h = getPixel(gl_FragCoord.xy + vec2( 0.0, 1.0)).rgb; vec3 d = getPixel(gl_FragCoord.xy + vec2(-1.0, 0.0)).rgb; vec3 i = getPixel(gl_FragCoord.xy + vec2( 1.0, 1.0)).rgb;; // Soft min and max. // a b c b // d e f * 0.5 + d e f * 0.5 // g h i h // These are 2.0x bigger (factored out the extra multiply). vec3 mnRGB = min(min(min(d, e.rgb), min(f, b)), h); vec3 mnRGB2 = min(mnRGB, min(min(a, c), min(g, i))); mnRGB += mnRGB2; vec3 mxRGB = max(max(max(d, e.rgb), max(f, b)), h); vec3 mxRGB2 = max(mxRGB, max(max(a, c), max(g, i))); mxRGB += mxRGB2; // Smooth minimum distance to signal limit divided by smooth max. vec3 rcpMRGB = 1.0 / mxRGB; vec3 ampRGB = clamp(min(mnRGB, 2.0 - mxRGB) * rcpMRGB, 0.0, 1.0); // Shaping amount of sharpening. ampRGB = inversesqrt(ampRGB); float peak = -3.0 * clamp(CONTRAST, 0.0, 1.0) + 8.0; vec3 wRGB = -(1.0 / (ampRGB * peak)); vec3 rcpWeightRGB = 1.0 / (4.0 * wRGB + 1.0); // 0 w 0 // Filter shape: w 1 w // 0 w 0 vec3 window = (b + d) + (f + h); vec3 outColor = clamp((window * wRGB + e.rgb) * rcpWeightRGB, 0.0, 1.0); return vec4(mix(e.rgb, outColor, SHARPENING), e.a); } // https://github.com/glslify/glslify#exporting-a-glsl-module #define DIFF 1.0 #define RADIUS 4.0 void bilateral_iter_3977570374(vec2 random_dir, vec2 radius, float diff, vec4 pixel, vec2 uv, inout vec3 result, inout float totalWeight) { vec2 dir = random_dir * radius; vec3 randomPixel = texture2D(u_texture, uv + dir).xyz; vec3 delta = randomPixel - pixel.rgb; float weight = exp(-dot(delta, delta) / diff); result += randomPixel * weight; totalWeight += weight; } vec4 bilateral(vec2 uv) { vec2 radius = (RADIUS / u_viewsize); float diff = DIFF / 255.0; vec4 pixel = texture2D(u_texture, uv); vec3 result = vec3(0.0, 0.0, 0.0); float totalWeight = 0.0; // uroll loop and substitute precalculated random vectors for GLSL 1.0 ES: bilateral_iter_3977570374(vec2(-0.886051297,0.447155535), radius, diff, pixel, uv, result, totalWeight); bilateral_iter_3977570374(vec2(0.270759493,0.537728608), radius, diff, pixel, uv, result, totalWeight); bilateral_iter_3977570374(vec2(-0.896959424,0.440607518), radius, diff, pixel, uv, result, totalWeight); bilateral_iter_3977570374(vec2(-0.804274619,0.125076547), radius, diff, pixel, uv, result, totalWeight); bilateral_iter_3977570374(vec2(0.373693645,0.240383312), radius, diff, pixel, uv, result, totalWeight); bilateral_iter_3977570374(vec2(-0.850325704,-0.192106694), radius, diff, pixel, uv, result, totalWeight); bilateral_iter_3977570374(vec2(-0.453608066,0.889671504), radius, diff, pixel, uv, result, totalWeight); bilateral_iter_3977570374(vec2(-0.280496657,0.206442386), radius, diff, pixel, uv, result, totalWeight); bilateral_iter_3977570374(vec2(0.840040743,-0.36367026), radius, diff, pixel, uv, result, totalWeight); bilateral_iter_3977570374(vec2(-0.151598319,-0.884027064), radius, diff, pixel, uv, result, totalWeight); bilateral_iter_3977570374(vec2(-0.221440807,0.593896627), radius, diff, pixel, uv, result, totalWeight); bilateral_iter_3977570374(vec2(-0.797481239,-0.243254974), radius, diff, pixel, uv, result, totalWeight); bilateral_iter_3977570374(vec2(0.48824361,0.225083455), radius, diff, pixel, uv, result, totalWeight); bilateral_iter_3977570374(vec2(-0.0387817062,0.838459492), radius, diff, pixel, uv, result, totalWeight); bilateral_iter_3977570374(vec2(0.92897892,-0.133588716), radius, diff, pixel, uv, result, totalWeight); bilateral_iter_3977570374(vec2(-0.693672359,-0.706737161), radius, diff, pixel, uv, result, totalWeight); result = result / totalWeight; return vec4(result, pixel.a); } // https://github.com/glslify/glslify#exporting-a-glsl-module vec3 rgb2hsv(vec3 c) { vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0); vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g)); vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r)); float d = q.x - min(q.w, q.y); float e = 1.0e-10; return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x); } vec3 hsv2rgb(vec3 c) { vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0); vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www); return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y); } vec4 light_correction_1117569599(vec4 c, float s) { vec3 hsv = rgb2hsv(c.rgb); hsv.y = pow(hsv.y, pow(s, -0.5)); hsv.z = pow(hsv.z, s); vec3 rgb = hsv2rgb(hsv); return vec4(rgb, c.a); } // https://github.com/glslify/glslify#exporting-a-glsl-module void main() { vec4 c; if (u_filters.x == 1.) c = fsr_easu_2717090884(v_tex_uv); else if (u_filters.x == 2.) c = bilateral(v_tex_uv); else c = texture2D(u_texture, v_tex_uv); if (u_filters.y != 1.) c = light_correction_1117569599(c, u_filters.y); gl_FragColor = c; }`,Nu=(r,o,a)=>{const u=r.createProgram();return r.attachShader(u,o),r.attachShader(u,a),r.linkProgram(u),r.useProgram(u),u},aa=(r,o,a)=>{const u=r.createShader(o);return r.shaderSource(u,a),r.compileShader(u),u},Bu=r=>{const o=r.createTexture();return r.bindTexture(r.TEXTURE_2D,o),r.texImage2D(r.TEXTURE_2D,0,r.RGB,1,1,0,r.RGB,r.UNSIGNED_BYTE,null),r.texParameteri(r.TEXTURE_2D,r.TEXTURE_WRAP_S,r.CLAMP_TO_EDGE),r.texParameteri(r.TEXTURE_2D,r.TEXTURE_WRAP_T,r.CLAMP_TO_EDGE),r.texParameteri(r.TEXTURE_2D,r.TEXTURE_MIN_FILTER,r.NEAREST),r.texParameteri(r.TEXTURE_2D,r.TEXTURE_MAG_FILTER,r.LINEAR),r.bindTexture(r.TEXTURE_2D,null),o},sa=(r,o)=>{let a=0,u=1;const b=document.createElement("canvas"),_=b.captureStream(30),w=b.getContext("webgl"),m=aa(w,w.VERTEX_SHADER,Du),F=aa(w,w.FRAGMENT_SHADER,Mu),D=Nu(w,m,F),O=Bu(w);w.bindTexture(w.TEXTURE_2D,O);const H=w.getAttribLocation(D,"a_position"),V=w.createBuffer();w.bindBuffer(w.ARRAY_BUFFER,V),w.bufferData(w.ARRAY_BUFFER,new Float32Array([-1,-1,1,-1,-1,1,-1,1,1,-1,1,1]),w.STATIC_DRAW),w.enableVertexAttribArray(H),w.vertexAttribPointer(H,2,w.FLOAT,!1,0,0);const q=w.getUniformLocation(D,"u_viewsize"),X=w.getUniformLocation(D,"u_filters");w.uniform2fv(X,new Float32Array([a,u])),Qr(r).then(ae=>{const he=ae.requestVideoFrameCallback?.bind(ae)||eo.requestAnimationFrame;(function Z(){ae.ended||!_.active||(he(Z),w.texImage2D(w.TEXTURE_2D,0,w.RGBA,w.RGBA,w.UNSIGNED_BYTE,ae),(b.width!==ae.videoWidth||b.height!==ae.videoHeight)&&(w.viewport(0,0,b.width=ae.videoWidth,b.height=ae.videoHeight),w.uniform2fv(q,new Float32Array([b.width,b.height]))),w.drawArrays(w.TRIANGLES,0,6))})()}),w.deleteProgram(D),w.deleteShader(F),w.deleteShader(m);const Ce={stream:_,denoise(ae){w.uniform2fv(X,new Float32Array([a=ae,u]))},exposureCompensation(ae){w.uniform2fv(X,new Float32Array([a,u=ae]))}};if(o)for(const[ae,he]of Object.entries(o))Ce[ae](he);return Ce},Ou=typeof screen<"u"&&screen.height>screen.width,en={facingMode:"user",width:{min:640,ideal:1280,max:1920},height:{min:480,ideal:720,max:1080},resizeMode:{ideal:"crop-and-scale"}};Ou&&(delete en.width,delete en.height);class ju{constructor(o){re(this,"_stream",null),re(this,"_constraints"),re(this,"_preferences",{}),re(this,"_enhancer",null),re(this,"kind","stream"),this._constraints={...en,...o}}get active(){return!!this._stream}denoise(o){this._preferences.denoise=Number(o),this._enhancer?.denoise(this._preferences.denoise)}setExposureCompensation(o){this._preferences.exposureCompensation=o,this._enhancer?.exposureCompensation(this._preferences.exposureCompensation)}async start(){return await(this._stream??(this._stream=la(this._constraints))),this}async*[Symbol.asyncIterator](o){const a=await(this._stream??(this._stream=la(this._constraints))),u=this._enhancer=lo(this._preferences)?sa(a,this._preferences):null;let b=new Jr(u?u.stream:a)[Symbol.asyncIterator]({horizontalFlip:!0,...o}),_;for(;;){if(!this._enhancer&&lo(this._preferences)){const F=this._enhancer=sa(a,this._preferences);b=new Jr(F.stream)[Symbol.asyncIterator]({horizontalFlip:!0,...o})}this._enhancer&&!lo(this._preferences)&&(this._enhancer.stream.getTracks().forEach(F=>F.stop()),this._enhancer=null,b=new Jr(a)[Symbol.asyncIterator]({horizontalFlip:!0,...o}));const{done:w,value:m}=await b.next(_);if(w)break;_=yield m}this.stop()}stop(){this._stream&&this._stream.then(o=>o.getTracks().forEach(a=>a.stop())),this._enhancer&&this._enhancer.stream.getTracks().forEach(o=>o.stop()),this._stream=null,this._enhancer=null}}const la=async r=>{if(typeof navigator.mediaDevices>"u")throw new Error(`SecureContext is required to access webcam It\u2018s likely you need to set up HTTPS/TLS for your website See https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia#Encryption_based_security for details `);return await navigator.mediaDevices.getUserMedia({video:r})},lo=r=>typeof r.exposureCompensation=="number"&&r.exposureCompensation!==1||r.denoise===1||r.denoise===2,Uu={createVideoElement:Qr,createCanvas:ao};let Wu="useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict",ua=(r=21)=>{let o="",a=r;for(;a--;)o+=Wu[Math.random()*64|0];return o};const ca="KGZ1bmN0aW9uKCl7InVzZSBzdHJpY3QiO3ZhciBzPVVpbnQ4QXJyYXkseD1VaW50MTZBcnJheSxPPVVpbnQzMkFycmF5LEg9bmV3IHMoWzAsMCwwLDAsMCwwLDAsMCwxLDEsMSwxLDIsMiwyLDIsMywzLDMsMyw0LDQsNCw0LDUsNSw1LDUsMCwwLDAsMF0pLEk9bmV3IHMoWzAsMCwwLDAsMSwxLDIsMiwzLDMsNCw0LDUsNSw2LDYsNyw3LDgsOCw5LDksMTAsMTAsMTEsMTEsMTIsMTIsMTMsMTMsMCwwXSksbDE9bmV3IHMoWzE2LDE3LDE4LDAsOCw3LDksNiwxMCw1LDExLDQsMTIsMywxMywyLDE0LDEsMTVdKSxKPWZ1bmN0aW9uKHIsdCl7Zm9yKHZhciBhPW5ldyB4KDMxKSxuPTA7bjwzMTsrK24pYVtuXT10Kz0xPDxyW24tMV07Zm9yKHZhciB2PW5ldyBPKGFbMzBdKSxuPTE7bjwzMDsrK24pZm9yKHZhciBpPWFbbl07aTxhW24rMV07KytpKXZbaV09aS1hW25dPDw1fG47cmV0dXJuW2Esdl19LEs9SihILDIpLFE9S1swXSxjMT1LWzFdO1FbMjhdPTI1OCxjMVsyNThdPTI4O2Zvcih2YXIgczE9SihJLDApLGQxPXMxWzBdLFU9bmV3IHgoMzI3NjgpLHU9MDt1PDMyNzY4OysrdSl7dmFyIEM9KHUmNDM2OTApPj4+MXwodSYyMTg0NSk8PDE7Qz0oQyY1MjQyOCk+Pj4yfChDJjEzMTA3KTw8MixDPShDJjYxNjgwKT4+PjR8KEMmMzg1NSk8PDQsVVt1XT0oKEMmNjUyODApPj4+OHwoQyYyNTUpPDw4KT4+PjF9Zm9yKHZhciB6PWZ1bmN0aW9uKHQsYSxuKXtmb3IodmFyIHY9dC5sZW5ndGgsaT0wLGM9bmV3IHgoYSk7aTx2OysraSl0W2ldJiYrK2NbdFtpXS0xXTt2YXIgZj1uZXcgeChhKTtmb3IoaT0wO2k8YTsrK2kpZltpXT1mW2ktMV0rY1tpLTFdPDwxO3ZhciBvO2lmKG4pe289bmV3IHgoMTw8YSk7dmFyIGU9MTUtYTtmb3IoaT0wO2k8djsrK2kpaWYodFtpXSlmb3IodmFyIGw9aTw8NHx0W2ldLGI9YS10W2ldLGQ9Zlt0W2ldLTFdKys8PGIseT1kfCgxPDxiKS0xO2Q8PXk7KytkKW9bVVtkXT4+PmVdPWx9ZWxzZSBmb3Iobz1uZXcgeCh2KSxpPTA7aTx2OysraSl0W2ldJiYob1tpXT1VW2ZbdFtpXS0xXSsrXT4+PjE1LXRbaV0pO3JldHVybiBvfSxCPW5ldyBzKDI4OCksdT0wO3U8MTQ0OysrdSlCW3VdPTg7Zm9yKHZhciB1PTE0NDt1PDI1NjsrK3UpQlt1XT05O2Zvcih2YXIgdT0yNTY7dTwyODA7Kyt1KUJbdV09Nztmb3IodmFyIHU9MjgwO3U8Mjg4OysrdSlCW3VdPTg7Zm9yKHZhciBWPW5ldyBzKDMyKSx1PTA7dTwzMjsrK3UpVlt1XT01O3ZhciBnMT16KEIsOSwxKSx3MT16KFYsNSwxKSxXPWZ1bmN0aW9uKHIpe2Zvcih2YXIgdD1yWzBdLGE9MTthPHIubGVuZ3RoOysrYSlyW2FdPnQmJih0PXJbYV0pO3JldHVybiB0fSxoPWZ1bmN0aW9uKHIsdCxhKXt2YXIgbj10Lzh8MDtyZXR1cm4ocltuXXxyW24rMV08PDgpPj4odCY3KSZhfSxYPWZ1bmN0aW9uKHIsdCl7dmFyIGE9dC84fDA7cmV0dXJuKHJbYV18clthKzFdPDw4fHJbYSsyXTw8MTYpPj4odCY3KX0saDE9ZnVuY3Rpb24ocil7cmV0dXJuKHIrNykvOHwwfSxqPWZ1bmN0aW9uKHIsdCxhKXsodD09bnVsbHx8dDwwKSYmKHQ9MCksKGE9PW51bGx8fGE+ci5sZW5ndGgpJiYoYT1yLmxlbmd0aCk7dmFyIG49bmV3KHIuQllURVNfUEVSX0VMRU1FTlQ9PTI/eDpyLkJZVEVTX1BFUl9FTEVNRU5UPT00P086cykoYS10KTtyZXR1cm4gbi5zZXQoci5zdWJhcnJheSh0LGEpKSxufSxtMT1bInVuZXhwZWN0ZWQgRU9GIiwiaW52YWxpZCBibG9jayB0eXBlIiwiaW52YWxpZCBsZW5ndGgvbGl0ZXJhbCIsImludmFsaWQgZGlzdGFuY2UiLCJzdHJlYW0gZmluaXNoZWQiLCJubyBzdHJlYW0gaGFuZGxlciIsLCJubyBjYWxsYmFjayIsImludmFsaWQgVVRGLTggZGF0YSIsImV4dHJhIGZpZWxkIHRvbyBsb25nIiwiZGF0ZSBub3QgaW4gcmFuZ2UgMTk4MC0yMDk5IiwiZmlsZW5hbWUgdG9vIGxvbmciLCJzdHJlYW0gZmluaXNoaW5nIiwiaW52YWxpZCB6aXAgZGF0YSJdLGc9ZnVuY3Rpb24ocix0LGEpe3ZhciBuPW5ldyBFcnJvcih0fHxtMVtyXSk7aWYobi5jb2RlPXIsRXJyb3IuY2FwdHVyZVN0YWNrVHJhY2UmJkVycm9yLmNhcHR1cmVTdGFja1RyYWNlKG4sZyksIWEpdGhyb3cgbjtyZXR1cm4gbn0sYjE9ZnVuY3Rpb24ocix0LGEpe3ZhciBuPXIubGVuZ3RoO2lmKCFufHxhJiZhLmYmJiFhLmwpcmV0dXJuIHR8fG5ldyBzKDApO3ZhciB2PSF0fHxhLGk9IWF8fGEuaTthfHwoYT17fSksdHx8KHQ9bmV3IHMobiozKSk7dmFyIGM9ZnVuY3Rpb24odTEpe3ZhciB2MT10Lmxlbmd0aDtpZih1MT52MSl7dmFyIGYxPW5ldyBzKE1hdGgubWF4KHYxKjIsdTEpKTtmMS5zZXQodCksdD1mMX19LGY9YS5mfHwwLG89YS5wfHwwLGU9YS5ifHwwLGw9YS5sLGI9YS5kLGQ9YS5tLHk9YS5uLFI9bio4O2Rve2lmKCFsKXtmPWgocixvLDEpO3ZhciBZPWgocixvKzEsMyk7aWYobys9MyxZKWlmKFk9PTEpbD1nMSxiPXcxLGQ9OSx5PTU7ZWxzZSBpZihZPT0yKXt2YXIgUz1oKHIsbywzMSkrMjU3LHIxPWgocixvKzEwLDE1KSs0LHQxPVMraChyLG8rNSwzMSkrMTtvKz0xNDtmb3IodmFyIEY9bmV3IHModDEpLEc9bmV3IHMoMTkpLHc9MDt3PHIxOysrdylHW2wxW3ddXT1oKHIsbyt3KjMsNyk7bys9cjEqMztmb3IodmFyIGExPVcoRyksQjE9KDE8PGExKS0xLFIxPXooRyxhMSwxKSx3PTA7dzx0MTspe3ZhciBuMT1SMVtoKHIsbyxCMSldO28rPW4xJjE1O3ZhciBwPW4xPj4+NDtpZihwPDE2KUZbdysrXT1wO2Vsc2V7dmFyIFQ9MCxOPTA7Zm9yKHA9PTE2PyhOPTMraChyLG8sMyksbys9MixUPUZbdy0xXSk6cD09MTc/KE49MytoKHIsbyw3KSxvKz0zKTpwPT0xOCYmKE49MTEraChyLG8sMTI3KSxvKz03KTtOLS07KUZbdysrXT1UfX12YXIgaTE9Ri5zdWJhcnJheSgwLFMpLF89Ri5zdWJhcnJheShTKTtkPVcoaTEpLHk9VyhfKSxsPXooaTEsZCwxKSxiPXooXyx5LDEpfWVsc2UgZygxKTtlbHNle3ZhciBwPWgxKG8pKzQsTD1yW3AtNF18cltwLTNdPDw4LFo9cCtMO2lmKFo+bil7aSYmZygwKTticmVha312JiZjKGUrTCksdC5zZXQoci5zdWJhcnJheShwLFopLGUpLGEuYj1lKz1MLGEucD1vPVoqOCxhLmY9Zjtjb250aW51ZX1pZihvPlIpe2kmJmcoMCk7YnJlYWt9fXYmJmMoZSsxMzEwNzIpO2Zvcih2YXIgWTE9KDE8PGQpLTEsRjE9KDE8PHkpLTEsJD1vOzskPW8pe3ZhciBUPWxbWChyLG8pJlkxXSxrPVQ+Pj40O2lmKG8rPVQmMTUsbz5SKXtpJiZnKDApO2JyZWFrfWlmKFR8fGcoMiksazwyNTYpdFtlKytdPWs7ZWxzZSBpZihrPT0yNTYpeyQ9byxsPW51bGw7YnJlYWt9ZWxzZXt2YXIgbzE9ay0yNTQ7aWYoaz4yNjQpe3ZhciB3PWstMjU3LE09SFt3XTtvMT1oKHIsbywoMTw8TSktMSkrUVt3XSxvKz1NfXZhciBQPWJbWChyLG8pJkYxXSxEPVA+Pj40O1B8fGcoMyksbys9UCYxNTt2YXIgXz1kMVtEXTtpZihEPjMpe3ZhciBNPUlbRF07Xys9WChyLG8pJigxPDxNKS0xLG8rPU19aWYobz5SKXtpJiZnKDApO2JyZWFrfXYmJmMoZSsxMzEwNzIpO2Zvcih2YXIgZTE9ZStvMTtlPGUxO2UrPTQpdFtlXT10W2UtX10sdFtlKzFdPXRbZSsxLV9dLHRbZSsyXT10W2UrMi1fXSx0W2UrM109dFtlKzMtX107ZT1lMX19YS5sPWwsYS5wPSQsYS5iPWUsYS5mPWYsbCYmKGY9MSxhLm09ZCxhLmQ9YixhLm49eSl9d2hpbGUoIWYpO3JldHVybiBlPT10Lmxlbmd0aD90OmoodCwwLGUpfSx5MT1uZXcgcygwKSxFPWZ1bmN0aW9uKHIsdCl7cmV0dXJuIHJbdF18clt0KzFdPDw4fSxtPWZ1bmN0aW9uKHIsdCl7cmV0dXJuKHJbdF18clt0KzFdPDw4fHJbdCsyXTw8MTZ8clt0KzNdPDwyNCk+Pj4wfSxxPWZ1bmN0aW9uKHIsdCl7cmV0dXJuIG0ocix0KSttKHIsdCs0KSo0Mjk0OTY3Mjk2fTtmdW5jdGlvbiBFMShyLHQpe3JldHVybiBiMShyLHQpfXZhciBBPXR5cGVvZiBUZXh0RGVjb2RlcjwidSImJm5ldyBUZXh0RGVjb2RlcixwMT0wO3RyeXtBLmRlY29kZSh5MSx7c3RyZWFtOiEwfSkscDE9MX1jYXRjaHt9dmFyIEMxPWZ1bmN0aW9uKHIpe2Zvcih2YXIgdD0iIixhPTA7Oyl7dmFyIG49clthKytdLHY9KG4+MTI3KSsobj4yMjMpKyhuPjIzOSk7aWYoYSt2PnIubGVuZ3RoKXJldHVyblt0LGoocixhLTEpXTt2P3Y9PTM/KG49KChuJjE1KTw8MTh8KHJbYSsrXSY2Myk8PDEyfChyW2ErK10mNjMpPDw2fHJbYSsrXSY2MyktNjU1MzYsdCs9U3RyaW5nLmZyb21DaGFyQ29kZSg1NTI5NnxuPj4xMCw1NjMyMHxuJjEwMjMpKTp2JjE/dCs9U3RyaW5nLmZyb21DaGFyQ29kZSgobiYzMSk8PDZ8clthKytdJjYzKTp0Kz1TdHJpbmcuZnJvbUNoYXJDb2RlKChuJjE1KTw8MTJ8KHJbYSsrXSY2Myk8PDZ8clthKytdJjYzKTp0Kz1TdHJpbmcuZnJvbUNoYXJDb2RlKG4pfX07ZnVuY3Rpb24gUzEocix0KXtpZih0KXtmb3IodmFyIGE9IiIsbj0wO248ci5sZW5ndGg7bis9MTYzODQpYSs9U3RyaW5nLmZyb21DaGFyQ29kZS5hcHBseShudWxsLHIuc3ViYXJyYXkobixuKzE2Mzg0KSk7cmV0dXJuIGF9ZWxzZXtpZihBKXJldHVybiBBLmRlY29kZShyKTt2YXIgdj1DMShyKSxpPXZbMF0sYz12WzFdO3JldHVybiBjLmxlbmd0aCYmZyg4KSxpfX12YXIgXzE9ZnVuY3Rpb24ocix0KXtyZXR1cm4gdCszMCtFKHIsdCsyNikrRShyLHQrMjgpfSx4MT1mdW5jdGlvbihyLHQsYSl7dmFyIG49RShyLHQrMjgpLHY9UzEoci5zdWJhcnJheSh0KzQ2LHQrNDYrbiksIShFKHIsdCs4KSYyMDQ4KSksaT10KzQ2K24sYz1tKHIsdCsyMCksZj1hJiZjPT00Mjk0OTY3Mjk1P1QxKHIsaSk6W2MsbShyLHQrMjQpLG0ocix0KzQyKV0sbz1mWzBdLGU9ZlsxXSxsPWZbMl07cmV0dXJuW0Uocix0KzEwKSxvLGUsdixpK0Uocix0KzMwKStFKHIsdCszMiksbF19LFQxPWZ1bmN0aW9uKHIsdCl7Zm9yKDtFKHIsdCkhPTE7dCs9NCtFKHIsdCsyKSk7cmV0dXJuW3Eocix0KzEyKSxxKHIsdCs0KSxxKHIsdCsyMCldfTtmdW5jdGlvbiBrMShyLHQpe2Zvcih2YXIgYT17fSxuPXIubGVuZ3RoLTIyO20ocixuKSE9MTAxMDEwMjU2Oy0tbikoIW58fHIubGVuZ3RoLW4+NjU1NTgpJiZnKDEzKTt2YXIgdj1FKHIsbis4KTtpZighdilyZXR1cm57fTt2YXIgaT1tKHIsbisxNiksYz1pPT00Mjk0OTY3Mjk1O2MmJihuPW0ocixuLTEyKSxtKHIsbikhPTEwMTA3NTc5MiYmZygxMyksdj1tKHIsbiszMiksaT1tKHIsbis0OCkpO2Zvcih2YXIgZj10JiZ0LmZpbHRlcixvPTA7bzx2Oysrbyl7dmFyIGU9eDEocixpLGMpLGw9ZVswXSxiPWVbMV0sZD1lWzJdLHk9ZVszXSxSPWVbNF0sWT1lWzVdLFM9XzEocixZKTtpPVIsKCFmfHxmKHtuYW1lOnksc2l6ZTpiLG9yaWdpbmFsU2l6ZTpkLGNvbXByZXNzaW9uOmx9KSkmJihsP2w9PTg/YVt5XT1FMShyLnN1YmFycmF5KFMsUytiKSxuZXcgcyhkKSk6ZygxNCwidW5rbm93biBjb21wcmVzc2lvbiB0eXBlICIrbCk6YVt5XT1qKHIsUyxTK2IpKX1yZXR1cm4gYX1jb25zdCB6MT1yPT5rMShyLHtmaWx0ZXI6KHtuYW1lOnR9KT0+ISh0LnN0YXJ0c1dpdGgoIl9fTUFDT1NYLyIpfHx0LmluY2x1ZGVzKCIuRFNfU3RvcmUiKSl9KTthZGRFdmVudExpc3RlbmVyKCJtZXNzYWdlIiwoe2RhdGE6cn0pPT57bGV0IHQ7dHJ5e3Q9e2lkOnIuaWQsZGF0YTp6MShyLmRhdGEpfX1jYXRjaChhKXt0PXtpZDpyLmlkLGVycm9yOmEubWVzc2FnZX19cG9zdE1lc3NhZ2UodCl9KX0pKCk7Cg==",fa=typeof window<"u"&&window.Blob&&new Blob([atob(ca)],{type:"text/javascript;charset=utf-8"});function $u(){let r;try{if(r=fa&&(window.URL||window.webkitURL).createObjectURL(fa),!r)throw"";return new Worker(r)}catch{return new Worker("data:application/javascript;base64,"+ca)}finally{r&&(window.URL||window.webkitURL).revokeObjectURL(r)}}let Mr;const Gu=async r=>new Promise((o,a)=>{const u=ua(),b=new Uint8Array(r),_={id:u,data:b};Mr||(Mr=new $u);const w=({data:m})=>{m.id===_.id&&(Mr.removeEventListener("message",w),"error"in m&&a(new Error(m.error)),"data"in m&&o(m.data))};Mr.addEventListener("message",w),Mr.postMessage(_,[r])}),uo="/";class da{constructor(o){re(this,"_source",null),re(this,"_fs",null),re(this,"_mountpoint",uo),re(this,"_data",{}),this._source=o}static async preload(o,a){if(Array.isArray(o)){const b=a?.onProgress;return await Promise.all(o.map((_,w)=>{const m=b?{onProgress:(...F)=>b(w,...F)}:{};return this.preload(_,m)}))}const u=new this(o);return await u.load(a),u}async _fetch(o,a){return await yu(o,{},a).then(u=>{if(u.ok)return u.blob();throw new Error(`Failed to fetch ${o.url} ${u.status} (${u.statusText})`)}).then(u=>{if(u.size>0)return u;throw new Error(`The source must not be empty. Received ${u.size} bytes size source.`)})}async _unzip(o){if(!o.type.includes("zip"))throw new TypeError(`The source type must be "application/zip"-like. Received: "${o.type}".`);return await o.arrayBuffer().then(Gu).then(a=>Object.entries(a)).then(a=>Object.fromEntries(a))}async load(o){let a=this._source;return typeof a=="string"&&(a=new Request(a)),a instanceof Request&&(a=await this._fetch(a,o)),a instanceof Blob&&(a=await this._unzip(a)),a instanceof Object&&a.constructor===Object&&await Promise.all(Object.entries(a).map(([u,b])=>this.writeFile(u,b))),this._source=null,this._data}_fsWriteFile(o,a){this._fs&&(o=`${this._mountpoint}${o.startsWith("/")?o.substring(1):o}`,this._fs.writeFile(o,a))}async writeFile(o,a){const u=new Uint8Array(a instanceof Blob?await a.arrayBuffer():a);this._data[o]=u,this._fsWriteFile(o,this._data[o])}mount(o,a=uo){this._fs=o,this._mountpoint=a.endsWith("/")?a:`${a}/`,Object.entries(this._data).forEach(([u,b])=>this._fsWriteFile(u,b))}unmount(){this._fs=null,this._mountpoint=uo}}var zu=Object.defineProperty,Vu=Object.getOwnPropertyDescriptor,Xu=(r,o,a,u)=>{for(var b=u>1?void 0:u?Vu(o,a):o,_=r.length-1,w;_>=0;_--)(w=r[_])&&(b=(u?w(o,a,b):w(b))||b);return u&&b&&zu(o,a,b),b};class ha{constructor(o){re(this,"name",`effects/${ua()}`),re(this,"_player",null),re(this,"_resource"),this._resource=new Ku(o)}static async preload(o,a){if(Array.isArray(o)){const b=a?.onProgress;return await Promise.all(o.map((_,w)=>{const m=b?{onProgress:(...F)=>b(w,...F)}:{};return this.preload(_,m)}))}const u=new this(o);return await u._load(a),u}async _load(o){await this._resource.load(o)}async _bind(o){await this._resource.load(),this._player=o,this._resource.mount(this._player.FS,this.name)}_unbind(){this._resource.unmount(),this._player=null}async writeFile(o,a){return this._resource.writeFile(o,a)}callJsMethod(o,a=""){if(!this._player){console.warn("The method won't evaluate: the effect is not applied to a player.");return}return this._player.callJsMethod(o,a)}async evalJs(o){if(!this._player){console.warn("The script won't evaluate: the effect is not applied to a player.");return}return await this._player.evalJs(o)}}Xu([Yi("Please, use Effect.evalJs() instead.")],ha.prototype,"callJsMethod",1);class Ku extends da{async _unzip(o){let a=await super._unzip(o);const u=Object.keys(a).map(_=>_.split("/").find(Boolean)),b=u[0];return u.every(_=>_===b)&&(a=Object.fromEntries(Object.entries(a).map(([_,w])=>[_.replace(`${b}/`,""),w]))),a}}let Hu=class{constructor(r){re(this,"_resource"),this._resource=new da(r)}static async preload(r,o){if(Array.isArray(r)){const u=o?.onProgress;return await Promise.all(r.map((b,_)=>{const w=u?{onProgress:(...m)=>u(_,...m)}:{};return this.preload(b,w)}))}const a=new this(r);return await a._load(o),a}async _load(r){await this._resource.load(r)}async _bind(r){await this._resource.load(),this._resource.mount(r.FS)}};var Yu=(()=>{var r=typeof document<"u"?document.currentScript?.src:void 0;return async function(o={}){var a,u=o,b,_,w=new Promise((e,t)=>{b=e,_=t}),m=!1;u.expectedDataFileDownloads??(u.expectedDataFileDownloads=0),u.expectedDataFileDownloads++,(()=>{var e=typeof ENVIRONMENT_IS_PTHREAD<"u"&&ENVIRONMENT_IS_PTHREAD,t=typeof ENVIRONMENT_IS_WASM_WORKER<"u"&&ENVIRONMENT_IS_WASM_WORKER;if(e||t)return;function n(i){typeof window=="object"?window.encodeURIComponent(window.location.pathname.substring(0,window.location.pathname.lastIndexOf("/"))+"/"):typeof process>"u"&&typeof location<"u"&&encodeURIComponent(location.pathname.substring(0,location.pathname.lastIndexOf("/"))+"/");var l="BanubaSDK.data",d="BanubaSDK.data",v=u.locateFile?u.locateFile(d,""):d,y=i.remote_package_size;function S(N,$,ee,ue){u.dataFileDownloads??(u.dataFileDownloads={}),fetch(N).catch(ce=>Promise.reject(new Error(`Network Error: ${N}`,{cause:ce}))).then(ce=>{if(!ce.ok)return Promise.reject(new Error(`${ce.status}: ${ce.url}`));if(!ce.body&&ce.arrayBuffer)return ce.arrayBuffer().then(ee);const ye=ce.body.getReader(),me=()=>ye.read().then(Fe).catch(Je=>Promise.reject(new Error(`Unexpected error while handling : ${ce.url} ${Je}`,{cause:Je}))),pe=[],ve=ce.headers,Le=Number(ve.get("Content-Length")??$);let le=0;const Fe=({done:Je,value:ft})=>{if(Je){const Ue=new Uint8Array(pe.map(Ve=>Ve.length).reduce((Ve,er)=>Ve+er,0));let dt=0;for(const Ve of pe)Ue.set(Ve,dt),dt+=Ve.length;ee(Ue.buffer)}else{pe.push(ft),le+=ft.length,u.dataFileDownloads[N]={loaded:le,total:Le};let Ue=0,dt=0;for(const Ve of Object.values(u.dataFileDownloads))Ue+=Ve.loaded,dt+=Ve.total;return u.setStatus?.(`Downloading data... (${Ue}/${dt})`),me()}};return u.setStatus?.("Downloading data..."),me()})}var P=null,A=u.getPreloadedPackage?u.getPreloadedPackage(v,y):null;A||S(v,y,N=>{P?(P(N),P=null):A=N});function U(N){function $(me,pe){if(!me)throw pe+new Error().stack}N.FS_createPath("/","bnb_js",!0,!0),N.FS_createPath("/","bnb_prefabs",!0,!0),N.FS_createPath("/bnb_prefabs","audio",!0,!0),N.FS_createPath("/bnb_prefabs","base",!0,!0),N.FS_createPath("/bnb_prefabs","camera",!0,!0),N.FS_createPath("/bnb_prefabs/camera","images",!0,!0),N.FS_createPath("/bnb_prefabs","foreground",!0,!0),N.FS_createPath("/bnb_prefabs","gltf",!0,!0),N.FS_createPath("/bnb_prefabs","gltf_base",!0,!0),N.FS_createPath("/bnb_prefabs/gltf_base","meshes",!0,!0),N.FS_createPath("/bnb_prefabs/gltf_base","shaders",!0,!0),N.FS_createPath("/bnb_prefabs","hint",!0,!0),N.FS_createPath("/bnb_prefabs/hint","font",!0,!0),N.FS_createPath("/bnb_prefabs/hint","meshes",!0,!0),N.FS_createPath("/bnb_prefabs/hint","scripts",!0,!0),N.FS_createPath("/bnb_prefabs/hint","shaders",!0,!0),N.FS_createPath("/bnb_prefabs","lights",!0,!0),N.FS_createPath("/bnb_prefabs","lut",!0,!0),N.FS_createPath