@needle-tools/engine
Version:
Needle Engine is a web-based runtime for 3D apps. It runs on your machine for development with great integrations into editors like Unity or Blender - and can be deployed onto any device! It is flexible, extensible and networking and XR are built-in.
979 lines (972 loc) • 35.2 kB
JavaScript
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
import { ShaderMaterial, SRGBColorSpace, Texture, Vector2, Vector4, VideoTexture } from "three";
import { isDevEnvironment } from "../engine/debug/index.js";
import { ObjectUtils, PrimitiveType } from "../engine/engine_create_objects.js";
import { awaitInput } from "../engine/engine_input_utils.js";
import { serializable } from "../engine/engine_serialization_decorator.js";
import { getWorldScale } from "../engine/engine_three_utils.js";
import { getParam } from "../engine/engine_utils.js";
import { Behaviour, GameObject } from "./Component.js";
import { Renderer } from "./Renderer.js";
const debug = getParam("debugvideo");
export var AspectMode;
(function (AspectMode) {
AspectMode[AspectMode["None"] = 0] = "None";
AspectMode[AspectMode["AdjustHeight"] = 1] = "AdjustHeight";
AspectMode[AspectMode["AdjustWidth"] = 2] = "AdjustWidth";
})(AspectMode || (AspectMode = {}));
export var VideoSource;
(function (VideoSource) {
/// <summary>
/// <para>Use the current clip as the video content source.</para>
/// </summary>
VideoSource[VideoSource["VideoClip"] = 0] = "VideoClip";
/// <summary>
/// <para>Use the current URL as the video content source.</para>
/// </summary>
VideoSource[VideoSource["Url"] = 1] = "Url";
})(VideoSource || (VideoSource = {}));
export var VideoAudioOutputMode;
(function (VideoAudioOutputMode) {
VideoAudioOutputMode[VideoAudioOutputMode["None"] = 0] = "None";
VideoAudioOutputMode[VideoAudioOutputMode["AudioSource"] = 1] = "AudioSource";
VideoAudioOutputMode[VideoAudioOutputMode["Direct"] = 2] = "Direct";
VideoAudioOutputMode[VideoAudioOutputMode["APIOnly"] = 3] = "APIOnly";
})(VideoAudioOutputMode || (VideoAudioOutputMode = {}));
export var VideoRenderMode;
(function (VideoRenderMode) {
VideoRenderMode[VideoRenderMode["CameraFarPlane"] = 0] = "CameraFarPlane";
VideoRenderMode[VideoRenderMode["CameraNearPlane"] = 1] = "CameraNearPlane";
VideoRenderMode[VideoRenderMode["RenderTexture"] = 2] = "RenderTexture";
VideoRenderMode[VideoRenderMode["MaterialOverride"] = 3] = "MaterialOverride";
})(VideoRenderMode || (VideoRenderMode = {}));
/**
* The VideoPlayer component can be used to playback video clips from urls, streams or m3u8 playlists (livestreams)
* @example Add a video player component to a game object and set the url to a video file. The video will start playing once the object becomes active in your scene
* ```typescript
* // Add a video player component to a game object and set the url to a video file. The video will start playing once the object becomes active in your scene
* const videoPlayer = addComponent(obj, VideoPlayer, {
* url: "https://www.w3schools.com/html/mov_bbb.mp4",
* playOnAwake: true,
* });
* ```
* @category Multimedia
* @group Components
*/
export class VideoPlayer extends Behaviour {
/**
* When true the video will start playing as soon as the component is enabled
*/
playOnAwake = true;
/**
* The aspect mode to use for the video. If
*/
aspectMode = AspectMode.None;
clip = null;
// set a default src, this should not be undefined
source = VideoSource.Url;
/**
* The video clip url to play.
*/
get url() { return this._url; }
/**
* The video clip to play.
*/
set url(val) {
const prev = this._url;
const changed = prev !== val;
if (this.__didAwake) {
if (changed) {
this.setClipURL(val ?? "");
}
}
else
this._url = val;
}
_url = null;
renderMode;
targetMaterialProperty;
targetMaterialRenderer;
targetTexture;
time = 0;
_playbackSpeed = 1;
/**
* Get the video playback speed. Increasing this value will speed up the video, decreasing it will slow it down.
* @default 1
*/
get playbackSpeed() {
return this._videoElement?.playbackRate ?? this._playbackSpeed;
}
/**
* Set the video playback speed. Increasing this value will speed up the video, decreasing it will slow it down.
*/
set playbackSpeed(val) {
this._playbackSpeed = val;
if (this._videoElement)
this._videoElement.playbackRate = val;
}
_isLooping = false;
get isLooping() {
return this._videoElement?.loop ?? this._isLooping;
}
set isLooping(val) {
this._isLooping = val;
if (this._videoElement)
this._videoElement.loop = val;
}
/**
* @returns the current time of the video in seconds
*/
get currentTime() {
return this._videoElement?.currentTime ?? this.time;
}
/**
* set the current time of the video in seconds
*/
set currentTime(val) {
if (this._videoElement) {
this._videoElement.currentTime = val;
}
else
this.time = val;
}
/**
* @returns true if the video is currently playing
*/
get isPlaying() {
const video = this._videoElement;
if (video) {
if (video.currentTime > 0 && !video.paused && !video.ended
&& video.readyState > video.HAVE_CURRENT_DATA)
return true;
else if (video.srcObject) {
const stream = video.srcObject;
if (stream.active)
return true;
}
}
return false;
}
get crossOrigin() {
return this._videoElement?.crossOrigin ?? this._crossOrigin;
}
set crossOrigin(val) {
this._crossOrigin = val;
if (this._videoElement) {
if (val !== null)
this._videoElement.setAttribute("crossorigin", val);
else
this._videoElement.removeAttribute("crossorigin");
}
}
/**
* the material that is used to render the video
*/
get videoMaterial() {
if (!this._videoMaterial)
if (!this.create(false))
return null;
return this._videoMaterial;
}
/**
* the video texture that is used to render the video
*/
get videoTexture() {
if (!this._videoTexture)
if (!this.create(false))
return null;
return this._videoTexture;
}
/**
* the HTMLVideoElement that is used to play the video
*/
get videoElement() {
if (!this._videoElement)
if (!this.create(false))
return null;
return this._videoElement;
}
/**
* Request the browser to enter picture in picture mode
* @link https://developer.mozilla.org/en-US/docs/Web/API/Picture-in-Picture_API
* @returns the promise returned by the browser
*/
requestPictureInPicture() {
if (this._videoElement)
return this._videoElement.requestPictureInPicture();
return null;
}
/**
* @returns true if the video is muted
*/
get muted() {
return this._videoElement?.muted ?? this._muted;
}
/**
* set the video to be muted
*/
set muted(val) {
this._muted = val;
if (this._videoElement)
this._videoElement.muted = val;
}
_muted = false;
/**
* The current video clip that is being played
*/
get currentVideo() {
return this.clip;
}
set audioOutputMode(mode) {
if (mode !== this._audioOutputMode) {
if (mode === VideoAudioOutputMode.AudioSource && isDevEnvironment())
console.warn("VideoAudioOutputMode.AudioSource is not yet implemented");
this._audioOutputMode = mode;
this.updateVideoElementSettings();
}
}
get audioOutputMode() { return this._audioOutputMode; }
_audioOutputMode = VideoAudioOutputMode.Direct;
/** Set this to false to pause video playback while the tab is not active
* @default true
*/
playInBackground = true;
_crossOrigin = "anonymous";
_videoElement = null;
_videoTexture = null;
_videoMaterial = null;
_isPlaying = false;
wasPlaying = false;
/** ensure's the video element has been created and will start loading the clip */
preloadVideo() {
if (debug)
console.log("Video Preload: " + this.name, this.clip);
this.create(false);
}
/** @deprecated use `preloadVideo()` */
preload() { this.preloadVideo(); }
/** Set a new video stream
* starts to play automatically if the videoplayer hasnt been active before and playOnAwake is true */
setVideo(video) {
this.clip = video;
this.source = VideoSource.VideoClip;
if (!this._videoElement)
this.create(this.playOnAwake);
else {
// TODO: how to prevent interruption error when another video is already playing
this._videoElement.srcObject = video;
if (this._isPlaying)
this.play();
this.updateAspect();
}
}
setClipURL(url) {
if (this._url === url)
return;
this._url = url;
this.source = VideoSource.Url;
if (debug)
console.log("set url", url);
if (!this._videoElement)
this.create(this.playOnAwake);
else {
if (url.endsWith(".m3u8") || url.includes(".m3u")) {
this.ensureM3UCanBePlayed();
}
else {
this._videoElement.src = url;
if (this._isPlaying) {
this.stop();
this.play();
}
}
}
}
/** @internal */
onEnable() {
if (debug)
console.log("VideoPlayer.onEnable", VideoSource[this.source], this.clip, this.url, this);
window.addEventListener('visibilitychange', this.visibilityChanged);
if (this.playOnAwake === true) {
this.create(true);
}
else {
this.preloadVideo();
}
if (this.screenspace) {
this._overlay?.start();
}
else
this._overlay?.stop();
}
/** @internal */
onDisable() {
window.removeEventListener('visibilitychange', this.visibilityChanged);
this._overlay?.stop();
this.pause();
}
visibilityChanged = (_) => {
switch (document.visibilityState) {
case "hidden":
if (!this.playInBackground) {
this.wasPlaying = this._isPlaying;
this.pause();
}
break;
case "visible":
if (this.wasPlaying && !this._isPlaying)
this.play();
break;
}
};
/** @internal */
onDestroy() {
if (this._videoElement) {
this.videoElement?.remove();
this._videoElement = null;
}
if (this._videoTexture) {
this._videoTexture.dispose();
this._videoTexture = null;
}
}
_receivedInput = false;
/**
* @internal
*/
constructor() {
super();
awaitInput(() => {
this._receivedInput = true;
this.updateVideoElementSettings();
});
this._targetObjects = [];
if (getParam("videoscreenspace")) {
window.addEventListener("keydown", evt => {
if (evt.key === "f") {
this.screenspace = !this.screenspace;
}
});
}
}
/** start playing the video source */
play() {
if (!this._videoElement)
this.create(false);
if (!this._videoElement) {
if (debug)
console.warn("Can not play: no video element found", this);
return;
}
if (this._isPlaying && !this._videoElement?.ended && !this._videoElement?.paused)
return;
this._isPlaying = true;
if (!this._receivedInput)
this._videoElement.muted = true;
this.handleBeginPlaying(false);
if (this.shouldUseM3U) {
this.ensureM3UCanBePlayed();
return;
}
if (debug)
console.log("Video Play()", this.clip, this._videoElement, this.time);
this._videoElement.currentTime = this.time;
this._videoElement.play().catch(err => {
console.log(err);
// https://developer.chrome.com/blog/play-request-was-interrupted/
if (debug)
console.error("Error playing video", err, "CODE=" + err.code, this.videoElement?.src, this);
setTimeout(() => {
if (this._isPlaying && !this.destroyed && this.activeAndEnabled)
this.play();
}, 1000);
});
if (debug)
console.log("play", this._videoElement, this.time);
}
/**
* Stop the video playback. This will reset the video to the beginning
*/
stop() {
this._isPlaying = false;
this.time = 0;
if (!this._videoElement)
return;
this._videoElement.currentTime = 0;
this._videoElement.pause();
if (debug)
console.log("STOP", this);
}
/**
* Pause the video playback
*/
pause() {
this.time = this._videoElement?.currentTime ?? 0;
this._isPlaying = false;
this._videoElement?.pause();
if (debug)
console.log("PAUSE", this, this.currentTime);
}
/** create the video element and assign the video source url or stream */
create(playAutomatically) {
let src;
switch (this.source) {
case VideoSource.VideoClip:
src = this.clip;
break;
case VideoSource.Url:
src = this.url;
if (!src?.length && typeof this.clip === "string")
src = this.clip;
break;
}
if (!src) {
if (debug)
console.warn("No video source set", this);
return false;
}
if (!this._videoElement) {
if (debug)
console.warn("Create VideoElement", this);
this._videoElement = this.createVideoElement();
this.context.domElement.shadowRoot.prepend(this._videoElement);
// hide it because otherwise it would overlay the website with default css
this.updateVideoElementStyles();
}
if (typeof src === "string") {
if (debug)
console.log("Set Video src", src);
this._videoElement.src = src;
// Nor sure why we did this here, but with this code the video does not restart when being paused / enable toggled
// const str = this._videoElement["captureStream"]?.call(this._videoElement);
// this.clip = str;
}
else {
if (debug)
console.log("Set Video srcObject", src);
this._videoElement.srcObject = src;
}
if (!this._videoTexture)
this._videoTexture = new VideoTexture(this._videoElement);
this._videoTexture.flipY = false;
this._videoTexture.colorSpace = SRGBColorSpace;
if (playAutomatically)
this.handleBeginPlaying(playAutomatically);
if (debug)
console.log("Video: handle playing done...", src, playAutomatically);
return true;
}
updateAspect() {
if (this.aspectMode === AspectMode.None)
return;
this.startCoroutine(this.updateAspectImpl());
}
_overlay = null;
/**
* If true the video will be rendered in screenspace mode and overlayed on top of the scene.
* Alternatively you can also request the video to be played in PictureInPicture mode by calling `requestPictureInPicture()`
*/
get screenspace() {
return this._overlay?.enabled ?? false;
}
set screenspace(val) {
if (val) {
if (!this._videoTexture)
return;
if (!this._overlay)
this._overlay = new VideoOverlay(this.context);
this._overlay.add(this._videoTexture);
}
else
this._overlay?.remove(this._videoTexture);
if (this._overlay)
this._overlay.enabled = val;
}
_targetObjects;
createVideoElement() {
const video = document.createElement("video");
if (this._crossOrigin)
video.setAttribute("crossorigin", this._crossOrigin);
if (debug)
console.log("created video element", video);
return video;
}
handleBeginPlaying(playAutomatically) {
if (!this.activeAndEnabled)
return;
if (!this._videoElement)
return;
this._targetObjects.length = 0;
let target = this.gameObject;
switch (this.renderMode) {
case VideoRenderMode.MaterialOverride:
target = this.targetMaterialRenderer?.gameObject;
if (!target)
target = GameObject.getComponent(this.gameObject, Renderer)?.gameObject;
break;
case VideoRenderMode.RenderTexture:
console.error("VideoPlayer renderTexture not implemented yet. Please use material override instead");
return;
}
if (!target) {
console.error("Missing target for video material renderer", this.name, VideoRenderMode[this.renderMode], this);
return;
}
const mat = target["material"];
if (mat) {
this._targetObjects.push(target);
if (mat !== this._videoMaterial) {
this._videoMaterial = mat.clone();
target["material"] = this._videoMaterial;
}
const fieldName = "map";
const videoMaterial = this._videoMaterial;
if (!this.targetMaterialProperty) {
videoMaterial[fieldName] = this._videoTexture;
}
else {
switch (this.targetMaterialProperty) {
default:
videoMaterial[fieldName] = this._videoTexture;
break;
// doesnt render:
// case "emissiveTexture":
// console.log(this.videoMaterial);
// // (this.videoMaterial as any).map = this.videoTexture;
// (this.videoMaterial as any).emissive?.set(1,1,1);// = this.videoTexture;
// (this.videoMaterial as any).emissiveMap = this.videoTexture;
// break;
}
}
}
else {
console.warn("Can not play video, no material found, this might be a multimaterial case which is not supported yet");
return;
}
this.updateVideoElementSettings();
this.updateVideoElementStyles();
if (playAutomatically) {
if (this.shouldUseM3U) {
this.ensureM3UCanBePlayed();
}
this.play();
}
}
updateVideoElementSettings() {
if (!this._videoElement)
return;
this._videoElement.loop = this._isLooping;
this._videoElement.currentTime = this.currentTime;
this._videoElement.playbackRate = this._playbackSpeed;
// dont open in fullscreen on ios
this._videoElement.playsInline = true;
let muted = !this._receivedInput || this.audioOutputMode === VideoAudioOutputMode.None;
if (!muted && this._muted)
muted = true;
this._videoElement.muted = muted;
if (this.playOnAwake)
this._videoElement.autoplay = true;
}
updateVideoElementStyles() {
if (!this._videoElement)
return;
// set style here so preview frame is rendered
// set display and selectable because otherwise is interfers with input/focus e.g. breaks orbit control
this._videoElement.style.userSelect = "none";
this._videoElement.style.visibility = "hidden";
this._videoElement.style.display = "none";
this.updateAspect();
}
_updateAspectRoutineId = -1;
*updateAspectImpl() {
const id = ++this._updateAspectRoutineId;
const lastAspect = undefined;
const stream = this.clip;
while (id === this._updateAspectRoutineId && this.aspectMode !== AspectMode.None && this.clip && stream === this.clip && this._isPlaying) {
if (!stream || typeof stream === "string") {
return;
}
let aspect = undefined;
for (const track of stream.getVideoTracks()) {
const settings = track.getSettings();
if (settings && settings.width && settings.height) {
aspect = settings.width / settings.height;
break;
}
// on firefox capture canvas stream works but looks like
// the canvas stream track doesnt contain settings?!!?
else {
aspect = this.context.renderer.domElement.clientWidth / this.context.renderer.domElement.clientHeight;
}
}
if (aspect === undefined) {
for (let i = 0; i < 10; i++)
yield;
if (!this.isPlaying)
break;
continue;
}
if (lastAspect === aspect) {
yield;
continue;
}
for (const obj of this._targetObjects) {
let worldAspect = 1;
if (obj.parent) {
const parentScale = getWorldScale(obj.parent);
worldAspect = parentScale.x / parentScale.y;
}
switch (this.aspectMode) {
case AspectMode.AdjustHeight:
obj.scale.y = 1 / aspect * obj.scale.x * worldAspect;
break;
case AspectMode.AdjustWidth:
obj.scale.x = aspect * obj.scale.y * worldAspect;
break;
}
}
for (let i = 0; i < 3; i++)
yield;
}
}
get shouldUseM3U() { return this.url != undefined && (this.url.endsWith(".m3u8") || this.url.endsWith(".m3u")) && this.source === VideoSource.Url; }
ensureM3UCanBePlayed() {
if (!this.shouldUseM3U)
return;
let hls_script = document.head.querySelector("script[data-hls_library]");
if (!hls_script) {
if (debug)
console.log("HLS: load script");
hls_script = document.createElement("script");
hls_script.dataset["hls_library"] = "hls.js";
hls_script.src = "https://cdn.jsdelivr.net/npm/hls.js@1";
hls_script.addEventListener("load", this.onHlsAvailable);
document.head.append(hls_script);
}
else if (globalThis["Hls"]) {
this.onHlsAvailable();
}
else {
hls_script.addEventListener("load", this.onHlsAvailable);
}
}
_hls;
onHlsAvailable = () => {
if (debug)
console.log("HLS: available", this.clip);
if (!this.shouldUseM3U || !this.url)
return;
if (!this._hls)
this._hls = new Hls();
this.videoElement.autoplay = true;
this._hls.loadSource(this.url);
this._hls.attachMedia(this.videoElement);
this._videoElement?.play();
if (debug)
console.log("HLS: loaded", this.clip);
};
}
__decorate([
serializable()
], VideoPlayer.prototype, "playOnAwake", void 0);
__decorate([
serializable()
], VideoPlayer.prototype, "aspectMode", void 0);
__decorate([
serializable(URL)
], VideoPlayer.prototype, "clip", void 0);
__decorate([
serializable()
], VideoPlayer.prototype, "source", void 0);
__decorate([
serializable(URL)
], VideoPlayer.prototype, "url", null);
__decorate([
serializable()
], VideoPlayer.prototype, "renderMode", void 0);
__decorate([
serializable()
], VideoPlayer.prototype, "targetMaterialProperty", void 0);
__decorate([
serializable(Renderer)
], VideoPlayer.prototype, "targetMaterialRenderer", void 0);
__decorate([
serializable(Texture)
], VideoPlayer.prototype, "targetTexture", void 0);
__decorate([
serializable()
], VideoPlayer.prototype, "time", void 0);
__decorate([
serializable()
], VideoPlayer.prototype, "playbackSpeed", null);
__decorate([
serializable()
], VideoPlayer.prototype, "isLooping", null);
__decorate([
serializable()
], VideoPlayer.prototype, "audioOutputMode", null);
class VideoOverlay {
context;
constructor(context) {
this.context = context;
this._input = new VideoOverlayInput(this);
}
get enabled() {
return this._isInScreenspaceMode;
}
set enabled(val) {
if (val)
this.start();
else
this.stop();
}
add(video) {
if (this._videos.indexOf(video) === -1) {
this._videos.push(video);
}
}
remove(video) {
if (!video)
return;
const index = this._videos.indexOf(video);
if (index >= 0) {
this._videos.splice(index, 1);
}
}
start() {
if (this._isInScreenspaceMode)
return;
if (this._videos.length < 0)
return;
const texture = this._videos[this._videos.length - 1];
if (!texture)
return;
this._isInScreenspaceMode = true;
if (!this._screenspaceModeQuad) {
this._screenspaceModeQuad = ObjectUtils.createPrimitive(PrimitiveType.Quad, {
material: new ScreenspaceTexture(texture)
});
if (!this._screenspaceModeQuad)
return;
this._screenspaceModeQuad.geometry.scale(2, 2, 2);
}
const quad = this._screenspaceModeQuad;
this.context.scene.add(quad);
this.updateScreenspaceMaterialUniforms();
const mat = quad.material;
mat?.reset();
this._input?.enable(mat);
}
stop() {
this._isInScreenspaceMode = false;
if (this._screenspaceModeQuad) {
this._input?.disable();
this._screenspaceModeQuad.removeFromParent();
}
}
updateScreenspaceMaterialUniforms() {
const mat = this._screenspaceModeQuad?.material;
if (!mat)
return;
// mat.videoAspect = this.videoTexture?.image?.width / this.videoTexture?.image?.height;
mat.screenAspect = this.context.domElement.clientWidth / this.context.domElement.clientHeight;
}
_videos = [];
_screenspaceModeQuad;
_isInScreenspaceMode = false;
_input;
}
class VideoOverlayInput {
_onResizeScreenFn;
_onKeyUpFn;
_onMouseWheelFn;
context;
overlay;
constructor(overlay) {
this.overlay = overlay;
this.context = overlay.context;
}
_material;
enable(mat) {
this._material = mat;
window.addEventListener("resize", this._onResizeScreenFn = () => {
this.overlay.updateScreenspaceMaterialUniforms();
});
window.addEventListener("keyup", this._onKeyUpFn = (args) => {
if (args.key === "Escape")
this.overlay.stop();
});
window.addEventListener("wheel", this._onMouseWheelFn = (args) => {
if (this.overlay.enabled) {
mat.zoom += args.deltaY * .0005;
args.preventDefault();
}
}, { passive: false });
const delta = new Vector2();
window.addEventListener("mousemove", (args) => {
if (this.overlay.enabled && this.context.input.getPointerPressed(0)) {
const normalizedMovement = new Vector2(args.movementX, args.movementY);
normalizedMovement.x /= this.context.domElement.clientWidth;
normalizedMovement.y /= this.context.domElement.clientHeight;
delta.set(normalizedMovement.x, normalizedMovement.y);
delta.multiplyScalar(mat.zoom / -this.context.time.deltaTime * .01);
mat.offset = mat.offset.add(delta);
}
});
window.addEventListener("pointermove", (args) => {
if (this.overlay.enabled && this.context.input.getPointerPressed(0)) {
const count = this.context.input.getTouchesPressedCount();
if (count === 1) {
delta.set(args.movementX, args.movementY);
delta.multiplyScalar(mat.zoom * -this.context.time.deltaTime * .05);
mat.offset = mat.offset.add(delta);
}
}
});
let lastTouchStartTime = 0;
window.addEventListener("touchstart", e => {
if (e.touches.length < 2) {
if (this.context.time.time - lastTouchStartTime < .3) {
this.overlay.stop();
}
lastTouchStartTime = this.context.time.time;
return;
}
this._isPinching = true;
this._lastPinch = 0;
});
window.addEventListener("touchmove", e => {
if (!this._isPinching || !this._material)
return;
const touch1 = e.touches[0];
const touch2 = e.touches[1];
const dx = touch1.clientX - touch2.clientX;
const dy = touch1.clientY - touch2.clientY;
const distance = Math.sqrt(dx * dx + dy * dy);
if (this._lastPinch !== 0) {
const delta = distance - this._lastPinch;
this._material.zoom -= delta * .004;
}
this._lastPinch = distance;
});
window.addEventListener("touchend", () => {
this._isPinching = false;
});
}
_isPinching = false;
_lastPinch = 0;
disable() {
if (this._onResizeScreenFn) {
window.removeEventListener("resize", this._onResizeScreenFn);
this._onResizeScreenFn = undefined;
}
if (this._onKeyUpFn) {
window.removeEventListener("keyup", this._onKeyUpFn);
this._onKeyUpFn = undefined;
}
if (this._onMouseWheelFn) {
window.removeEventListener("wheel", this._onMouseWheelFn);
this._onMouseWheelFn = undefined;
}
}
}
class ScreenspaceTexture extends ShaderMaterial {
set screenAspect(val) {
this.uniforms["screenAspect"].value = val;
this.needsUpdate = true;
}
set offset(vec) {
const val = this.uniforms["offsetScale"].value;
val.x = vec.x;
val.y = vec.y;
// console.log(val);
this.uniforms["offsetScale"].value = val;
this.needsUpdate = true;
}
_offset = new Vector2();
get offset() {
const val = this.uniforms["offsetScale"].value;
this._offset.set(val.x, val.y);
return this._offset;
}
set zoom(val) {
const zoom = this.uniforms["offsetScale"].value;
if (val < .001)
val = .001;
zoom.z = val;
// zoom.z = this.maxZoom - val;
// zoom.z /= this.maxZoom;
this.needsUpdate = true;
}
get zoom() {
return this.uniforms["offsetScale"].value.z; // * this.maxZoom;
}
reset() {
this.offset = this.offset.set(0, 0);
this.zoom = 1;
this.needsUpdate = true;
}
// maxZoom : number = 10
constructor(tex) {
super();
this.uniforms = {
map: { value: tex },
screenAspect: { value: 1 },
offsetScale: { value: new Vector4(0, 0, 1, 1) }
};
this.vertexShader = `
uniform sampler2D map;
uniform float screenAspect;
uniform vec4 offsetScale;
varying vec2 vUv;
void main() {
gl_Position = vec4( position , 1.0 );
vUv = uv;
vUv.y = 1. - vUv.y;
// fit into screen
ivec2 res = textureSize(map, 0);
float videoAspect = float(res.x) / float(res.y);
float aspect = videoAspect / screenAspect;
if(aspect >= 1.0)
{
vUv.y = vUv.y * aspect;
float offset = (1. - aspect) * .5;
vUv.y = vUv.y + offset;
}
else
{
vUv.x = vUv.x / aspect;
float offset = (1. - 1. / aspect) * .5;
vUv.x = vUv.x + offset;
}
vUv.x -= .5;
vUv.y -= .5;
vUv.x *= offsetScale.z;
vUv.y *= offsetScale.z;
vUv.x += offsetScale.x;
vUv.y += offsetScale.y;
vUv.x += .5;
vUv.y += .5;
}
`;
this.fragmentShader = `
uniform sampler2D map;
varying vec2 vUv;
void main() {
if(vUv.x < 0. || vUv.x > 1. || vUv.y < 0. || vUv.y > 1.)
gl_FragColor = vec4(0., 0., 0., 1.);
else
{
vec4 texcolor = texture2D(map, vUv);
gl_FragColor = texcolor;
}
}
`;
}
}
//# sourceMappingURL=VideoPlayer.js.map