UNPKG

@needle-tools/engine

Version:

Needle Engine is a web-based runtime for 3D apps. It runs on your machine for development with great integrations into editors like Unity or Blender - and can be deployed onto any device! It is flexible, extensible and networking and XR are built-in.

1,113 lines (1,112 loc) 74.8 kB
import 'three/examples/jsm/renderers/webgl-legacy/nodes/WebGLNodes.js'; import { Color, DepthTexture, NearestFilter, NoToneMapping, Object3D, PCFSoftShadowMap, PerspectiveCamera, RGBAFormat, Scene, SRGBColorSpace, Texture, WebGLRenderer, WebGLRenderTarget } from 'three'; /** @ts-ignore (not yet in types?) */ import { BasicNodeLibrary } from "three"; import * as Stats from 'three/examples/jsm/libs/stats.module.js'; import { nodeFrame } from "three/examples/jsm/renderers/webgl-legacy/nodes/WebGLNodeBuilder.js"; import { isDevEnvironment, LogType, showBalloonError, showBalloonMessage } from './debug/index.js'; import { Addressables } from './engine_addressables.js'; import { AnimationsRegistry } from './engine_animation.js'; import { Application } from './engine_application.js'; import { AssetDatabase } from './engine_assetdatabase.js'; import { VERSION } from './engine_constants.js'; import { ContextEvent, ContextRegistry } from './engine_context_registry.js'; import { WaitForPromise } from './engine_coroutine.js'; import { ObjectUtils } from "./engine_create_objects.js"; import { destroy, foreachComponent } from './engine_gameobject.js'; import { getLoader } from './engine_gltf.js'; import { Input } from './engine_input.js'; import { invokeLifecycleFunctions } from './engine_lifecycle_functions_internal.js'; import { LightDataRegistry } from './engine_lightdata.js'; import { LODsManager } from "./engine_lods.js"; import * as looputils from './engine_mainloop_utils.js'; import { NetworkConnection } from './engine_networking.js'; import { Physics } from './engine_physics.js'; import { PlayerViewManager } from './engine_playerview.js'; import { RendererData as SceneLighting } from './engine_scenelighting.js'; import { logHierarchy } from './engine_three_utils.js'; import { Time } from './engine_time.js'; import { patchTonemapping } from './engine_tonemapping.js'; import { deepClone, delay, DeviceUtilities, getParam } from './engine_utils.js'; import { NeedleMenu } from './webcomponents/needle menu/needle-menu.js'; const debug = getParam("debugcontext"); const stats = getParam("stats"); const debugActive = getParam("debugactive"); const debugframerate = getParam("debugframerate"); const debugCoroutine = getParam("debugcoroutine"); // this is where functions that setup unity scenes will be pushed into // those will be accessed from our custom html element to load them into their context export const build_scene_functions = {}; export class ContextArgs { name; /** for debugging only */ alias; /** the hash is used as a seed when initially loading the scene files */ hash; /** when true the context will not check if it's visible in the viewport and always update and render */ runInBackground; /** the DOM element the context belongs to or is inside of (this does not have to be the canvas. use renderer.domElement if you want to access the dom canvas) */ domElement; /** externally owned renderer */ renderer; /** externally owned camera */ camera; /** externally owned scene */ scene; } export var FrameEvent; (function (FrameEvent) { FrameEvent[FrameEvent["Start"] = -1] = "Start"; FrameEvent[FrameEvent["EarlyUpdate"] = 0] = "EarlyUpdate"; FrameEvent[FrameEvent["Update"] = 1] = "Update"; FrameEvent[FrameEvent["LateUpdate"] = 2] = "LateUpdate"; FrameEvent[FrameEvent["OnBeforeRender"] = 3] = "OnBeforeRender"; FrameEvent[FrameEvent["OnAfterRender"] = 4] = "OnAfterRender"; FrameEvent[FrameEvent["PrePhysicsStep"] = 9] = "PrePhysicsStep"; FrameEvent[FrameEvent["PostPhysicsStep"] = 10] = "PostPhysicsStep"; FrameEvent[FrameEvent["Undefined"] = -1] = "Undefined"; })(FrameEvent || (FrameEvent = {})); export function registerComponent(script, context) { if (!script) return; if (!script.isComponent) { if (isDevEnvironment() || debug) console.error("Registered script is not a Needle Engine component. \nThe script will be ignored. Please make sure your component extends \"Behaviour\" imported from \"@needle-tools/engine\"\n", script); return; } if (!context) { context = Context.Current; if (debug) console.warn("> Registering component without context"); } const new_scripts = context?.new_scripts; if (!new_scripts.includes(script)) { new_scripts.push(script); } } /** * The context is the main object that holds all the data and state of the Needle Engine. * It can be used to access the scene, renderer, camera, input, physics, networking, and more. * @example * ```typescript * import { Behaviour } from "@needle-tools/engine"; * import { Mesh, BoxGeometry, MeshBasicMaterial } from "three"; * export class MyScript extends Behaviour { * start() { * console.log("Hello from MyScript"); * this.context.scene.add(new Mesh(new BoxGeometry(), new MeshBasicMaterial())); * } * } * ``` */ export class Context { static _defaultTargetFramerate = { value: 90, toString() { return this.value; } }; /** When a new context is created this is the framerate that will be used by default */ static get DefaultTargetFrameRate() { return Context._defaultTargetFramerate.value; } /** When a new context is created this is the framerate that will be used by default */ static set DefaultTargetFrameRate(val) { Context._defaultTargetFramerate.value = val; } static _defaultWebglRendererParameters = { antialias: true, alpha: false, // Note: this is due to a bug on OSX devices. See NE-5370 powerPreference: (DeviceUtilities.isiOS() || DeviceUtilities.isMacOS()) ? "default" : "high-performance", stencil: true, // logarithmicDepthBuffer: true, // reverseDepthBuffer: true, // https://github.com/mrdoob/three.js/issues/29770 }; /** The default parameters that will be used when creating a new WebGLRenderer. * Modify in global context to change the default parameters for all new contexts. * @example * ```typescript * import { Context } from "@needle-tools/engine"; * Context.DefaultWebGLRendererParameters.antialias = false; * ``` */ static get DefaultWebGLRendererParameters() { return Context._defaultWebglRendererParameters; } /** The needle engine version */ get version() { return VERSION; } /** The currently active context. Only set during the update loops */ static get Current() { return ContextRegistry.Current; } /** @internal this property should not be set by user code */ static set Current(context) { ContextRegistry.Current = context; } static get All() { return ContextRegistry.All; } /** The name of the context */ name; /** An alias for the context */ alias; /** When the renderer or camera are managed by an external process (e.g. when running in r3f context). * When this is false you are responsible to call update(timestamp, xframe. * It is also currently assumed that rendering is handled performed by an external process * */ isManagedExternally = false; /** set to true to pause the update loop. You can receive an event for it in your components. * Note that script updates will not be called when paused */ isPaused = false; /** When enabled the application will run while not visible on the page */ runInBackground = false; /** * Set to the target framerate you want your application to run in (you can use ?stats to check the fps) * Set to undefined if you want to run at the maximum framerate */ targetFrameRate; /** Use a higher number for more accurate physics simulation. * When undefined physics steps will be 1 for mobile devices and 5 for desktop devices * Set to 0 to disable physics updates * TODO: changing physics steps is currently not supported because then forces that we get from the character controller and rigidbody et al are not correct anymore - this needs to be properly tested before making this configureable */ physicsSteps = 1; /** used to append to loaded assets */ hash; /** The `<needle-engine>` web component */ domElement; appendHTMLElement(element) { if (this.domElement.shadowRoot) return this.domElement.shadowRoot.appendChild(element); else return this.domElement.appendChild(element); } get resolutionScaleFactor() { return this._resolutionScaleFactor; } /** use to scale the resolution up or down of the renderer. default is 1 */ set resolutionScaleFactor(val) { if (val === this._resolutionScaleFactor) return; if (typeof val !== "number") return; if (val <= 0) { console.error("Invalid resolution scale factor", val); return; } this._resolutionScaleFactor = val; this.updateSize(); } _resolutionScaleFactor = 1; // domElement.clientLeft etc doesnt return absolute position _boundingClientRectFrame = -1; _boundingClientRect = null; _domX; _domY; /** update bounding rects + domX, domY */ calculateBoundingClientRect() { // workaround for mozilla webXR viewer if (this.xr) { this._domX = 0; this._domY = 0; return; } // TODO: cache this if (this._boundingClientRectFrame === this.time.frame) return; this._boundingClientRectFrame = this.time.frame; this._boundingClientRect = this.domElement.getBoundingClientRect(); this._domX = this._boundingClientRect.x; this._domY = this._boundingClientRect.y; } /** The width of the `<needle-engine>` element on the website */ get domWidth() { // for mozilla XR if (this.isInAR) return window.innerWidth; return this.domElement.clientWidth; } /** The height of the `<needle-engine>` element on the website */ get domHeight() { // for mozilla XR if (this.isInAR) return window.innerHeight; return this.domElement.clientHeight; } /** the X position of the `<needle-engine>` element on the website */ get domX() { this.calculateBoundingClientRect(); return this._domX; } /** the Y position of the `<needle-engine>` element on the website */ get domY() { this.calculateBoundingClientRect(); return this._domY; } /** * Is a XR session currently active and presenting? * @returns true if the xr renderer is currently presenting */ get isInXR() { return this.renderer?.xr?.isPresenting || false; } /** shorthand for `NeedleXRSession.active` * Automatically set by NeedleXRSession when a XR session is active * @returns the active XR session or null if no session is active * */ xr = null; /** * Shorthand for `this.xr?.mode`. AR or VR * @returns the current XR session mode (immersive-vr or immersive-ar) */ get xrSessionMode() { return this.xr?.mode; } /** Shorthand for `this.xrSessionMode === "immersive-vr"` * @returns true if a webxr VR session is currently active. */ get isInVR() { return this.xrSessionMode === "immersive-vr"; } /** * Shorthand for `this.xrSessionMode === "immersive-ar"` * @returns true if a webxr AR session is currently active. */ get isInAR() { return this.xrSessionMode === "immersive-ar"; } /** If a XR session is active and in pass through mode (immersive-ar on e.g. Quest) * @returns true if the XR session is in pass through mode */ get isInPassThrough() { return this.xr ? this.xr.isPassThrough : false; } /** access the raw `XRSession` object (shorthand for `context.renderer.xr.getSession()`). For more control use `NeedleXRSession.active` */ get xrSession() { return this.renderer?.xr?.getSession(); } /** @returns the latest XRFrame (if a XRSession is currently active) * @link https://developer.mozilla.org/en-US/docs/Web/API/XRFrame */ get xrFrame() { return this._xrFrame; } /** @returns the current WebXR camera while the WebXRManager is active (shorthand for `context.renderer.xr.getCamera()`) */ get xrCamera() { return this.renderer.xr.isPresenting ? this.renderer?.xr?.getCamera() : undefined; } _xrFrame = null; /** * The AR overlay element is used to display 2D HTML elements while a AR session is active. */ get arOverlayElement() { const el = this.domElement; if (typeof el.getAROverlayContainer === "function") return el.getAROverlayContainer(); return this.domElement; } /** * Current event of the update cycle (e.g. `FrameEvent.EarlyUpdate` or `FrameEvent.OnBeforeRender`) */ get currentFrameEvent() { return this._currentFrameEvent; } _currentFrameEvent = FrameEvent.Undefined; /** * The scene contains all objects in the hierarchy and is automatically rendered by the context every frane. */ scene; /** * The renderer is used to render the scene. It is automatically created when the context is created. */ renderer; /** * The effect composer can be used to render postprocessing effects. If assigned then it will automatically render the scene every frame. */ composer = null; /** * @internal All known components. Don't use directly */ scripts = []; /** * @internal All paused components. Don't use directly */ scripts_pausedChanged = []; /** * @internal All components that have a early update event. Don't use directly */ scripts_earlyUpdate = []; /** * @internal All components that have a update event. Don't use directly */ scripts_update = []; /** * @internal All components that have a late update event. Don't use directly */ scripts_lateUpdate = []; /** * @internal All components that have a onBeforeRender event. Don't use directly */ scripts_onBeforeRender = []; /** * @internal All components that have a onAfterRender event. Don't use directly */ scripts_onAfterRender = []; /** * @internal All components that have coroutines. Don't use directly */ scripts_WithCorroutines = []; /** * @internal Components with immersive-vr event methods. Don't use directly */ scripts_immersive_vr = []; /** * @internal Components with immersive-ar event methods. Don't use directly */ scripts_immersive_ar = []; /** * @internal Coroutine data */ coroutines = {}; /** callbacks called once after the context has been created */ post_setup_callbacks = []; /** called every frame at the beginning of the frame (after component start events and before earlyUpdate) */ pre_update_callbacks = []; /** called every frame before rendering (after all component events) */ pre_render_callbacks = []; /** called every frame after rendering (after all component events) */ post_render_callbacks = []; /** called every frame befroe update (this list is emptied every frame) */ pre_update_oneshot_callbacks = []; /** @internal */ new_scripts = []; /** @internal */ new_script_start = []; /** @internal */ new_scripts_pre_setup_callbacks = []; /** @internal */ new_scripts_post_setup_callbacks = []; /** @internal */ new_scripts_xr = []; /** * The **main camera component** of the scene - this camera is used for rendering. * Use `setCurrentCamera` for updating the main camera. */ mainCameraComponent = undefined; /** * The main camera of the scene - this camera is used for rendering * Use `setCurrentCamera` for updating the main camera. */ get mainCamera() { if (this._mainCamera) { return this._mainCamera; } if (this.mainCameraComponent) { const cam = this.mainCameraComponent; if (!cam.threeCamera) cam.buildCamera(); return cam.threeCamera; } if (!this._fallbackCamera) { this._fallbackCamera = new PerspectiveCamera(75, this.domWidth / this.domHeight, 0.1, 1000); } return this._fallbackCamera; } /** Set the main camera of the scene. If set to null the camera of the {@link mainCameraComponent} will be used - this camera is used for rendering */ set mainCamera(cam) { this._mainCamera = cam; } _mainCamera = null; _fallbackCamera = null; /** access application state (e.g. if all audio should be muted) */ application; /** access animation mixer used by components in the scene */ animations; /** access timings (current frame number, deltaTime, timeScale, ...) */ time; /** access input data (e.g. click or touch events) */ input; /** access physics related methods (e.g. raycasting). To access the phyiscs engine use `context.physics.engine` */ physics; /** access networking methods (use it to send or listen to messages or join a networking backend) */ connection; /** * @deprecated AssetDataBase is deprecated */ assets; /** The main light in the scene */ mainLight = null; /** @deprecated Use sceneLighting */ get rendererData() { return this.sceneLighting; } sceneLighting; addressables; lightmaps; players; lodsManager; /** Access the needle menu to add or remove buttons to the menu element */ menu; /** @returns true if the context is fully created and ready */ get isCreated() { return this._isCreated; } _needsUpdateSize = false; _isCreated = false; _isCreating = false; _isVisible = false; _stats = stats ? new Stats.default() : null; constructor(args) { this.name = args?.name || ""; this.alias = args?.alias; this.domElement = args?.domElement || document.body; this.hash = args?.hash; if (args?.renderer) { this.renderer = args.renderer; this.isManagedExternally = true; } if (args?.runInBackground !== undefined) this.runInBackground = args.runInBackground; if (args?.scene) this.scene = args.scene; else this.scene = new Scene(); if (args?.camera) this._mainCamera = args.camera; this.application = new Application(this); this.time = new Time(); this.input = new Input(this); this.physics = new Physics(this); this.connection = new NetworkConnection(this); // eslint-disable-next-line deprecation/deprecation this.assets = new AssetDatabase(); this.sceneLighting = new SceneLighting(this); this.addressables = new Addressables(this); this.lightmaps = new LightDataRegistry(this); this.players = new PlayerViewManager(this); this.menu = new NeedleMenu(this); this.lodsManager = new LODsManager(this); this.animations = new AnimationsRegistry(this); const resizeCallback = () => this._needsUpdateSize = true; window.addEventListener('resize', resizeCallback); this._disposeCallbacks.push(() => window.removeEventListener('resize', resizeCallback)); const resizeObserver = new ResizeObserver(_ => this._needsUpdateSize = true); resizeObserver.observe(this.domElement); this._disposeCallbacks.push(() => resizeObserver.disconnect()); this._intersectionObserver = new IntersectionObserver(entries => { this._isVisible = entries[0].isIntersecting; }); this._disposeCallbacks.push(() => this._intersectionObserver?.disconnect()); ContextRegistry.register(this); } /** * Calling this function will dispose the current renderer and create a new one which will then be assigned to the context. It can be used to create a new renderer with custom WebGLRendererParameters. * **Note**: Instead you can also modify the static `Context.DefaultWebGlRendererParameters` before the context is created. * **Note**: This method is recommended because it re-uses an potentially already existing canvas element. This is necessary to keep input event handlers from working (e.g. components like OrbitControls subscribe to input events on the canvas) * @returns {WebGLRenderer} the newly created renderer */ createNewRenderer(params) { this.renderer?.dispose(); params = { ...Context.DefaultWebGLRendererParameters, ...params }; if (!params.canvas) { // get canvas already configured in the Needle Engine Web Component const canvas = this.domElement?.shadowRoot?.querySelector("canvas"); if (canvas) { params.canvas = canvas; if (debug) { console.log("Using canvas from shadow root", canvas); } } } if (debug) console.log("Using Renderer Parameters:", params, this.domElement); this.renderer = new WebGLRenderer(params); this.renderer.debug.checkShaderErrors = isDevEnvironment() || getParam("checkshadererrors") === true; // some tonemapping other than "NONE" is required for adjusting exposure with EXR environments this.renderer.toneMappingExposure = 1; // range [0...inf] instead of the usual -15..15 this.renderer.toneMapping = NoToneMapping; // could also set to LinearToneMapping, ACESFilmicToneMapping this.renderer.setClearColor(new Color('lightgrey'), 0); // // @ts-ignore // this.renderer.alpha = false; this.renderer.shadowMap.enabled = true; this.renderer.shadowMap.type = PCFSoftShadowMap; this.renderer.setSize(this.domWidth, this.domHeight); this.renderer.outputColorSpace = SRGBColorSpace; // Injecting the core nodes library here, like WebGPURenderer backends do //@ts-ignore this.renderer.nodes = { library: new BasicNodeLibrary(), modelViewMatrix: null, modelNormalViewMatrix: null, }; // this.renderer.toneMapping = AgXToneMapping; this.lodsManager.setRenderer(this.renderer); this.input.bindEvents(); return this.renderer; } _intersectionObserver = null; internalOnUpdateVisible() { this._intersectionObserver?.disconnect(); this._intersectionObserver?.observe(this.domElement); } _disposeCallbacks = []; /** will request a renderer size update the next render call (will call updateSize the next update) */ requestSizeUpdate() { this._needsUpdateSize = true; } /** Clamps the renderer max resolution. If undefined the max resolution is not clamped. Default is undefined */ maxRenderResolution; /** Control the renderer devicePixelRatio. * **Options** * - `auto` - Needle Engine automatically sets the pixel ratio to the current window.devicePixelRatio. * - `manual` - Needle Engine will not change the renderer pixel ratio. You can set it manually. * - `number` - Needle Engine will set the pixel ratio to the given number. The change will be applied to the renderer and the composer (if used) at the end of the current frame. */ get devicePixelRatio() { return this._devicePixelRatio; } set devicePixelRatio(val) { if (val !== this._devicePixelRatio) { this._devicePixelRatio = val; this._needsUpdateSize = true; } } _devicePixelRatio = "auto"; /** * Update the renderer and canvas size. This is also automatically called when a DOM size change is detected. */ updateSize(force = false) { if (force || (!this.isManagedExternally && this.renderer.xr?.isPresenting === false)) { this._needsUpdateSize = false; const scaleFactor = this.resolutionScaleFactor; let width = this.domWidth * scaleFactor; let height = this.domHeight * scaleFactor; if (this.maxRenderResolution) { this.maxRenderResolution.x = Math.max(1, this.maxRenderResolution.x); width = Math.min(this.maxRenderResolution.x, width); this.maxRenderResolution.y = Math.max(1, this.maxRenderResolution.y); height = Math.min(this.maxRenderResolution.y, height); } const camera = this.mainCamera; this.updateAspect(camera); this.renderer.setSize(width, height, true); // avoid setting pixel values here since this can cause pingpong updates // e.g. when system scale is set to 125% // https://github.com/needle-tools/needle-engine-support/issues/69 this.renderer.domElement.style.width = "100%"; this.renderer.domElement.style.height = "100%"; const devicePixelRatio = typeof this.devicePixelRatio === "number" ? this.devicePixelRatio : this.devicePixelRatio === "auto" ? window.devicePixelRatio : undefined; if (devicePixelRatio !== undefined) { this.renderer.setPixelRatio(devicePixelRatio); } if (this.composer) { this.composer.setSize?.call(this.composer, width, height); if (devicePixelRatio !== undefined && "setPixelRatio" in this.composer && typeof this.composer.setPixelRatio === "function") this.composer.setPixelRatio?.call(this.composer, window.devicePixelRatio); } } } /** * Update the camera aspect ratio or orthorgraphic camera size. This is automatically called when a DOM size change is detected. */ updateAspect(camera, width, height) { if (!camera) return; if (width === undefined) width = this.domWidth; if (height === undefined) height = this.domHeight; const aspectRatio = width / height; if (camera.isPerspectiveCamera) { const cam = camera; const pa = cam.aspect; cam.aspect = aspectRatio; if (pa !== cam.aspect) camera.updateProjectionMatrix(); } else if (camera.isOrthographicCamera) { const cam = camera; // Maintain the camera's current vertical size (top - bottom) const verticalSize = cam.top - cam.bottom; // Calculate new horizontal size based on aspect ratio const horizontalSize = verticalSize * aspectRatio; // Update camera bounds while maintaining center position const halfWidth = horizontalSize / 2; const halfHeight = verticalSize / 2; if (cam.left != -halfWidth || cam.top != halfHeight) { cam.left = -halfWidth; cam.right = halfWidth; cam.top = halfHeight; cam.bottom = -halfHeight; camera.updateProjectionMatrix(); } } } /** This will recreate the whole needle engine context and dispose the whole scene content * All content will be reloaded (loading times might be faster due to browser caches) * All scripts will be recreated */ recreate() { this.clear(); this.create(this._originalCreationArgs); } _originalCreationArgs; /** @deprecated use create. This method will be removed in a future version */ async onCreate(opts) { return this.create(opts); } /** @internal */ async create(opts) { try { this._isCreating = true; if (opts !== this._originalCreationArgs) this._originalCreationArgs = deepClone(opts); window.addEventListener("unhandledrejection", this.onUnhandledRejection); const res = await this.internalOnCreate(opts); this._isCreated = res; return res; } finally { window.removeEventListener("unhandledrejection", this.onUnhandledRejection); this._isCreating = false; } } onUnhandledRejection = (event) => { this.onError(event.reason); }; /** Dispatches an error */ onError(error) { this.domElement.dispatchEvent(new CustomEvent("error", { detail: error })); } /** * Clears the context and destroys all scenes and objects in the scene. * The ContextCleared event is called at the end. * This is automatically called when e.g. the `src` attribute changes on `<needle-engine>` * or when the web component is removed from the DOM */ clear() { ContextRegistry.dispatchCallback(ContextEvent.ContextClearing, this); invokeLifecycleFunctions(this, ContextEvent.ContextClearing); // NOTE: this does dispose the environment/background image too // which is probably not desired if it is set via the background-image attribute destroy(this.scene, true, true); this.scene = new Scene(); this.addressables?.dispose(); this.lightmaps?.clear(); this.physics?.engine?.clearCaches(); this.lodsManager.disable(); this._onBeforeRenderListeners.clear(); this._onAfterRenderListeners.clear(); if (!this.isManagedExternally) { if (this.renderer) { this.renderer.renderLists.dispose(); this.renderer.state.reset(); this.renderer.resetState(); } } // We do not want to clear the renderer here because when switching src we want to keep the last rendered frame in case the loading screen is not visible // if a user wants to see the background they can still call setClearAlpha(0) and clear manually ContextRegistry.dispatchCallback(ContextEvent.ContextCleared, this); } /** * Dispose all allocated resources and clears the scene. This is automatically called e.g. when the `<needle-engine>` component is removed from the DOM. */ dispose() { this.internalOnDestroy(); } /**@deprecated use dispose() */ onDestroy() { this.internalOnDestroy(); } internalOnDestroy() { Context.Current = this; ContextRegistry.dispatchCallback(ContextEvent.ContextDestroying, this); invokeLifecycleFunctions(this, ContextEvent.ContextDestroying); this.clear(); this.renderer?.setAnimationLoop(null); if (this.renderer) { this.renderer.setClearAlpha(0); this.renderer.clear(); if (!this.isManagedExternally) { if (debug) console.log("Disposing renderer"); this.renderer.dispose(); } } this.scene = null; this.renderer = null; this.input.dispose(); this.menu.onDestroy(); this.animations.onDestroy(); for (const cb of this._disposeCallbacks) { try { cb(); } catch (e) { console.error("Error in on dispose callback:", e, cb); } } if (this.domElement?.parentElement) { this.domElement.parentElement.removeChild(this.domElement); } this._isCreated = false; ContextRegistry.dispatchCallback(ContextEvent.ContextDestroyed, this); invokeLifecycleFunctions(this, ContextEvent.ContextDestroyed); ContextRegistry.unregister(this); if (Context.Current === this) { //@ts-ignore Context.Current = null; } } /** @internal Automatically called by components when you call `startCoroutine`. Use `startCoroutine` instead */ registerCoroutineUpdate(script, coroutine, evt) { if (typeof coroutine?.next !== "function") { console.error("Registered invalid coroutine function from " + script.name + "\nCoroutine functions must be generators: \"*myCoroutine() {...}\"\nStart a coroutine from a component by calling \"this.startCoroutine(myCoroutine())\""); return coroutine; } if (!this.coroutines[evt]) this.coroutines[evt] = []; this.coroutines[evt].push({ comp: script, main: coroutine }); return coroutine; } /** @internal Automatically called by components. */ unregisterCoroutineUpdate(coroutine, evt) { if (!this.coroutines[evt]) return; const idx = this.coroutines[evt].findIndex(c => c.main === coroutine); if (idx >= 0) this.coroutines[evt].splice(idx, 1); } /** @internal Automatically called */ stopAllCoroutinesFrom(script) { for (const evt in this.coroutines) { const rout = this.coroutines[evt]; for (let i = rout.length - 1; i >= 0; i--) { const r = rout[i]; if (r.comp === script) { rout.splice(i, 1); } } } } _cameraStack = []; /** Change the main camera */ setCurrentCamera(cam) { if (!cam) return; if (!cam.threeCamera) cam.buildCamera(); // < to build camera if (!cam.threeCamera) { console.warn("Camera component is missing camera", cam); return; } const index = this._cameraStack.indexOf(cam); if (index >= 0) this._cameraStack.splice(index, 1); this._cameraStack.push(cam); this.mainCameraComponent = cam; const camera = cam.threeCamera; if (camera.isPerspectiveCamera) this.updateAspect(camera); this.mainCameraComponent?.applyClearFlagsIfIsActiveCamera(); } /** * Remove the camera from the mainCamera stack (if it has been set before with `setCurrentCamera`) */ removeCamera(cam) { if (!cam) return; const index = this._cameraStack.indexOf(cam); if (index >= 0) this._cameraStack.splice(index, 1); if (this.mainCameraComponent === cam) { this.mainCameraComponent = undefined; if (this._cameraStack.length > 0) { const last = this._cameraStack[this._cameraStack.length - 1]; this.setCurrentCamera(last); } } } _onBeforeRenderListeners = new Map(); _onAfterRenderListeners = new Map(); /** Use to subscribe to onBeforeRender events on threejs objects. * @link https://threejs.org/docs/#api/en/core/Object3D.onBeforeRender */ addBeforeRenderListener(target, callback) { if (!this._onBeforeRenderListeners.has(target.uuid)) { const arr = []; this._onBeforeRenderListeners.set(target.uuid, arr); target.onBeforeRender = this._createRenderCallbackWrapper(arr); } this._onBeforeRenderListeners.get(target.uuid).push(callback); } /** Remove callback from three `onBeforeRender` event (if it has been added with `addBeforeRenderListener(...)`) * @link https://threejs.org/docs/#api/en/core/Object3D.onBeforeRender */ removeBeforeRenderListener(target, callback) { if (this._onBeforeRenderListeners.has(target.uuid)) { const arr = this._onBeforeRenderListeners.get(target.uuid); const idx = arr.indexOf(callback); if (idx >= 0) arr.splice(idx, 1); } } /** * Subscribe to onAfterRender events on threejs objects * @link https://threejs.org/docs/#api/en/core/Object3D.onAfterRender */ addAfterRenderListener(target, callback) { if (!this._onAfterRenderListeners.has(target.uuid)) { const arr = []; this._onAfterRenderListeners.set(target.uuid, arr); target.onAfterRender = this._createRenderCallbackWrapper(arr); } this._onAfterRenderListeners.get(target.uuid)?.push(callback); } /** * Remove from onAfterRender events on threejs objects * @link https://threejs.org/docs/#api/en/core/Object3D.onAfterRender */ removeAfterRenderListener(target, callback) { if (this._onAfterRenderListeners.has(target.uuid)) { const arr = this._onAfterRenderListeners.get(target.uuid); const idx = arr.indexOf(callback); if (idx >= 0) arr.splice(idx, 1); } } _createRenderCallbackWrapper(array) { return (renderer, scene, camera, geometry, material, group) => { for (let i = 0; i < array.length; i++) { const fn = array[i]; fn(renderer, scene, camera, geometry, material, group); } }; } _requireDepthTexture = false; _requireColorTexture = false; _renderTarget; _isRendering = false; /** @returns true while the WebGL renderer is rendering (between onBeforeRender and onAfterRender events) */ get isRendering() { return this._isRendering; } setRequireDepth(val) { this._requireDepthTexture = val; } setRequireColor(val) { this._requireColorTexture = val; } get depthTexture() { return this._renderTarget?.depthTexture || null; } get opaqueColorTexture() { return this._renderTarget?.texture || null; } /** @returns true if the `<needle-engine>` DOM element is visible on screen (`context.domElement`) */ get isVisibleToUser() { if (this.isInXR) return true; if (!this._isVisible) return false; const style = getComputedStyle(this.domElement); return style.visibility !== "hidden" && style.display !== "none" && style.opacity !== "0"; } _createId = 0; async internalOnCreate(opts) { const createId = ++this._createId; if (debug) console.log("Creating context", this.name, opts); // wait for async imported dependencies to be loaded // see https://linear.app/needle/issue/NE-4445 const dependenciesReady = globalThis["needle:dependencies:ready"]; if (dependenciesReady instanceof Promise) { if (debug) console.log("Waiting for dependencies to be ready"); await dependenciesReady .catch(err => { if (debug || isDevEnvironment()) { showBalloonError("Needle Engine dependencies failed to load. Please check the console for more details"); const printedError = false; if (err instanceof ReferenceError) { let offendingComponentName = "YourComponentName"; const offendingComponentStartIndex = err.message.indexOf("'"); if (offendingComponentStartIndex > 0) { const offendingComponentEndIndex = err.message.indexOf("'", offendingComponentStartIndex + 1); if (offendingComponentEndIndex > 0) { const name = err.message.substring(offendingComponentStartIndex + 1, offendingComponentEndIndex); if (name.length > 3) offendingComponentName = name; } } console.error(`Needle Engine dependencies failed to load:\n\n# Make sure you don't have circular imports in your scripts!\n\nPossible solutions: \n→ Replace @serializable(${offendingComponentName}) in your script with @serializable(Behaviour)\n→ If you only need type information try importing the type only, e.g: import { type ${offendingComponentName} }\n\n---`, err); return; } if (!printedError) { console.error("Needle Engine dependencies failed to load", err); } } }) .then(() => { if (debug) console.log("Needle Engine dependencies are ready"); }); } this.clear(); const oldRenderer = this.renderer; // We only need to create a new renderer if we don't have one yet // We do prevent creating a new renderer here to avoid flickering when the context is created while the content is still being loaded. This can be the case where CSS transformations update the layout (e.g. scale() while loading + old canvas disposed but in the DOM layout.) const needsNewRenderer = !oldRenderer || oldRenderer["isDisposed"] === true; // stop the animation loop if its running during creation // since we do not want to start enabling scripts etc before they are deserialized if (this.isManagedExternally === false && (needsNewRenderer)) { this.createNewRenderer(); } else { this.lodsManager.setRenderer(this.renderer); } this.renderer?.setAnimationLoop(null); await delay(1); Context.Current = this; await ContextRegistry.dispatchCallback(ContextEvent.ContextCreationStart, this); // load and create scene let prepare_succeeded = true; let loadedFiles; try { Context.Current = this; if (opts) { loadedFiles = await this.internalLoadInitialContent(createId, opts); } else loadedFiles = []; } catch (err) { console.error(err); prepare_succeeded = false; } if (!prepare_succeeded) { this.onError("Failed to load initial content"); return false; } if (createId !== this._createId || opts?.abortSignal?.aborted) { return false; } this.internalOnUpdateVisible(); if (!this.renderer) { if (debug) console.warn("Context has no renderer (perhaps it was disconnected?", this.domElement.isConnected); return false; } if (!this.isManagedExternally && !this.domElement.shadowRoot) { this.domElement.prepend(this.renderer.domElement); } Context.Current = this; // TODO: we could configure if we need physics // await this.physics.engine?.initialize(); // Setup Context.Current = this; for (let i = 0; i < this.new_scripts.length; i++) { const script = this.new_scripts[i]; if (script.gameObject !== undefined && script.gameObject !== null) { if (script.gameObject.userData === undefined) script.gameObject.userData = {}; if (script.gameObject.userData.components === undefined) script.gameObject.userData.components = []; const arr = script.gameObject.userData.components; if (!arr.includes(script)) arr.push(script); } // if (script.gameObject && !this.raycastTargets.includes(script.gameObject)) { // this.raycastTargets.push(script.gameObject); // } } // const context = new SerializationContext(this.scene); // for (let i = 0; i < this.new_scripts.length; i++) { // const script = this.new_scripts[i]; // const ser = script as unknown as ISerializable; // if (ser.$serializedTypes === undefined) continue; // context.context = this; // context.object = script.gameObject; // deserializeObject(ser, script, context); // } // resolve post setup callbacks (things that rely on threejs objects having references to components) if (this.post_setup_callbacks) { for (let i = 0; i < this.post_setup_callbacks.length; i++) { Context.Current = this; await this.post_setup_callbacks[i](this); } } if (!this._mainCamera) { Context.Current = this; let camera = null; foreachComponent(this.scene, comp => { const cam = comp; if (cam?.isCamera) { looputils.updateActiveInHierarchyWithoutEventCall(cam.gameObject); if (!cam.activeAndEnabled) return undefined; if (cam.tag === "MainCamera") { camera = cam; return true; } else camera = cam; } return undefined; }); if (camera) { this.setCurrentCamera(camera); } else { const res = ContextRegistry.dispatchCallback(ContextEvent.MissingCamera, this, { files: loadedFiles }); if (!res && !this.mainCamera && !this.isManagedExternally) console.warn("Missing camera in main scene", this); } } this.input.bindEvents(); Context.Current = this; looputils.processNewScripts(this); // We have to step once so that colliders that have been created in onEnable can be raycasted in start if (this.physics.engine) { this.physics.engine?.step(0); this.physics.engine?.postStep(); } // const mainCam = this.mainCameraComponent as Camera; // if (mainCam) { // mainCam.applyClearFlagsIfIsActiveCamera(); // } if (!this.isManagedExternally && this.composer && this.mainCamera) { // TODO: import postprocessing async // const renderPass = new RenderPass(this.scene, this.mainCamera); // this.renderer.setSize(this.domWidth, this.domHeight); // this.composer.addPass(renderPass); // this.composer.setSize(this.domWidth, this.domHeight); } this._needsUpdateSize = true; if (this._stats) { this._stats.showPanel(0); this._stats.dom.style.position = "absolute"; // (default is fixed) this.domElement.shadowRoot?.appendChild(this._stats.dom); } if (debug) logHierarchy(this.scene, true); // If no target framerate was set we use the default if (this.targetFrameRate === undefined) { if (debug) console.warn("No target framerate set, using default", Context.DefaultTargetFrameRate); // the _defaultTargetFramerate is intentionally an object so it can be changed at any time if not explictly set by the user this.targetFrameRate = Context._defaultTargetFramerate; } else if (debug) console.log("Target framerate set to", this.targetFrameRate); this._dispatchReadyAfterFrame = true; const res = ContextRegistry.dispatchCallback(ContextEvent.ContextCreated, this, { files: loadedFiles }); if (res) { const domElement = this.domElement; if ("internalSetLoadingMessage" in domElement && typeof domElement.internalSetLoadingMessage === "function") domElement?.internalSetLoadingMessage("finish loading"); await res; } if (opts?.abortSignal?.aborted) { return false; } invokeLifecycleFunctions(this, ContextEvent.ContextCreated); if (debug) console.log("Context Created...", this.renderer, this.renderer.domElement); this._isCreating = false; if (!this.isManagedExternally && !opts?.abortSignal?.aborted) this.restartRenderLoop(); return true; } async internalLoadInitialContent(createId, args) { const results = new Array(); // early out if we dont have any files to load if (args.files.length === 0) return results; const files = [...args.files]; const progressArg = { name: "", progress: null, index: 0, count: files.length }; const loader = getLoader(); // this hash should be constant since it is used to initialize the UIDProvider per initially loaded scene const loadingHash = 0; for (let i = 0; i < files.length; i++) { if (args.abortSignal?.aborted) { if (debug) console.log("Aborting loading because of abort signal"); break; }