UNPKG

@needle-tools/engine

Version:

Needle Engine is a web-based runtime for 3D apps. It runs on your machine for development with great integrations into editors like Unity or Blender - and can be deployed onto any device! It is flexible, extensible and networking and XR are built-in.

687 lines 30.7 kB
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; }; import { AnimationMixer, Object3D } from 'three'; import { isDevEnvironment } from '../../engine/debug/index.js'; import { FrameEvent } from '../../engine/engine_context.js'; import { isLocalNetwork } from '../../engine/engine_networking_utils.js'; import { serializable } from '../../engine/engine_serialization.js'; import { deepClone, delay, getParam } from '../../engine/engine_utils.js'; import { Animator } from '../Animator.js'; import { AudioListener } from '../AudioListener.js'; import { AudioSource } from '../AudioSource.js'; import { Behaviour, GameObject } from '../Component.js'; import { SignalReceiver } from './SignalAsset.js'; import * as Models from "./TimelineModels.js"; import * as Tracks from "./TimelineTracks.js"; const debug = getParam("debugtimeline"); /** * The wrap mode of the {@link PlayableDirector}. */ export var DirectorWrapMode; (function (DirectorWrapMode) { /// <summary> /// <para>Hold the last frame when the playable time reaches it's duration.</para> /// </summary> DirectorWrapMode[DirectorWrapMode["Hold"] = 0] = "Hold"; /// <summary> /// <para>Loop back to zero time and continue playing.</para> /// </summary> DirectorWrapMode[DirectorWrapMode["Loop"] = 1] = "Loop"; /// <summary> /// <para>Do not keep playing when the time reaches the duration.</para> /// </summary> DirectorWrapMode[DirectorWrapMode["None"] = 2] = "None"; })(DirectorWrapMode || (DirectorWrapMode = {})); /** How the clip handles time outside its start and end range. */ export var ClipExtrapolation; (function (ClipExtrapolation) { /** No extrapolation is applied. */ ClipExtrapolation[ClipExtrapolation["None"] = 0] = "None"; /** Hold the time at the end value of the clip. */ ClipExtrapolation[ClipExtrapolation["Hold"] = 1] = "Hold"; /** Repeat time values outside the start/end range. */ ClipExtrapolation[ClipExtrapolation["Loop"] = 2] = "Loop"; /** Repeat time values outside the start/end range, reversing direction at each loop */ ClipExtrapolation[ClipExtrapolation["PingPong"] = 3] = "PingPong"; /** Time values are passed in without modification, extending beyond the clips range */ ClipExtrapolation[ClipExtrapolation["Continue"] = 4] = "Continue"; })(ClipExtrapolation || (ClipExtrapolation = {})); ; /** * The PlayableDirector component is the main component to control timelines in needle engine. * It is used to play, pause, stop and evaluate timelines. * Assign a TimelineAsset to the `playableAsset` property to start playing a timeline. * @category Animation and Sequencing * @group Components */ export class PlayableDirector extends Behaviour { static createTrackFunctions = {}; static registerCreateTrack(type, fn) { this.createTrackFunctions[type] = fn; } playableAsset; /** Set to true to start playing the timeline when the scene starts */ playOnAwake; extrapolationMode = DirectorWrapMode.Loop; /** @returns true if the timeline is currently playing */ get isPlaying() { return this._isPlaying; } /** @returns true if the timeline is currently paused */ get isPaused() { return this._isPaused; } /** the current time of the timeline */ get time() { return this._time; } set time(value) { if (typeof value === "number" && !Number.isNaN(value)) this._time = value; else if (debug || isLocalNetwork()) { console.error("INVALID TIMELINE.TIME VALUE", value, this.name); } ; } /** the duration of the timeline */ get duration() { return this._duration; } set duration(value) { this._duration = value; } /** the weight of the timeline. Set to a value below 1 to blend with other timelines */ get weight() { return this._weight; } ; set weight(value) { this._weight = value; } /** the playback speed of the timeline */ get speed() { return this._speed; } set speed(value) { this._speed = value; } /** When enabled the timeline will wait for audio tracks to load at the current time before starting to play */ waitForAudio = true; _visibilityChangeEvt; _clonedPlayableAsset = false; _speed = 1; /** @internal */ awake() { if (debug) console.log(this, this.playableAsset?.tracks); this.rebuildGraph(); if (!this.isValid() && (debug || isDevEnvironment())) { if (debug) { console.warn("PlayableDirector is not valid", "Asset?", this.playableAsset, "Tracks:", this.playableAsset?.tracks, "IsArray?", Array.isArray(this.playableAsset?.tracks), this); } else if (!this.playableAsset?.tracks?.length) { console.warn("PlayableDirector has no tracks"); } else { console.warn("PlayableDirector is not valid"); } } } /** @internal */ onEnable() { for (const track of this._audioTracks) { track.onEnable?.(); } for (const track of this._customTracks) { track.onEnable?.(); } for (const track of this._animationTracks) { track.onEnable?.(); } if (this.playOnAwake) { this.play(); } if (!this._visibilityChangeEvt) this._visibilityChangeEvt = () => { switch (document.visibilityState) { case "hidden": this.setAudioTracksAllowPlaying(false); break; case "visible": this.setAudioTracksAllowPlaying(true); break; } }; window.addEventListener('visibilitychange', this._visibilityChangeEvt); } /** @internal */ onDisable() { this.stop(); for (const track of this._audioTracks) { track.onDisable?.(); } for (const track of this._customTracks) { track.onDisable?.(); } for (const track of this._animationTracks) { track.onDisable?.(); } if (this._visibilityChangeEvt) window.removeEventListener('visibilitychange', this._visibilityChangeEvt); } /** @internal */ onDestroy() { for (const tracks of this._allTracks) { for (const track of tracks) track.onDestroy?.(); } } /** @internal */ rebuildGraph() { if (!this.isValid()) return; this.resolveBindings(); this.updateTimelineDuration(); this.setupAndCreateTrackHandlers(); } /** * Play the timeline from the current time. * If the timeline is already playing this method does nothing. */ async play() { if (!this.isValid()) return; const pauseChanged = this._isPaused == true; this._isPaused = false; if (this._isPlaying) return; this._isPlaying = true; if (pauseChanged) this.invokePauseChangedMethodsOnTracks(); if (this.waitForAudio) { // Make sure audio tracks have loaded at the current time const promises = []; for (const track of this._audioTracks) { const promise = track.loadAudio(this._time, 1, 0); if (promise) promises.push(promise); } if (promises.length > 0) { await Promise.all(promises); if (!this._isPlaying) return; } while (this._audioTracks.length > 0 && this._isPlaying && !AudioSource.userInteractionRegistered && this.waitForAudio) await delay(200); } this.invokeStateChangedMethodsOnTracks(); // Update timeline in LateUpdate to give other scripts time to react to the updated state // e.g. if we animate OrbitControls look at target we want those changes to be applied in onBeforeRender // if we use onBeforeRender here it will be called *after* the regular onBeforeRender events // which is too late this._internalUpdateRoutine = this.startCoroutine(this.internalUpdate(), FrameEvent.LateUpdate); } /** * Pause the timeline. */ pause() { if (!this.isValid()) return; this._isPlaying = false; if (this._isPaused) return; this._isPaused = true; this.internalEvaluate(); this.invokePauseChangedMethodsOnTracks(); this.invokeStateChangedMethodsOnTracks(); } /** * Stop the timeline. */ stop() { this._isStopping = true; for (const track of this._audioTracks) track.stop(); const pauseChanged = this._isPaused == true; const wasPlaying = this._isPlaying; if (this._isPlaying) { this._time = 0; this._isPlaying = false; this._isPaused = false; this.internalEvaluate(); if (pauseChanged) this.invokePauseChangedMethodsOnTracks(); } this._isPlaying = false; this._isPaused = false; if (pauseChanged && !wasPlaying) this.invokePauseChangedMethodsOnTracks(); if (wasPlaying) this.invokeStateChangedMethodsOnTracks(); if (this._internalUpdateRoutine) this.stopCoroutine(this._internalUpdateRoutine); this._internalUpdateRoutine = null; this._isStopping = false; } /** * Evaluate the timeline at the current time. This is useful when you want to manually update the timeline e.g. when the timeline is paused and you set `time` to a new value. */ evaluate() { this.internalEvaluate(true); } /** * @returns true if the timeline is valid and has tracks */ isValid() { return this.playableAsset && this.playableAsset.tracks && Array.isArray(this.playableAsset.tracks); } /** Iterates over all tracks of the timeline * @returns all tracks of the timeline */ *forEachTrack() { for (const tracks of this._allTracks) { for (const track of tracks) yield track; } } /** * @returns all animation tracks of the timeline */ get animationTracks() { return this._animationTracks; } /** * @returns all audio tracks of the timeline */ get audioTracks() { return this._audioTracks; } _guidsMap; /** @internal */ resolveGuids(map) { this._guidsMap = map; } // INTERNALS _isPlaying = false; _internalUpdateRoutine; _isPaused = false; /** internal, true during the time stop() is being processed */ _isStopping = false; _time = 0; _duration = 0; _weight = 1; _animationTracks = []; _audioTracks = []; _signalTracks = []; _controlTracks = []; _customTracks = []; _allTracks = [ this._animationTracks, this._audioTracks, this._signalTracks, this._controlTracks, this._customTracks ]; /** should be called after evaluate if the director was playing */ invokePauseChangedMethodsOnTracks() { for (const track of this.forEachTrack()) { track.onPauseChanged?.call(track); } } invokeStateChangedMethodsOnTracks() { for (const track of this.forEachTrack()) { track.onStateChanged?.call(track, this._isPlaying); } } *internalUpdate() { while (this._isPlaying && this.activeAndEnabled) { if (!this._isPaused && this._isPlaying) { this._time += this.context.time.deltaTime * this.speed; this.internalEvaluate(); } // for (let i = 0; i < 5; i++) yield; } } /** * PlayableDirector lifecycle should always call this instead of "evaluate" * @param called_by_user If true the evaluation is called by the user (e.g. via evaluate()) */ internalEvaluate(called_by_user = false) { // when the timeline is called by a user via evaluate() we want to keep updating activation tracks // because "isPlaying" might be false but the director is still active. See NE-3737 if (!this.isValid()) return; let t = this._time; switch (this.extrapolationMode) { case DirectorWrapMode.Hold: if (this._speed > 0) t = Math.min(t, this._duration); else if (this._speed < 0) t = Math.max(t, 0); this._time = t; break; case DirectorWrapMode.Loop: t %= this._duration; this._time = t; break; case DirectorWrapMode.None: if (t > this._duration) { this.stop(); return; } break; } const time = this._time; for (const track of this.playableAsset.tracks) { if (track.muted) continue; switch (track.type) { case Models.TrackType.Activation: // when the timeline is being disabled or stopped // then we want to leave objects active state as they were // see NE-3241 // TODO: support all "post-playback-state" settings an activation track has, this is just "Leave as is" if (!called_by_user && !this._isPlaying) continue; for (let i = 0; i < track.outputs.length; i++) { const binding = track.outputs[i]; if (typeof binding === "object") { let isActive = false; if (track.clips) { for (const clip of track.clips) { if (clip.start <= time && time <= clip.end) { isActive = true; } } } const obj = binding; if (obj.visible !== undefined) { if (obj.visible !== isActive) { obj.visible = isActive; if (debug) console.warn(this.name, "set ActivationTrack-" + i, obj.name, isActive, time); } } } } break; } } // When timeline reaches the end "stop()" is called which is evaluating with time 0 // We don't want to re-evaluate the animation then in case the timeline is blended with the Animator // e.g then the timeline animation at time 0 is 100% applied on top of the animator animation if (!this._isStopping) { for (const handler of this._animationTracks) { handler.evaluate(time); } } for (const handler of this._audioTracks) { handler.evaluate(time); } for (const sig of this._signalTracks) { sig.evaluate(time); } for (const ctrl of this._controlTracks) { ctrl.evaluate(time); } for (const cust of this._customTracks) { cust.evaluate(time); } } resolveBindings() { if (!this._clonedPlayableAsset) { this._clonedPlayableAsset = true; this.playableAsset = deepClone(this.playableAsset); } if (!this.playableAsset || !this.playableAsset.tracks) return; // if the director has a parent we assume it is part of the current scene // if not (e.g. when loaded via adressable but not yet added to any scene) // we can only resolve objects that are children const root = this.findRoot(this.gameObject); for (const track of this.playableAsset.tracks) { for (let i = track.outputs.length - 1; i >= 0; i--) { let binding = track.outputs[i]; if (typeof binding === "string") { if (this._guidsMap && this._guidsMap[binding]) binding = this._guidsMap[binding]; const obj = GameObject.findByGuid(binding, root); if (obj === null || typeof obj !== "object") { // if the binding is missing remove it to avoid unnecessary loops track.outputs.splice(i, 1); console.warn("Failed to resolve binding", binding, track.name, track.type); } else { if (debug) console.log("Resolved binding", binding, "to", obj); track.outputs[i] = obj; } } else if (binding === null) { track.outputs.splice(i, 1); if (PlayableDirector.createTrackFunctions[track.type]) { // if a custom track doesnt have a binding its ok continue; } // if the binding is missing remove it to avoid unnecessary loops if (track.type !== Models.TrackType.Audio && track.type !== Models.TrackType.Control && track.type !== Models.TrackType.Marker && track.type !== Models.TrackType.Signal) console.warn("Missing binding", binding, track.name, track.type, this.name, this.playableAsset.name); } } if (track.type === Models.TrackType.Control) { if (track.clips) { for (let i = 0; i < track.clips.length; i++) { const clip = track.clips[i]; let binding = clip.asset.sourceObject; if (typeof binding === "string") { if (this._guidsMap && this._guidsMap[binding]) binding = this._guidsMap[binding]; const obj = GameObject.findByGuid(binding, root); if (obj === null || typeof obj !== "object") { console.warn("Failed to resolve sourceObject binding", binding, track.name, clip); } else { if (debug) console.log("Resolved binding", binding, "to", obj); clip.asset.sourceObject = obj; } } } } } } } findRoot(current) { if (current.parent) return this.findRoot(current.parent); return current; } updateTimelineDuration() { this._duration = 0; if (!this.playableAsset || !this.playableAsset.tracks) return; for (const track of this.playableAsset.tracks) { if (track.muted === true) continue; if (track.clips) { for (const clip of track.clips) { if (clip.end > this._duration) this._duration = clip.end; } } if (track.markers) { for (const marker of track.markers) { if (marker.time > this._duration) this._duration = marker.time + .001; } } } // console.log("timeline duration", this._duration, this.playableAsset); } setupAndCreateTrackHandlers() { this._animationTracks.length = 0; this._audioTracks.length = 0; this._signalTracks.length = 0; if (!this.playableAsset) return; let audioListener = GameObject.findObjectOfType(AudioListener, this.context); for (const track of this.playableAsset.tracks) { const type = track.type; const registered = PlayableDirector.createTrackFunctions[type]; if (registered !== null && registered !== undefined) { const res = registered(this, track); if (typeof res.evaluate === "function") { res.director = this; res.track = track; this._customTracks.push(res); continue; } } // only handle animation tracks if (track.type === Models.TrackType.Animation) { if (!track.clips || track.clips.length <= 0) { if (debug) console.warn("Animation track has no clips", track); continue; } // loop outputs / bindings, they should contain animator references for (let i = track.outputs.length - 1; i >= 0; i--) { let binding = track.outputs[i]; if (binding instanceof Object3D) { const anim = GameObject.getOrAddComponent(binding, Animator); if (anim) binding = anim; } const animationClips = binding?.gameObject?.animations; if (animationClips) { const handler = new Tracks.AnimationTrackHandler(); handler.trackOffset = track.trackOffset; handler.director = this; handler.track = track; for (let i = 0; i < track.clips.length; i++) { const clipModel = track.clips[i]; const animModel = clipModel.asset; if (!animModel) { console.error(`Timeline ${this.name}: clip #${i} on track \"${track.name}\" has no animation data`); continue; } // console.log(clipModel, track); const targetObjectId = animModel.clip; let clip = targetObjectId; if (typeof clip === "string" || typeof clip === "number") { clip = animationClips.find(c => c.name === targetObjectId); } if (debug) console.log(animModel, targetObjectId, "→", clip); if (!clip) { console.warn("Could not find animationClip for model", clipModel, track.name, this.name, this.playableAsset?.name, animationClips, binding); continue; } // Try to share the mixer with the animator if (binding instanceof Animator && binding.runtimeAnimatorController) { if (!binding.__internalDidAwakeAndStart) binding.initializeRuntimeAnimatorController(); // Call bind once to ensure the animator is setup and has a mixer if (!binding.runtimeAnimatorController.mixer) binding.runtimeAnimatorController.bind(binding); handler.mixer = binding.runtimeAnimatorController.mixer; } // If we can not get the mixer from the animator then create a new one if (!handler.mixer) { handler.mixer = new AnimationMixer(binding.gameObject); this.context.animations.registerAnimationMixer(handler.mixer); } handler.clips.push(clip); // uncache because we want to create a new action // this is needed because if a clip is used multiple times in a track (or even multiple tracks) // we want to avoid setting weights on the same instance for clips/objects that are not active handler.mixer.uncacheAction(clip); handler.createHooks(clipModel.asset, clip); const clipAction = handler.mixer.clipAction(clip); // new AnimationAction(handler.mixer, clip, null, null); handler.actions.push(clipAction); handler.models.push(clipModel); } this._animationTracks.push(handler); } } } else if (track.type === Models.TrackType.Audio) { if (!track.clips || track.clips.length <= 0) continue; const audio = new Tracks.AudioTrackHandler(); audio.director = this; audio.track = track; audio.audioSource = track.outputs.find(o => o instanceof AudioSource); this._audioTracks.push(audio); if (!audioListener) { // If the scene doesnt have an AudioListener we add one to the main camera audioListener = this.context.mainCameraComponent?.gameObject.addComponent(AudioListener); } audio.listener = audioListener.listener; for (let i = 0; i < track.clips.length; i++) { const clipModel = track.clips[i]; audio.addModel(clipModel); } } else if (track.type === Models.TrackType.Marker) { const signalHandler = new Tracks.SignalTrackHandler(); signalHandler.director = this; signalHandler.track = track; if (track.markers) { for (const marker of track.markers) { switch (marker.type) { case Models.MarkerType.Signal: signalHandler.models.push(marker); signalHandler.didTrigger.push(false); break; } } } if (signalHandler !== null && signalHandler.models.length > 0) { const rec = GameObject.getComponent(this.gameObject, SignalReceiver); if (rec) { signalHandler.receivers.push(rec); this._signalTracks.push(signalHandler); } } } else if (track.type === Models.TrackType.Signal) { const handler = new Tracks.SignalTrackHandler(); handler.director = this; handler.track = track; if (track.markers) { for (const marker of track.markers) { handler.models.push(marker); handler.didTrigger.push(false); } } for (const bound of track.outputs) { handler.receivers.push(bound); } this._signalTracks.push(handler); } else if (track.type === Models.TrackType.Control) { const handler = new Tracks.ControlTrackHandler(); handler.director = this; handler.track = track; if (track.clips) { for (const clip of track.clips) { handler.models.push(clip); } } handler.resolveSourceObjects(this.context); this._controlTracks.push(handler); } } } setAudioTracksAllowPlaying(allow) { for (const track of this._audioTracks) { track.onAllowAudioChanged(allow); } } /** Experimental support for overriding timeline animation data (position or rotation) */ animationCallbackReceivers = []; /** Experimental: Receive callbacks for timeline animation. Allows modification of final value */ registerAnimationCallback(receiver) { this.animationCallbackReceivers.push(receiver); } /** Experimental: Unregister callbacks for timeline animation. Allows modification of final value */ unregisterAnimationCallback(receiver) { const index = this.animationCallbackReceivers.indexOf(receiver); if (index === -1) return; this.animationCallbackReceivers.splice(index, 1); } } __decorate([ serializable() ], PlayableDirector.prototype, "playOnAwake", void 0); __decorate([ serializable() ], PlayableDirector.prototype, "extrapolationMode", void 0); //# sourceMappingURL=PlayableDirector.js.map