@needle-tools/engine
Version:
Needle Engine is a web-based runtime for 3D apps. It runs on your machine for development with great integrations into editors like Unity or Blender - and can be deployed onto any device! It is flexible, extensible and networking and XR are built-in.
865 lines (791 loc) • 36.2 kB
text/typescript
import { AnimationMixer, Object3D, Quaternion, Vector3 } from 'three';
import { isDevEnvironment } from '../../engine/debug/index.js';
import { FrameEvent } from '../../engine/engine_context.js';
import { isLocalNetwork } from '../../engine/engine_networking_utils.js';
import { serializable } from '../../engine/engine_serialization.js';
import type { GuidsMap } from '../../engine/engine_types.js';
import { deepClone, delay, getParam } from '../../engine/engine_utils.js';
import { Animator } from '../Animator.js';
import { AudioListener } from '../AudioListener.js';
import { AudioSource } from '../AudioSource.js';
import { Behaviour, GameObject } from '../Component.js';
import { SignalReceiver } from './SignalAsset.js';
import * as Models from "./TimelineModels.js";
import * as Tracks from "./TimelineTracks.js";
const debug = getParam("debugtimeline");
/**
* Controls how the {@link PlayableDirector} behaves when playback reaches the end.
* @see {@link PlayableDirector.extrapolationMode}
*/
export enum DirectorWrapMode {
/** Hold the last frame when playback reaches the end of the timeline. */
Hold = 0,
/** Loop back to the start and continue playing indefinitely. */
Loop = 1,
/** Stop playback when the end is reached. The timeline will not loop. */
None = 2,
}
/** How the clip handles time outside its start and end range. */
export enum ClipExtrapolation {
/** No extrapolation is applied. */
None = 0,
/** Hold the time at the end value of the clip. */
Hold = 1,
/** Repeat time values outside the start/end range. */
Loop = 2,
/** Repeat time values outside the start/end range, reversing direction at each loop */
PingPong = 3,
/** Time values are passed in without modification, extending beyond the clips range */
Continue = 4
};
/** @internal */
export type CreateTrackFunction = (director: PlayableDirector, track: Models.TrackModel) => Tracks.TrackHandler | undefined | null;
/**
* PlayableDirector is the main component for controlling timelines in Needle Engine.
* It orchestrates playback of TimelineAssets containing animation, audio, signal,
* control, and activation tracks.
*
* 
* *Screenshot: Timeline in Unity*
*
* **Supported track types:**
* - Animation tracks - animate objects using AnimationClips
* - Audio tracks - play synchronized audio
* - Activation tracks - show/hide objects at specific times
* - Signal tracks - trigger events at specific points
* - Control tracks - control nested timelines or prefab instances
* - Marker tracks - add metadata and navigation points
*
* [](https://engine.needle.tools/samples/bike-scrollytelling-responsive-3d)
*
* [](https://app.songsofcultures.com/?scene=little-brother)
*
* **Playback control:**
* Use `play()`, `pause()`, `stop()` for basic control.
* Set `time` directly and call `evaluate()` for scrubbing.
* Adjust `speed` for playback rate and `weight` for blending.
*
* @example Basic timeline playback
* ```ts
* const director = myObject.getComponent(PlayableDirector);
* director.play();
* // Jump to specific time
* director.time = 2.5;
* director.evaluate();
* ```
*
* @example Control playback speed
* ```ts
* director.speed = 0.5; // Half speed
* director.speed = -1; // Reverse playback
* ```
*
* - Example: https://engine.needle.tools/samples-uploads/product-flyover/
*
* @summary Controls and plays TimelineAssets
* @category Animation and Sequencing
* @group Components
* @see {@link Animator} for playing individual AnimationClips
* @see {@link AudioSource} for standalone audio playback
* @see {@link SignalReceiver} for handling timeline signals
* @link https://engine.needle.tools/samples/?overlay=samples&tag=animation
* @link https://app.songsofcultures.com/
* @link https://engine.needle.tools/docs/blender/animation.html Blender timeline and animation export
*/
export class PlayableDirector extends Behaviour {
private static createTrackFunctions: { [key: string]: CreateTrackFunction } = {};
/**
* Register a function to create a track handler for a custom track type.
* This allows you to extend the timeline system with your own track types and handlers.
*/
static registerCreateTrack(type: string, fn: CreateTrackFunction) {
this.createTrackFunctions[type] = fn;
}
/**
* The timeline asset containing tracks, clips, and markers that this director will play.
* Assign a timeline asset exported from Unity or Blender to enable playback.
*/
playableAsset?: Models.TimelineAssetModel;
/**
* When true, the timeline starts playing automatically when the component awakens.
* Set to false to control playback manually via `play()`.
* @default false
*/
()
playOnAwake?: boolean;
/**
* Determines how the timeline behaves when it reaches the end of its duration.
* @default DirectorWrapMode.Loop
*/
()
extrapolationMode: DirectorWrapMode = DirectorWrapMode.Loop;
/** Returns true if the timeline is currently playing (not paused or stopped). */
get isPlaying(): boolean { return this._isPlaying; }
/** Returns true if the timeline is currently paused. */
get isPaused(): boolean { return this._isPaused; }
/**
* The current playback time in seconds. Set this and call `evaluate()` to scrub.
* @example Scrub to a specific time
* ```ts
* director.time = 5.0;
* director.evaluate();
* ```
*/
get time(): number { return this._time; }
set time(value: number) {
if (typeof value === "number" && !Number.isNaN(value))
this._time = value;
else if (debug || isLocalNetwork()) {
console.error("INVALID TIMELINE.TIME VALUE", value, this.name)
};
}
/** The total duration of the timeline in seconds (read from the longest track/clip). */
get duration(): number { return this._duration; }
set duration(value: number) { this._duration = value; }
/**
* The blend weight of the timeline (0-1). Use values below 1 to blend
* timeline animations with other animations like those from an Animator.
*/
get weight(): number { return this._weight; };
set weight(value: number) { this._weight = value; }
/**
* The playback speed multiplier. Set to negative values for reverse playback.
* @example Reverse playback at double speed
* ```ts
* director.speed = -2;
* ```
*/
get speed(): number { return this._speed; }
set speed(value: number) { this._speed = value; }
/**
* When true, `play()` will wait for audio tracks to load and for user interaction
* before starting playback. Web browsers require user interaction (click/tap) before
* allowing audio to play - this ensures audio is synchronized with the timeline.
* Set to false if you need immediate visual playback and can tolerate audio delay.
* @default true
*/
waitForAudio: boolean = true;
private _visibilityChangeEvt?: any;
private _clonedPlayableAsset: boolean = false;
private _speed: number = 1;
/** @internal */
awake(): void {
if (debug) console.log(`[Timeline] Awake '${this.name}'`, this);
this.rebuildGraph();
if (!this.isValid() && (debug || isDevEnvironment())) {
if (debug) {
console.warn("PlayableDirector is not valid", "Asset?", this.playableAsset, "Tracks:", this.playableAsset?.tracks, "IsArray?", Array.isArray(this.playableAsset?.tracks), this);
}
else if (!this.playableAsset?.tracks?.length) {
console.warn("PlayableDirector has no tracks");
}
else {
console.warn("PlayableDirector is not valid");
}
}
}
/** @internal */
onEnable() {
if (debug) console.log("[Timeline] OnEnable", this.name, this.playOnAwake);
for (const track of this._audioTracks) {
track.onEnable?.();
}
for (const track of this._customTracks) {
track.onEnable?.();
}
for (const track of this._animationTracks) {
track.onEnable?.();
}
if (this.playOnAwake) {
this.play();
}
if (!this._visibilityChangeEvt) this._visibilityChangeEvt = () => {
switch (document.visibilityState) {
case "hidden":
this.setAudioTracksAllowPlaying(false);
break;
case "visible":
this.setAudioTracksAllowPlaying(true);
break;
}
}
window.addEventListener('visibilitychange', this._visibilityChangeEvt);
}
/** @internal */
onDisable(): void {
if (debug) console.log("[Timeline] OnDisable", this.name);
this.stop();
for (const track of this._audioTracks) {
track.onDisable?.();
}
for (const track of this._customTracks) {
track.onDisable?.();
}
for (const track of this._animationTracks) {
track.onDisable?.();
}
if (this._visibilityChangeEvt)
window.removeEventListener('visibilitychange', this._visibilityChangeEvt);
}
/** @internal */
onDestroy(): void {
for (const tracks of this._allTracks) {
for (const track of tracks)
track.onDestroy?.();
}
}
/** @internal */
rebuildGraph() {
if (!this.isValid()) return;
this.resolveBindings();
this.updateTimelineDuration();
this.setupAndCreateTrackHandlers();
}
/**
* Play the timeline from the current time.
* If the timeline is already playing this method does nothing.
*/
async play() {
if (!this.isValid()) return;
const pauseChanged = this._isPaused == true;
this._isPaused = false;
if (this._isPlaying) return;
this._isPlaying = true;
if (pauseChanged) this.invokePauseChangedMethodsOnTracks();
if (this.waitForAudio) {
// Make sure audio tracks have loaded at the current time
const promises: Array<Promise<any>> = [];
for (const track of this._audioTracks) {
const promise = track.loadAudio(this._time, 1, 0);
if (promise)
promises.push(promise);
}
if (promises.length > 0) {
await Promise.all(promises);
if (!this._isPlaying) return;
}
while (this._audioTracks.length > 0 && this._isPlaying && !AudioSource.userInteractionRegistered && this.waitForAudio)
await delay(200);
}
this.invokeStateChangedMethodsOnTracks();
// Update timeline in LateUpdate to give other scripts time to react to the updated state
// e.g. if we animate OrbitControls look at target we want those changes to be applied in onBeforeRender
// if we use onBeforeRender here it will be called *after* the regular onBeforeRender events
// which is too late
this._internalUpdateRoutine = this.startCoroutine(this.internalUpdate(), FrameEvent.LateUpdate);
}
/**
* Pause the timeline.
*/
pause() {
if (!this.isValid()) return;
this._isPlaying = false;
if (this._isPaused) return;
this._isPaused = true;
this.internalEvaluate();
this.invokePauseChangedMethodsOnTracks();
this.invokeStateChangedMethodsOnTracks();
}
/**
* Stop the timeline.
*/
stop() {
this._isStopping = true;
for (const track of this._audioTracks) track.stop();
const pauseChanged = this._isPaused == true;
const wasPlaying = this._isPlaying;
if (this._isPlaying) {
this._time = 0;
this._isPlaying = false;
this._isPaused = false;
this.internalEvaluate();
if (pauseChanged) this.invokePauseChangedMethodsOnTracks();
}
this._isPlaying = false;
this._isPaused = false;
if (pauseChanged && !wasPlaying) this.invokePauseChangedMethodsOnTracks();
if (wasPlaying) this.invokeStateChangedMethodsOnTracks();
if (this._internalUpdateRoutine)
this.stopCoroutine(this._internalUpdateRoutine);
this._internalUpdateRoutine = null;
this._isStopping = false;
}
/**
* Evaluate the timeline at the current time. This is useful when you want to manually update the timeline e.g. when the timeline is paused and you set `time` to a new value.
*/
evaluate() {
this.internalEvaluate(true);
}
/**
* @returns true if the timeline is valid and has tracks
*/
isValid() {
return this.playableAsset && this.playableAsset.tracks && Array.isArray(this.playableAsset.tracks);
}
/** Iterates over all tracks of the timeline
* @returns all tracks of the timeline
*/
*forEachTrack() {
for (const tracks of this._allTracks) {
for (const track of tracks)
yield track;
}
}
/**
* @returns all animation tracks of the timeline
*/
get animationTracks() {
return this._animationTracks;
}
/**
* @returns all audio tracks of the timeline
*/
get audioTracks(): Tracks.AudioTrackHandler[] {
return this._audioTracks;
}
/**
* @returns all signal tracks of the timeline
*/
get signalTracks(): Tracks.SignalTrackHandler[] {
return this._signalTracks;
}
/**
* @returns all marker tracks of the timeline
*/
get markerTracks(): Tracks.MarkerTrackHandler[] {
return this._markerTracks;
}
/**
* Iterates over all markers of the timeline, optionally filtering by type
*
* @example
* ```ts
* // Iterate over all ScrollMarkers in the timeline
* for (const marker of director.foreachMarker<{selector:string}>("ScrollMarker")) {
* console.log(marker.time, marker.selector);
* }
* ```
*
*/
*foreachMarker<T extends Record<string, any>>(type: string | null = null): Generator<(T & Models.MarkerModel)> {
for (const track of this._markerTracks) {
for (const marker of track.foreachMarker<T>(type)) {
yield marker as T & Models.MarkerModel;
}
}
}
private _guidsMap?: GuidsMap;
/** @internal */
resolveGuids(map: GuidsMap) {
this._guidsMap = map;
}
// INTERNALS
private _isPlaying: boolean = false;
private _internalUpdateRoutine: any;
private _isPaused: boolean = false;
/** internal, true during the time stop() is being processed */
private _isStopping: boolean = false;
private _time: number = 0;
private _duration: number = 0;
private _weight: number = 1;
private readonly _animationTracks: Array<Tracks.AnimationTrackHandler> = [];
private readonly _audioTracks: Array<Tracks.AudioTrackHandler> = [];
private readonly _signalTracks: Array<Tracks.SignalTrackHandler> = [];
private readonly _markerTracks: Array<Tracks.MarkerTrackHandler> = [];
private readonly _controlTracks: Array<Tracks.ControlTrackHandler> = [];
private readonly _customTracks: Array<Tracks.TrackHandler> = [];
private readonly _tracksArray: Array<Array<Tracks.TrackHandler>> = [];
private get _allTracks(): Array<Array<Tracks.TrackHandler>> {
this._tracksArray.length = 0;
this._tracksArray.push(this._animationTracks);
this._tracksArray.push(this._audioTracks);
this._tracksArray.push(this._signalTracks);
this._tracksArray.push(this._markerTracks);
this._tracksArray.push(this._controlTracks);
this._tracksArray.push(this._customTracks);
return this._tracksArray;
}
/** should be called after evaluate if the director was playing */
private invokePauseChangedMethodsOnTracks() {
for (const track of this.forEachTrack()) {
track.onPauseChanged?.call(track);
}
}
private invokeStateChangedMethodsOnTracks() {
for (const track of this.forEachTrack()) {
track.onStateChanged?.call(track, this._isPlaying);
}
}
private * internalUpdate() {
while (this._isPlaying && this.activeAndEnabled) {
if (!this._isPaused && this._isPlaying) {
this._time += this.context.time.deltaTime * this.speed;
this.internalEvaluate();
}
// for (let i = 0; i < 5; i++)
yield;
}
}
/**
* PlayableDirector lifecycle should always call this instead of "evaluate"
* @param called_by_user If true the evaluation is called by the user (e.g. via evaluate())
*/
private internalEvaluate(called_by_user: boolean = false) {
// when the timeline is called by a user via evaluate() we want to keep updating activation tracks
// because "isPlaying" might be false but the director is still active. See NE-3737
if (!this.isValid()) return;
let t = this._time;
switch (this.extrapolationMode) {
case DirectorWrapMode.Hold:
if (this._speed > 0)
t = Math.min(t, this._duration);
else if (this._speed < 0)
t = Math.max(t, 0);
this._time = t;
break;
case DirectorWrapMode.Loop:
t %= this._duration;
this._time = t;
break;
case DirectorWrapMode.None:
if (t > this._duration) {
this.stop();
return;
}
break;
}
const time = this._time;
for (const track of this.playableAsset!.tracks) {
if (track.muted) continue;
switch (track.type) {
case Models.TrackType.Activation:
// when the timeline is being disabled or stopped
// then we want to leave objects active state as they were
// see NE-3241
// TODO: support all "post-playback-state" settings an activation track has, this is just "Leave as is"
if (!called_by_user && !this._isPlaying) continue;
for (let i = 0; i < track.outputs.length; i++) {
const binding = track.outputs[i];
if (typeof binding === "object") {
let isActive: boolean = false;
if (track.clips) {
for (const clip of track.clips) {
if (clip.start <= time && time <= clip.end) {
isActive = true;
}
}
}
const obj = binding as Object3D;
if (obj.visible !== undefined) {
if (obj.visible !== isActive) {
obj.visible = isActive;
if (debug)
console.warn(this.name, "set ActivationTrack-" + i, obj.name, isActive, time);
}
}
}
}
break;
}
}
for (const track of this._allTracks) {
for (const handler of track) {
// When timeline reaches the end "stop()" is called which is evaluating with time 0
// We don't want to re-evaluate the animation then in case the timeline is blended with the Animator
// e.g then the timeline animation at time 0 is 100% applied on top of the animator animation
if (this._isStopping && handler instanceof Tracks.AnimationTrackHandler) {
continue;
}
handler.evaluate(time);
}
}
}
private resolveBindings() {
if (!this._clonedPlayableAsset) {
this._clonedPlayableAsset = true;
this.playableAsset = deepClone(this.playableAsset);
}
if (!this.playableAsset || !this.playableAsset.tracks) return;
// if the director has a parent we assume it is part of the current scene
// if not (e.g. when loaded via adressable but not yet added to any scene)
// we can only resolve objects that are children
const root = this.findRoot(this.gameObject);
for (const track of this.playableAsset.tracks) {
for (let i = track.outputs.length - 1; i >= 0; i--) {
let binding = track.outputs[i];
if (typeof binding === "string") {
if (this._guidsMap && this._guidsMap[binding])
binding = this._guidsMap[binding];
const obj = GameObject.findByGuid(binding, root);
if (obj === null || typeof obj !== "object") {
// if the binding is missing remove it to avoid unnecessary loops
track.outputs.splice(i, 1);
console.warn("Failed to resolve binding", binding, track.name, track.type);
}
else {
if (debug)
console.log("Resolved binding", binding, "to", obj);
track.outputs[i] = obj;
}
}
else if (binding === null) {
track.outputs.splice(i, 1);
if (PlayableDirector.createTrackFunctions[track.type]) {
// if a custom track doesnt have a binding its ok
continue;
}
// if the binding is missing remove it to avoid unnecessary loops
if (track.type !== Models.TrackType.Audio && track.type !== Models.TrackType.Control && track.type !== Models.TrackType.Marker && track.type !== Models.TrackType.Signal)
console.warn("Missing binding", binding, track.name, track.type, this.name, this.playableAsset.name);
}
}
if (track.type === Models.TrackType.Control) {
if (track.clips) {
for (let i = 0; i < track.clips.length; i++) {
const clip = track.clips[i];
let binding = clip.asset.sourceObject;
if (typeof binding === "string") {
if (this._guidsMap && this._guidsMap[binding])
binding = this._guidsMap[binding];
const obj = GameObject.findByGuid(binding, root);
if (obj === null || typeof obj !== "object") {
console.warn("Failed to resolve sourceObject binding", binding, track.name, clip);
}
else {
if (debug)
console.log("Resolved binding", binding, "to", obj);
clip.asset.sourceObject = obj;
}
}
}
}
}
}
}
private findRoot(current: Object3D): Object3D {
if (current.parent)
return this.findRoot(current.parent);
return current;
}
private updateTimelineDuration() {
this._duration = 0;
if (!this.playableAsset || !this.playableAsset.tracks) return;
for (const track of this.playableAsset.tracks) {
if (track.muted === true) continue;
if (track.clips) {
for (const clip of track.clips) {
if (clip.end > this._duration) this._duration = clip.end;
}
}
if (track.markers) {
for (const marker of track.markers) {
if (marker.time > this._duration) this._duration = marker.time + .001;
}
}
}
// console.log("timeline duration", this._duration, this.playableAsset);
}
private setupAndCreateTrackHandlers() {
this._animationTracks.length = 0;
this._audioTracks.length = 0;
this._signalTracks.length = 0;
if (!this.playableAsset) return;
let audioListener: AudioListener | null = GameObject.findObjectOfType(AudioListener, this.context);
for (const track of this.playableAsset!.tracks) {
const type = track.type;
const registered = PlayableDirector.createTrackFunctions[type];
if (registered !== null && registered !== undefined) {
const res = registered(this, track) as Tracks.TrackHandler;
if (typeof res.evaluate === "function") {
res.director = this;
res.track = track;
this._customTracks.push(res);
continue;
}
}
// only handle animation tracks
if (track.type === Models.TrackType.Animation) {
if (!track.clips || track.clips.length <= 0) {
if (debug) console.warn("Animation track has no clips", track);
continue;
}
// loop outputs / bindings, they should contain animator references
for (let i = track.outputs.length - 1; i >= 0; i--) {
let binding = track.outputs[i] as Animator;
if (binding instanceof Object3D) {
const anim = GameObject.getOrAddComponent(binding, Animator);
if (anim) binding = anim;
}
const animationClips = binding?.gameObject?.animations;
if (animationClips) {
const handler = new Tracks.AnimationTrackHandler();
handler.trackOffset = track.trackOffset;
handler.director = this;
handler.track = track;
for (let i = 0; i < track.clips.length; i++) {
const clipModel = track.clips[i];
const animModel = clipModel.asset as Models.AnimationClipModel;
if (!animModel) {
console.error(`Timeline ${this.name}: clip #${i} on track \"${track.name}\" has no animation data`);
continue;
}
// console.log(clipModel, track);
const targetObjectId = animModel.clip;
let clip: any = targetObjectId;
if (typeof clip === "string" || typeof clip === "number") {
clip = animationClips.find(c => c.name === targetObjectId);
}
if (debug) console.log(animModel, targetObjectId, "→", clip)
if (!clip) {
console.warn("Could not find animationClip for model", clipModel, track.name, this.name, this.playableAsset?.name, animationClips, binding);
continue;
}
// Try to share the mixer with the animator
if (binding instanceof Animator && binding.runtimeAnimatorController) {
if (!binding.__internalDidAwakeAndStart) binding.initializeRuntimeAnimatorController();
// Call bind once to ensure the animator is setup and has a mixer
if (!binding.runtimeAnimatorController.mixer) binding.runtimeAnimatorController.bind(binding);
handler.mixer = binding.runtimeAnimatorController.mixer;
}
// If we can not get the mixer from the animator then create a new one
if (!handler.mixer) {
handler.mixer = new AnimationMixer(binding.gameObject);
this.context.animations.registerAnimationMixer(handler.mixer);
}
handler.clips.push(clip);
// uncache because we want to create a new action
// this is needed because if a clip is used multiple times in a track (or even multiple tracks)
// we want to avoid setting weights on the same instance for clips/objects that are not active
handler.mixer.uncacheAction(clip);
handler.createHooks(clipModel.asset as Models.AnimationClipModel, clip);
const clipAction = handler.mixer.clipAction(clip); // new AnimationAction(handler.mixer, clip, null, null);
handler.actions.push(clipAction);
handler.models.push(clipModel);
}
this._animationTracks.push(handler);
}
}
}
else if (track.type === Models.TrackType.Audio) {
if (!track.clips || track.clips.length <= 0) continue;
const audio = new Tracks.AudioTrackHandler();
audio.director = this;
audio.track = track;
audio.audioSource = track.outputs.find(o => o instanceof AudioSource) as AudioSource;
this._audioTracks.push(audio);
if (!audioListener) {
// If the scene doesnt have an AudioListener we add one to the main camera
audioListener = this.context.mainCameraComponent?.gameObject.addComponent(AudioListener)!;
}
audio.listener = audioListener.listener;
for (let i = 0; i < track.clips.length; i++) {
const clipModel = track.clips[i];
audio.addModel(clipModel);
}
}
else if (track.type === Models.TrackType.Marker) {
if (track.markers) {
// For the marker track we create both a signal track handler AND a markertrack handler because a marker track can have signals and markers
const signalHandler: Tracks.SignalTrackHandler = new Tracks.SignalTrackHandler();
signalHandler.director = this;
signalHandler.track = track;
const markerHandler: Tracks.MarkerTrackHandler = new Tracks.MarkerTrackHandler();
markerHandler.director = this;
markerHandler.track = track;
for (const marker of track.markers) {
switch (marker.type) {
case Models.MarkerType.Signal:
signalHandler.models.push(marker as Models.SignalMarkerModel);
signalHandler.didTrigger.push(false);
break;
default:
markerHandler.models.push(marker);
break;
}
}
if (signalHandler !== null && signalHandler.models.length > 0) {
const rec = GameObject.getComponent(this.gameObject, SignalReceiver);
if (rec) {
signalHandler.receivers.push(rec);
this._signalTracks.push(signalHandler);
}
}
if (markerHandler !== null && markerHandler.models.length > 0) {
this._markerTracks.push(markerHandler);
}
}
}
else if (track.type === Models.TrackType.Signal) {
const handler = new Tracks.SignalTrackHandler();
handler.director = this;
handler.track = track;
if (track.markers) {
for (const marker of track.markers) {
handler.models.push(marker as Models.SignalMarkerModel);
handler.didTrigger.push(false);
}
}
for (const bound of track.outputs) {
handler.receivers.push(bound as SignalReceiver);
}
this._signalTracks.push(handler);
}
else if (track.type === Models.TrackType.Control) {
const handler = new Tracks.ControlTrackHandler();
handler.director = this;
handler.track = track;
if (track.clips) {
for (const clip of track.clips) {
handler.models.push(clip);
}
}
handler.resolveSourceObjects(this.context);
this._controlTracks.push(handler);
}
}
}
private setAudioTracksAllowPlaying(allow: boolean) {
for (const track of this._audioTracks) {
track.onAllowAudioChanged(allow);
}
}
/** Experimental support for overriding timeline animation data (position or rotation) */
readonly animationCallbackReceivers: ITimelineAnimationCallbacks[] = [];
/** Experimental: Receive callbacks for timeline animation. Allows modification of final value */
registerAnimationCallback(receiver: ITimelineAnimationCallbacks) { this.animationCallbackReceivers.push(receiver); }
/** Experimental: Unregister callbacks for timeline animation. Allows modification of final value */
unregisterAnimationCallback(receiver: ITimelineAnimationCallbacks) {
const index = this.animationCallbackReceivers.indexOf(receiver);
if (index === -1) return;
this.animationCallbackReceivers.splice(index, 1);
}
}
/**
* Interface for receiving callbacks during timeline animation evaluation.
* Allows modification of position/rotation values before they are applied.
*
* **Registration:**
* ```ts
* director.registerAnimationCallback(this);
* // Later: director.unregisterAnimationCallback(this);
* ```
*
* @experimental This interface may change in future versions
* @see {@link PlayableDirector.registerAnimationCallback}
*/
export interface ITimelineAnimationCallbacks {
/**
* @param director The director that is playing the timeline
* @param target The target object that is being animated
* @param time The current time of the timeline
* @param rotation The evaluated rotation of the target object at the current time
*/
onTimelineRotation?(director: PlayableDirector, target: Object3D, time: number, rotation: Quaternion);
/**
* @param director The director that is playing the timeline
* @param target The target object that is being animated
* @param time The current time of the timeline
* @param position The evaluated position of the target object at the current time
*/
onTimelinePosition?(director: PlayableDirector, target: Object3D, time: number, position: Vector3);
}