@needle-tools/engine
Version:
Needle Engine is a web-based runtime for 3D apps. It runs on your machine for development with great integrations into editors like Unity or Blender - and can be deployed onto any device! It is flexible, extensible and networking and XR are built-in.
880 lines • 39.7 kB
JavaScript
import { Audio, AudioLoader, Euler, Quaternion, QuaternionKeyframeTrack, Vector3, VectorKeyframeTrack } from "three";
import { isDevEnvironment } from "../../engine/debug/index.js";
import { getParam, resolveUrl } from "../../engine/engine_utils.js";
import { setObjectAnimated } from "../AnimationUtils.js";
import { Animator } from "../Animator.js";
import { AudioSource } from "../AudioSource.js";
import { GameObject } from "../Component.js";
import { PlayableDirector } from "./PlayableDirector.js";
import { SignalReceiver } from "./SignalAsset.js";
import * as Models from "./TimelineModels.js";
const debug = getParam("debugtimeline");
/**
* A TrackHandler is responsible for evaluating a specific type of timeline track.
* A timeline track can be an animation track, audio track, signal track, control track etc and is controlled by a {@link PlayableDirector}.
*/
export class TrackHandler {
director;
track;
get muted() { return this.track.muted; }
set muted(val) {
if (val !== this.track.muted) {
this.track.muted = val;
this.onMuteChanged?.call(this);
}
}
*forEachClip(backwards = false) {
if (!this.track?.clips)
return;
if (backwards) {
for (let i = this.track.clips.length - 1; i >= 0; i--) {
yield this.track.clips[i];
}
}
else {
for (const clip of this.track.clips) {
yield clip;
}
}
}
getClipTime(time, model) {
return model.clipIn + (time - model.start) * model.timeScale;
}
getClipTimeNormalized(time, model) {
return (time - model.start) / model.duration;
}
evaluateWeight(time, index, models, isActive = true) {
if (index < 0 || index >= models.length)
return 0;
const model = models[index];
if (isActive || time >= model.start && time <= model.end) {
let weight = 1;
const isBlendingWithNext = false;
// this blending with next clips is already baked into easeIn/easeOut
// if (allowBlendWithNext && index + 1 < models.length) {
// const next = models[index + 1];
// const nextWeight = (time - next.start) / (model.end - next.start);
// isBlendingWithNext = nextWeight > 0;
// weight = 1 - nextWeight;
// }
if (model.easeInDuration > 0) {
const easeIn = Math.min((time - model.start) / model.easeInDuration, 1);
weight *= easeIn;
}
if (model.easeOutDuration > 0 && !isBlendingWithNext) {
const easeOut = Math.min((model.end - time) / model.easeOutDuration, 1);
weight *= easeOut;
}
return weight;
}
return 0;
}
}
class AnimationClipOffsetData {
clip;
rootPositionOffset;
rootQuaternionOffset;
get hasOffsets() { return this.rootPositionOffset !== undefined || this.rootQuaternionOffset !== undefined; }
// not necessary
rootStartPosition;
rootEndPosition;
rootStartQuaternion;
rootEndQuaternion;
constructor(action) {
const clip = action.getClip();
this.clip = clip;
const root = action.getRoot();
const rootPositionTrackName = root.name + ".position";
const rootRotationTrackName = root.name + ".quaternion";
if (debug)
console.log(clip.name, clip.tracks, rootPositionTrackName);
for (const track of clip.tracks) {
if (track.times.length <= 0)
continue;
if (track.name.endsWith(rootPositionTrackName)) {
this.rootStartPosition = new Vector3().fromArray(track.values, 0);
this.rootEndPosition = new Vector3().fromArray(track.values, track.values.length - 3);
this.rootPositionOffset = this.rootEndPosition.clone().sub(this.rootStartPosition);
if (debug)
console.log(this.rootPositionOffset);
// this.rootPositionOffset.set(0, 0, 0);
}
else if (track.name.endsWith(rootRotationTrackName)) {
this.rootStartQuaternion = new Quaternion().fromArray(track.values, 0);
this.rootEndQuaternion = new Quaternion().fromArray(track.values, track.values.length - 4);
this.rootQuaternionOffset = this.rootEndQuaternion.clone().multiply(this.rootStartQuaternion);
if (debug) {
const euler = new Euler().setFromQuaternion(this.rootQuaternionOffset);
console.log("ROT", euler);
}
}
}
}
}
// TODO: add support for clip clamp modes (loop, pingpong, clamp)
export class AnimationTrackHandler extends TrackHandler {
/** @internal */
models = [];
/** @internal */
trackOffset;
/** The object that is being animated. */
target;
/** The AnimationMixer, should be shared with the animator if an animator is bound */
mixer;
clips = [];
actions = [];
/**
* You can use the weight to blend the timeline animation tracks with multiple animation tracks on the same object.
* @default 1
*/
weight = 1;
/** holds data/info about clips differences */
_actionOffsets = [];
_didBind = false;
_animator = null;
onDisable() {
// if this track is disabled we need to stop the currently active actions
this.mixer?.stopAllAction();
}
onDestroy() {
this.director.context.animations.unregisterAnimationMixer(this.mixer);
}
// Using this callback instead of onEnable etc
// because we want to re-enable the animator when the director is at the end and wrap mode is set to none
// in which case the director is stopped (but not disabled)
// which means we want to notify the object that it's not animated anymore
// and the animator can then take over
onStateChanged() {
if (this._animator)
setObjectAnimated(this._animator.gameObject, this, this.director.isPlaying);
}
createHooks(clipModel, clip) {
if (clip.tracks?.length <= 0) {
console.warn("No tracks in AnimationClip", clip);
return;
}
// we only want to hook into the binding of the root object
// TODO: test with a clip with multiple roots
const parts = clip.tracks[0].name.split(".");
const rootName = parts[parts.length - 2];
const positionTrackName = rootName + ".position";
const rotationTrackName = rootName + ".quaternion";
let foundPositionTrack = false;
let foundRotationTrack = false;
for (const t of clip.tracks) {
if (t.name.endsWith(positionTrackName)) {
foundPositionTrack = true;
this.createPositionInterpolant(clip, clipModel, t);
}
else if (t.name.endsWith(rotationTrackName)) {
foundRotationTrack = true;
this.createRotationInterpolant(clip, clipModel, t);
}
}
// ensure we always have a position and rotation track so we can apply offsets in interpolator
// TODO: this currently assumes that there is only one root always that has offsets so it only does create the interpolator for the first track which might be incorrect. In general it would probably be better if we would not create additional tracks but apply the offsets for these objects elsewhere!?
if (!foundPositionTrack || !foundRotationTrack) {
const root = this.mixer?.getRoot();
const track = clip.tracks[0];
const indexOfProperty = track.name.lastIndexOf(".");
const baseName = track.name.substring(0, indexOfProperty);
const objName = baseName.substring(baseName.lastIndexOf(".") + 1);
const targetObj = root.getObjectByName(objName);
// TODO can't animate unnamed objects which use GUID as name this way, need scene.getObjectByProperty('uuid', objectName);
// This should be right but needs testing:
// const parsedPath = PropertyBinding.parseTrackName(track.name);
// const targetObj = PropertyBinding.findNode(root, parsedPath.nodeName);
if (targetObj) {
if (!foundPositionTrack) {
const trackName = baseName + ".position";
if (debug)
console.warn("Create position track", objName, targetObj);
// apply initial local position so it doesnt get flipped or otherwise changed
const pos = targetObj.position;
const track = new VectorKeyframeTrack(trackName, [0, clip.duration], [pos.x, pos.y, pos.z, pos.x, pos.y, pos.z]);
clip.tracks.push(track);
this.createPositionInterpolant(clip, clipModel, track);
}
else if (!foundRotationTrack) {
const trackName = clip.tracks[0].name.substring(0, indexOfProperty) + ".quaternion";
if (debug)
console.warn("Create quaternion track", objName, targetObj);
const rot = targetObj.quaternion;
const track = new QuaternionKeyframeTrack(trackName, [0, clip.duration], [rot.x, rot.y, rot.z, rot.w, rot.x, rot.y, rot.z, rot.w]);
clip.tracks.push(track);
this.createRotationInterpolant(clip, clipModel, track);
}
}
}
}
bind() {
if (this._didBind)
return;
this._didBind = true;
if (debug)
console.log(this.models);
// the object being animated
if (this.mixer)
this.target = this.mixer.getRoot();
else
console.warn("No mixer was assigned to animation track");
for (const action of this.actions) {
const off = new AnimationClipOffsetData(action);
this._actionOffsets.push(off);
}
if (this.target) {
// We need to disable the animator component in case it also animates
// which overrides the timeline
this._animator = GameObject.getComponent(this.target, Animator) ?? null;
if (this._animator) {
setObjectAnimated(this._animator.gameObject, this, true);
}
}
// Clip Offsets
for (const model of this.models) {
const clipData = model.asset;
const pos = clipData.position;
const rot = clipData.rotation;
if (pos && pos.x !== undefined) {
if (!pos.isVector3) {
clipData.position = new Vector3(pos.x, pos.y, pos.z);
}
if (!rot.isQuaternion) {
clipData.rotation = new Quaternion(rot.x, rot.y, rot.z, rot.w);
}
}
}
this.ensureTrackOffsets();
}
ensureTrackOffsets() {
if (this.trackOffset) {
const pos = this.trackOffset.position;
if (pos) {
if (!pos.isVector3) {
this.trackOffset.position = new Vector3(pos.x, pos.y, pos.z);
}
}
const rot = this.trackOffset.rotation;
if (rot) {
if (!rot.isQuaternion) {
this.trackOffset.rotation = new Quaternion(rot.x, rot.y, rot.z, rot.w);
}
}
}
}
_useclipOffsets = true;
_totalOffsetPosition = new Vector3();
_totalOffsetRotation = new Quaternion();
_totalOffsetPosition2 = new Vector3();
_totalOffsetRotation2 = new Quaternion();
_summedPos = new Vector3();
_tempPos = new Vector3();
_summedRot = new Quaternion();
_tempRot = new Quaternion();
_clipRotQuat = new Quaternion();
evaluate(time) {
if (this.track.muted)
return;
if (!this.mixer)
return;
this.bind();
// if (this._animator && this.director.isPlaying && this.director.weight > 0) this._animator.enabled = false;
this._totalOffsetPosition.set(0, 0, 0);
this._totalOffsetRotation.set(0, 0, 0, 1);
this._totalOffsetPosition2.set(0, 0, 0);
this._totalOffsetRotation2.set(0, 0, 0, 1);
let activeClips = 0;
let blend = 0;
let didPostExtrapolate = false;
let didPreExtrapolate = false;
// The total weight is used to blend with the animator controller active states
let totalWeight = 0;
for (let i = 0; i < this.clips.length; i++) {
const model = this.models[i];
const action = this.actions[i];
const clipModel = model.asset;
action.weight = 0;
const isInTimeRange = time >= model.start && time <= model.end;
const preExtrapolation = model.preExtrapolationMode;
const postExtrapolation = model.postExtrapolationMode;
const nextClip = i < this.clips.length - 1 ? this.models[i + 1] : null;
let isActive = isInTimeRange;
let doPreExtrapolate = false;
if (!isActive && !didPostExtrapolate && model.end < time && postExtrapolation !== Models.ClipExtrapolation.None) {
// use post extrapolate if its the last clip of the next clip has not yet started
if (!nextClip || nextClip.start > time) {
isActive = true;
didPostExtrapolate = true;
}
}
else if (i == 0 && !isActive && !didPreExtrapolate && model.start > time && preExtrapolation !== Models.ClipExtrapolation.None) {
if (!nextClip || nextClip.start < time) {
isActive = true;
doPreExtrapolate = true;
didPreExtrapolate = true;
}
}
if (isActive) {
// const clip = this.clips[i];
let weight = this.weight;
weight *= this.evaluateWeight(time, i, this.models, isActive);
weight *= this.director.weight;
let handleLoop = isInTimeRange;
if (doPreExtrapolate) {
switch (preExtrapolation) {
case Models.ClipExtrapolation.Hold:
// Nothing to do
break;
case Models.ClipExtrapolation.Loop:
// TODO: this is not correct yet
time += model.start;
handleLoop = true;
break;
default:
time += model.start;
handleLoop = true;
break;
}
}
// TODO: handle clipIn again
let t = this.getClipTime(time, model);
let loops = 0;
const duration = clipModel.duration;
// This is the actual duration of the clip in the timeline (with clipping and scale)
// const clipDuration = (model.end - model.start) * model.timeScale;
if (doPreExtrapolate) {
if (preExtrapolation === Models.ClipExtrapolation.Hold) {
t = 0;
}
}
if (handleLoop) {
if (clipModel.loop) {
// const t0 = t - .001;
loops += Math.floor(t / (duration + .000001));
while (t > duration) {
t -= duration;
}
}
}
else if (!isInTimeRange) {
if (didPostExtrapolate) {
switch (postExtrapolation) {
case Models.ClipExtrapolation.Hold:
t = this.getClipTime(model.end, model);
break;
case Models.ClipExtrapolation.Loop:
t %= duration;
break;
case Models.ClipExtrapolation.PingPong:
const loops = Math.floor(t / duration);
const invert = loops % 2 !== 0;
t %= duration;
if (invert)
t = duration - t;
break;
}
}
}
if (model.reversed === true)
action.time = action.getClip().duration - t;
else
action.time = t;
action.timeScale = 0;
const effectiveWeight = Math.max(0, weight);
action.weight = effectiveWeight;
totalWeight += effectiveWeight;
action.clampWhenFinished = false;
if (!action.isRunning())
action.play();
// console.log(action.time, action.weight);
if (this._useclipOffsets) {
const totalPosition = activeClips == 0 ? this._totalOffsetPosition : this._totalOffsetPosition2;
const totalRotation = activeClips == 0 ? this._totalOffsetRotation : this._totalOffsetRotation2;
if (activeClips < 1)
blend = 1 - weight;
activeClips += 1;
const summedPos = this._summedPos.set(0, 0, 0);
const tempPos = this._tempPos.set(0, 0, 0);
const summedRot = this._summedRot.identity();
const tempRot = this._tempRot.identity();
const clipOffsetRot = clipModel.rotation;
if (clipOffsetRot) {
this._clipRotQuat.identity();
this._clipRotQuat.slerp(clipOffsetRot, weight);
}
const offsets = this._actionOffsets[i];
if (offsets.hasOffsets) {
for (let i = 0; i < loops; i++) {
if (offsets.rootPositionOffset)
tempPos.copy(offsets.rootPositionOffset);
else
tempPos.set(0, 0, 0);
tempPos.applyQuaternion(summedRot);
if (this._clipRotQuat)
tempPos.applyQuaternion(this._clipRotQuat);
if (offsets.rootQuaternionOffset) {
// console.log(new Euler().setFromQuaternion(offsets.rootQuaternionOffset).y.toFixed(2));
tempRot.copy(offsets.rootQuaternionOffset);
summedRot.multiply(tempRot);
}
summedPos.add(tempPos);
}
}
if (this._clipRotQuat)
totalRotation.multiply(this._clipRotQuat);
totalRotation.multiply(summedRot);
if (clipModel.position)
summedPos.add(clipModel.position);
totalPosition.add(summedPos);
}
}
}
if (this._useclipOffsets) {
this._totalOffsetPosition.lerp(this._totalOffsetPosition2, blend);
this._totalOffsetRotation.slerp(this._totalOffsetRotation2, blend);
}
if (this["__mixerError"] === undefined && (debug || isDevEnvironment()) && this._animator?.runtimeAnimatorController?.mixer && this.mixer !== this._animator?.runtimeAnimatorController?.mixer) {
this["__mixerError"] = true;
console.error("AnimationTrack mixer is not shared with the animator controller - this might result in the timeline to not animate properly. Please report a bug to the Needle Engine team!", this);
}
if (this._animator?.runtimeAnimatorController) {
// If the Timeline is running then the timeline track takes control over the animatorcontroller
// we calculate the weight left for the animatorcontroller actions
const weightLeft = Math.max(0, 1 - totalWeight);
this._animator?.runtimeAnimatorController?.update(weightLeft);
}
else {
this.mixer.update(time);
}
}
createRotationInterpolant(_clip, _clipModel, track) {
const createInterpolantOriginal = track.createInterpolant.bind(track);
const quat = new Quaternion();
this.ensureTrackOffsets();
const trackOffsetRot = this.trackOffset?.rotation;
track.createInterpolant = () => {
const createdInterpolant = createInterpolantOriginal();
const interpolate = createdInterpolant.evaluate.bind(createdInterpolant);
// console.log(interpolate);
createdInterpolant.evaluate = (time) => {
const res = interpolate(time);
quat.set(res[0], res[1], res[2], res[3]);
quat.premultiply(this._totalOffsetRotation);
// console.log(new Euler().setFromQuaternion(quat).y.toFixed(2));
if (trackOffsetRot)
quat.premultiply(trackOffsetRot);
if (this.director.animationCallbackReceivers) {
for (const rec of this.director.animationCallbackReceivers) {
rec?.onTimelineRotation?.call(rec, this.director, this.target, time, quat);
}
}
res[0] = quat.x;
res[1] = quat.y;
res[2] = quat.z;
res[3] = quat.w;
return res;
};
return createdInterpolant;
};
}
createPositionInterpolant(clip, clipModel, track) {
const createInterpolantOriginal = track.createInterpolant.bind(track);
const currentPosition = new Vector3();
this.ensureTrackOffsets();
const trackOffsetRot = this.trackOffset?.rotation;
const trackOffsetPos = this.trackOffset?.position;
let startOffset = undefined;
track.createInterpolant = () => {
const createdInterpolant = createInterpolantOriginal();
const evaluate = createdInterpolant.evaluate.bind(createdInterpolant);
createdInterpolant.evaluate = (time) => {
const res = evaluate(time);
currentPosition.set(res[0], res[1], res[2]);
if (clipModel.removeStartOffset) {
if (startOffset === undefined) {
startOffset = null;
startOffset = this._actionOffsets.find(a => a.clip === clip)?.rootStartPosition?.clone();
}
else if (startOffset?.isVector3) {
currentPosition.sub(startOffset);
}
}
currentPosition.applyQuaternion(this._totalOffsetRotation);
currentPosition.add(this._totalOffsetPosition);
// apply track offset
if (trackOffsetRot)
currentPosition.applyQuaternion(trackOffsetRot);
if (trackOffsetPos) {
// flipped unity X
currentPosition.x -= trackOffsetPos.x;
currentPosition.y += trackOffsetPos.y;
currentPosition.z += trackOffsetPos.z;
}
if (this.director.animationCallbackReceivers) {
for (const rec of this.director.animationCallbackReceivers) {
rec?.onTimelinePosition?.call(rec, this.director, this.target, time, currentPosition);
}
}
res[0] = currentPosition.x;
res[1] = currentPosition.y;
res[2] = currentPosition.z;
return res;
};
return createdInterpolant;
};
}
}
const muteAudioTracks = getParam("mutetimeline");
export class AudioTrackHandler extends TrackHandler {
models = [];
listener;
audio = [];
audioContextTimeOffset = [];
lastTime = 0;
audioSource;
_audioLoader = null;
getAudioFilePath(path) {
// TODO: this should be the timeline asset location probably which MIGHT be different
const glbLocation = this.director.sourceId;
return resolveUrl(glbLocation, path);
}
onAllowAudioChanged(allow) {
for (let i = 0; i < this.models.length; i++) {
const model = this.models[i];
const audio = this.audio[i];
audio.setVolume(allow ? model.asset.volume : 0);
}
}
addModel(model) {
const audio = new Audio(this.listener);
this.audio.push(audio);
const audioClipModel = model;
audioClipModel._didTriggerPlay = false;
this.models.push(audioClipModel);
}
onDisable() {
for (const audio of this.audio) {
if (audio.isPlaying)
audio.stop();
}
for (const model of this.models) {
model._didTriggerPlay = false;
}
}
onDestroy() {
for (const audio of this.audio) {
if (audio.source)
audio?.disconnect();
}
this.audio.length = 0;
// TODO: dispose loaded audio buffers by this track
}
onMuteChanged() {
if (this.muted) {
for (let i = 0; i < this.audio.length; i++) {
const audio = this.audio[i];
if (audio?.isPlaying)
audio.stop();
}
}
}
stop() {
for (let i = 0; i < this.audio.length; i++) {
const audio = this.audio[i];
if (audio?.isPlaying)
audio.stop();
}
for (const model of this.models) {
model._didTriggerPlay = false;
}
}
_playableDirectorResumed = false;
onPauseChanged() {
// if the timeline gets paused we stop all audio clips
// we dont reset the triggerPlay here (this will automatically reset when the timeline start evaluating again)
for (let i = 0; i < this.audio.length; i++) {
const audio = this.audio[i];
if (audio?.isPlaying)
audio.stop();
}
this._playableDirectorResumed = this.director.isPlaying;
}
evaluate(time) {
if (muteAudioTracks)
return;
if (this.track.muted)
return;
if (this.director.speed < 0) {
// Reversed audio playback is currently not supported
return;
}
const isMuted = this.director.context.application.muted;
const resumePlay = this._playableDirectorResumed;
this._playableDirectorResumed = false;
// this is just so that we dont hear the very first beat when the audio starts but is muted
// if we dont add a delay we hear a little bit of the audio before it shuts down
// MAYBE instead of doing it like this we should connect a custom audio node (or disconnect the output node?)
const playTimeOffset = isMuted ? .1 : 0;
for (let i = 0; i < this.models.length; i++) {
const model = this.models[i];
const audio = this.audio[i];
const asset = model.asset;
// only trigger loading for tracks that are CLOSE to being played
if ((!audio || !audio.buffer) && this.isInTimeRange(model, time - 1, time + 1)) {
this.handleAudioLoading(model, audio);
}
if (AudioSource.userInteractionRegistered === false)
continue;
if (audio === null || !audio.buffer)
continue;
audio.playbackRate = this.director.context.time.timeScale * this.director.speed;
audio.loop = asset.loop;
if (time >= model.start && time <= model.end && time < this.director.duration) {
if (!audio.isPlaying || !this.director.isPlaying) {
// if the timeline is paused we trigger the audio clip once when the model is entered
// we dont playback the audio clip if we scroll back in time
// this is to support audioclip playback when using timeline with manual scrolling (scrollytelling)
if (resumePlay || (!model._didTriggerPlay && this.lastTime < time)) {
// we don't want to clip in the audio if it's a very short clip
const clipDuration = model.duration * model.timeScale;
if (clipDuration > .3)
audio.offset = model.clipIn + (time - model.start) * model.timeScale;
else
audio.offset = 0;
if (debug)
console.log("Timeline Audio (" + this.track.name + ") play with offset " + audio.offset + " - " + model.asset.clip);
audio.play(playTimeOffset);
model._didTriggerPlay = true;
}
else {
// do nothing...
}
}
else {
const targetOffset = model.clipIn + (time - model.start) * model.timeScale;
// seems it's non-trivial to get the right time from audio sources;
// https://github.com/mrdoob/js/blob/master/src/audio/Audio.js#L170
const currentTime = audio.context.currentTime - audio["_startedAt"] + audio.offset;
const diff = Math.abs(targetOffset - currentTime);
if (diff > 0.3) {
audio.offset = targetOffset;
audio.stop();
audio.play(playTimeOffset);
}
}
let vol = asset.volume;
if (this.track.volume !== undefined)
vol *= this.track.volume;
if (isMuted)
vol = 0;
if (model.easeInDuration > 0) {
const easeIn = Math.min((time - model.start) / model.easeInDuration, 1);
vol *= easeIn;
}
if (model.easeOutDuration > 0) {
const easeOut = Math.min((model.end - time) / model.easeOutDuration, 1);
vol *= easeOut;
}
audio.setVolume(vol * this.director.weight);
}
else {
model._didTriggerPlay = false;
if (this.director.isPlaying) {
if (audio.isPlaying) {
audio.stop();
}
}
}
}
this.lastTime = time;
}
/** Call to load audio buffer for a specific time in the track. Can be used to preload the timeline audio */
loadAudio(time, lookAhead = 0, lookBehind = 0) {
let promises = null;
const rangeStart = time - lookBehind;
const rangeEnd = time + lookAhead;
for (const model of this.models) {
if (this.isInTimeRange(model, rangeStart, rangeEnd)) {
const audio = this.audio[this.models.indexOf(model)];
const promise = this.handleAudioLoading(model, audio);
if (promise !== null) {
if (promises === null)
promises = [];
promises.push(promise);
}
}
}
if (promises !== null) {
return Promise.all(promises);
}
return null;
}
isInTimeRange(model, start, end) {
// Range surrounds clip range
if (start <= model.start && end >= model.end)
return true;
// Range start is in clip range
if (start >= model.start && start <= model.end)
return true;
// Range end is in clip range
if (end >= model.start && end <= model.end)
return true;
return false;
}
static _audioBuffers = new Map();
static dispose() {
AudioTrackHandler._audioBuffers.clear();
}
handleAudioLoading(model, audio) {
if (!this._audioLoader) {
this._audioLoader = new AudioLoader();
}
// TODO: maybe we should cache the loaders / buffers here by path
const path = this.getAudioFilePath(model.asset.clip);
if (AudioTrackHandler._audioBuffers.get(path)) {
const promise = AudioTrackHandler._audioBuffers.get(path);
promise.then((buffer) => {
if (buffer)
audio.setBuffer(buffer);
});
return promise;
}
if (debug)
console.warn("LOAD audio track", path, this.director.sourceId);
const loadingPromise = new Promise((resolve, _reject) => {
this._audioLoader.load(path, buffer => {
audio.setBuffer(buffer);
resolve(buffer);
}, undefined, err => {
console.error("Error loading audio", err);
resolve(null);
});
});
AudioTrackHandler._audioBuffers.set(path, loadingPromise);
return loadingPromise;
}
}
export class SignalTrackHandler extends TrackHandler {
models = [];
didTrigger = [];
receivers = [];
// TODO: test when timeline signals are being reset in Unity
// onEnable() {
// for (let i = 0; i < this.didTrigger?.length; i++) {
// this.didTrigger[i] = false;
// }
// }
// private _lastTime: number = -1;
evaluate(time) {
if (this.track.muted)
return;
// let lastTime = this._lastTime;
// if (lastTime === -1) lastTime = time;
// this._lastTime = time;
const estimatedFrameLengthWithPadding = this.director.context.time.deltaTime * 1.5;
for (let i = 0; i < this.models.length; i++) {
const model = this.models[i];
const wasTriggered = this.didTrigger[i];
const td = model.time - time;
let isActive = false;
if (model.retroActive) {
isActive = td <= 0.000001;
}
else {
const abs = Math.abs(td);
// e.g. if the signal is at frame 0 and the timeline duration also 0 (no tracks, just a signal at frame 0)
if (abs === 0) {
isActive = true;
}
else if (abs >= .00001 && abs < estimatedFrameLengthWithPadding) {
isActive = true;
}
}
// console.log(time, td, isActive);
if (isActive) {
if (!wasTriggered) {
if (debug)
console.log("Trigger signal", time, model.time, model);
this.didTrigger[i] = true;
// If a signal doesnt have any explicit receivers it will invoke the signal globally
if (this.receivers?.length <= 0) {
SignalReceiver.invoke(model.asset);
}
else {
for (const rec of this.receivers) {
if (!rec)
continue;
rec.invoke(model.asset);
}
}
}
}
else {
if (!model.emitOnce)
this.didTrigger[i] = false;
}
}
}
}
export class ControlTrackHandler extends TrackHandler {
models = [];
timelines = [];
resolveSourceObjects(_context) {
for (let i = this.models.length - 1; i >= 0; i--) {
const model = this.models[i];
const asset = model.asset;
if (!asset.sourceObject || typeof asset.sourceObject !== "object") {
console.log("no source object, removing model", i, asset);
this.models.splice(i, 1);
continue;
}
else {
const timeline = GameObject.getComponent(asset.sourceObject, PlayableDirector);
// always add it to keep size of timelines and models in sync (index of model is index of timeline)
this.timelines.push(timeline);
if (timeline) {
if (asset.updateDirector) {
timeline.playOnAwake = false;
}
}
}
}
}
_previousActiveModel = null;
evaluate(time) {
this._previousActiveModel = null;
for (let i = 0; i < this.models.length; i++) {
const model = this.models[i];
const asset = model.asset;
if (time >= model.start && time <= model.end) {
this._previousActiveModel = model;
const clipTime = this.getClipTime(time, model);
if (asset.controlActivation) {
const obj = asset.sourceObject;
obj.visible = true;
}
if (asset.updateDirector) {
const timeline = this.timelines[i];
if (timeline) {
if (timeline.isPlaying) {
timeline.pause();
}
timeline.time = clipTime;
timeline.evaluate();
}
}
// control tracks can not overlap/blend
// break;
}
else {
const previousActiveAsset = this._previousActiveModel?.asset;
if (asset.controlActivation) {
const obj = asset.sourceObject;
if (previousActiveAsset?.sourceObject !== obj)
obj.visible = false;
}
}
}
}
}
//# sourceMappingURL=TimelineTracks.js.map