@needle-tools/engine
Version:
Needle Engine is a web-based runtime for 3D apps. It runs on your machine for development with great integrations into editors like Unity or Blender - and can be deployed onto any device! It is flexible, extensible and networking and XR are built-in.
645 lines (574 loc) • 25 kB
text/typescript
import { AudioLoader, PositionalAudio } from "three";
import { PositionalAudioHelper } from 'three/examples/jsm/helpers/PositionalAudioHelper.js';
import { isDevEnvironment } from "../engine/debug/index.js";
import { Application, ApplicationEvents } from "../engine/engine_application.js";
import { findObjectOfType } from "../engine/engine_components.js";
import { Mathf } from "../engine/engine_math.js";
import { serializable } from "../engine/engine_serialization_decorator.js";
import { DeviceUtilities, getParam } from "../engine/engine_utils.js";
import { AudioListener } from "./AudioListener.js";
import { Behaviour, GameObject } from "./Component.js";
const debug = getParam("debugaudio");
/**
* Defines how audio volume attenuates over distance from the listener.
*/
export enum AudioRolloffMode {
/**
* Logarithmic rolloff provides a natural, real-world attenuation where volume decreases
* exponentially with distance.
*/
Logarithmic = 0,
/**
* Linear rolloff provides a straightforward volume reduction that decreases at a constant
* rate with distance.
*/
Linear = 1,
/**
* Custom rolloff allows for defining specialized distance-based attenuation curves.
* Note: Custom rolloff is not fully implemented in this version.
*/
Custom = 2,
}
/**
* Plays audio clips in the scene, with support for spatial positioning.
*
* The AudioSource component can play audio files or media streams with
* options for spatial blending, volume control, looping, and more.
*
* When a page loses visibility (tab becomes inactive), audio will automatically
* pause unless {@link playInBackground} is set to true. On mobile devices, audio always
* pauses regardless of this setting. When the page becomes visible again,
* previously playing audio will resume.
*
* AudioSource also responds to application mute state changes. When the application
* is muted, the volume is set to 0. When unmuted, the volume
* returns to its previous value.
*
* @category Multimedia
* @group Components
*/
export class AudioSource extends Behaviour {
/**
* Checks if the user has interacted with the page to allow audio playback.
* Audio playback often requires a user gesture first due to browser autoplay policies.
* This is the same as calling {@link Application.userInteractionRegistered}.
*
* @returns Whether user interaction has been registered to allow audio playback
*/
public static get userInteractionRegistered(): boolean {
return Application.userInteractionRegistered;
}
/**
* Registers a callback that will be executed once the user has interacted with the page,
* allowing audio playback to begin.
* This is the same as calling {@link Application.registerWaitForInteraction}.
*
* @param cb - The callback function to execute when user interaction is registered
*/
public static registerWaitForAllowAudio(cb: Function) {
Application.registerWaitForInteraction(cb);
}
/**
* The audio clip to play. Can be a URL string pointing to an audio file or a {@link MediaStream} object.
*/
clip: string | MediaStream = "";
/**
* When true, the audio will automatically start playing when the component is enabled.
* When false, you must call play() manually to start audio playback.
* @default false
*/
playOnAwake: boolean = false;
/**
* When true, the audio clip will be loaded during initialization rather than when play() is called.
* This can reduce playback delay but increases initial loading time.
* @default true
*/
preload: boolean = true;
/**
* When true, audio will continue playing when the browser tab loses focus.
* When false, audio will pause when the tab is minimized or not active.
* @default true
*/
playInBackground: boolean = true;
/**
* Indicates whether the audio is currently playing.
*
* @returns True if the audio is playing, false otherwise
*/
get isPlaying(): boolean { return this.sound?.isPlaying ?? false; }
/**
* The total duration of the current audio clip in seconds.
*
* @returns Duration in seconds or undefined if no clip is loaded
*/
get duration() {
return this.sound?.buffer?.duration;
}
/**
* The current playback position as a normalized value between 0 and 1.
* Can be set to seek to a specific position in the audio.
*/
get time01() {
const duration = this.duration;
if (duration && this.sound) {
return this.sound?.context.currentTime / duration;
}
return 0;
}
set time01(val: number) {
const duration = this.duration;
if (duration && this.sound) {
this.time = val * duration;
}
}
/**
* The current playback position in seconds.
* Can be set to seek to a specific time in the audio.
*/
get time(): number { return this.sound?.source ? (this.sound.source?.context.currentTime - this._lastContextTime + this.sound.offset) : 0; }
set time(val: number) {
if (this.sound) {
if (val === this.sound.offset) return;
const wasPlaying = this.isPlaying;
this.stop();
this.sound.offset = val;
if (wasPlaying)
this.play();
}
}
/**
* When true, the audio will repeat after reaching the end.
* When false, audio will play once and stop.
* @default false
*/
get loop(): boolean {
if (this.sound) this._loop = this.sound.getLoop();
return this._loop;
}
set loop(val: boolean) {
this._loop = val;
if (this.sound) this.sound.setLoop(val);
}
/**
* Controls how the audio is positioned in space.
* Values range from 0 (2D, non-positional) to 1 (fully 3D positioned).
* Note: 2D playback is not fully supported in the current implementation.
*/
get spatialBlend(): number {
return this._spatialBlend;
}
set spatialBlend(val: number) {
if (val === this._spatialBlend) return;
this._spatialBlend = val;
this._needUpdateSpatialDistanceSettings = true;
}
/**
* The minimum distance from the audio source at which the volume starts to attenuate.
* Within this radius, the audio plays at full volume regardless of distance.
*/
get minDistance(): number {
return this._minDistance;
}
set minDistance(val: number) {
if (this._minDistance === val) return;
this._minDistance = val;
this._needUpdateSpatialDistanceSettings = true;
}
/**
* The maximum distance from the audio source beyond which the volume no longer decreases.
* This defines the outer limit of the attenuation curve.
*/
get maxDistance(): number {
return this._maxDistance;
}
set maxDistance(val: number) {
if (this._maxDistance === val) return;
this._maxDistance = val;
this._needUpdateSpatialDistanceSettings = true;
}
private _spatialBlend: number = 0;
private _minDistance: number = 1;
private _maxDistance: number = 100;
/**
* Controls the overall volume/loudness of the audio.
* Values range from 0 (silent) to 1 (full volume).
* @default 1
*/
get volume(): number { return this._volume; }
set volume(val: number) {
this._volume = val;
if (this.sound && !this.context.application.muted) {
if (debug) console.log(this.name, "audio set volume", val);
this.sound.setVolume(val);
}
}
private _volume: number = 1;
/**
* Controls the playback rate (speed) of the audio.
* Values greater than 1 increase speed, values less than 1 decrease it.
* This affects both speed and pitch of the audio.
* @default 1
*/
set pitch(val: number) {
if (this.sound) this.sound.setPlaybackRate(val);
}
get pitch(): number {
return this.sound ? this.sound.getPlaybackRate() : 1;
}
/**
* Determines how audio volume decreases with distance from the listener.
* @default AudioRolloffMode.Logarithmic
* @see {@link AudioRolloffMode}
*/
rollOffMode: AudioRolloffMode = 0;
private _loop: boolean = false;
private sound: PositionalAudio | null = null;
private helper: PositionalAudioHelper | null = null;
private wasPlaying = false;
private audioLoader: AudioLoader | null = null;
private shouldPlay: boolean = false;
// set this from audio context time, used to set clip offset when setting "time" property
// there is maybe a better way to set a audio clip current time?!
private _lastClipStartedLoading: string | MediaStream | null = null;
private _audioElement: HTMLAudioElement | null = null;
/**
* Returns the underlying {@link PositionalAudio} object, creating it if necessary.
* The audio source needs a user interaction to be initialized due to browser autoplay policies.
*
* @returns The three.js PositionalAudio object or null if unavailable
*/
public get Sound(): PositionalAudio | null {
if (!this.sound && AudioSource.userInteractionRegistered) {
// Get or create an audiolistener in the scene
let listener = this.gameObject.getComponent(AudioListener) // AudioListener on AudioSource?
?? this.context.mainCamera.getComponent(AudioListener) // AudioListener on current main camera?
?? findObjectOfType(AudioListener, this.context, false); // Active AudioListener in scene?
if (!listener && this.context.mainCamera) listener = this.context.mainCamera.addComponent(AudioListener);
if (listener?.listener) {
this.sound = new PositionalAudio(listener.listener);
this.gameObject?.add(this.sound);
// this._listener = listener;
// this._originalSoundMatrixWorldFunction = this.sound.updateMatrixWorld;
// this.sound.updateMatrixWorld = this._onSoundMatrixWorld;
}
else if (debug) console.warn("No audio listener found in scene - can not play audio");
}
return this.sound;
}
// This is a hacky workaround to get the PositionalAudio behave like a 2D audio source
// private _listener: AudioListener | null = null;
// private _originalSoundMatrixWorldFunction: Function | null = null;
// private _onSoundMatrixWorld = (force: boolean) => {
// if (this._spatialBlend > .05) {
// if (this._originalSoundMatrixWorldFunction) {
// this._originalSoundMatrixWorldFunction.call(this.sound, force);
// }
// }
// else {
// // we use another object's matrix world function (but bound to the positional audio)
// // this is just a little trick to prevent calling the PositionalAudio's updateMatrixWorld function
// this.gameObject.updateMatrixWorld?.call(this.sound, force);
// if (this.sound && this._listener) {
// this.sound.gain.connect(this._listener.listener.getInput());
// // const pos = getTempVector().setFromMatrixPosition(this._listener.gameObject.matrixWorld);
// // const ctx = this.sound.context;
// // const delay = this._listener.listener.timeDelta;
// // const time = ctx.currentTime ;
// // this.sound.panner.positionX.setValueAtTime(pos.x, time);
// // this.sound.panner.positionY.setValueAtTime(pos.y, time);
// // this.sound.panner.positionZ.setValueAtTime(pos.z, time);
// // this.sound.panner.orientationX.setValueAtTime(0, time);
// // this.sound.panner.orientationY.setValueAtTime(0, time);
// // this.sound.panner.orientationZ.setValueAtTime(-1, time);
// }
// }
// }
/**
* Indicates whether the audio source is queued to play when possible.
* This may be true before user interaction has been registered.
*
* @returns Whether the audio source intends to play
*/
public get ShouldPlay(): boolean { return this.shouldPlay; }
/**
* Returns the Web Audio API context associated with this audio source.
*
* @returns The {@link AudioContext} or null if not available
*/
public get audioContext() {
return this.sound?.context;
}
/** @internal */
awake() {
if (debug) console.log("[AudioSource]", this);
this.audioLoader = new AudioLoader();
if (this.playOnAwake) this.shouldPlay = true;
if (this.preload) {
if (typeof this.clip === "string") {
this.audioLoader.load(this.clip, this.createAudio, () => { }, console.error);
}
}
}
/** @internal */
onEnable(): void {
if (this.sound)
this.gameObject.add(this.sound);
if (!AudioSource.userInteractionRegistered) {
AudioSource.registerWaitForAllowAudio(() => {
if (this.enabled && !this.destroyed && this.shouldPlay)
this.onNewClip(this.clip);
});
}
else if (this.playOnAwake && this.context.application.isVisible) {
this.play();
}
globalThis.addEventListener('visibilitychange', this.onVisibilityChanged);
this.context.application.addEventListener(ApplicationEvents.MuteChanged, this.onApplicationMuteChanged);
}
/** @internal */
onDisable() {
globalThis.removeEventListener('visibilitychange', this.onVisibilityChanged);
this.context.application.removeEventListener(ApplicationEvents.MuteChanged, this.onApplicationMuteChanged);
this.pause();
}
private onVisibilityChanged = () => {
switch (document.visibilityState) {
case "hidden":
if (this.playInBackground === false || DeviceUtilities.isMobileDevice()) {
this.wasPlaying = this.isPlaying;
if (this.isPlaying) {
this.pause();
}
}
break;
case "visible":
if (debug) console.log("visible", this.enabled, this.playOnAwake, !this.isPlaying, AudioSource.userInteractionRegistered, this.wasPlaying);
if (this.enabled && this.playOnAwake && !this.isPlaying && AudioSource.userInteractionRegistered && this.wasPlaying) {
this.play();
}
break;
}
}
private onApplicationMuteChanged = () => {
if (this.context.application.muted)
this.sound?.setVolume(0);
else
this.sound?.setVolume(this.volume);
}
private createAudio = (buffer?: AudioBuffer) => {
if(this.destroyed) {
if(debug) console.warn("AudioSource destroyed, not creating audio", this.name);
return;
}
if (debug) console.log("AudioBuffer finished loading", buffer);
const sound = this.Sound;
if (!sound) {
if (debug) console.warn("Failed getting sound?", this.name);
return;
}
if (sound.isPlaying)
sound.stop();
if (buffer) sound.setBuffer(buffer);
sound.loop = this._loop;
if (this.context.application.muted) sound.setVolume(0);
else sound.setVolume(this.volume);
sound.autoplay = this.shouldPlay && AudioSource.userInteractionRegistered;
this.applySpatialDistanceSettings();
if (sound.isPlaying)
sound.stop();
// const panner = sound.panner;
// panner.coneOuterGain = 1;
// sound.setDirectionalCone(360, 360, 1);
// const src = sound.context.createBufferSource();
// src.buffer = sound.buffer;
// src.connect(sound.panner);
// src.start(this.audioContext?.currentTime);
// const gain = sound.context.createGain();
// gain.gain.value = 1 - this.spatialBlend;
// src.connect(gain);
// make sure we only play the sound if the user has interacted with the page
AudioSource.registerWaitForAllowAudio(this.__onAllowAudioCallback);
}
private __onAllowAudioCallback = () => {
if (this.shouldPlay)
this.play();
}
private applySpatialDistanceSettings() {
const sound = this.sound;
if (!sound) return;
this._needUpdateSpatialDistanceSettings = false;
const dist = Mathf.lerp(10 * this._maxDistance / Math.max(0.0001, this.spatialBlend), this._minDistance, this.spatialBlend);
if (debug) console.log(this.name, this._minDistance, this._maxDistance, this.spatialBlend, "Ref distance=" + dist);
sound.setRefDistance(dist);
sound.setMaxDistance(Math.max(0.01, this._maxDistance));
// sound.setRolloffFactor(this.spatialBlend);
// sound.panner.positionZ.automationRate
// https://developer.mozilla.org/en-US/docs/Web/API/PannerNode/distanceModel
switch (this.rollOffMode) {
case AudioRolloffMode.Logarithmic:
sound.setDistanceModel('exponential');
break;
case AudioRolloffMode.Linear:
sound.setDistanceModel('linear');
break;
case AudioRolloffMode.Custom:
console.warn("Custom rolloff for AudioSource is not supported: " + this.name);
break;
}
if (this.spatialBlend > 0) {
if (debug && !this.helper) {
this.helper = new PositionalAudioHelper(sound, sound.getRefDistance());
sound.add(this.helper);
}
}
else if (this.helper && this.helper.parent) {
this.helper.removeFromParent();
}
}
private async onNewClip(clip?: string | MediaStream) {
if (clip) this.clip = clip;
if (typeof clip === "string") {
if (debug)
console.log(clip);
if (clip.endsWith(".mp3") || clip.endsWith(".wav")) {
if (!this.audioLoader)
this.audioLoader = new AudioLoader();
this.shouldPlay = true;
if (this._lastClipStartedLoading === clip) {
if (debug) console.log("Is currently loading:", this._lastClipStartedLoading, this)
return;
}
this._lastClipStartedLoading = clip;
if (debug)
console.log("load audio", clip);
const buffer = await this.audioLoader.loadAsync(clip).catch(console.error);
if(this.destroyed) return;
if(this._lastClipStartedLoading === clip) this._lastClipStartedLoading = null;
if (buffer) this.createAudio(buffer);
}
else console.warn("Unsupported audio clip type", clip)
}
else {
this.shouldPlay = true;
this.createAudio();
}
}
/**
* Plays the audio clip or media stream.
* If no argument is provided, plays the currently assigned clip.
*
* @param clip - Optional audio clip or {@link MediaStream} to play
*/
play(clip: string | MediaStream | undefined = undefined) {
// use audio source's clip when no clip is passed in
if (!clip && this.clip)
clip = this.clip;
// We only support strings and media stream
// TODO: maybe we should return here if an invalid value is passed in
if (clip !== undefined && typeof clip !== "string" && !(clip instanceof MediaStream)) {
if (isDevEnvironment())
console.warn("Called play on AudioSource with unknown argument type:", clip + "\nUsing the assigned clip instead:", this.clip)
// e.g. when a AudioSource.Play is called from SpatialTrigger onEnter this event is called with the TriggerReceiver... to still make this work we *re-use* our already assigned clip. Because otherwise calling `play` would not play the clip...
clip = this.clip;
}
// Check if we need to call load first
let needsLoading = !this.sound || (clip && clip !== this.clip);
if (typeof clip === "string" && !this.audioLoader) needsLoading = true;
if (clip instanceof MediaStream || typeof clip === "string")
this.clip = clip;
if (needsLoading) {
this.shouldPlay = true;
this.onNewClip(clip);
return;
}
this.shouldPlay = true;
this._hasEnded = false;
if (debug)
console.log("play", this.sound?.getVolume(), this.sound);
if (this.sound && !this.sound.isPlaying) {
const muted = this.context.application.muted;
if (muted) this.sound.setVolume(0);
this.gameObject?.add(this.sound);
if (this.clip instanceof MediaStream) {
// We have to set the audio element source to the mediastream as well
// otherwise it will not play for some reason...
this.sound.setMediaStreamSource(this.clip);
if (!this._audioElement) {
this._audioElement = document.createElement('audio');
this._audioElement.style.display = "none";
}
if (!this._audioElement.parentNode)
this.context.domElement.shadowRoot?.append(this._audioElement);
this._audioElement.srcObject = this.clip;
this._audioElement.autoplay = false;
}
else {
if (this._audioElement) this._audioElement.remove();
this.sound.play(muted ? .1 : 0);
}
}
}
/**
* Pauses audio playback while maintaining the current position.
* Use play() to resume from the paused position.
*/
pause() {
if (debug) console.log("Pause", this);
this._hasEnded = true;
this.shouldPlay = false;
if (this.sound && this.sound.isPlaying && this.sound.source) {
this._lastContextTime = this.sound?.context.currentTime;
this.sound.pause();
}
this._audioElement?.remove();
}
/**
* Stops audio playback completely and resets the playback position to the beginning.
* Unlike pause(), calling play() after stop() will start from the beginning.
*/
stop() {
if (debug) console.log("Pause", this);
this._hasEnded = true;
this.shouldPlay = false;
if (this.sound && this.sound.source) {
this._lastContextTime = this.sound?.context.currentTime;
if (debug)
console.log(this._lastContextTime)
this.sound.stop();
}
this._audioElement?.remove();
}
private _lastContextTime: number = 0;
private _hasEnded: boolean = true;
private _needUpdateSpatialDistanceSettings: boolean = false;
/** @internal */
update() {
if (this.helper) {
if (this.isPlaying)
this.helper.update();
this.helper.visible = this.isPlaying;
}
if (this._needUpdateSpatialDistanceSettings) {
this.applySpatialDistanceSettings();
}
if (this.sound && !this.sound.isPlaying && this.shouldPlay && !this._hasEnded) {
this._hasEnded = true;
if (debug)
console.log("Audio clip ended", this.clip);
this.dispatchEvent(new CustomEvent("ended", { detail: this }));
}
// this.gameObject.position.x = Math.sin(time.time) * 2;
// this.gameObject.position.z = Math.cos(time.time * .5) * 2;
}
}