UNPKG

ez-web-audio

Version:

Making the Web Audio API super EZ since 2024.

1,066 lines (1,065 loc) 29.3 kB
var A = Object.defineProperty; var S = (o, e, t) => e in o ? A(o, e, { enumerable: !0, configurable: !0, writable: !0, value: t }) : o[e] = t; var r = (o, e, t) => S(o, typeof e != "symbol" ? e + "" : e, t); function m(o) { const e = Math.floor(o); return e < 10 ? `0${e}` : `${e}`; } function f(o, e, t) { return { raw: o, string: `${m(e)}:${m(t)}`, pojo: { minutes: e, seconds: t } }; } function p(o) { let e = [], t = 1; function n() { return o.currentTime * 1e3; } function i() { const s = n(); e.forEach((a) => { a.due <= s && a.fn(); }), e = e.filter((a) => a.due > s), e.length > 0 && window.requestAnimationFrame(i); } return { setTimeout(s, a) { const u = t; return t += 1, e.push({ id: u, due: n() + a, fn: s }), e.length === 1 && window.requestAnimationFrame(i), u; }, clearTimeout(s) { e = e.filter((a) => a.id !== s); } }; } function x(o, e, t) { return o < e ? e : o > t ? t : o; } class w { constructor(e, t, n) { r(this, "startingValues", []); r(this, "valuesAtTime", []); r(this, "exponentialValues", []); r(this, "linearValues", []); this.audioSource = e, this.gainNode = t, this.pannerNode = n; } // TODO: handle all gainNode and pannerNode props get gain() { return this.gainNode.gain.value; } set gain(e) { this.gainNode.gain.value = e; } get pan() { return this.pannerNode.pan.value; } set pan(e) { this.pannerNode.pan.value = e; } updateGainNode(e) { e.gain.value = this.gain, this.gainNode = e; } updatePannerNode(e) { e.pan.value = this.pan, this.pannerNode = e; } _update(e, t) { switch (e) { case "pan": this.pan = t; break; case "gain": this.gain = t; break; case "detune": if (!this.audioSource.detune) throw new Error("Audio source does not support detune"); this.audioSource.detune.value = t; break; default: throw new Error(`Control type '${e}' not supported`); } } // TODO: Consider changing 'from' to be something like 'using' or 'as' update(e) { return { to: (t) => ({ from: (n) => { switch (n) { case "ratio": this._update(e, t); break; case "inverseRatio": this._update(e, 1 - t); break; case "percent": this._update(e, t / 100); break; default: throw new Error(`Control method '${n}' not supported`); } } }) }; } onPlaySet(e) { return { to: (t) => { const n = { type: e, value: t }; return this.startingValues.push(n), { at: (i) => { this.removeStartingValue(n), this.valuesAtTime.push({ ...n, time: i }); }, endingAt: (i, s = "exponential") => { this.removeStartingValue(n), this.addRampValue({ ...n, time: i }, s); } }; } }; } onPlayRamp(e, t) { return { from: (n) => ({ to: (i) => ({ in: (s) => { this.onPlaySet(e).to(n), this.onPlaySet(e).to(i).endingAt(s, t); } }) }) }; } removeStartingValue(e) { this.startingValues = this.startingValues.filter((t) => t !== e); } addRampValue(e, t) { switch (t) { case "exponential": this.exponentialValues.push(e); break; case "linear": this.linearValues.push(e); break; default: throw new Error(`Unsupported ramp type: ${t}`); } } } class g { constructor(e, t) { r(this, "gainNode"); r(this, "pannerNode"); r(this, "bufferSourceNode"); r(this, "controller"); r(this, "_isPlaying", !1); r(this, "_startedPlayingAt", 0); r(this, "startOffset", 0); r(this, "connections", []); this.audioContext = e, this.audioBuffer = t; const n = this.audioContext.createBufferSource(), i = e.createGain(), s = e.createStereoPanner(); this.gainNode = i, this.pannerNode = s, this.bufferSourceNode = n, this.audioBuffer = t, n.buffer = t, this.controller = new T(n, i, s); } setup() { const e = this.audioContext.createBufferSource(); e.buffer = this.audioBuffer, this.bufferSourceNode = e, this.wireConnections(), this.controller.setValuesAtTimes(); } wireConnections() { const e = [this.bufferSourceNode], { connections: t, pannerNode: n } = this; for (let i = 0; i < t.length; i++) e.push(t[i].audioNode); e.push(this.gainNode), e.push(n); for (let i = 0; i < e.length - 1; i++) e[i].connect(e[i + 1]); n.connect(this.audioContext.destination); } addConnection(e) { this.connections.push(e), this.wireConnections(); } removeConnection(e) { const t = this.getConnection(e); if (t) { const n = this.connections.indexOf(t); n > -1 && (this.connections.splice(n, 1), this.wireConnections()); } } // Allows you to get any user created connection in the connections array getConnection(e) { return this.connections.find((t) => t.name === e); } // Allows you to get node from any user created connection in the connections array getNodeFrom(e) { var t; return (t = this.getConnection(e)) == null ? void 0 : t.audioNode; } get audioSourceNode() { return this.bufferSourceNode; } update(e) { return this.controller.update(e); } changePanTo(e) { this.controller.update("pan").to(e).from("ratio"); } changeGainTo(e) { return this.controller.update("gain").to(e); } onPlaySet(e) { return this.controller.onPlaySet(e); } onPlayRamp(e, t) { return this.controller.onPlayRamp(e, t); } play() { this.playAt(this.audioContext.currentTime); } playFor(e) { const { setTimeout: t } = p(this.audioContext); this.playAt(this.audioContext.currentTime), t(() => this.stop(), e * 1e3); } playAt(e) { const { audioContext: t } = this, { currentTime: n } = t, { setTimeout: i } = p(t); this.setup(), this.bufferSourceNode.start(e, this.startOffset), this._startedPlayingAt = e, i(() => this._isPlaying = !1, this.duration.pojo.seconds * 1e3), e <= n ? this._isPlaying = !0 : i(() => { this._isPlaying = !0; }, (e - n) * 1e3); } stop() { this.bufferSourceNode.stop(), this._isPlaying = !1; } get isPlaying() { return this._isPlaying; } get duration() { const e = this.bufferSourceNode.buffer; if (e === null) return f(0, 0, 0); const { duration: t } = e, n = Math.floor(t / 60), i = t % 60; return f(t, n, i); } get percentGain() { return this.controller.gain * 100; } /** * Gets the bufferSource and stops the initAudio, * changes it's play position, and restarts the audio. * * returns a pojo with the `from` method that `value` is curried to, allowing * one to specify which type of value is being provided. * * @example * // for a Sound instance with a duration of 100 seconds, these will all * // move the play position to 90 seconds. * soundInstance.seek(0.9).from('ratio'); * soundInstance.seek(0.1).from('inverseRatio') * soundInstance.seek(90).from('percent'); * soundInstance.seek(90).from('seconds'); * * @param {number} amount The new play position value. */ seek(e) { const t = this.duration.raw, n = (i) => { const s = this._isPlaying, a = x(i, 0, t); s ? (this.stop(), this.startOffset = a, this.play()) : this.startOffset = a; }; return { from(i) { switch (i) { case "ratio": n(e * t); break; case "percent": n(e * t * 0.01); break; case "inverseRatio": n(t - e * t); break; case "seconds": n(e); break; } } }; } } class T extends w { constructor(e, t, n) { super(e, t, n), this.bufferSourceNode = e, this.gainNode = t, this.pannerNode = n; } updateAudioSource(e) { this.bufferSourceNode = e; } setValuesAtTimes() { const { bufferSourceNode: e } = this, t = e.context.currentTime; this.applyValues(this.startingValues, t), this.applyValues(this.valuesAtTime, t), this.applyRampValues(this.exponentialValues, t, "exponential"), this.applyRampValues(this.linearValues, t, "linear"); } applyValues(e, t) { e.forEach((n) => { switch (n.type) { case "detune": this.bufferSourceNode.detune.setValueAtTime(n.value, t); break; case "gain": this.gainNode.gain.setValueAtTime(n.value, t); break; default: throw new Error(`Unsupported control type: ${n.type}`); } }); } applyRampValues(e, t, n) { e.forEach((i) => { const s = t + i.time; switch (i.type) { case "detune": switch (n) { case "exponential": this.bufferSourceNode.detune.exponentialRampToValueAtTime(i.value, s); break; case "linear": this.bufferSourceNode.detune.linearRampToValueAtTime(i.value, s); break; default: throw new Error(`Unsupported ramp type: ${n}`); } break; case "gain": switch (n) { case "exponential": this.gainNode.gain.exponentialRampToValueAtTime(i.value, s); break; case "linear": this.gainNode.gain.linearRampToValueAtTime(i.value, s); break; default: throw new Error(`Unsupported ramp type: ${n}`); } break; default: throw new Error(`ControlType of ${i.type} not supported`); } }); } } const h = { C0: 16.35, // 'C#0': 17.32, Db0: 17.32, D0: 18.35, // 'D#0': 19.45, Eb0: 19.45, E0: 20.6, F0: 21.83, // 'F#0': 23.12, Gb0: 23.12, G0: 24.5, // 'G#0': 25.96, Ab0: 25.96, A0: 27.5, // 'A#0': 29.14, Bb0: 29.14, B0: 30.87, C1: 32.7, // 'C#1': 34.65, Db1: 34.65, D1: 36.71, // 'D#1': 38.89, Eb1: 38.89, E1: 41.2, F1: 43.65, // 'F#1': 46.25, Gb1: 46.25, G1: 49, // 'G#1': 51.91, Ab1: 51.91, A1: 55, // 'A#1': 58.27, Bb1: 58.27, B1: 61.74, C2: 65.41, // 'C#2': 69.3, Db2: 69.3, D2: 73.42, // 'D#2': 77.78, Eb2: 77.78, E2: 82.41, F2: 87.31, // 'F#2': 92.5, Gb2: 92.5, G2: 98, // 'G#2': 103.83, Ab2: 103.83, A2: 110, // 'A#2': 116.54, Bb2: 116.54, B2: 123.47, C3: 130.81, // 'C#3': 138.59, Db3: 138.59, D3: 146.83, // 'D#3': 155.56, Eb3: 155.56, E3: 164.81, F3: 174.61, // 'F#3': 185, Gb3: 185, G3: 196, // 'G#3': 207.65, Ab3: 207.65, A3: 220, // 'A#3': 233.08, Bb3: 233.08, B3: 246.94, C4: 261.63, // 'C#4': 277.18, Db4: 277.18, D4: 293.66, // 'D#4': 311.13, Eb4: 311.13, E4: 329.63, F4: 349.23, // 'F#4': 369.99, Gb4: 369.99, G4: 392, // 'G#4': 415.3, Ab4: 415.3, A4: 440, // 'A#4': 466.16, Bb4: 466.16, B4: 493.88, C5: 523.25, // 'C#5': 554.37, Db5: 554.37, D5: 587.33, // 'D#5': 622.25, Eb5: 622.25, E5: 659.26, F5: 698.46, // 'F#5': 739.99, Gb5: 739.99, G5: 783.99, // 'G#5': 830.61, Ab5: 830.61, A5: 880, // 'A#5': 932.33, Bb5: 932.33, B5: 987.77, C6: 1046.5, // 'C#6': 1108.73, Db6: 1108.73, D6: 1174.66, // 'D#6': 1244.51, Eb6: 1244.51, E6: 1318.51, F6: 1396.91, // 'F#6': 1479.98, Gb6: 1479.98, G6: 1567.98, // 'G#6': 1661.22, Ab6: 1661.22, A6: 1760, // 'A#6': 1864.66, Bb6: 1864.66, B6: 1975.53, C7: 2093, // 'C#7': 2217.46, Db7: 2217.46, D7: 2349.32, // 'D#7': 2489.02, Eb7: 2489.02, E7: 2637.02, F7: 2793.83, // 'F#7': 2959.96, Gb7: 2959.96, G7: 3135.96, // 'G#7': 3322.44, Ab7: 3322.44, A7: 3520, // 'A#7': 3729.31, Bb7: 3729.31, B7: 3951.07, C8: 4186.01, // 'C#8': 4434.92, Db8: 4434.92, D8: 4698.64, // 'D#8': 4978.03, Eb8: 4978.03 }; function d(o, e) { return e.split(".").reduce((t, n) => t && t[n], o); } const { warn: P } = console; function N(o) { return class extends o { constructor(...n) { super(...n); /** * @property letter For note `Ab5`, this would be `A`. */ r(this, "letter", "A"); /** * @property accidental For note `Ab5`, this would be `b`. */ r(this, "accidental", ""); /** * @property octave For note `Ab5`, this would be `5`. */ r(this, "octave", "0"); const i = n[n.length - 1]; if (i) { const { identifier: s, frequency: a, letter: u, accidental: y, octave: b } = i; (s && a || (s || a) && (u || y || b)) && P("ez-audio: upon instantiation, multiple note identifiers were provided which might be a mistake and ez-audio has no way to determine which should be preferred", i, this), s && (this.identifier = s), a && (this.frequency = a), u && (this.letter = u), y && (this.accidental = y), b && (this.octave = b); } } /** * @property name Computed property. Value is `${letter}` or `${letter}${accidental}` if accidental exists. * @todo 'type' letter + accidental */ get name() { const { accidental: n, letter: i } = this; return n ? `${i}${n}` : i; } /** * @property frequency Computed property. The frequency of the note in hertz. Calculated by * comparing western musical standards (a standard piano) and the note * identifier (i.e. `Ab1`). If this property is set directly, all other * properties are updated to reflect the provided frequency. */ get frequency() { const { identifier: n } = this; return n && d(h, n) || 0; } set frequency(n) { let i; for (i in h) n === d(h, i) && (this.identifier = i); } /** * @property identifier Computed property. Value is `${letter}${octave}` or * `${letter}${accidental}${octave}` if accidental exists. If this property * is set directly, all other properties are updated to reflect the provided * identifier. */ get identifier() { const { accidental: n, letter: i, octave: s } = this; let a = "A0"; if (n ? a = `${i}${n}${s}` : a = `${i}${s}`, d(h, a)) return a; throw new Error(`Invalid musical identifier: ${a}`); } set identifier(n) { const [i] = n, s = n[2] || n[1]; let a; n[2] ? a = n[1] : a = "", this.letter = i, this.accidental = a, this.octave = s; } }; } class V extends N(g) { } class C { constructor(e) { this.notes = e; } /** * Gets a note from `notes`, given it's identifier. * * @method getNote * @param {string} identifier The identifier for the note that should be * returned, * @return {SampledNote} The specified Note instance. */ getNote(e) { return this.notes.find((t) => t.identifier === e); } /** * Plays a note from `notes`, given it's `identifier`. * * @method play * @param {string} identifier The identifier for the note that should be * played. */ play(e) { this.getNote(e).play(); } } function v(o) { return new Uint8Array( atob(o).split("").map((e) => e.charCodeAt(0)) ); } function k(o) { const e = o.indexOf("=", o.indexOf("MIDI.Soundfont.")) + 2, t = o.lastIndexOf('"') + 1, n = `${o.slice(e, t)}}`.replace(/data:audio\/mp3;base64,/g, "").replace(/data:audio\/mpeg;base64,/g, "").replace(/data:audio\/ogg;base64,/g, ""); return JSON.parse(n); } function E(o, e) { const t = o.slice(0, e), n = o.slice(e, o.length); return n.push(...t), n; } function _(o) { return [...new Set(o)]; } function O(o) { let e = q(o); e = D(e); let t = R(e); return t = G(t), t = B(t), t.flat(); } function B(o) { const e = o.shift() || [], t = o[0].map((a) => a.name), n = e[e.length - 1].name, i = t.lastIndexOf(n) + 1, s = o.map((a) => E(a, i)); return s.unshift(e), s; } function G(o) { return o.map((e) => e.sort($)); } function q(o) { return [o, o.map((e) => e.octave)]; } function D([o, e]) { return [o, _(e).sort()]; } function R([o, e]) { return e.map((t) => o.filter((n) => n.octave === t)); } function $(o, e) { const t = o.letter, n = e.letter; return t < n || t === n && o.accidental === "b" ? -1 : 1; } function F(o, e) { const t = []; async function n(i, s) { const a = await o.decodeAudioData(s); return [i, a]; } for (const i in e) if (Object.prototype.hasOwnProperty.call(e, i)) { const s = v(e[i]); t.push(n(i, s.buffer)); } return Promise.all(t); } function I(o, e) { const t = e.map((n) => { const [i, s] = n, a = new V(o, s); return a.identifier = i, a; }); return O(t); } class U { constructor(e) { /** * @property gain * Determines the gain applied to each sample. */ r(this, "gain", 1); /** * @property pan * Determines the stereo pan position of each sample. */ r(this, "pan", 0); /** * @property _soundIterator * Temporary storage for the iterable that comes from the sounds Set. * This iterable is meant to be replaced with a new copy every time it reaches * it's end, resulting in an infinite stream of Sound instances. */ r(this, "_soundIterator"); /** * @property sounds * Acts as a register for loaded audio sources. Audio sources can be anything * that uses {{#crossLink "Playable"}}{{/crossLink}}. If not set on * instantiation, automatically set to `new Set()` via `_initSounds`. */ r(this, "sounds"); const t = new Set(e); this.sounds = t, this._soundIterator = e.values(); } /** * Gets the next audio source and plays it immediately. * * @public * @method play */ play() { this._getNextSound().play(); } /** * @method playIn * Gets the next Sound and plays it after the specified offset has elapsed. * * @param {number} _ Number of seconds from "now" that the next Sound * should be played. */ playIn(e) { } /** * Gets the next Sound and plays it at the specified moment in time. A * "moment in time" is measured in seconds from the moment that the * {{#crossLink "AudioContext"}}{{/crossLink}} was instantiated. * * @param {number} time The moment in time (in seconds, relative to the * {{#crossLink "AudioContext"}}AudioContext's{{/crossLink}} "beginning of * time") when the next Sound should be played. * * @public * @method playAt */ playAt(e) { this._getNextSound().playAt(e); } /** * Gets _soundIterator and returns it's next value. If _soundIterator has * reached it's end, replaces _soundIterator with a fresh copy from sounds * and returns the first value from that. * * @private * @method _getNextSound * @return {Sound} */ _getNextSound() { let e = this._soundIterator, t; return t = e.next(), t.done && (e = this.sounds.values(), t = e.next()), this._soundIterator = e, this._setGainAndPan(t.value); } /** * Applies the `gain` and `pan` properties from the Sampler instance to a * Sound instance and returns the Sound instance. * * @private * @method _setGainAndPan * @return {Sound} The input sound after having it's gain and pan set */ _setGainAndPan(e) { return e.changePanTo(this.pan), e; } } const M = [ "highpass", "bandpass", "lowpass", "lowshelf", "highshelf", "peaking", "notch", "allpass" ]; class j { constructor(e, t) { r(this, "filters", []); r(this, "type"); r(this, "freq"); r(this, "controller"); r(this, "oscillator"); r(this, "gainNode"); r(this, "pannerNode"); r(this, "connections", []); r(this, "_isPlaying", !1); this.audioContext = e, this.type = (t == null ? void 0 : t.type) || "sine", this.freq = (t == null ? void 0 : t.frequency) || 440, this.oscillator = e.createOscillator(), this.gainNode = e.createGain(), this.pannerNode = e.createStereoPanner(), this.controller = new z(this.oscillator, this.gainNode, this.pannerNode), t && t.gain !== void 0 && this.changeGainTo(t.gain), M.forEach((n) => { const i = d(t, n); if (i) { const s = e.createBiquadFilter(); s.type = n, s.frequency.setValueAtTime(i.frequency || 440, e.currentTime), s.Q.setValueAtTime(i.q || 1, e.currentTime), this.filters.push(s); } }); } onPlaySet(e) { return this.controller.onPlaySet(e); } onPlayRamp(e, t) { return this.controller.onPlayRamp(e, t); } setup() { const e = this.audioContext.createOscillator(); e.type = this.type || "sine", e.frequency.setValueAtTime(this.freq || 440, this.audioContext.currentTime), this.oscillator = e; const t = this.audioContext.createGain(); this.gainNode = t, this.controller.updateAudioSource(e), this.controller.updateGainNode(t), this.wireConnections(), this.controller.setValuesAtTimes(); } wireConnections() { const e = [this.oscillator], { connections: t, filters: n, pannerNode: i } = this; for (let s = 0; s < n.length; s++) e.push(n[s]); for (let s = 0; s < t.length; s++) e.push(t[s].audioNode); e.push(this.gainNode), e.push(i); for (let s = 0; s < e.length - 1; s++) e[s].connect(e[s + 1]); i.connect(this.audioContext.destination); } addConnection(e) { this.connections.push(e), this.wireConnections(); } removeConnection(e) { const t = this.getConnection(e); if (t) { const n = this.connections.indexOf(t); n > -1 && (this.connections.splice(n, 1), this.wireConnections()); } } getConnection(e) { return this.connections.find((t) => t.name === e); } getNodeFrom(e) { var t; return (t = this.getConnection(e)) == null ? void 0 : t.audioNode; } get audioSourceNode() { return this.oscillator; } // convenience method, equivalent longer form would be // osc.controller.update(type).to(value).from('ratio') update(e) { return this.controller.update(e); } // convenience method, equivalent longer form would be // osc.update('pan').to(value).from('ratio') changePanTo(e) { this.controller.update("pan").to(e).from("ratio"); } // convenience method, equivalent longer form would be // osc.update('gain').to(value).from('ratio') changeGainTo(e) { this.controller.update("gain").to(e).from("ratio"); } play() { this.playAt(this.audioContext.currentTime); } playFor(e) { const { setTimeout: t } = p(this.audioContext); this.playAt(this.audioContext.currentTime), t(() => this.stop(), e * 1e3); } // playAt is the underlying play method behind all play methods playAt(e) { const { audioContext: t } = this, { currentTime: n } = t, { setTimeout: i } = p(t); this.setup(), this.oscillator.start(e), e <= n ? this._isPlaying = !0 : i(() => { this._isPlaying = !0; }, (e - n) * 1e3); } stop() { this._isPlaying = !1, this.oscillator.stop(); } get isPlaying() { return this._isPlaying; } // TODO: implement duration... can I? I think duration is too dynamic? any way to infer from asdr? or if there is a sheduled stop? get duration() { return f(0, 0, 0); } get percentGain() { return this.gainNode.gain.value * 100; } } class z extends w { constructor(e, t, n) { super(e, t, n), this.oscillator = e, this.gainNode = t, this.pannerNode = n; } updateAudioSource(e) { this.oscillator = e; } _update(e, t) { switch (e) { case "frequency": this.oscillator.frequency.value = t; break; default: super._update(e, t); } } setValuesAtTimes() { const { oscillator: { context: { currentTime: e } } } = this; this.applyValues(this.startingValues, e), this.applyValues(this.valuesAtTime, e), this.applyRampValues(this.exponentialValues, e, "exponential"), this.applyRampValues(this.linearValues, e, "linear"); } applyValues(e, t) { const { oscillator: n, gainNode: i } = this; e.forEach((s) => { switch (s.type) { case "frequency": n.frequency.setValueAtTime(s.value, t); break; case "gain": i.gain.setValueAtTime(s.value, t); break; default: throw new Error(`Unsupported control type: ${s.type}`); } }); } applyRampValues(e, t, n) { const { oscillator: i, gainNode: s } = this; e.forEach((a) => { const u = t + a.time; switch (a.type) { case "frequency": switch (n) { case "exponential": i.frequency.exponentialRampToValueAtTime(a.value, u); break; case "linear": i.frequency.linearRampToValueAtTime(a.value, u); break; default: throw new Error(`Unsupported ramp type: ${n}`); } break; case "gain": switch (n) { case "exponential": s.gain.exponentialRampToValueAtTime(a.value, u); break; case "linear": s.gain.linearRampToValueAtTime(a.value, u); break; default: throw new Error(`Unsupported ramp type: ${n}`); } break; default: throw new Error(`ControlType of ${a.type} not supported`); } }); } } class L extends g { /** * @property position Value is an object containing the current play position * of the audioBuffer in three formats. The three * formats are `raw`, `string`, and `pojo`. * * Play position of 6 minutes would be output as: * * { * raw: 360, // seconds * string: '06:00', * pojo: { * minutes: 6, * seconds: 0 * } * } */ get position() { const e = this.startOffset, t = Math.floor(e / 60), n = e - t * 60; return f(e, t, n); } /** * @property percentPlayed * Value is the current play position of the * audioBuffer, formatted as a percentage. */ get percentPlayed() { return this.startOffset / this.duration.raw * 100; } /** * @method play * Plays the audio source immediately. */ play() { super.play(), this.audioSourceNode.onended = () => this.stop(), this._trackPlayPosition(); } /** * @method pause * Pauses the audio source by stopping without * setting startOffset back to 0. */ pause() { if (this._isPlaying) { const e = this.audioSourceNode; e.onended = function() { }, e.stop(), this._isPlaying = !1; } } /** * @method stop * Stops the audio source and sets * startOffset to 0. */ stop() { this.startOffset = 0, this._isPlaying && (this.audioSourceNode.onended = function() { }, super.stop()); } /** * @method _trackPlayPosition * Sets up a `requestAnimationFrame` based loop that updates the * startOffset as `audioContext.currentTime` grows. * Loop ends when `_isPlaying` is false. */ _trackPlayPosition() { const e = this.audioContext, t = this.startOffset, n = this._startedPlayingAt, i = () => { this._isPlaying && (this.startOffset = t + e.currentTime - n, requestAnimationFrame(i)); }; requestAnimationFrame(i); } } class W extends N(class { }) { constructor(e) { super(), this.letter = (e == null ? void 0 : e.letter) || "A", this.accidental = (e == null ? void 0 : e.accidental) || "", this.octave = (e == null ? void 0 : e.octave) || "0"; } } let c; function l() { if (!c) throw new Error("The audio context does not exist yet! You must call `initAudio()` in response to a user interaction before performing this action."); } async function Y() { c || (c = new AudioContext()), c.state === "suspended" && await c.resume(); } function H() { return l(), c; } function X(o) { const e = []; o || (o = h); for (const t in o) { const n = o[t], i = new W(); i.frequency = n, e.push(i); } return e; } async function J(o) { l(); const t = await (await fetch(o)).arrayBuffer(), n = await c.decodeAudioData(t); return new g(c, n); } async function Z(o) { l(); const t = await (await fetch(o)).arrayBuffer(), n = await c.decodeAudioData(t); return new L(c, n); } async function ee(o) { l(); const e = await Promise.all(o.map(async (t) => J(t))); return new U(e); } function te(o) { return l(), new j(c, o); } async function ne(o) { l(); const t = await (await fetch(o)).text(), n = k(t), i = await F(c, n), s = I(c, i); return new C(s); } function ie() { l(); const o = c.sampleRate, e = c.createBuffer(1, o, o), t = e.getChannelData(0); for (let n = 0; n < o; n++) t[n] = Math.random() * 2 - 1; return new g(c, e); } export { C as Font, N as MusicallyAware, W as Note, j as Oscillator, V as SampledNote, U as Sampler, g as Sound, L as Track, ne as createFont, X as createNotes, te as createOscillator, ee as createSampler, J as createSound, Z as createTrack, ie as createWhiteNoise, h as frequencyMap, H as getAudioContext, Y as initAudio };