UNPKG

@oiij/use

Version:

Som Composable Functions for Vue 3

167 lines (165 loc) 5.17 kB
const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs'); const vue = require_rolldown_runtime.__toESM(require("vue")); //#region src/composables/use-audio-context.ts function formatTime(seconds) { const minutes = Math.floor(seconds / 60); seconds = Math.floor(seconds % 60); return `${minutes}:${seconds.toString().padStart(2, "0")}`; } function useAudioContext(options) { const { analyser = false, volume: defaultVolume = 80 } = options ?? {}; const audioContext = new AudioContext(); const audioBuffer = (0, vue.shallowRef)(); const bufferSource = (0, vue.shallowRef)(null); const gainNode = audioContext.createGain(); const analyserNode = audioContext.createAnalyser(); analyserNode.fftSize = 2048; const bufferLength = analyserNode.frequencyBinCount; const unit8Array = new Uint8Array(bufferLength); gainNode.connect(analyserNode).connect(audioContext.destination); const status = (0, vue.ref)(audioContext.state); audioContext.addEventListener("statechange", () => { status.value = audioContext.state; }); const playing = (0, vue.ref)(false); const ended = (0, vue.ref)(false); const startFlag = (0, vue.ref)(0); const pauseFlag = (0, vue.ref)(0); const currentTimeRaw = (0, vue.ref)(0); const currentTime = (0, vue.computed)(() => formatTime(currentTimeRaw.value)); const durationRaw = (0, vue.ref)(0); const duration = (0, vue.computed)(() => formatTime(durationRaw.value)); const progressRaw = (0, vue.ref)(0); const progress = (0, vue.computed)(() => Number(progressRaw.value.toFixed(0))); const volume = (0, vue.ref)(defaultVolume ?? gainNode.gain.value * 100); (0, vue.watch)(volume, (val) => { gainNode.gain.value = val / 100; }); const detune = (0, vue.ref)(bufferSource.value?.detune.defaultValue ?? 0); (0, vue.watch)(detune, (val) => { if (bufferSource.value) bufferSource.value.detune.value = val; }); const playbackRate = (0, vue.ref)(bufferSource.value?.playbackRate.defaultValue ?? 1); (0, vue.watch)(playbackRate, (val) => { if (bufferSource.value) bufferSource.value.playbackRate.value = val; }); let _onByteTimeDomainDataFn = null; function getByteTimeDomainData() { analyserNode.getByteTimeDomainData(unit8Array); if (typeof _onByteTimeDomainDataFn === "function") _onByteTimeDomainDataFn(unit8Array); requestAnimationFrame(getByteTimeDomainData); } let _onEndedFn = null; let _onProgressFn = null; function updateDuration() { const _currentTime = audioContext.currentTime - startFlag.value; if (_currentTime >= durationRaw.value) { playing.value = false; ended.value = true; if (typeof _onEndedFn === "function") _onEndedFn(); return; } currentTimeRaw.value = _currentTime; progressRaw.value = _currentTime / durationRaw.value * 100; if (typeof _onProgressFn === "function") _onProgressFn(progressRaw.value); requestAnimationFrame(updateDuration); } function createBufferSource(audioBuffer$1) { const bufferSource$1 = audioContext.createBufferSource(); bufferSource$1.buffer = audioBuffer$1; bufferSource$1.connect(gainNode); return bufferSource$1; } function setProgress(val) { if (audioBuffer.value) { const targetDuration = val / 100 * durationRaw.value; bufferSource.value?.stop(); bufferSource.value = createBufferSource(audioBuffer.value); bufferSource.value.start(0, targetDuration); startFlag.value = audioContext.currentTime - targetDuration; if (!playing.value) pauseFlag.value = audioContext.currentTime - startFlag.value; } } async function playBuffer(arrayBuffer) { audioBuffer.value = await audioContext.decodeAudioData(arrayBuffer.buffer); play(); } function play() { if (audioBuffer.value) { bufferSource.value?.stop(); bufferSource.value = createBufferSource(audioBuffer.value); bufferSource.value.start(0); playing.value = true; ended.value = false; durationRaw.value = audioBuffer.value.duration; startFlag.value = audioContext.currentTime; updateDuration(); } } function pause() { audioContext.suspend(); pauseFlag.value = audioContext.currentTime - startFlag.value; playing.value = false; } function resume() { if (ended.value) { play(); return; } audioContext.resume(); startFlag.value = audioContext.currentTime - pauseFlag.value; playing.value = true; } function stop() { bufferSource.value?.stop(); pauseFlag.value = 0; startFlag.value = 0; currentTimeRaw.value = 0; playing.value = false; ended.value = true; } function destroy() { stop(); bufferSource.value = null; audioContext.close(); } return { audioContext, bufferSource, gainNode, analyserNode, status, playing, startFlag, pauseFlag, currentTimeRaw, currentTime, durationRaw, duration, progressRaw, progress, setProgress, onProgress: (fn) => { _onProgressFn = fn; }, volume, detune, playbackRate, updateDuration, playBuffer, play, pause, resume, stop, onByteTimeDomainData: (fn) => { _onByteTimeDomainDataFn = fn; if (analyser) getByteTimeDomainData(); }, destroy, onEnded: (fn) => { _onEndedFn = fn; } }; } //#endregion exports.useAudioContext = useAudioContext;