@oiij/use
Version:
Som Composable Functions for Vue 3
328 lines (326 loc) • 9.68 kB
JavaScript
import { computed, onUnmounted, ref, shallowRef, watchEffect } from "vue";
import { createEventHook } from "@vueuse/core";
//#region src/composables/use-audio-context-buffer.ts
function formatTime(seconds) {
const minutes = Math.floor(seconds / 60);
seconds = Math.floor(seconds % 60);
return `${minutes}:${seconds.toString().padStart(2, "0")}`;
}
function useAudioContextBuffer(options) {
const { analyser = false, volume: defaultVolume = 80, playbackRate: defaultPlaybackRate = 1, fadeOptions } = options ?? {};
const eqFrequencies = [
32,
64,
125,
250,
500,
1e3,
2e3,
4e3,
8e3,
16e3
];
const defaultFadeOptions = typeof fadeOptions === "boolean" ? {
fade: true,
duration: 1
} : fadeOptions ?? {};
const audioContext = new AudioContext();
const audioBuffer = shallowRef();
const bufferSource = shallowRef(null);
const gainNode = audioContext.createGain();
const analyserNode = audioContext.createAnalyser();
analyserNode.fftSize = 2048;
const bufferLength = analyserNode.frequencyBinCount;
const unit8Array = new Uint8Array(bufferLength);
const filters = eqFrequencies.map((freq) => {
const filter = audioContext.createBiquadFilter();
filter.type = "peaking";
filter.frequency.value = freq;
filter.Q.value = 1;
filter.gain.value = 0;
return filter;
});
function createFilterNode() {
let filterNode$1 = gainNode;
filters.forEach((filter) => {
filterNode$1.connect(filter);
filterNode$1 = filter;
});
return filterNode$1;
}
const filterNode = createFilterNode();
gainNode.connect(filterNode);
filterNode.connect(analyserNode);
analyserNode.connect(audioContext.destination);
const onVolumeUpdateEv = createEventHook();
const onRateUpdateEv = createEventHook();
const onPlayingEv = createEventHook();
const onPausedEv = createEventHook();
const onEndedEv = createEventHook();
const onTimeUpdateEv = createEventHook();
const onDurationUpdateEv = createEventHook();
const onByteTimeDomainDataEv = createEventHook();
const status = ref(audioContext.state);
audioContext.addEventListener("statechange", () => {
status.value = audioContext.state;
});
const playing = ref(false);
const paused = ref(false);
const ended = ref(false);
const startFlag = ref(0);
const pauseFlag = ref(0);
const currentTimeRaw = ref(0);
const currentTime = computed(() => formatTime(currentTimeRaw.value));
const currentTimeText = computed(() => formatTime(currentTimeRaw.value));
const durationRaw = ref(0);
const duration = computed(() => formatTime(durationRaw.value));
const durationText = computed(() => formatTime(durationRaw.value));
const progressRaw = ref(0);
const progress = computed(() => Number(progressRaw.value.toFixed(0)));
const cachedDuration = ref(0);
const cachedDurationText = computed(() => formatTime(cachedDuration.value));
const cachedProgress = ref(0);
const volume = ref(defaultVolume);
gainNode.gain.value = volume.value / 100;
function setVolume(volume$1) {
gainNode.gain.cancelScheduledValues(audioContext.currentTime);
gainNode.gain.setValueAtTime(Math.max(0, Math.min(100, volume$1)) / 100, audioContext.currentTime);
onVolumeUpdateEv.trigger();
}
watchEffect(() => {
setVolume(volume.value);
});
const detune = ref(0);
watchEffect(() => {
if (bufferSource.value) bufferSource.value.detune.value = detune.value;
});
const playbackRate = ref(defaultPlaybackRate);
function setPlaybackRate(playbackRate$1) {
if (bufferSource.value) bufferSource.value.playbackRate.value = playbackRate$1;
onRateUpdateEv.trigger();
}
watchEffect(() => {
setPlaybackRate(playbackRate.value);
});
function getByteTimeDomainData() {
analyserNode.getByteTimeDomainData(unit8Array);
onByteTimeDomainDataEv.trigger(unit8Array);
requestAnimationFrame(getByteTimeDomainData);
}
function updateDuration() {
const _currentTime = audioContext.currentTime - startFlag.value;
if (_currentTime >= durationRaw.value) {
playing.value = false;
ended.value = true;
onEndedEv.trigger();
return;
}
currentTimeRaw.value = _currentTime;
progressRaw.value = _currentTime / durationRaw.value * 100;
onTimeUpdateEv.trigger();
requestAnimationFrame(updateDuration);
}
function createBufferSourceNode(audioBuffer$1) {
const bufferSource$1 = audioContext.createBufferSource();
bufferSource$1.buffer = audioBuffer$1;
bufferSource$1.detune.value = detune.value;
bufferSource$1.playbackRate.value = playbackRate.value;
bufferSource$1.connect(gainNode);
bufferSource$1.onended = () => {
playing.value = false;
ended.value = true;
onEndedEv.trigger();
};
return bufferSource$1;
}
function setCurrentTime(time) {
if (audioBuffer.value) {
const targetDuration = Math.max(0, Math.min(time, durationRaw.value));
bufferSource.value?.stop();
bufferSource.value = createBufferSourceNode(audioBuffer.value);
bufferSource.value.start(0, targetDuration);
startFlag.value = audioContext.currentTime - targetDuration;
currentTimeRaw.value = targetDuration;
if (!playing.value) {
pauseFlag.value = targetDuration;
audioContext.suspend();
}
}
}
function setProgress(val) {
if (audioBuffer.value) setCurrentTime(val / 100 * durationRaw.value);
}
async function playBuffer(arrayBuffer) {
try {
audioBuffer.value = await audioContext.decodeAudioData(arrayBuffer.buffer);
cachedDuration.value = audioBuffer.value.duration;
cachedProgress.value = 100;
onDurationUpdateEv.trigger();
play();
} catch (error) {
console.error("useAudioContextBuffer:playBuffer error:", error);
throw error;
}
}
function play() {
if (audioBuffer.value) {
bufferSource.value?.stop();
bufferSource.value = createBufferSourceNode(audioBuffer.value);
bufferSource.value.start(0);
playing.value = true;
paused.value = false;
ended.value = false;
durationRaw.value = audioBuffer.value.duration;
startFlag.value = audioContext.currentTime;
pauseFlag.value = 0;
if (audioContext.state === "suspended") audioContext.resume();
onPlayingEv.trigger();
updateDuration();
}
}
function pause(options$1) {
const { fade = true, duration: duration$1 = 1 } = options$1 ?? defaultFadeOptions;
if (fade) {
const currentTime$1 = audioContext.currentTime;
gainNode.gain.cancelScheduledValues(currentTime$1);
gainNode.gain.setValueAtTime(gainNode.gain.value, currentTime$1);
gainNode.gain.linearRampToValueAtTime(0, currentTime$1 + duration$1);
setTimeout(() => {
audioContext.suspend();
pauseFlag.value = audioContext.currentTime - startFlag.value;
playing.value = false;
paused.value = true;
onPausedEv.trigger();
}, duration$1 * 1e3);
return;
}
audioContext.suspend();
pauseFlag.value = audioContext.currentTime - startFlag.value;
playing.value = false;
paused.value = true;
onPausedEv.trigger();
}
function resume(options$1) {
if (ended.value) {
play();
return;
}
const { fade = true, duration: duration$1 = 1 } = options$1 ?? defaultFadeOptions;
if (fade) {
const currentTime$1 = audioContext.currentTime;
gainNode.gain.cancelScheduledValues(currentTime$1);
gainNode.gain.setValueAtTime(0, currentTime$1);
gainNode.gain.linearRampToValueAtTime(volume.value / 100, currentTime$1 + duration$1);
setTimeout(() => {
audioContext.resume();
startFlag.value = audioContext.currentTime - pauseFlag.value;
playing.value = true;
paused.value = false;
onPlayingEv.trigger();
updateDuration();
}, duration$1 * 1e3);
return;
}
audioContext.resume();
startFlag.value = audioContext.currentTime - pauseFlag.value;
playing.value = true;
paused.value = false;
onPlayingEv.trigger();
updateDuration();
}
function stop() {
bufferSource.value?.stop();
pauseFlag.value = 0;
startFlag.value = 0;
currentTimeRaw.value = 0;
playing.value = false;
paused.value = false;
ended.value = true;
}
function toggle() {
if (playing.value) pause();
else resume();
}
function getFrequencyData() {
const frequencyData = new Uint8Array(analyserNode.frequencyBinCount);
analyserNode.getByteFrequencyData(frequencyData);
return frequencyData;
}
function setEQFrequency(index, value) {
if (index >= 0 && index < filters.length) filters[index].gain.value = value;
}
function getEQFrequency(index) {
if (index >= 0 && index < filters.length) return filters[index].gain.value;
return 0;
}
function getEQFrequencies() {
return eqFrequencies.map((freq, index) => ({
frequency: freq,
gain: getEQFrequency(index)
}));
}
function destroy() {
stop();
bufferSource.value = null;
audioContext.close();
}
if (analyser) getByteTimeDomainData();
onUnmounted(() => {
destroy();
});
return {
eqFrequencies,
audioContext,
audioBuffer,
bufferSource,
gainNode,
analyserNode,
filters,
filterNode,
status,
volume,
setVolume,
playbackRate,
setPlaybackRate,
detune,
playing,
paused,
ended,
startFlag,
pauseFlag,
currentTimeRaw,
currentTime,
currentTimeText,
setCurrentTime,
durationRaw,
duration,
durationText,
progressRaw,
progress,
setProgress,
cachedDuration,
cachedDurationText,
cachedProgress,
playBuffer,
play,
pause,
resume,
stop,
toggle,
getFrequencyData,
setEQFrequency,
getEQFrequency,
getEQFrequencies,
destroy,
onVolumeUpdate: onVolumeUpdateEv.on,
onRateUpdate: onRateUpdateEv.on,
onTimeUpdate: onTimeUpdateEv.on,
onDurationUpdate: onDurationUpdateEv.on,
onPlaying: onPlayingEv.on,
onPaused: onPausedEv.on,
onEnded: onEndedEv.on,
onByteTimeDomainData: onByteTimeDomainDataEv.on
};
}
//#endregion
export { useAudioContextBuffer };