@remotion/media-utils
Version:
Utilities for working with media files
167 lines (166 loc) • 7.07 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.useWindowedAudioData = void 0;
const react_1 = require("react");
const remotion_1 = require("remotion");
const combine_float32_arrays_1 = require("./combine-float32-arrays");
const get_partial_wave_data_1 = require("./get-partial-wave-data");
const is_remote_asset_1 = require("./is-remote-asset");
const probe_wave_file_1 = require("./probe-wave-file");
const useWindowedAudioData = ({ src, frame, fps, windowInSeconds, channelIndex = 0, }) => {
const isMounted = (0, react_1.useRef)(true);
const [waveProbe, setWaveProbe] = (0, react_1.useState)(null);
const [waveFormMap, setWaveformMap] = (0, react_1.useState)({});
const requests = (0, react_1.useRef)({});
const [initialWindowInSeconds] = (0, react_1.useState)(windowInSeconds);
if (windowInSeconds !== initialWindowInSeconds) {
throw new Error('windowInSeconds cannot be changed dynamically');
}
(0, react_1.useEffect)(() => {
isMounted.current = true;
return () => {
isMounted.current = false;
};
}, []);
const fetchMetadata = (0, react_1.useCallback)(async (signal) => {
const handle = (0, remotion_1.delayRender)(`Waiting for audio metadata with src="${src}" to be loaded`);
const cont = () => {
(0, remotion_1.continueRender)(handle);
};
signal.addEventListener('abort', cont, { once: true });
try {
const data = await (0, probe_wave_file_1.probeWaveFile)(src);
if (isMounted.current) {
setWaveProbe(data);
}
(0, remotion_1.continueRender)(handle);
}
catch (err) {
(0, remotion_1.cancelRender)(err);
}
finally {
signal.removeEventListener('abort', cont);
}
}, [src]);
(0, react_1.useLayoutEffect)(() => {
const controller = new AbortController();
fetchMetadata(controller.signal);
return () => {
controller.abort();
};
}, [fetchMetadata]);
const currentTime = frame / fps;
const currentWindowIndex = Math.floor(currentTime / windowInSeconds);
const windowsToFetch = (0, react_1.useMemo)(() => {
if (!waveProbe) {
return [];
}
const maxWindowIndex = Math.floor(
// If an audio is exactly divisible by windowInSeconds, we need to
// subtract 0.000000000001 to avoid fetching an extra window.
waveProbe.durationInSeconds / windowInSeconds - 0.000000000001);
// needs to be in order because we rely on the concatenation below
return [
currentWindowIndex === 0 ? null : currentWindowIndex - 1,
currentWindowIndex,
currentWindowIndex + 1 > maxWindowIndex ? null : currentWindowIndex + 1,
]
.filter((i) => i !== null)
.filter((i) => i >= 0);
}, [currentWindowIndex, waveProbe, windowInSeconds]);
const fetchAndSetWaveformData = (0, react_1.useCallback)(async (windowIndex) => {
if (!waveProbe) {
throw new Error('Wave probe is not loaded yet');
}
const controller = new AbortController();
requests.current[windowIndex] = controller;
const partialWaveData = await (0, get_partial_wave_data_1.getPartialWaveData)({
bitsPerSample: waveProbe.bitsPerSample,
blockAlign: waveProbe.blockAlign,
channelIndex,
dataOffset: waveProbe.dataOffset,
fileSize: waveProbe.fileSize,
fromSeconds: windowIndex * windowInSeconds,
sampleRate: waveProbe.sampleRate,
src,
toSeconds: (windowIndex + 1) * windowInSeconds,
signal: controller.signal,
});
requests.current[windowIndex] = null;
setWaveformMap((prev) => {
const entries = Object.keys(prev);
const windowsToClear = entries.filter((entry) => !windowsToFetch.includes(Number(entry)));
return {
...prev,
// Delete windows that are not needed anymore
...windowsToClear.reduce((acc, key) => {
acc[key] = null;
return acc;
}, {}),
// Add the new window
[windowIndex]: partialWaveData,
};
});
}, [channelIndex, src, waveProbe, windowInSeconds, windowsToFetch]);
(0, react_1.useEffect)(() => {
if (!waveProbe) {
return;
}
const windowsToClear = Object.keys(requests.current).filter((entry) => !windowsToFetch.includes(Number(entry)));
for (const windowIndex of windowsToClear) {
const controller = requests.current[windowIndex];
if (controller) {
controller.abort();
requests.current[windowIndex] = null;
}
}
Promise.all(windowsToFetch.map((windowIndex) => {
return fetchAndSetWaveformData(windowIndex);
})).catch((err) => {
var _a, _b, _c, _d, _e;
if ((_a = err.stack) === null || _a === void 0 ? void 0 : _a.includes('Cancelled')) {
return;
}
if ((_c = (_b = err.stack) === null || _b === void 0 ? void 0 : _b.toLowerCase()) === null || _c === void 0 ? void 0 : _c.includes('aborted')) {
return;
}
// firefox
if ((_e = (_d = err.message) === null || _d === void 0 ? void 0 : _d.toLowerCase()) === null || _e === void 0 ? void 0 : _e.includes('aborted')) {
return;
}
(0, remotion_1.cancelRender)(err);
});
}, [fetchAndSetWaveformData, waveProbe, windowsToFetch]);
const currentAudioData = (0, react_1.useMemo)(() => {
if (!waveProbe) {
return null;
}
if (windowsToFetch.some((i) => !waveFormMap[i])) {
return null;
}
const windows = windowsToFetch.map((i) => waveFormMap[i]);
const data = (0, combine_float32_arrays_1.combineFloat32Arrays)(windows);
return {
channelWaveforms: [data],
durationInSeconds: waveProbe.durationInSeconds,
isRemote: (0, is_remote_asset_1.isRemoteAsset)(src),
numberOfChannels: 1,
resultId: String(Math.random()),
sampleRate: waveProbe.sampleRate,
};
}, [src, waveFormMap, waveProbe, windowsToFetch]);
(0, react_1.useLayoutEffect)(() => {
if (currentAudioData) {
return;
}
const handle = (0, remotion_1.delayRender)(`Waiting for audio data with src="${src}" to be loaded`);
return () => {
(0, remotion_1.continueRender)(handle);
};
}, [currentAudioData, src]);
return {
audioData: currentAudioData,
dataOffsetInSeconds: windowsToFetch[0] * windowInSeconds,
};
};
exports.useWindowedAudioData = useWindowedAudioData;