remotion
Version:
Make videos programmatically
132 lines (131 loc) • 6.75 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.AudioForRendering = void 0;
const jsx_runtime_1 = require("react/jsx-runtime");
const react_1 = require("react");
const RenderAssetManager_js_1 = require("../RenderAssetManager.js");
const SequenceContext_js_1 = require("../SequenceContext.js");
const absolute_src_js_1 = require("../absolute-src.js");
const delay_render_js_1 = require("../delay-render.js");
const random_js_1 = require("../random.js");
const timeline_position_state_js_1 = require("../timeline-position-state.js");
const use_current_frame_js_1 = require("../use-current-frame.js");
const volume_prop_js_1 = require("../volume-prop.js");
const use_audio_frame_js_1 = require("./use-audio-frame.js");
const AudioForRenderingRefForwardingFunction = (props, ref) => {
const audioRef = (0, react_1.useRef)(null);
const { volume: volumeProp, playbackRate, allowAmplificationDuringRender, onDuration, toneFrequency, _remotionInternalNeedsDurationCalculation, _remotionInternalNativeLoopPassed, acceptableTimeShiftInSeconds, name, onNativeError, delayRenderRetries, delayRenderTimeoutInMilliseconds, loopVolumeCurveBehavior, pauseWhenBuffering, ...nativeProps } = props;
const absoluteFrame = (0, timeline_position_state_js_1.useTimelinePosition)();
const volumePropFrame = (0, use_audio_frame_js_1.useFrameForVolumeProp)(loopVolumeCurveBehavior !== null && loopVolumeCurveBehavior !== void 0 ? loopVolumeCurveBehavior : 'repeat');
const frame = (0, use_current_frame_js_1.useCurrentFrame)();
const sequenceContext = (0, react_1.useContext)(SequenceContext_js_1.SequenceContext);
const { registerRenderAsset, unregisterRenderAsset } = (0, react_1.useContext)(RenderAssetManager_js_1.RenderAssetManager);
// Generate a string that's as unique as possible for this asset
// but at the same time the same on all threads
const id = (0, react_1.useMemo)(() => {
var _a;
return `audio-${(0, random_js_1.random)((_a = props.src) !== null && _a !== void 0 ? _a : '')}-${sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.relativeFrom}-${sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.cumulatedFrom}-${sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.durationInFrames}`;
}, [
props.src,
sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.relativeFrom,
sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.cumulatedFrom,
sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.durationInFrames,
]);
const volume = (0, volume_prop_js_1.evaluateVolume)({
volume: volumeProp,
frame: volumePropFrame,
mediaVolume: 1,
});
(0, react_1.useImperativeHandle)(ref, () => {
return audioRef.current;
}, []);
(0, react_1.useEffect)(() => {
var _a, _b;
if (!props.src) {
throw new Error('No src passed');
}
if (!window.remotion_audioEnabled) {
return;
}
if (props.muted) {
return;
}
if (volume <= 0) {
return;
}
registerRenderAsset({
type: 'audio',
src: (0, absolute_src_js_1.getAbsoluteSrc)(props.src),
id,
frame: absoluteFrame,
volume,
mediaFrame: frame,
playbackRate: (_a = props.playbackRate) !== null && _a !== void 0 ? _a : 1,
toneFrequency: toneFrequency !== null && toneFrequency !== void 0 ? toneFrequency : null,
audioStartFrame: Math.max(0, -((_b = sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.relativeFrom) !== null && _b !== void 0 ? _b : 0)),
});
return () => unregisterRenderAsset(id);
}, [
props.muted,
props.src,
registerRenderAsset,
absoluteFrame,
id,
unregisterRenderAsset,
volume,
volumePropFrame,
frame,
playbackRate,
props.playbackRate,
toneFrequency,
sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.relativeFrom,
]);
const { src } = props;
// The <audio> tag is only rendered if the duration needs to be calculated for the `loop`
// attribute to work, or if the user assigns a ref to it.
const needsToRenderAudioTag = ref || _remotionInternalNeedsDurationCalculation;
// If audio source switches, make new handle
(0, react_1.useLayoutEffect)(() => {
var _a, _b;
if (((_b = (_a = window.process) === null || _a === void 0 ? void 0 : _a.env) === null || _b === void 0 ? void 0 : _b.NODE_ENV) === 'test') {
return;
}
if (!needsToRenderAudioTag) {
return;
}
const newHandle = (0, delay_render_js_1.delayRender)('Loading <Audio> duration with src=' + src, {
retries: delayRenderRetries !== null && delayRenderRetries !== void 0 ? delayRenderRetries : undefined,
timeoutInMilliseconds: delayRenderTimeoutInMilliseconds !== null && delayRenderTimeoutInMilliseconds !== void 0 ? delayRenderTimeoutInMilliseconds : undefined,
});
const { current } = audioRef;
const didLoad = () => {
if (current === null || current === void 0 ? void 0 : current.duration) {
onDuration(current.src, current.duration);
}
(0, delay_render_js_1.continueRender)(newHandle);
};
if (current === null || current === void 0 ? void 0 : current.duration) {
onDuration(current.src, current.duration);
(0, delay_render_js_1.continueRender)(newHandle);
}
else {
current === null || current === void 0 ? void 0 : current.addEventListener('loadedmetadata', didLoad, { once: true });
}
// If tag gets unmounted, clear pending handles because video metadata is not going to load
return () => {
current === null || current === void 0 ? void 0 : current.removeEventListener('loadedmetadata', didLoad);
(0, delay_render_js_1.continueRender)(newHandle);
};
}, [
src,
onDuration,
needsToRenderAudioTag,
delayRenderRetries,
delayRenderTimeoutInMilliseconds,
]);
if (!needsToRenderAudioTag) {
return null;
}
return (0, jsx_runtime_1.jsx)("audio", { ref: audioRef, ...nativeProps, onError: onNativeError });
};
exports.AudioForRendering = (0, react_1.forwardRef)(AudioForRenderingRefForwardingFunction);