remotion
Version:
Make videos programmatically
155 lines (154 loc) • 8.16 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.AudioForPreview = void 0;
const jsx_runtime_1 = require("react/jsx-runtime");
const react_1 = require("react");
const SequenceContext_js_1 = require("../SequenceContext.js");
const SequenceManager_js_1 = require("../SequenceManager.js");
const get_cross_origin_value_js_1 = require("../get-cross-origin-value.js");
const log_level_context_js_1 = require("../log-level-context.js");
const prefetch_js_1 = require("../prefetch.js");
const random_js_1 = require("../random.js");
const use_amplification_js_1 = require("../use-amplification.js");
const use_media_in_timeline_js_1 = require("../use-media-in-timeline.js");
const use_media_playback_js_1 = require("../use-media-playback.js");
const volume_position_state_js_1 = require("../volume-position-state.js");
const volume_prop_js_1 = require("../volume-prop.js");
const shared_audio_tags_js_1 = require("./shared-audio-tags.js");
const use_audio_frame_js_1 = require("./use-audio-frame.js");
const AudioForDevelopmentForwardRefFunction = (props, ref) => {
var _a;
const [initialShouldPreMountAudioElements] = (0, react_1.useState)(props.shouldPreMountAudioTags);
if (props.shouldPreMountAudioTags !== initialShouldPreMountAudioElements) {
throw new Error('Cannot change the behavior for pre-mounting audio tags dynamically.');
}
const logLevel = (0, log_level_context_js_1.useLogLevel)();
const { volume, muted, playbackRate, shouldPreMountAudioTags, src, onDuration, acceptableTimeShiftInSeconds, _remotionInternalNeedsDurationCalculation, _remotionInternalNativeLoopPassed, _remotionInternalStack, allowAmplificationDuringRender, name, pauseWhenBuffering, showInTimeline, loopVolumeCurveBehavior, stack, crossOrigin, delayRenderRetries, delayRenderTimeoutInMilliseconds, toneFrequency, useWebAudioApi, onError, onNativeError, ...nativeProps } = props;
// Typecheck that we are not accidentially passing unrecognized props
// to the DOM
const _propsValid = true;
if (!_propsValid) {
throw new Error('typecheck error');
}
const [mediaVolume] = (0, volume_position_state_js_1.useMediaVolumeState)();
const [mediaMuted] = (0, volume_position_state_js_1.useMediaMutedState)();
const volumePropFrame = (0, use_audio_frame_js_1.useFrameForVolumeProp)(loopVolumeCurveBehavior !== null && loopVolumeCurveBehavior !== void 0 ? loopVolumeCurveBehavior : 'repeat');
const { hidden } = (0, react_1.useContext)(SequenceManager_js_1.SequenceVisibilityToggleContext);
if (!src) {
throw new TypeError("No 'src' was passed to <Audio>.");
}
const preloadedSrc = (0, prefetch_js_1.usePreload)(src);
const sequenceContext = (0, react_1.useContext)(SequenceContext_js_1.SequenceContext);
const [timelineId] = (0, react_1.useState)(() => String(Math.random()));
const isSequenceHidden = (_a = hidden[timelineId]) !== null && _a !== void 0 ? _a : false;
const userPreferredVolume = (0, volume_prop_js_1.evaluateVolume)({
frame: volumePropFrame,
volume,
mediaVolume,
});
const crossOriginValue = (0, get_cross_origin_value_js_1.getCrossOriginValue)({
crossOrigin,
requestsVideoFrame: false,
});
const propsToPass = (0, react_1.useMemo)(() => {
return {
muted: muted || mediaMuted || isSequenceHidden || userPreferredVolume <= 0,
src: preloadedSrc,
loop: _remotionInternalNativeLoopPassed,
crossOrigin: crossOriginValue,
...nativeProps,
};
}, [
_remotionInternalNativeLoopPassed,
isSequenceHidden,
mediaMuted,
muted,
nativeProps,
preloadedSrc,
userPreferredVolume,
crossOriginValue,
]);
// Generate a string that's as unique as possible for this asset
// but at the same time deterministic. We use it to combat strict mode issues.
const id = (0, react_1.useMemo)(() => `audio-${(0, random_js_1.random)(src !== null && src !== void 0 ? src : '')}-${sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.relativeFrom}-${sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.cumulatedFrom}-${sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.durationInFrames}-muted:${props.muted}-loop:${props.loop}`, [
src,
sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.relativeFrom,
sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.cumulatedFrom,
sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.durationInFrames,
props.muted,
props.loop,
]);
const context = (0, react_1.useContext)(shared_audio_tags_js_1.SharedAudioContext);
if (!context) {
throw new Error('SharedAudioContext not found');
}
const { el: audioRef, mediaElementSourceNode } = (0, shared_audio_tags_js_1.useSharedAudio)({
aud: propsToPass,
audioId: id,
premounting: Boolean(sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.premounting),
});
(0, use_media_in_timeline_js_1.useMediaInTimeline)({
volume,
mediaVolume,
mediaRef: audioRef,
src,
mediaType: 'audio',
playbackRate: playbackRate !== null && playbackRate !== void 0 ? playbackRate : 1,
displayName: name !== null && name !== void 0 ? name : null,
id: timelineId,
stack: _remotionInternalStack,
showInTimeline,
premountDisplay: null,
onAutoPlayError: null,
isPremounting: Boolean(sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.premounting),
});
// putting playback before useVolume
// because volume looks at playbackrate
(0, use_media_playback_js_1.useMediaPlayback)({
mediaRef: audioRef,
src,
mediaType: 'audio',
playbackRate: playbackRate !== null && playbackRate !== void 0 ? playbackRate : 1,
onlyWarnForMediaSeekingError: false,
acceptableTimeshift: acceptableTimeShiftInSeconds !== null && acceptableTimeShiftInSeconds !== void 0 ? acceptableTimeShiftInSeconds : null,
isPremounting: Boolean(sequenceContext === null || sequenceContext === void 0 ? void 0 : sequenceContext.premounting),
pauseWhenBuffering,
onAutoPlayError: null,
});
(0, use_amplification_js_1.useVolume)({
logLevel,
mediaRef: audioRef,
source: mediaElementSourceNode,
volume: userPreferredVolume,
shouldUseWebAudioApi: useWebAudioApi !== null && useWebAudioApi !== void 0 ? useWebAudioApi : false,
});
(0, react_1.useImperativeHandle)(ref, () => {
return audioRef.current;
}, [audioRef]);
const currentOnDurationCallback = (0, react_1.useRef)(onDuration);
currentOnDurationCallback.current = onDuration;
(0, react_1.useEffect)(() => {
var _a;
const { current } = audioRef;
if (!current) {
return;
}
if (current.duration) {
(_a = currentOnDurationCallback.current) === null || _a === void 0 ? void 0 : _a.call(currentOnDurationCallback, current.src, current.duration);
return;
}
const onLoadedMetadata = () => {
var _a;
(_a = currentOnDurationCallback.current) === null || _a === void 0 ? void 0 : _a.call(currentOnDurationCallback, current.src, current.duration);
};
current.addEventListener('loadedmetadata', onLoadedMetadata);
return () => {
current.removeEventListener('loadedmetadata', onLoadedMetadata);
};
}, [audioRef, src]);
if (initialShouldPreMountAudioElements) {
return null;
}
return ((0, jsx_runtime_1.jsx)("audio", { ref: audioRef, preload: "metadata", crossOrigin: crossOriginValue, ...propsToPass }));
};
exports.AudioForPreview = (0, react_1.forwardRef)(AudioForDevelopmentForwardRefFunction);