remotion
Version:
Make videos programmatically
206 lines (205 loc) • 10.6 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.VideoForPreview = void 0;
const jsx_runtime_1 = require("react/jsx-runtime");
const react_1 = require("react");
const SequenceContext_js_1 = require("../SequenceContext.js");
const SequenceManager_js_1 = require("../SequenceManager.js");
const shared_audio_tags_js_1 = require("../audio/shared-audio-tags.js");
const shared_element_source_node_js_1 = require("../audio/shared-element-source-node.js");
const use_audio_frame_js_1 = require("../audio/use-audio-frame.js");
const get_cross_origin_value_js_1 = require("../get-cross-origin-value.js");
const log_level_context_js_1 = require("../log-level-context.js");
const playback_logging_js_1 = require("../playback-logging.js");
const prefetch_js_1 = require("../prefetch.js");
const use_amplification_js_1 = require("../use-amplification.js");
const use_media_in_timeline_js_1 = require("../use-media-in-timeline.js");
const use_media_playback_js_1 = require("../use-media-playback.js");
const use_video_config_js_1 = require("../use-video-config.js");
const version_js_1 = require("../version.js");
const volume_position_state_js_1 = require("../volume-position-state.js");
const volume_prop_js_1 = require("../volume-prop.js");
const emit_video_frame_js_1 = require("./emit-video-frame.js");
const video_fragment_js_1 = require("./video-fragment.js");
const VideoForDevelopmentRefForwardingFunction = (props, ref) => {
var _a, _b, _c;
const context = (0, react_1.useContext)(shared_audio_tags_js_1.SharedAudioContext);
if (!context) {
throw new Error('SharedAudioContext not found');
}
const videoRef = (0, react_1.useRef)(null);
const sharedSource = (0, react_1.useMemo)(() => {
if (!context.audioContext) {
return null;
}
return (0, shared_element_source_node_js_1.makeSharedElementSourceNode)({
audioContext: context.audioContext,
ref: videoRef,
});
}, [context.audioContext]);
const { volume, muted, playbackRate, onlyWarnForMediaSeekingError, src, onDuration,
// @ts-expect-error
acceptableTimeShift, acceptableTimeShiftInSeconds, toneFrequency, name, _remotionInternalNativeLoopPassed, _remotionInternalStack, style, pauseWhenBuffering, showInTimeline, loopVolumeCurveBehavior, onError, onAutoPlayError, onVideoFrame, crossOrigin, delayRenderRetries, delayRenderTimeoutInMilliseconds, allowAmplificationDuringRender, useWebAudioApi, ...nativeProps } = props;
const _propsValid = true;
if (!_propsValid) {
throw new Error('typecheck error');
}
const volumePropFrame = (0, use_audio_frame_js_1.useFrameForVolumeProp)(loopVolumeCurveBehavior !== null && loopVolumeCurveBehavior !== void 0 ? loopVolumeCurveBehavior : 'repeat');
const { fps, durationInFrames } = (0, use_video_config_js_1.useVideoConfig)();
const parentSequence = (0, react_1.useContext)(SequenceContext_js_1.SequenceContext);
const { hidden } = (0, react_1.useContext)(SequenceManager_js_1.SequenceVisibilityToggleContext);
const logLevel = (0, log_level_context_js_1.useLogLevel)();
const mountTime = (0, log_level_context_js_1.useMountTime)();
const [timelineId] = (0, react_1.useState)(() => String(Math.random()));
const isSequenceHidden = (_a = hidden[timelineId]) !== null && _a !== void 0 ? _a : false;
if (typeof acceptableTimeShift !== 'undefined') {
throw new Error('acceptableTimeShift has been removed. Use acceptableTimeShiftInSeconds instead.');
}
const [mediaVolume] = (0, volume_position_state_js_1.useMediaVolumeState)();
const [mediaMuted] = (0, volume_position_state_js_1.useMediaMutedState)();
const userPreferredVolume = (0, volume_prop_js_1.evaluateVolume)({
frame: volumePropFrame,
volume,
mediaVolume,
});
(0, use_media_in_timeline_js_1.useMediaInTimeline)({
mediaRef: videoRef,
volume,
mediaVolume,
mediaType: 'video',
src,
playbackRate: (_b = props.playbackRate) !== null && _b !== void 0 ? _b : 1,
displayName: name !== null && name !== void 0 ? name : null,
id: timelineId,
stack: _remotionInternalStack,
showInTimeline,
premountDisplay: null,
onAutoPlayError: onAutoPlayError !== null && onAutoPlayError !== void 0 ? onAutoPlayError : null,
isPremounting: Boolean(parentSequence === null || parentSequence === void 0 ? void 0 : parentSequence.premounting),
});
// putting playback before useVolume
// because volume looks at playbackrate
(0, use_media_playback_js_1.useMediaPlayback)({
mediaRef: videoRef,
src,
mediaType: 'video',
playbackRate: (_c = props.playbackRate) !== null && _c !== void 0 ? _c : 1,
onlyWarnForMediaSeekingError,
acceptableTimeshift: acceptableTimeShiftInSeconds !== null && acceptableTimeShiftInSeconds !== void 0 ? acceptableTimeShiftInSeconds : null,
isPremounting: Boolean(parentSequence === null || parentSequence === void 0 ? void 0 : parentSequence.premounting),
pauseWhenBuffering,
onAutoPlayError: onAutoPlayError !== null && onAutoPlayError !== void 0 ? onAutoPlayError : null,
});
(0, use_amplification_js_1.useVolume)({
logLevel,
mediaRef: videoRef,
volume: userPreferredVolume,
source: sharedSource,
shouldUseWebAudioApi: useWebAudioApi !== null && useWebAudioApi !== void 0 ? useWebAudioApi : false,
});
const actualFrom = parentSequence ? parentSequence.relativeFrom : 0;
const duration = parentSequence
? Math.min(parentSequence.durationInFrames, durationInFrames)
: durationInFrames;
const preloadedSrc = (0, prefetch_js_1.usePreload)(src);
const actualSrc = (0, video_fragment_js_1.useAppendVideoFragment)({
actualSrc: preloadedSrc,
actualFrom,
duration,
fps,
});
(0, react_1.useImperativeHandle)(ref, () => {
return videoRef.current;
}, []);
(0, react_1.useState)(() => (0, playback_logging_js_1.playbackLogging)({
logLevel,
message: `Mounting video with source = ${actualSrc}, v=${version_js_1.VERSION}, user agent=${typeof navigator === 'undefined' ? 'server' : navigator.userAgent}`,
tag: 'video',
mountTime,
}));
(0, react_1.useEffect)(() => {
const { current } = videoRef;
if (!current) {
return;
}
const errorHandler = () => {
var _a;
if (current.error) {
// eslint-disable-next-line no-console
console.error('Error occurred in video', current === null || current === void 0 ? void 0 : current.error);
// If user is handling the error, we don't cause an unhandled exception
if (onError) {
const err = new Error(`Code ${current.error.code}: ${current.error.message}`);
onError(err);
return;
}
throw new Error(`The browser threw an error while playing the video ${src}: Code ${current.error.code} - ${(_a = current === null || current === void 0 ? void 0 : current.error) === null || _a === void 0 ? void 0 : _a.message}. See https://remotion.dev/docs/media-playback-error for help. Pass an onError() prop to handle the error.`);
}
else {
// If user is handling the error, we don't cause an unhandled exception
if (onError) {
const err = new Error(`The browser threw an error while playing the video ${src}`);
onError(err);
return;
}
throw new Error('The browser threw an error while playing the video');
}
};
current.addEventListener('error', errorHandler, { once: true });
return () => {
current.removeEventListener('error', errorHandler);
};
}, [onError, src]);
const currentOnDurationCallback = (0, react_1.useRef)(onDuration);
currentOnDurationCallback.current = onDuration;
(0, emit_video_frame_js_1.useEmitVideoFrame)({ ref: videoRef, onVideoFrame });
(0, react_1.useEffect)(() => {
var _a;
const { current } = videoRef;
if (!current) {
return;
}
if (current.duration) {
(_a = currentOnDurationCallback.current) === null || _a === void 0 ? void 0 : _a.call(currentOnDurationCallback, src, current.duration);
return;
}
const onLoadedMetadata = () => {
var _a;
(_a = currentOnDurationCallback.current) === null || _a === void 0 ? void 0 : _a.call(currentOnDurationCallback, src, current.duration);
};
current.addEventListener('loadedmetadata', onLoadedMetadata);
return () => {
current.removeEventListener('loadedmetadata', onLoadedMetadata);
};
}, [src]);
(0, react_1.useEffect)(() => {
const { current } = videoRef;
if (!current) {
return;
}
// Without this, on iOS Safari, the video cannot be seeked.
// if a seek is triggered before `loadedmetadata` is fired,
// the video will not seek, even if `loadedmetadata` is fired afterwards.
// Also, this needs to happen in a useEffect, because otherwise
// the SSR props will be applied.
if ((0, video_fragment_js_1.isIosSafari)()) {
current.preload = 'metadata';
}
else {
current.preload = 'auto';
}
}, []);
const actualStyle = (0, react_1.useMemo)(() => {
var _a;
return {
...style,
opacity: isSequenceHidden ? 0 : ((_a = style === null || style === void 0 ? void 0 : style.opacity) !== null && _a !== void 0 ? _a : 1),
};
}, [isSequenceHidden, style]);
const crossOriginValue = (0, get_cross_origin_value_js_1.getCrossOriginValue)({
crossOrigin,
requestsVideoFrame: Boolean(onVideoFrame),
});
return ((0, jsx_runtime_1.jsx)("video", { ref: videoRef, muted: muted || mediaMuted || isSequenceHidden || userPreferredVolume <= 0, playsInline: true, src: actualSrc, loop: _remotionInternalNativeLoopPassed, style: actualStyle, disableRemotePlayback: true, crossOrigin: crossOriginValue, ...nativeProps }));
};
exports.VideoForPreview = (0, react_1.forwardRef)(VideoForDevelopmentRefForwardingFunction);