@remotion/studio
Version:
APIs for interacting with the Remotion Studio
306 lines (305 loc) • 12 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.TimelineVideoInfo = void 0;
const jsx_runtime_1 = require("react/jsx-runtime");
const react_1 = require("react");
const remotion_1 = require("remotion");
const extract_frames_1 = require("../../helpers/extract-frames");
const frame_database_1 = require("../../helpers/frame-database");
const resize_video_frame_1 = require("../../helpers/resize-video-frame");
const timeline_layout_1 = require("../../helpers/timeline-layout");
const AudioWaveform_1 = require("../AudioWaveform");
const FILMSTRIP_HEIGHT = timeline_layout_1.TIMELINE_LAYER_HEIGHT_IMAGE - 2;
const outerStyle = {
width: '100%',
height: '100%',
display: 'flex',
flexDirection: 'column',
};
const filmstripContainerStyle = {
height: FILMSTRIP_HEIGHT,
width: '100%',
backgroundColor: 'rgba(0, 0, 0, 0.3)',
display: 'flex',
borderTopLeftRadius: 2,
fontSize: 10,
fontFamily: 'Arial, Helvetica',
};
const WEBCODECS_TIMESCALE = 1000000;
const MAX_TIME_DEVIATION = WEBCODECS_TIMESCALE * 0.05;
const getDurationOfOneFrame = ({ visualizationWidth, aspectRatio, segmentDuration, }) => {
const framesFitInWidthUnrounded = visualizationWidth / (FILMSTRIP_HEIGHT * aspectRatio);
return (segmentDuration / framesFitInWidthUnrounded) * WEBCODECS_TIMESCALE;
};
const fixRounding = (value) => {
if (value % 1 >= 0.49999999) {
return Math.ceil(value);
}
return Math.floor(value);
};
const calculateTimestampSlots = ({ visualizationWidth, fromSeconds, segmentDuration, aspectRatio, }) => {
const framesFitInWidthUnrounded = visualizationWidth / (FILMSTRIP_HEIGHT * aspectRatio);
const framesFitInWidth = Math.ceil(framesFitInWidthUnrounded);
const durationOfOneFrame = getDurationOfOneFrame({
visualizationWidth,
aspectRatio,
segmentDuration,
});
const timestampTargets = [];
for (let i = 0; i < framesFitInWidth + 1; i++) {
const target = fromSeconds * WEBCODECS_TIMESCALE + durationOfOneFrame * (i + 0.5);
const snappedToDuration = (Math.round(fixRounding(target / durationOfOneFrame)) - 1) *
durationOfOneFrame;
timestampTargets.push(snappedToDuration);
}
return timestampTargets;
};
const ensureSlots = ({ filledSlots, naturalWidth, fromSeconds, toSeconds, aspectRatio, }) => {
const segmentDuration = toSeconds - fromSeconds;
const timestampTargets = calculateTimestampSlots({
visualizationWidth: naturalWidth,
fromSeconds,
segmentDuration,
aspectRatio,
});
for (const timestamp of timestampTargets) {
if (!filledSlots.has(timestamp)) {
filledSlots.set(timestamp, undefined);
}
}
};
const drawSlot = ({ frame, ctx, filledSlots, visualizationWidth, timestamp, segmentDuration, fromSeconds, }) => {
const durationOfOneFrame = getDurationOfOneFrame({
visualizationWidth,
aspectRatio: frame.displayWidth / frame.displayHeight,
segmentDuration,
});
const relativeTimestamp = timestamp - fromSeconds * WEBCODECS_TIMESCALE;
const frameIndex = relativeTimestamp / durationOfOneFrame;
const thumbnailWidth = frame.displayWidth / window.devicePixelRatio;
const left = Math.floor(frameIndex * thumbnailWidth);
const right = Math.ceil((frameIndex + 1) * thumbnailWidth);
ctx.drawImage(frame, left, 0, right - left, frame.displayHeight / window.devicePixelRatio);
filledSlots.set(timestamp, frame.timestamp);
};
const fillWithCachedFrames = ({ ctx, naturalWidth, filledSlots, src, segmentDuration, fromSeconds, }) => {
const prefix = (0, frame_database_1.getFrameDatabaseKeyPrefix)(src);
const keys = Array.from(frame_database_1.frameDatabase.keys()).filter((k) => k.startsWith(prefix));
const targets = Array.from(filledSlots.keys());
for (const timestamp of targets) {
let bestKey;
let bestDistance = Infinity;
for (const key of keys) {
const distance = Math.abs((0, frame_database_1.getTimestampFromFrameDatabaseKey)(key) - timestamp);
if (distance < bestDistance) {
bestDistance = distance;
bestKey = key;
}
}
if (!bestKey) {
continue;
}
const frame = frame_database_1.frameDatabase.get(bestKey);
if (!frame) {
continue;
}
const alreadyFilled = filledSlots.get(timestamp);
// Don't fill if a closer frame was already drawn
if (alreadyFilled &&
Math.abs(alreadyFilled - timestamp) <=
Math.abs(frame.frame.timestamp - timestamp)) {
continue;
}
frame.lastUsed = Date.now();
drawSlot({
ctx,
frame: frame.frame,
filledSlots,
visualizationWidth: naturalWidth,
timestamp,
segmentDuration,
fromSeconds,
});
}
};
const fillFrameWhereItFits = ({ frame, filledSlots, ctx, visualizationWidth, segmentDuration, fromSeconds, }) => {
const slots = Array.from(filledSlots.keys());
for (let i = 0; i < slots.length; i++) {
const slot = slots[i];
if (Math.abs(slot - frame.timestamp) > MAX_TIME_DEVIATION) {
continue;
}
const filled = filledSlots.get(slot);
// Don't fill if a better timestamp was already filled
if (filled &&
Math.abs(filled - slot) <= Math.abs(filled - frame.timestamp)) {
continue;
}
drawSlot({
ctx,
frame,
filledSlots,
visualizationWidth,
timestamp: slot,
segmentDuration,
fromSeconds,
});
}
};
const TimelineVideoInfo = ({ src, visualizationWidth, naturalWidth, trimBefore, durationInFrames, playbackRate, volume, doesVolumeChange, premountWidth, postmountWidth, }) => {
const { fps } = (0, remotion_1.useVideoConfig)();
const ref = (0, react_1.useRef)(null);
const [error, setError] = (0, react_1.useState)(null);
const aspectRatio = (0, react_1.useRef)((0, frame_database_1.getAspectRatioFromCache)(src));
// for rendering frames
(0, react_1.useEffect)(() => {
if (error) {
return;
}
const { current } = ref;
if (!current) {
return;
}
const controller = new AbortController();
const canvas = document.createElement('canvas');
canvas.width = visualizationWidth;
canvas.height = FILMSTRIP_HEIGHT;
const ctx = canvas.getContext('2d');
if (!ctx) {
return;
}
current.appendChild(canvas);
// desired-timestamp -> filled-timestamp
const filledSlots = new Map();
const fromSeconds = trimBefore / fps;
// Trim is applied first, then playbackRate. Each composition frame
// advances the source video by `playbackRate` source frames.
const toSeconds = fromSeconds + (durationInFrames * playbackRate) / fps;
if (aspectRatio.current !== null) {
ensureSlots({
filledSlots,
naturalWidth,
fromSeconds,
toSeconds,
aspectRatio: aspectRatio.current,
});
fillWithCachedFrames({
ctx,
naturalWidth,
filledSlots,
src,
segmentDuration: toSeconds - fromSeconds,
fromSeconds,
});
const unfilled = Array.from(filledSlots.keys()).filter((timestamp) => !filledSlots.get(timestamp));
// Don't extract frames if all slots are filled
if (unfilled.length === 0) {
return () => {
current.removeChild(canvas);
};
}
}
(0, extract_frames_1.extractFrames)({
timestampsInSeconds: ({ track, }) => {
aspectRatio.current = track.width / track.height;
frame_database_1.aspectRatioCache.set(src, aspectRatio.current);
ensureSlots({
filledSlots,
fromSeconds,
toSeconds,
naturalWidth,
aspectRatio: aspectRatio.current,
});
return Array.from(filledSlots.keys()).map((timestamp) => timestamp / WEBCODECS_TIMESCALE);
},
src,
onVideoSample: (sample) => {
let frame;
try {
frame = sample.toVideoFrame();
const scale = (FILMSTRIP_HEIGHT / frame.displayHeight) * window.devicePixelRatio;
const transformed = (0, resize_video_frame_1.resizeVideoFrame)({
frame,
scale,
});
if (transformed !== frame) {
frame.close();
}
frame = undefined;
const databaseKey = (0, frame_database_1.makeFrameDatabaseKey)(src, transformed.timestamp);
(0, frame_database_1.addFrameToCache)(databaseKey, transformed);
if (aspectRatio.current === null) {
throw new Error('Aspect ratio is not set');
}
ensureSlots({
filledSlots,
fromSeconds,
toSeconds,
naturalWidth,
aspectRatio: aspectRatio.current,
});
fillFrameWhereItFits({
ctx,
filledSlots,
visualizationWidth: naturalWidth,
frame: transformed,
segmentDuration: toSeconds - fromSeconds,
fromSeconds,
});
}
catch (e) {
if (frame) {
frame.close();
}
throw e;
}
finally {
sample.close();
}
},
signal: controller.signal,
})
.then(() => {
if (controller.signal.aborted) {
return;
}
fillWithCachedFrames({
ctx,
naturalWidth,
filledSlots,
src,
segmentDuration: toSeconds - fromSeconds,
fromSeconds,
});
})
.catch((e) => {
setError(e);
});
return () => {
controller.abort();
current.removeChild(canvas);
};
}, [
durationInFrames,
error,
fps,
naturalWidth,
playbackRate,
src,
trimBefore,
visualizationWidth,
]);
const audioWidth = visualizationWidth - premountWidth - postmountWidth;
const audioStyle = (0, react_1.useMemo)(() => {
return {
height: timeline_layout_1.TIMELINE_LAYER_HEIGHT_AUDIO,
width: audioWidth,
position: 'relative',
marginLeft: premountWidth,
};
}, [audioWidth, premountWidth]);
return (jsx_runtime_1.jsxs("div", { style: outerStyle, children: [
jsx_runtime_1.jsx("div", { ref: ref, style: filmstripContainerStyle }), jsx_runtime_1.jsx("div", { style: audioStyle, children: jsx_runtime_1.jsx(AudioWaveform_1.AudioWaveform, { src: src, visualizationWidth: audioWidth, startFrom: trimBefore, durationInFrames: durationInFrames, volume: volume, doesVolumeChange: doesVolumeChange, playbackRate: playbackRate }) })
] }));
};
exports.TimelineVideoInfo = TimelineVideoInfo;