@remotion/renderer
Version:
Render Remotion videos using Node.js or Bun
184 lines (183 loc) • 7.3 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.createCombinedAudio = exports.getClosestAlignedTime = exports.durationOf1Frame = void 0;
const fs_1 = require("fs");
const path_1 = require("path");
const version_1 = require("remotion/version");
const call_ffmpeg_1 = require("./call-ffmpeg");
const logger_1 = require("./logger");
const audio_codec_1 = require("./options/audio-codec");
const parse_ffmpeg_progress_1 = require("./parse-ffmpeg-progress");
const sample_rate_1 = require("./sample-rate");
const truthy_1 = require("./truthy");
exports.durationOf1Frame = (1024 / sample_rate_1.DEFAULT_SAMPLE_RATE) * 1000000;
const getClosestAlignedTime = (targetTime) => {
const decimalFramesToTargetTime = (targetTime * 1000000) / exports.durationOf1Frame;
const nearestFrameIndexForTargetTime = Math.round(decimalFramesToTargetTime);
return (nearestFrameIndexForTargetTime * exports.durationOf1Frame) / 1000000;
};
exports.getClosestAlignedTime = getClosestAlignedTime;
const encodeAudio = async ({ files, resolvedAudioCodec, audioBitrate, filelistDir, output, indent, logLevel, addRemotionMetadata, fps, binariesDirectory, cancelSignal, onProgress, }) => {
var _a;
const fileList = files.map((p) => `file '${p}'`).join('\n');
const fileListTxt = (0, path_1.join)(filelistDir, 'audio-files.txt');
(0, fs_1.writeFileSync)(fileListTxt, fileList);
const startCombining = Date.now();
const command = [
'-hide_banner',
'-f',
'concat',
'-safe',
'0',
'-i',
fileListTxt,
'-c:a',
(0, audio_codec_1.mapAudioCodecToFfmpegAudioCodecName)(resolvedAudioCodec),
resolvedAudioCodec === 'aac' ? '-cutoff' : null,
resolvedAudioCodec === 'aac' ? '18000' : null,
'-b:a',
audioBitrate ? audioBitrate : '320k',
'-vn',
addRemotionMetadata ? `-metadata` : null,
addRemotionMetadata ? `comment=Made with Remotion ${version_1.VERSION}` : null,
'-y',
output,
];
logger_1.Log.verbose({ indent, logLevel }, `Combining audio with re-encoding, command: ${command.join(' ')}`);
try {
const task = (0, call_ffmpeg_1.callFf)({
args: command,
bin: 'ffmpeg',
indent,
logLevel,
binariesDirectory,
cancelSignal,
});
(_a = task.stderr) === null || _a === void 0 ? void 0 : _a.on('data', (data) => {
const utf8 = data.toString('utf8');
const parsed = (0, parse_ffmpeg_progress_1.parseFfmpegProgress)(utf8, fps);
if (parsed === undefined) {
logger_1.Log.verbose({ indent, logLevel }, utf8);
}
else {
onProgress(parsed);
logger_1.Log.verbose({ indent, logLevel }, `Encoded ${parsed} audio frames`);
}
});
await task;
logger_1.Log.verbose({ indent, logLevel }, `Encoded audio in ${Date.now() - startCombining}ms`);
return output;
}
catch (e) {
(0, fs_1.rmSync)(fileListTxt, { recursive: true });
throw e;
}
};
const combineAudioSeamlessly = async ({ files, filelistDir, indent, logLevel, output, chunkDurationInSeconds, addRemotionMetadata, fps, binariesDirectory, cancelSignal, onProgress, }) => {
var _a;
const startConcatenating = Date.now();
const fileList = files
.map((p, i) => {
const isLast = i === files.length - 1;
const targetStart = i * chunkDurationInSeconds;
const endStart = (i + 1) * chunkDurationInSeconds;
const startTime = (0, exports.getClosestAlignedTime)(targetStart) * 1000000;
const endTime = (0, exports.getClosestAlignedTime)(endStart) * 1000000;
const realDuration = endTime - startTime;
let inpoint = 0;
if (i > 0) {
// Although we only asked for two frames of padding, ffmpeg will add an
// additional 2 frames of silence at the start of the segment. When we slice out
// our real data with inpoint and outpoint, we'll want remove both the silence
// and the extra frames we asked for.
inpoint = exports.durationOf1Frame * 4;
}
// inpoint is inclusive and outpoint is exclusive. To avoid overlap, we subtract
// the duration of one frame from the outpoint.
// we don't have to subtract a frame if this is the last segment.
const outpoint = (i === 0 ? exports.durationOf1Frame * 2 : inpoint) +
realDuration -
(isLast ? 0 : exports.durationOf1Frame);
return [`file '${p}'`, `inpoint ${inpoint}us`, `outpoint ${outpoint}us`]
.filter(truthy_1.truthy)
.join('\n');
})
.join('\n');
const fileListTxt = (0, path_1.join)(filelistDir, 'audio-files.txt');
(0, fs_1.writeFileSync)(fileListTxt, fileList);
const command = [
'-hide_banner',
'-f',
'concat',
'-safe',
'0',
'-i',
fileListTxt,
'-c:a',
'copy',
'-vn',
addRemotionMetadata ? `-metadata` : null,
addRemotionMetadata ? `comment=Made with Remotion ${version_1.VERSION}` : null,
'-y',
output,
];
logger_1.Log.verbose({ indent, logLevel }, `Combining AAC audio seamlessly, command: ${command.join(' ')}`);
try {
const task = (0, call_ffmpeg_1.callFf)({
args: command,
bin: 'ffmpeg',
indent,
logLevel,
binariesDirectory,
cancelSignal,
});
(_a = task.stderr) === null || _a === void 0 ? void 0 : _a.on('data', (data) => {
const utf8 = data.toString('utf8');
const parsed = (0, parse_ffmpeg_progress_1.parseFfmpegProgress)(utf8, fps);
if (parsed !== undefined) {
onProgress(parsed);
logger_1.Log.verbose({ indent, logLevel }, `Encoded ${parsed} audio frames`);
}
});
await task;
logger_1.Log.verbose({ indent, logLevel }, `Combined audio seamlessly in ${Date.now() - startConcatenating}ms`);
return output;
}
catch (e) {
(0, fs_1.rmSync)(fileListTxt, { recursive: true });
logger_1.Log.error({ indent, logLevel }, e);
throw e;
}
};
const createCombinedAudio = ({ seamless, filelistDir, files, indent, logLevel, audioBitrate, resolvedAudioCodec, output, chunkDurationInSeconds, addRemotionMetadata, binariesDirectory, fps, cancelSignal, onProgress, }) => {
if (seamless) {
return combineAudioSeamlessly({
filelistDir,
files,
indent,
logLevel,
output,
chunkDurationInSeconds,
addRemotionMetadata,
binariesDirectory,
fps,
cancelSignal,
onProgress,
});
}
return encodeAudio({
filelistDir,
files,
resolvedAudioCodec,
audioBitrate,
output,
indent,
logLevel,
addRemotionMetadata,
binariesDirectory,
fps,
cancelSignal,
onProgress,
});
};
exports.createCombinedAudio = createCombinedAudio;