webcodecs-encoder
Version:
A TypeScript library for browser environments to encode video (H.264/AVC, VP9, VP8) and audio (AAC, Opus) using the WebCodecs API and mux them into MP4 or WebM containers with real-time streaming support. New function-first API design.
1,145 lines (1,132 loc) • 37.5 kB
JavaScript
// src/types.ts
var EncodeError = class _EncodeError extends Error {
constructor(type, message, cause) {
super(message);
this.name = "EncodeError";
this.type = type;
this.cause = cause;
Object.setPrototypeOf(this, _EncodeError.prototype);
}
};
// src/utils/config-parser.ts
async function inferAndBuildConfig(source, options) {
const inferredConfig = await inferConfigFromSource(source);
const mergedOptions = mergeWithUserOptions(inferredConfig, options);
const configWithPreset = applyQualityPreset(mergedOptions, options?.quality);
return convertToEncoderConfig(configWithPreset);
}
async function inferConfigFromSource(source) {
const config = {
frameRate: 30,
// デフォルト値
container: "mp4"
// デフォルト値
};
try {
const firstFrame = await getFirstFrame(source);
if (firstFrame) {
const dimensions = getFrameDimensions(firstFrame);
config.width = dimensions.width;
config.height = dimensions.height;
}
if (isVideoFileSource(source)) {
await enrichConfigFromVideoFile(config, source);
}
if (source instanceof MediaStream) {
const videoTracks = source.getVideoTracks();
const audioTracks = source.getAudioTracks();
if (videoTracks.length === 0) {
config.video = false;
}
if (audioTracks.length === 0) {
config.audio = false;
} else {
const audioTrack = audioTracks[0];
const settings = audioTrack.getSettings();
config.audio = {
sampleRate: settings.sampleRate || 48e3,
channels: settings.channelCount || 2
};
}
}
} catch (error) {
config.width = 640;
config.height = 480;
}
return config;
}
function mergeWithUserOptions(inferredConfig, userOptions) {
const mergeNestedConfig = (inferredValue, userValue) => {
if (userValue === false) {
return false;
}
if (userValue === void 0) {
if (inferredValue === false) {
return false;
}
if (inferredValue && typeof inferredValue === "object") {
return { ...inferredValue };
}
return inferredValue;
}
if (inferredValue === false || inferredValue == null) {
return { ...userValue };
}
return {
...inferredValue,
...userValue
};
};
return {
// 推定された設定をベースに
...inferredConfig,
// ユーザー指定の設定で上書き
...userOptions,
// ネストしたオブジェクトは個別にマージ
video: mergeNestedConfig(
inferredConfig.video,
userOptions?.video
),
audio: mergeNestedConfig(
inferredConfig.audio,
userOptions?.audio
)
};
}
function applyQualityPreset(config, quality) {
if (!quality) return config;
const width = config.width || 640;
const height = config.height || 480;
const pixels = width * height;
const basePixelsPerSecond = pixels * (config.frameRate || 30);
let videoBitrate;
let audioBitrate;
switch (quality) {
case "low":
videoBitrate = Math.max(5e5, basePixelsPerSecond * 0.1);
audioBitrate = 64e3;
break;
case "medium":
videoBitrate = Math.max(1e6, basePixelsPerSecond * 0.2);
audioBitrate = 128e3;
break;
case "high":
videoBitrate = Math.max(2e6, basePixelsPerSecond * 0.4);
audioBitrate = 192e3;
break;
case "lossless":
videoBitrate = Math.max(1e7, basePixelsPerSecond * 1);
audioBitrate = 32e4;
break;
default:
return config;
}
const mergedAudio = config.audio === false ? false : {
...config.audio
};
if (mergedAudio && typeof mergedAudio === "object") {
const codec = mergedAudio.codec || "aac";
if (codec !== "pcm" && codec !== "ulaw" && codec !== "alaw" && mergedAudio.bitrate == null) {
mergedAudio.bitrate = audioBitrate;
}
}
return {
...config,
video: config.video === false ? false : {
...config.video,
bitrate: config.video?.bitrate || videoBitrate
},
audio: mergedAudio
};
}
function convertToEncoderConfig(options) {
const videoOptions = options.video && typeof options.video === "object" ? options.video : null;
const config = {
width: options.video === false ? 0 : options.width || 640,
height: options.video === false ? 0 : options.height || 480,
frameRate: options.frameRate || 30,
videoBitrate: options.video === false ? 0 : videoOptions?.bitrate || 1e6,
audioBitrate: 0,
sampleRate: 0,
channels: 0,
container: options.container || "mp4",
codec: {
video: options.video === false ? void 0 : videoOptions?.codec || "avc",
audio: void 0
},
latencyMode: options.video === false ? "quality" : options.latencyMode || videoOptions?.latencyMode || "quality",
hardwareAcceleration: options.video === false ? "no-preference" : videoOptions?.hardwareAcceleration || "no-preference",
keyFrameInterval: options.video === false ? void 0 : videoOptions?.keyFrameInterval,
audioBitrateMode: void 0,
firstTimestampBehavior: options.firstTimestampBehavior || "offset",
maxVideoQueueSize: options.maxVideoQueueSize || 30,
maxAudioQueueSize: options.maxAudioQueueSize || 30,
backpressureStrategy: options.backpressureStrategy || "drop"
};
if (options.video !== false && videoOptions?.codecString) {
config.codecString = {
...config.codecString ?? {},
video: videoOptions.codecString
};
}
if (options.video !== false && videoOptions) {
const videoEncoderConfig = {};
if (typeof videoOptions.quantizer === "number") {
videoEncoderConfig.quantizer = videoOptions.quantizer;
}
if (config.codec?.video === "avc" && videoOptions.avc?.format) {
videoEncoderConfig.avc = { format: videoOptions.avc.format };
}
if (config.codec?.video === "hevc" && videoOptions.hevc?.format) {
videoEncoderConfig.hevc = { format: videoOptions.hevc.format };
}
if (Object.keys(videoEncoderConfig).length > 0) {
config.videoEncoderConfig = videoEncoderConfig;
}
}
if (options.audio !== false) {
const audioOptions = options.audio || {};
const requestedCodec = audioOptions.codec || "aac";
const isTelephonyCodec = requestedCodec === "ulaw" || requestedCodec === "alaw";
const isPcmCodec = requestedCodec === "pcm";
const defaultSampleRate = audioOptions.sampleRate || (isTelephonyCodec ? 8e3 : 48e3);
const defaultChannels = audioOptions.channels || (isTelephonyCodec ? 1 : 2);
let defaultBitrate = audioOptions.bitrate;
if (defaultBitrate == null) {
if (isPcmCodec) {
defaultBitrate = defaultSampleRate * defaultChannels * 16;
} else if (isTelephonyCodec) {
defaultBitrate = 64e3;
} else if (requestedCodec === "mp3") {
defaultBitrate = 128e3;
} else if (requestedCodec === "flac") {
defaultBitrate = 512e3;
} else if (requestedCodec === "vorbis") {
defaultBitrate = 128e3;
} else {
defaultBitrate = 128e3;
}
}
config.sampleRate = defaultSampleRate;
config.channels = defaultChannels;
config.audioBitrate = defaultBitrate;
config.codec = {
...config.codec,
audio: requestedCodec
};
config.audioBitrateMode = audioOptions.bitrateMode || (requestedCodec === "aac" ? "variable" : "constant");
if (audioOptions.codecString) {
config.codecString = {
...config.codecString ?? {},
audio: audioOptions.codecString
};
}
const audioEncoderConfig = {};
if (requestedCodec === "aac" && audioOptions.aac?.format) {
audioEncoderConfig.aac = { format: audioOptions.aac.format };
}
if (Object.keys(audioEncoderConfig).length > 0) {
config.audioEncoderConfig = audioEncoderConfig;
}
}
if (options.audio === false) {
config.codec = {
...config.codec,
audio: void 0
};
}
return config;
}
async function getFirstFrame(source) {
if (Array.isArray(source)) {
return source.length > 0 ? source[0] : null;
}
if (source instanceof MediaStream) {
const videoTracks = source.getVideoTracks();
if (videoTracks.length > 0) {
const settings = videoTracks[0].getSettings();
if (settings.width && settings.height) {
return {
displayWidth: settings.width,
displayHeight: settings.height
};
}
}
return null;
}
if (source && typeof source[Symbol.asyncIterator] === "function") {
return null;
}
return null;
}
async function enrichConfigFromVideoFile(config, videoFile) {
if (typeof document === "undefined" || typeof URL === "undefined") {
return;
}
const file = videoFile.file;
if (!(typeof Blob !== "undefined" && file instanceof Blob)) {
return;
}
const video = document.createElement("video");
video.preload = "metadata";
let objectUrl = null;
try {
objectUrl = URL.createObjectURL(file);
video.src = objectUrl;
await new Promise((resolve, reject) => {
const cleanup = () => {
video.onloadedmetadata = null;
video.onerror = null;
};
video.onloadedmetadata = () => {
cleanup();
resolve();
};
video.onerror = () => {
cleanup();
reject(new Error("Failed to load video metadata"));
};
});
if (video.videoWidth && video.videoHeight) {
config.width = video.videoWidth;
config.height = video.videoHeight;
}
if (!config.container && typeof videoFile.type === "string") {
if (videoFile.type.includes("webm")) {
config.container = "webm";
} else if (videoFile.type.includes("mp4")) {
config.container = "mp4";
}
}
} catch (error) {
console.warn("Failed to infer metadata from VideoFile", error);
} finally {
if (objectUrl) {
URL.revokeObjectURL(objectUrl);
}
video.src = "";
video.remove?.();
}
}
function isVideoFileSource(source) {
if (!source || typeof source !== "object") {
return false;
}
const maybeVideoFile = source;
if (!("file" in maybeVideoFile)) {
return false;
}
const file = maybeVideoFile.file;
if (typeof Blob !== "undefined" && file instanceof Blob) {
return true;
}
return false;
}
function getFrameDimensions(frame) {
if (!frame) {
return { width: 640, height: 480 };
}
if (frame instanceof VideoFrame) {
return {
width: frame.displayWidth || frame.codedWidth,
height: frame.displayHeight || frame.codedHeight
};
}
if (frame instanceof HTMLCanvasElement || frame instanceof OffscreenCanvas) {
return { width: frame.width, height: frame.height };
}
if (frame instanceof ImageBitmap) {
return { width: frame.width, height: frame.height };
}
if (frame instanceof ImageData) {
return { width: frame.width, height: frame.height };
}
if ("displayWidth" in frame && "displayHeight" in frame) {
return {
width: frame.displayWidth,
height: frame.displayHeight
};
}
return { width: 640, height: 480 };
}
// src/worker/worker-communicator.ts
function resolveWorkerUrl() {
const processUrl = typeof process !== "undefined" ? process.env?.WEBCODECS_WORKER_URL : void 0;
const windowUrl = typeof window !== "undefined" ? window.__WEBCODECS_WORKER_URL__ : void 0;
const configuredUrl = typeof windowUrl === "string" && windowUrl.trim() || typeof processUrl === "string" && processUrl.trim();
if (configuredUrl) {
return configuredUrl;
}
if (typeof document !== "undefined" && document.baseURI) {
return new URL("webcodecs-worker.js", document.baseURI).toString();
}
return "/webcodecs-worker.js";
}
function createExternalWorker() {
try {
return new Worker(resolveWorkerUrl(), { type: "module" });
} catch (error) {
throw new EncodeError(
"initialization-failed",
"Failed to create external worker. Make sure webcodecs-worker.js is available and WEBCODECS_WORKER_URL is configured when needed.",
error
);
}
}
function createInlineWorker() {
try {
const workerSource = getWorkerSource();
const blob = new Blob([workerSource], { type: "application/javascript" });
const blobUrl = URL.createObjectURL(blob);
const worker = new Worker(blobUrl, { type: "module" });
return { worker, blobUrl };
} catch (error) {
throw new EncodeError(
"initialization-failed",
"Failed to create inline worker",
error
);
}
}
function createWorker() {
const isTestEnvironment = detectTestEnvironment();
const isProductionEnvironment = detectProductionEnvironment();
const inlineOverride = hasInlineWorkerOverride();
const inlineDisabled = isInlineWorkerDisabled();
if (inlineOverride) {
if (isProductionEnvironment && !allowInlineOverrideInProduction()) {
throw new Error(
"[WorkerCommunicator] Inline worker override is disabled in production environments."
);
}
console.warn("[WorkerCommunicator] Using inline worker (override).");
return createInlineWorker();
}
if (isTestEnvironment && !inlineDisabled) {
console.warn(
"[WorkerCommunicator] Using inline worker (test environment)."
);
return createInlineWorker();
}
try {
return createExternalWorker();
} catch (error) {
if (!inlineDisabled && !isProductionEnvironment) {
console.warn(
"[WorkerCommunicator] Failed to create external worker. Falling back to inline worker.",
error
);
return createInlineWorker();
}
if (!inlineDisabled) {
console.error(
"[WorkerCommunicator] Failed to create external worker in a production-like environment.",
error
);
}
throw error;
}
}
function detectTestEnvironment() {
if (typeof process !== "undefined") {
if (process.env?.VITEST === "true") return true;
if (process.env?.JEST_WORKER_ID !== void 0) return true;
if (process.env?.NODE_ENV === "test") return true;
if (process.env?.npm_lifecycle_event?.includes("test")) return true;
}
if (typeof globalThis !== "undefined" && globalThis.vi) return true;
if (typeof global !== "undefined") {
const nodeEnv = global.process?.env?.NODE_ENV;
if (nodeEnv === "test") return true;
}
if (typeof window !== "undefined") {
if (window.navigator?.userAgent?.includes("jsdom")) return true;
}
return false;
}
function detectProductionEnvironment() {
if (typeof process !== "undefined") {
const nodeEnv = process.env?.NODE_ENV;
if (!nodeEnv) {
const lifecycle = process.env?.npm_lifecycle_event ?? "";
return /build|start|serve|preview/i.test(lifecycle);
}
return ["production", "prod", "staging", "preview"].includes(nodeEnv);
}
if (typeof window !== "undefined") {
const protocol = window.location?.protocol;
const hostname = window.location?.hostname ?? "";
const isLocalHost = hostname === "" || hostname === "localhost" || hostname === "127.0.0.1" || hostname.endsWith(".localhost");
return protocol === "https:" && !isLocalHost;
}
return false;
}
function hasInlineWorkerOverride() {
return typeof process !== "undefined" && process.env?.WEBCODECS_USE_INLINE_WORKER === "true" || typeof window !== "undefined" && window.__WEBCODECS_USE_INLINE_WORKER__ === true;
}
function allowInlineOverrideInProduction() {
return typeof process !== "undefined" && process.env?.WEBCODECS_ALLOW_INLINE_IN_PROD === "true" || typeof window !== "undefined" && window.__WEBCODECS_ALLOW_INLINE_IN_PROD__ === true;
}
function isInlineWorkerDisabled() {
return typeof process !== "undefined" && process.env?.WEBCODECS_DISABLE_INLINE_WORKER === "true" || typeof window !== "undefined" && window.__WEBCODECS_DISABLE_INLINE_WORKER__ === true;
}
function getWorkerSource() {
return `
// \u26A0\uFE0F TESTING ONLY - DO NOT USE IN PRODUCTION \u26A0\uFE0F
// WebCodecs Encoder Worker (Inline Mock Implementation)
// This is a minimal mock for testing purposes only.
// Real encoding should use the external webcodecs-worker.js file.
console.warn('\u26A0\uFE0F Using inline mock worker - FOR TESTING ONLY');
let config = null;
let processedFrames = 0;
self.onmessage = async function(event) {
const { type, ...data } = event.data;
try {
switch (type) {
case 'initialize':
config = data.config;
processedFrames = 0;
// Wait a bit before sending success response
setTimeout(() => {
self.postMessage({ type: 'initialized' });
}, 50);
break;
case 'addVideoFrame':
processedFrames++;
// Progress update
self.postMessage({
type: 'progress',
processedFrames,
totalFrames: data.totalFrames
});
break;
case 'addAudioData':
// Audio data processing (placeholder)
break;
case 'finalize':
// Wait a bit before returning result
setTimeout(() => {
const result = new Uint8Array([0x00, 0x00, 0x00, 0x20, 0x66, 0x74, 0x79, 0x70]); // MP4 magic number
self.postMessage({ type: 'finalized', output: result });
}, 100);
break;
case 'cancel':
self.postMessage({ type: 'cancelled' });
break;
default:
console.warn('Unknown message type:', type);
}
} catch (error) {
self.postMessage({
type: 'error',
errorDetail: {
message: error.message,
type: 'encoding-failed',
stack: error.stack
}
});
}
};
`;
}
var WorkerCommunicator = class {
constructor() {
this.messageHandlers = /* @__PURE__ */ new Map();
this.workerBlobUrl = null;
this.pendingWorkerError = null;
const workerResult = createWorker();
if (typeof workerResult === "object" && "worker" in workerResult) {
this.worker = workerResult.worker;
this.workerBlobUrl = workerResult.blobUrl;
} else {
this.worker = workerResult;
}
this.worker.onmessage = this.handleMessage.bind(this);
this.worker.onerror = this.handleWorkerError.bind(this);
}
handleMessage(event) {
const { type, ...data } = event.data;
const handler = this.messageHandlers.get(type);
if (handler) {
handler(data);
}
}
handleWorkerError(event) {
if (typeof event.preventDefault === "function") {
event.preventDefault();
}
const payload = {
errorDetail: {
message: event.message ? `Worker error: ${event.message}` : "Worker error",
type: "worker-error",
stack: event.error?.stack
}
};
const handler = this.messageHandlers.get("error");
if (handler) {
handler(payload);
return;
}
this.pendingWorkerError = payload;
console.error("Worker error before error handler registration:", event);
}
/**
* Register message handler
*/
on(type, handler) {
this.messageHandlers.set(type, handler);
if (type === "error" && this.pendingWorkerError) {
const pending = this.pendingWorkerError;
this.pendingWorkerError = null;
handler(pending);
}
}
/**
* Unregister message handler
*/
off(type) {
this.messageHandlers.delete(type);
}
/**
* Send message to worker
*/
send(type, data = {}) {
const transferables = [];
const isSafari = typeof navigator !== "undefined" && /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
if (data.buffer instanceof ArrayBuffer) {
transferables.push(data.buffer);
}
this.collectTransferables(data, transferables, isSafari);
if (transferables.length > 0) {
try {
this.worker.postMessage({ type, ...data }, transferables);
} catch (error) {
console.warn(
"Transferable object transfer failed, falling back to clone:",
error
);
this.worker.postMessage({ type, ...data });
}
} else {
this.worker.postMessage({ type, ...data });
}
}
/**
* Recursively collect transferable objects while avoiding problematic types
*/
collectTransferables(obj, transferables, isSafari) {
if (!obj || typeof obj !== "object") return;
if (typeof VideoFrame !== "undefined" && obj instanceof VideoFrame) return;
if (typeof AudioData !== "undefined" && obj instanceof AudioData) return;
if (isSafari) {
if (obj instanceof ArrayBuffer && !transferables.includes(obj)) {
transferables.push(obj);
}
return;
}
if (obj instanceof ArrayBuffer && !transferables.includes(obj)) {
transferables.push(obj);
} else if (obj instanceof MessagePort && !transferables.includes(obj)) {
transferables.push(obj);
} else if (typeof ImageBitmap !== "undefined" && obj instanceof ImageBitmap && !transferables.includes(obj)) {
transferables.push(obj);
}
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
this.collectTransferables(obj[key], transferables, isSafari);
}
}
}
/**
* Terminate communication
*/
terminate() {
this.messageHandlers.clear();
if (this.worker) {
this.worker.terminate();
}
if (this.workerBlobUrl) {
URL.revokeObjectURL(this.workerBlobUrl);
this.workerBlobUrl = null;
}
}
};
// src/utils/video-frame-converter.ts
async function convertToVideoFrame(frame, timestamp) {
if (frame instanceof VideoFrame) {
return new VideoFrame(frame, { timestamp });
}
if (frame instanceof HTMLCanvasElement) {
return new VideoFrame(frame, { timestamp });
}
if (frame instanceof OffscreenCanvas) {
return new VideoFrame(frame, { timestamp });
}
if (frame instanceof ImageBitmap) {
return new VideoFrame(frame, { timestamp });
}
if (frame instanceof ImageData) {
return new VideoFrame(frame.data, {
format: "RGBA",
codedWidth: frame.width,
codedHeight: frame.height,
timestamp
});
}
if (frame && typeof frame === "object") {
if ("width" in frame && "height" in frame && "data" in frame) {
const imageDataLike = frame;
return new VideoFrame(imageDataLike.data, {
format: "RGBA",
codedWidth: imageDataLike.width,
codedHeight: imageDataLike.height,
timestamp
});
}
if ("width" in frame && "height" in frame && ("getContext" in frame || "transferToImageBitmap" in frame)) {
return new VideoFrame(frame, { timestamp });
}
if ("width" in frame && "height" in frame && "close" in frame && typeof frame.close === "function") {
return new VideoFrame(frame, { timestamp });
}
}
throw new EncodeError(
"invalid-input",
`Unsupported frame type: ${typeof frame}. Frame must be VideoFrame, HTMLCanvasElement, OffscreenCanvas, ImageBitmap, or ImageData.`
);
}
// src/stream/encode-stream.ts
async function* encodeStream(source, options) {
let communicator = null;
const chunks = [];
let isFinalized = false;
let streamError = null;
let processedFrames = 0;
let totalFrames;
const startTime = Date.now();
try {
const baseConfig = await inferAndBuildConfig(source, options);
const config = { ...baseConfig, latencyMode: "realtime" };
try {
totalFrames = await calculateTotalFrames(source, config);
} catch (error) {
console.warn("Failed to calculate total frames for streaming:", error);
}
communicator = new WorkerCommunicator();
const updateProgress = (stage) => {
if (options?.onProgress) {
const elapsed = Date.now() - startTime;
const fps = processedFrames > 0 ? processedFrames / elapsed * 1e3 : 0;
const percent = totalFrames ? processedFrames / totalFrames * 100 : 0;
const estimatedRemainingMs = totalFrames && fps > 0 ? (totalFrames - processedFrames) / fps * 1e3 : void 0;
const progressInfo = {
percent,
processedFrames,
totalFrames,
fps,
stage,
estimatedRemainingMs
};
options.onProgress(progressInfo);
}
};
const encodingPromise = new Promise((resolve, reject) => {
communicator.on("initialized", () => {
updateProgress("streaming");
processVideoSource(communicator, source, config).then(() => {
updateProgress("finalizing");
communicator.send("finalize");
}).catch(reject);
});
communicator.on(
"progress",
(data) => {
processedFrames = data.processedFrames;
if (data.totalFrames !== void 0) {
totalFrames = data.totalFrames;
}
updateProgress("streaming");
}
);
communicator.on("dataChunk", (data) => {
chunks.push(data.chunk);
});
communicator.on("finalized", () => {
isFinalized = true;
updateProgress("finalizing");
resolve();
});
communicator.on("error", (data) => {
streamError = new EncodeError(
data.errorDetail.type || "encoding-failed",
data.errorDetail.message || "Worker error",
data.errorDetail
);
reject(streamError);
});
communicator.send("initialize", { config, totalFrames });
});
while (!isFinalized && !streamError) {
if (chunks.length > 0) {
const chunk = chunks.shift();
yield chunk;
} else {
await new Promise((resolve) => setTimeout(resolve, 10));
}
}
while (chunks.length > 0) {
const chunk = chunks.shift();
yield chunk;
}
if (streamError) {
throw streamError;
}
try {
await encodingPromise;
} catch (error) {
const encodeError = error instanceof EncodeError ? error : new EncodeError(
"encoding-failed",
`Streaming failed: ${error instanceof Error ? error.message : String(error)}`,
error
);
if (options?.onError) {
options.onError(encodeError);
}
throw encodeError;
}
} catch (error) {
const encodeError = error instanceof EncodeError ? error : new EncodeError(
"encoding-failed",
`Stream encoding failed: ${error instanceof Error ? error.message : String(error)}`,
error
);
if (options?.onError) {
options.onError(encodeError);
}
throw encodeError;
} finally {
if (communicator) {
communicator.terminate();
}
}
}
async function processVideoSource(communicator, source, config) {
if (Array.isArray(source)) {
await processFrameArray(communicator, source, config);
} else if (source instanceof MediaStream) {
await processMediaStreamRealtime(communicator, source, config);
} else if (Symbol.asyncIterator in source) {
await processAsyncIterable(communicator, source, config);
} else {
await processVideoFile(communicator, source, config);
}
}
async function processFrameArray(communicator, frames, config) {
const frameRate = config?.frameRate || 30;
const frameDelay = 1e3 / frameRate;
let lastFrameTime = performance.now();
for (let i = 0; i < frames.length; i++) {
const frame = frames[i];
const timestamp = i * 1e6 / frameRate;
await addFrameToWorker(communicator, frame, timestamp);
const now = performance.now();
const elapsedTime = now - lastFrameTime;
const delay = Math.max(0, frameDelay - elapsedTime);
await new Promise((resolve) => setTimeout(resolve, delay));
lastFrameTime = performance.now();
}
}
async function processAsyncIterable(communicator, source, config) {
let frameIndex = 0;
const frameRate = config?.frameRate || 30;
for await (const frame of source) {
const timestamp = frameIndex * 1e6 / frameRate;
await addFrameToWorker(communicator, frame, timestamp);
frameIndex++;
}
}
async function processMediaStreamRealtime(communicator, stream, config) {
const videoTracks = stream.getVideoTracks();
const audioTracks = stream.getAudioTracks();
const readers = [];
const processingPromises = [];
try {
if (videoTracks.length > 0) {
const videoTrack = videoTracks[0];
const processor = new MediaStreamTrackProcessor({ track: videoTrack });
const reader = processor.readable.getReader();
readers.push(reader);
processingPromises.push(
processVideoTrackRealtime(communicator, reader, config)
);
}
if (audioTracks.length > 0) {
const audioTrack = audioTracks[0];
const processor = new MediaStreamTrackProcessor({ track: audioTrack });
const reader = processor.readable.getReader();
readers.push(reader);
processingPromises.push(processAudioTrackRealtime(communicator, reader));
}
await Promise.all(processingPromises);
} finally {
for (const reader of readers) {
try {
reader.cancel();
} catch (e) {
}
}
}
}
async function processVideoTrackRealtime(communicator, reader, _config) {
try {
while (true) {
const { value, done } = await reader.read();
if (done || !value) break;
try {
await addFrameToWorker(communicator, value, value.timestamp || 0);
} finally {
value.close();
}
}
} catch (error) {
throw new EncodeError(
"video-encoding-error",
`Real-time video stream processing error: ${error instanceof Error ? error.message : String(error)}`,
error
);
}
}
async function processAudioTrackRealtime(communicator, reader) {
try {
while (true) {
const { value, done } = await reader.read();
if (done || !value) break;
try {
communicator.send("addAudioData", {
audio: value,
timestamp: value.timestamp || 0,
format: "f32",
sampleRate: value.sampleRate,
numberOfFrames: value.numberOfFrames,
numberOfChannels: value.numberOfChannels
});
} finally {
value.close();
}
}
} catch (error) {
throw new EncodeError(
"audio-encoding-error",
`Real-time audio stream processing error: ${error instanceof Error ? error.message : String(error)}`,
error
);
}
}
async function addFrameToWorker(communicator, frame, timestamp) {
const videoFrame = await convertToVideoFrame(frame, timestamp);
try {
communicator.send("addVideoFrame", {
frame: videoFrame,
timestamp
});
} finally {
videoFrame.close();
}
}
async function processVideoFile(communicator, videoFile, config) {
const video = document.createElement("video");
video.muted = true;
video.preload = "metadata";
let objectUrl = null;
let audioContext = null;
try {
objectUrl = URL.createObjectURL(videoFile.file);
video.src = objectUrl;
await new Promise((resolve, reject) => {
const handleLoaded = () => {
cleanup();
resolve();
};
const handleError = () => {
cleanup();
reject(new Error("Failed to load video file"));
};
const cleanup = () => {
video.onloadedmetadata = null;
video.onerror = null;
};
video.onloadedmetadata = handleLoaded;
video.onerror = handleError;
});
const { duration, videoWidth, videoHeight } = video;
const frameRate = config.frameRate && config.frameRate > 0 ? config.frameRate : 30;
const totalFrames = Math.max(1, Math.floor(duration * frameRate) || 1);
if (config.audioBitrate > 0 && typeof AudioContext !== "undefined") {
try {
audioContext = new AudioContext();
const arrayBuffer = await videoFile.file.arrayBuffer();
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
await processAudioFromFile(
communicator,
audioBuffer,
duration,
frameRate
);
} catch (audioError) {
console.warn("Failed to process audio from VideoFile:", audioError);
}
}
const targetWidth = config.width && config.width > 0 ? config.width : videoWidth || 640;
const targetHeight = config.height && config.height > 0 ? config.height : videoHeight || 480;
const canvas = document.createElement("canvas");
canvas.width = targetWidth;
canvas.height = targetHeight;
const ctx = canvas.getContext("2d");
if (!ctx) {
throw new EncodeError(
"initialization-failed",
"Failed to get canvas context"
);
}
for (let frameIndex = 0; frameIndex < totalFrames; frameIndex++) {
const timestampSeconds = Math.min(duration || 0, frameIndex / frameRate);
video.currentTime = Number.isFinite(timestampSeconds) ? timestampSeconds : 0;
await new Promise((resolve, reject) => {
const handleSeeked = () => {
cleanup();
resolve();
};
const handleError = () => {
cleanup();
reject(new Error("Video seek failed"));
};
const cleanup = () => {
video.removeEventListener("seeked", handleSeeked);
video.removeEventListener("error", handleError);
};
video.addEventListener("seeked", handleSeeked, { once: true });
video.addEventListener("error", handleError, { once: true });
});
ctx.drawImage(
video,
0,
0,
videoWidth || canvas.width,
videoHeight || canvas.height,
0,
0,
canvas.width,
canvas.height
);
const chunkTimestamp = Math.round(frameIndex * (1e6 / frameRate));
await addFrameToWorker(communicator, canvas, chunkTimestamp);
await new Promise((resolve) => requestAnimationFrame(resolve));
}
} catch (error) {
throw new EncodeError(
"invalid-input",
`VideoFile processing failed: ${error instanceof Error ? error.message : String(error)}`,
error
);
} finally {
if (audioContext) {
try {
await audioContext.close();
} catch (closeError) {
console.warn("Failed to close AudioContext", closeError);
}
}
if (objectUrl) {
URL.revokeObjectURL(objectUrl);
}
video.src = "";
video.remove();
}
}
async function processAudioFromFile(communicator, audioBuffer, duration, frameRate) {
const sampleRate = audioBuffer.sampleRate;
const numberOfChannels = audioBuffer.numberOfChannels;
const totalSamples = audioBuffer.length;
const chunkDurationMs = Math.min(20, 1e3 / frameRate);
const samplesPerChunk = Math.floor(sampleRate * chunkDurationMs / 1e3);
for (let offset = 0; offset < totalSamples; offset += samplesPerChunk) {
const remainingSamples = Math.min(samplesPerChunk, totalSamples - offset);
const timestamp = offset / sampleRate * 1e6;
const channelData = [];
for (let channel = 0; channel < numberOfChannels; channel++) {
const sourceData = audioBuffer.getChannelData(channel);
const chunkData = new Float32Array(remainingSamples);
chunkData.set(sourceData.subarray(offset, offset + remainingSamples));
channelData.push(chunkData);
}
try {
const interleavedData = new Float32Array(
remainingSamples * numberOfChannels
);
for (let frame = 0; frame < remainingSamples; frame++) {
for (let channel = 0; channel < numberOfChannels; channel++) {
interleavedData[frame * numberOfChannels + channel] = channelData[channel][frame];
}
}
const audioData = new AudioData({
format: "f32",
sampleRate,
numberOfFrames: remainingSamples,
numberOfChannels,
timestamp,
data: interleavedData
});
communicator.send("addAudioData", {
audio: audioData,
timestamp,
format: "f32",
sampleRate,
numberOfFrames: remainingSamples,
numberOfChannels
});
audioData.close();
channelData.length = 0;
} catch (error) {
console.warn("Failed to create AudioData chunk:", error);
}
await new Promise((resolve) => setTimeout(resolve, 0));
}
}
async function calculateTotalFrames(source, config) {
try {
if (Array.isArray(source)) {
return source.length;
} else if (source instanceof MediaStream) {
return void 0;
} else if (Symbol.asyncIterator in source) {
return void 0;
} else {
const videoFile = source;
const video = document.createElement("video");
video.muted = true;
video.preload = "metadata";
const objectUrl = URL.createObjectURL(videoFile.file);
video.src = objectUrl;
try {
await new Promise((resolve, reject) => {
video.onloadedmetadata = () => resolve();
video.onerror = () => reject(new Error("Failed to load video metadata"));
});
const frameRate = config.frameRate || 30;
const totalFrames = Math.floor(video.duration * frameRate);
URL.revokeObjectURL(objectUrl);
return totalFrames;
} catch (error) {
URL.revokeObjectURL(objectUrl);
throw error;
}
}
} catch (error) {
console.warn("Failed to calculate total frames for streaming:", error);
return void 0;
}
}
export {
encodeStream
};
//# sourceMappingURL=encode-stream.js.map