UNPKG

webcodecs-encoder

Version:

A TypeScript library for browser environments to encode video (H.264/AVC, VP9, VP8) and audio (AAC, Opus) using the WebCodecs API and mux them into MP4 or WebM containers with real-time streaming support. New function-first API design.

1,540 lines (1,524 loc) 59.7 kB
// src/types.ts var EncodeError = class _EncodeError extends Error { constructor(type, message, cause) { super(message); this.name = "EncodeError"; this.type = type; this.cause = cause; Object.setPrototypeOf(this, _EncodeError.prototype); } }; // src/utils/config-parser.ts async function inferAndBuildConfig(source, options) { const inferredConfig = await inferConfigFromSource(source); const mergedOptions = mergeWithUserOptions(inferredConfig, options); const configWithPreset = applyQualityPreset(mergedOptions, options?.quality); return convertToEncoderConfig(configWithPreset); } async function inferConfigFromSource(source) { const config = { frameRate: 30, // デフォルト値 container: "mp4" // デフォルト値 }; try { const firstFrame = await getFirstFrame(source); if (firstFrame) { const dimensions = getFrameDimensions(firstFrame); config.width = dimensions.width; config.height = dimensions.height; } if (source instanceof MediaStream) { const videoTracks = source.getVideoTracks(); const audioTracks = source.getAudioTracks(); if (videoTracks.length === 0) { config.video = false; } if (audioTracks.length === 0) { config.audio = false; } else { const audioTrack = audioTracks[0]; const settings = audioTrack.getSettings(); config.audio = { sampleRate: settings.sampleRate || 48e3, channels: settings.channelCount || 2 }; } } } catch (error) { config.width = 640; config.height = 480; } return config; } function mergeWithUserOptions(inferredConfig, userOptions) { return { // 推定された設定をベースに ...inferredConfig, // ユーザー指定の設定で上書き ...userOptions, // ネストしたオブジェクトは個別にマージ video: { ...inferredConfig.video, ...userOptions?.video }, audio: userOptions?.audio === false ? false : { ...inferredConfig.audio, ...userOptions?.audio } }; } function applyQualityPreset(config, quality) { if (!quality) return config; const width = config.width || 640; const height = config.height || 480; const pixels = width * height; const basePixelsPerSecond = pixels * (config.frameRate || 30); let videoBitrate; let audioBitrate; switch (quality) { case "low": videoBitrate = Math.max(5e5, basePixelsPerSecond * 0.1); audioBitrate = 64e3; break; case "medium": videoBitrate = Math.max(1e6, basePixelsPerSecond * 0.2); audioBitrate = 128e3; break; case "high": videoBitrate = Math.max(2e6, basePixelsPerSecond * 0.4); audioBitrate = 192e3; break; case "lossless": videoBitrate = Math.max(1e7, basePixelsPerSecond * 1); audioBitrate = 32e4; break; default: return config; } return { ...config, video: config.video === false ? false : { ...config.video, bitrate: config.video?.bitrate || videoBitrate }, audio: config.audio === false ? false : { ...config.audio, bitrate: config.audio?.bitrate || audioBitrate } }; } function convertToEncoderConfig(options) { const config = { width: options.video === false ? 0 : options.width || 640, height: options.video === false ? 0 : options.height || 480, frameRate: options.frameRate || 30, videoBitrate: options.video === false ? 0 : options.video?.bitrate || 1e6, audioBitrate: options.audio === false ? 0 : options.audio?.bitrate || 128e3, sampleRate: options.audio === false ? 0 : options.audio?.sampleRate || 48e3, channels: options.audio === false ? 0 : options.audio?.channels || 2, container: options.container || "mp4", codec: { video: options.video === false ? void 0 : options.video?.codec || "avc", audio: options.audio === false ? void 0 : options.audio?.codec || "aac" }, latencyMode: options.video === false ? "quality" : options.latencyMode || options.video?.latencyMode || "quality", hardwareAcceleration: options.video === false ? "no-preference" : options.video?.hardwareAcceleration || "no-preference", keyFrameInterval: options.video === false ? void 0 : options.video?.keyFrameInterval, audioBitrateMode: options.audio === false ? void 0 : options.audio?.bitrateMode || "variable", firstTimestampBehavior: options.firstTimestampBehavior || "strict", maxVideoQueueSize: options.maxVideoQueueSize || 30, maxAudioQueueSize: options.maxAudioQueueSize || 30, backpressureStrategy: options.backpressureStrategy || "drop" }; return config; } async function getFirstFrame(source) { if (Array.isArray(source)) { return source.length > 0 ? source[0] : null; } if (source instanceof MediaStream) { const videoTracks = source.getVideoTracks(); if (videoTracks.length > 0) { const settings = videoTracks[0].getSettings(); if (settings.width && settings.height) { return { displayWidth: settings.width, displayHeight: settings.height }; } } return null; } if (Symbol.asyncIterator in source) { for await (const frame of source) { return frame; } return null; } return null; } function getFrameDimensions(frame) { if (!frame) { return { width: 640, height: 480 }; } if (frame instanceof VideoFrame) { return { width: frame.displayWidth || frame.codedWidth, height: frame.displayHeight || frame.codedHeight }; } if (frame instanceof HTMLCanvasElement || frame instanceof OffscreenCanvas) { return { width: frame.width, height: frame.height }; } if (frame instanceof ImageBitmap) { return { width: frame.width, height: frame.height }; } if (frame instanceof ImageData) { return { width: frame.width, height: frame.height }; } if ("displayWidth" in frame && "displayHeight" in frame) { return { width: frame.displayWidth, height: frame.displayHeight }; } return { width: 640, height: 480 }; } // src/worker/worker-communicator.ts function createExternalWorker() { try { const worker = new Worker("/webcodecs-worker.js", { type: "module" }); worker.onerror = (event) => { console.error("Worker error:", event); throw new EncodeError("worker-error", `Worker error: ${event.message}`); }; return worker; } catch (error) { throw new EncodeError( "initialization-failed", "Failed to create external worker. Make sure webcodecs-worker.js is available in your public directory.", error ); } } function createInlineWorker() { try { const workerSource = getWorkerSource(); const blob = new Blob([workerSource], { type: "application/javascript" }); const blobUrl = URL.createObjectURL(blob); const worker = new Worker(blobUrl, { type: "module" }); worker.onerror = (event) => { console.error("Inline worker error:", event); throw new EncodeError( "worker-error", `Inline worker error: ${event.message}` ); }; return { worker, blobUrl }; } catch (error) { throw new EncodeError( "initialization-failed", "Failed to create inline worker", error ); } } function createWorker() { const isProductionEnvironment = detectProductionEnvironment(); const isTestEnvironment = ( // Vitest environment typeof process !== "undefined" && process.env?.VITEST === "true" || // Jest environment typeof process !== "undefined" && process.env?.JEST_WORKER_ID !== void 0 || // Node.js environment typeof process !== "undefined" && process.env?.NODE_ENV === "test" || // Global test runner exists typeof global !== "undefined" && global.process?.env?.NODE_ENV === "test" || // vitest global function exists typeof globalThis !== "undefined" && "vi" in globalThis || // jsdom environment typeof window !== "undefined" && window.navigator?.userAgent?.includes("jsdom") || // Variables commonly set in test environments typeof process !== "undefined" && process.env?.npm_lifecycle_event?.includes("test") ); const isIntegrationTestEnvironment = typeof window !== "undefined" && (window.location?.hostname === "localhost" || window.location?.hostname === "127.0.0.1") && window.location?.port; const forceDisableInlineWorker = typeof process !== "undefined" && process.env?.WEBCODECS_DISABLE_INLINE_WORKER === "true" || typeof window !== "undefined" && window.__WEBCODECS_DISABLE_INLINE_WORKER__ === true; if ((isProductionEnvironment || forceDisableInlineWorker) && (isTestEnvironment || isIntegrationTestEnvironment)) { throw new Error( "[WorkerCommunicator] CRITICAL SECURITY ERROR: Inline worker detected in production environment or explicitly disabled. This is a security risk. Please ensure webcodecs-worker.js is properly deployed." ); } if (isTestEnvironment || isIntegrationTestEnvironment) { console.warn( "[WorkerCommunicator] Using inline worker for test environment" ); return createInlineWorker(); } try { return createExternalWorker(); } catch (error) { if (isProductionEnvironment) { throw new Error( "[WorkerCommunicator] PRODUCTION ERROR: External worker failed to load. Inline worker is disabled for security reasons. Please ensure webcodecs-worker.js is accessible at /webcodecs-worker.js" ); } console.error( "[WorkerCommunicator] External worker creation failed. Inline worker is not used in production.", error ); throw error; } } function detectProductionEnvironment() { if (typeof process !== "undefined") { const nodeEnv = process.env?.NODE_ENV; return nodeEnv === "production" || nodeEnv === "staging" || nodeEnv === "preview" || nodeEnv === "prod"; } if (typeof window !== "undefined") { const isHttps = window.location?.protocol === "https:"; const isNotLocalhost = window.location?.hostname !== "localhost" && window.location?.hostname !== "127.0.0.1" && !window.location?.hostname?.endsWith(".localhost"); const isDevelopmentPort = window.location?.port && ["3000", "3001", "4000", "5000", "5173", "8000", "8080", "9000"].includes( window.location.port ); const hostname = window.location?.hostname || ""; const isProductionDomain = hostname.includes(".com") || hostname.includes(".org") || hostname.includes(".net") || hostname.includes("staging") || hostname.includes("preview") || hostname.includes("prod"); return isHttps && isNotLocalhost && !isDevelopmentPort && isProductionDomain; } return false; } function getWorkerSource() { return ` // \u26A0\uFE0F TESTING ONLY - DO NOT USE IN PRODUCTION \u26A0\uFE0F // WebCodecs Encoder Worker (Inline Mock Implementation) // This is a minimal mock for testing purposes only. // Real encoding should use the external webcodecs-worker.js file. console.warn('\u26A0\uFE0F Using inline mock worker - FOR TESTING ONLY'); let config = null; let processedFrames = 0; self.onmessage = async function(event) { const { type, ...data } = event.data; try { switch (type) { case 'initialize': config = data.config; processedFrames = 0; // Wait a bit before sending success response setTimeout(() => { self.postMessage({ type: 'initialized' }); }, 50); break; case 'addVideoFrame': processedFrames++; // Progress update self.postMessage({ type: 'progress', processedFrames, totalFrames: data.totalFrames }); break; case 'addAudioData': // Audio data processing (placeholder) break; case 'finalize': // Wait a bit before returning result setTimeout(() => { const result = new Uint8Array([0x00, 0x00, 0x00, 0x20, 0x66, 0x74, 0x79, 0x70]); // MP4 magic number self.postMessage({ type: 'finalized', output: result }); }, 100); break; case 'cancel': self.postMessage({ type: 'cancelled' }); break; default: console.warn('Unknown message type:', type); } } catch (error) { self.postMessage({ type: 'error', errorDetail: { message: error.message, type: 'encoding-failed', stack: error.stack } }); } }; `; } var WorkerCommunicator = class { constructor() { this.messageHandlers = /* @__PURE__ */ new Map(); this.workerBlobUrl = null; const workerResult = createWorker(); if (typeof workerResult === "object" && "worker" in workerResult) { this.worker = workerResult.worker; this.workerBlobUrl = workerResult.blobUrl; } else { this.worker = workerResult; } this.worker.onmessage = this.handleMessage.bind(this); } handleMessage(event) { const { type, ...data } = event.data; const handler = this.messageHandlers.get(type); if (handler) { handler(data); } } /** * Register message handler */ on(type, handler) { this.messageHandlers.set(type, handler); } /** * Unregister message handler */ off(type) { this.messageHandlers.delete(type); } /** * Send message to worker */ send(type, data = {}) { const transferables = []; const isSafari = typeof navigator !== "undefined" && /^((?!chrome|android).)*safari/i.test(navigator.userAgent); if (data.buffer instanceof ArrayBuffer) { transferables.push(data.buffer); } this.collectTransferables(data, transferables, isSafari); if (transferables.length > 0) { try { this.worker.postMessage({ type, ...data }, transferables); } catch (error) { console.warn( "Transferable object transfer failed, falling back to clone:", error ); this.worker.postMessage({ type, ...data }); } } else { this.worker.postMessage({ type, ...data }); } } /** * Recursively collect transferable objects while avoiding problematic types */ collectTransferables(obj, transferables, isSafari) { if (!obj || typeof obj !== "object") return; if (typeof VideoFrame !== "undefined" && obj instanceof VideoFrame) return; if (typeof AudioData !== "undefined" && obj instanceof AudioData) return; if (isSafari) { if (obj instanceof ArrayBuffer && !transferables.includes(obj)) { transferables.push(obj); } return; } if (obj instanceof ArrayBuffer && !transferables.includes(obj)) { transferables.push(obj); } else if (obj instanceof MessagePort && !transferables.includes(obj)) { transferables.push(obj); } else if (typeof ImageBitmap !== "undefined" && obj instanceof ImageBitmap && !transferables.includes(obj)) { transferables.push(obj); } for (const key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { this.collectTransferables(obj[key], transferables, isSafari); } } } /** * Terminate communication */ terminate() { this.messageHandlers.clear(); if (this.worker) { this.worker.terminate(); } if (this.workerBlobUrl) { URL.revokeObjectURL(this.workerBlobUrl); this.workerBlobUrl = null; } } }; // src/utils/video-frame-converter.ts async function convertToVideoFrame(frame, timestamp) { if (frame instanceof VideoFrame) { return new VideoFrame(frame, { timestamp }); } if (frame instanceof HTMLCanvasElement) { return new VideoFrame(frame, { timestamp }); } if (frame instanceof OffscreenCanvas) { return new VideoFrame(frame, { timestamp }); } if (frame instanceof ImageBitmap) { return new VideoFrame(frame, { timestamp }); } if (frame instanceof ImageData) { return new VideoFrame(frame.data, { format: "RGBA", codedWidth: frame.width, codedHeight: frame.height, timestamp }); } if (frame && typeof frame === "object") { if ("width" in frame && "height" in frame && "data" in frame) { const imageDataLike = frame; return new VideoFrame(imageDataLike.data, { format: "RGBA", codedWidth: imageDataLike.width, codedHeight: imageDataLike.height, timestamp }); } if ("width" in frame && "height" in frame && ("getContext" in frame || "transferToImageBitmap" in frame)) { return new VideoFrame(frame, { timestamp }); } if ("width" in frame && "height" in frame && "close" in frame && typeof frame.close === "function") { return new VideoFrame(frame, { timestamp }); } } throw new EncodeError( "invalid-input", `Unsupported frame type: ${typeof frame}. Frame must be VideoFrame, HTMLCanvasElement, OffscreenCanvas, ImageBitmap, or ImageData.` ); } // src/core/encode.ts async function encode(source, options) { let communicator = null; try { const config = await inferAndBuildConfig(source, options); communicator = new WorkerCommunicator(); const result = await performEncoding(communicator, source, config, options); return result; } catch (error) { const encodeError = error instanceof EncodeError ? error : new EncodeError( "encoding-failed", `Encoding failed: ${error instanceof Error ? error.message : String(error)}`, error ); if (options?.onError) { options.onError(encodeError); } throw encodeError; } finally { if (communicator) { communicator.terminate(); } } } async function performEncoding(communicator, source, config, options) { return new Promise((resolve, reject) => { let processedFrames = 0; let totalFrames; const startTime = Date.now(); calculateTotalFrames(source, config).then((frames) => { totalFrames = frames; }).catch((error) => { console.warn("Failed to calculate total frames:", error); }); const updateProgress = (stage) => { if (options?.onProgress) { const elapsed = Date.now() - startTime; const fps = processedFrames > 0 ? processedFrames / elapsed * 1e3 : 0; const percent = totalFrames ? processedFrames / totalFrames * 100 : 0; const estimatedRemainingMs = totalFrames && fps > 0 ? (totalFrames - processedFrames) / fps * 1e3 : void 0; const progressInfo = { percent, processedFrames, totalFrames, fps, stage, estimatedRemainingMs }; options.onProgress(progressInfo); } }; communicator.on("initialized", () => { updateProgress("encoding"); processVideoSource(communicator, source, config).then(() => { updateProgress("finalizing"); communicator.send("finalize"); }).catch(reject); }); communicator.on( "progress", (data) => { processedFrames = data.processedFrames; if (data.totalFrames !== void 0) { totalFrames = data.totalFrames; } updateProgress("encoding"); } ); communicator.on("finalized", (data) => { if (data.output) { updateProgress("finalizing"); resolve(data.output); } else { reject(new EncodeError("encoding-failed", "No output produced")); } }); communicator.on("error", (data) => { const error = new EncodeError( data.errorDetail.type || "encoding-failed", data.errorDetail.message || "Worker error", data.errorDetail ); reject(error); }); communicator.send("initialize", { config, totalFrames }); }); } async function processVideoSource(communicator, source, config) { if (Array.isArray(source)) { await processFrameArray(communicator, source, config); } else if (source instanceof MediaStream) { await processMediaStream(communicator, source, config); } else if (Symbol.asyncIterator in source) { await processAsyncIterable(communicator, source, config); } else { await processVideoFile(communicator, source, config); } } async function processFrameArray(communicator, frames, config) { const frameRate = config?.frameRate || 30; for (let i = 0; i < frames.length; i++) { const frame = frames[i]; const timestamp = i * 1e6 / frameRate; await addFrameToWorker(communicator, frame, timestamp); } } async function processAsyncIterable(communicator, source, config) { let frameIndex = 0; const frameRate = config?.frameRate || 30; for await (const frame of source) { const timestamp = frameIndex * 1e6 / frameRate; await addFrameToWorker(communicator, frame, timestamp); frameIndex++; } } async function processMediaStream(communicator, stream, _config) { const videoTracks = stream.getVideoTracks(); const audioTracks = stream.getAudioTracks(); const readers = []; const processingPromises = []; try { if (videoTracks.length > 0) { const videoTrack = videoTracks[0]; const processor = new MediaStreamTrackProcessor({ track: videoTrack }); const reader = processor.readable.getReader(); readers.push(reader); processingPromises.push(processVideoReader(communicator, reader)); } if (audioTracks.length > 0) { const audioTrack = audioTracks[0]; const processor = new MediaStreamTrackProcessor({ track: audioTrack }); const reader = processor.readable.getReader(); readers.push(reader); processingPromises.push(processAudioReader(communicator, reader)); } await Promise.all(processingPromises); } finally { for (const reader of readers) { try { reader.cancel(); } catch (e) { } } for (const track of [...videoTracks, ...audioTracks]) { track.stop(); } } } async function processVideoReader(communicator, reader) { try { while (true) { const { value, done } = await reader.read(); if (done || !value) break; try { await addFrameToWorker(communicator, value, value.timestamp || 0); } finally { value.close(); } } } catch (error) { throw new EncodeError( "video-encoding-error", `Video stream processing error: ${error instanceof Error ? error.message : String(error)}`, error ); } } async function processAudioReader(communicator, reader) { try { while (true) { const { value, done } = await reader.read(); if (done || !value) break; try { communicator.send("addAudioData", { audio: value, timestamp: value.timestamp || 0, format: "f32", sampleRate: value.sampleRate, numberOfFrames: value.numberOfFrames, numberOfChannels: value.numberOfChannels }); } finally { value.close(); } } } catch (error) { throw new EncodeError( "audio-encoding-error", `Audio stream processing error: ${error instanceof Error ? error.message : String(error)}`, error ); } } async function addFrameToWorker(communicator, frame, timestamp) { const videoFrame = await convertToVideoFrame(frame, timestamp); try { communicator.send("addVideoFrame", { frame: videoFrame, timestamp }); } finally { videoFrame.close(); } } async function processVideoFile(communicator, videoFile, config) { try { const video = document.createElement("video"); video.muted = true; video.preload = "metadata"; const objectUrl = URL.createObjectURL(videoFile.file); video.src = objectUrl; await new Promise((resolve, reject) => { video.onloadedmetadata = () => resolve(); video.onerror = () => reject(new Error("Failed to load video file")); }); const { duration, videoWidth, videoHeight } = video; const frameRate = config.frameRate || 30; const totalFrames = Math.floor(duration * frameRate); let audioContext = null; let audioBuffer = null; if (config.audioBitrate > 0 && typeof AudioContext !== "undefined") { try { audioContext = new AudioContext(); const arrayBuffer = await videoFile.file.arrayBuffer(); audioBuffer = await audioContext.decodeAudioData(arrayBuffer); await processAudioFromFile( communicator, audioBuffer, duration, frameRate ); } catch (audioError) { console.warn("Failed to process audio from VideoFile:", audioError); } } const canvas = document.createElement("canvas"); canvas.width = videoWidth; canvas.height = videoHeight; const ctx = canvas.getContext("2d"); if (!ctx) { throw new EncodeError( "initialization-failed", "Failed to get canvas context" ); } for (let frameIndex = 0; frameIndex < totalFrames; frameIndex++) { try { const timestamp = frameIndex / frameRate; video.currentTime = timestamp; await new Promise((resolve, reject) => { const onSeeked = () => { video.removeEventListener("seeked", onSeeked); resolve(); }; video.addEventListener("seeked", onSeeked, { once: true }); video.onerror = () => reject(new Error("Video seek failed")); }); ctx.drawImage(video, 0, 0, videoWidth, videoHeight); const videoFrame = new VideoFrame(canvas, { timestamp: frameIndex * (1e6 / frameRate) // microseconds }); await addFrameToWorker( communicator, videoFrame, frameIndex * (1e6 / frameRate) ); videoFrame.close(); } catch (frameError) { throw new EncodeError( "video-encoding-error", `Failed to process frame ${frameIndex}: ${frameError instanceof Error ? frameError.message : String(frameError)}`, frameError ); } } URL.revokeObjectURL(objectUrl); video.remove(); if (audioContext) { audioContext.close(); } } catch (error) { throw new EncodeError( "invalid-input", `VideoFile processing failed: ${error instanceof Error ? error.message : String(error)}`, error ); } } async function processAudioFromFile(communicator, audioBuffer, duration, frameRate) { const sampleRate = audioBuffer.sampleRate; const numberOfChannels = audioBuffer.numberOfChannels; const totalSamples = audioBuffer.length; const chunkDurationMs = Math.min(20, 1e3 / frameRate); const samplesPerChunk = Math.floor(sampleRate * chunkDurationMs / 1e3); for (let offset = 0; offset < totalSamples; offset += samplesPerChunk) { const remainingSamples = Math.min(samplesPerChunk, totalSamples - offset); const timestamp = offset / sampleRate * 1e6; const channelData = []; for (let channel = 0; channel < numberOfChannels; channel++) { const sourceData = audioBuffer.getChannelData(channel); const chunkData = new Float32Array(remainingSamples); chunkData.set(sourceData.subarray(offset, offset + remainingSamples)); channelData.push(chunkData); } try { const interleavedData = new Float32Array( remainingSamples * numberOfChannels ); for (let frame = 0; frame < remainingSamples; frame++) { for (let channel = 0; channel < numberOfChannels; channel++) { interleavedData[frame * numberOfChannels + channel] = channelData[channel][frame]; } } const audioData = new AudioData({ format: "f32", sampleRate, numberOfFrames: remainingSamples, numberOfChannels, timestamp, data: interleavedData }); communicator.send("addAudioData", { audio: audioData, timestamp, format: "f32", sampleRate, numberOfFrames: remainingSamples, numberOfChannels }); audioData.close(); channelData.length = 0; } catch (error) { console.warn("Failed to create AudioData chunk:", error); } } } async function calculateTotalFrames(source, config) { try { if (Array.isArray(source)) { return source.length; } else if (source instanceof MediaStream) { return void 0; } else if (Symbol.asyncIterator in source) { return void 0; } else { const videoFile = source; const video = document.createElement("video"); video.muted = true; video.preload = "metadata"; const objectUrl = URL.createObjectURL(videoFile.file); video.src = objectUrl; try { await new Promise((resolve, reject) => { video.onloadedmetadata = () => resolve(); video.onerror = () => reject(new Error("Failed to load video metadata")); }); const frameRate = config.frameRate || 30; const totalFrames = Math.floor(video.duration * frameRate); URL.revokeObjectURL(objectUrl); return totalFrames; } catch (error) { URL.revokeObjectURL(objectUrl); throw error; } } } catch (error) { console.warn("Failed to calculate total frames:", error); return void 0; } } // src/stream/encode-stream.ts async function* encodeStream(source, options) { let communicator = null; const chunks = []; let isFinalized = false; let streamError = null; let processedFrames = 0; let totalFrames; const startTime = Date.now(); try { const baseConfig = await inferAndBuildConfig(source, options); const config = { ...baseConfig, latencyMode: "realtime" }; try { totalFrames = await calculateTotalFrames2(source, config); } catch (error) { console.warn("Failed to calculate total frames for streaming:", error); } communicator = new WorkerCommunicator(); const updateProgress = (stage) => { if (options?.onProgress) { const elapsed = Date.now() - startTime; const fps = processedFrames > 0 ? processedFrames / elapsed * 1e3 : 0; const percent = totalFrames ? processedFrames / totalFrames * 100 : 0; const estimatedRemainingMs = totalFrames && fps > 0 ? (totalFrames - processedFrames) / fps * 1e3 : void 0; const progressInfo = { percent, processedFrames, totalFrames, fps, stage, estimatedRemainingMs }; options.onProgress(progressInfo); } }; const encodingPromise = new Promise((resolve, reject) => { communicator.on("initialized", () => { updateProgress("streaming"); processVideoSource2(communicator, source, config).then(() => { updateProgress("finalizing"); communicator.send("finalize"); }).catch(reject); }); communicator.on( "progress", (data) => { processedFrames = data.processedFrames; if (data.totalFrames !== void 0) { totalFrames = data.totalFrames; } updateProgress("streaming"); } ); communicator.on("dataChunk", (data) => { chunks.push(data.chunk); }); communicator.on("finalized", () => { isFinalized = true; updateProgress("finalizing"); resolve(); }); communicator.on("error", (data) => { streamError = new EncodeError( data.errorDetail.type || "encoding-failed", data.errorDetail.message || "Worker error", data.errorDetail ); reject(streamError); }); communicator.send("initialize", { config, totalFrames }); }); while (!isFinalized && !streamError) { if (chunks.length > 0) { const chunk = chunks.shift(); yield chunk; } else { await new Promise((resolve) => setTimeout(resolve, 10)); } } while (chunks.length > 0) { const chunk = chunks.shift(); yield chunk; } if (streamError) { throw streamError; } try { await encodingPromise; } catch (error) { const encodeError = error instanceof EncodeError ? error : new EncodeError( "encoding-failed", `Streaming failed: ${error instanceof Error ? error.message : String(error)}`, error ); if (options?.onError) { options.onError(encodeError); } throw encodeError; } } catch (error) { const encodeError = error instanceof EncodeError ? error : new EncodeError( "encoding-failed", `Stream encoding failed: ${error instanceof Error ? error.message : String(error)}`, error ); if (options?.onError) { options.onError(encodeError); } throw encodeError; } finally { if (communicator) { communicator.terminate(); } } } async function processVideoSource2(communicator, source, config) { if (Array.isArray(source)) { await processFrameArray2(communicator, source, config); } else if (source instanceof MediaStream) { await processMediaStreamRealtime(communicator, source, config); } else if (Symbol.asyncIterator in source) { await processAsyncIterable2(communicator, source, config); } else { await processVideoFile2(communicator, source, config); } } async function processFrameArray2(communicator, frames, config) { const frameRate = config?.frameRate || 30; const frameDelay = 1e3 / frameRate; let lastFrameTime = performance.now(); for (let i = 0; i < frames.length; i++) { const frame = frames[i]; const timestamp = i * 1e6 / frameRate; await addFrameToWorker2(communicator, frame, timestamp); const now = performance.now(); const elapsedTime = now - lastFrameTime; const delay = Math.max(0, frameDelay - elapsedTime); await new Promise((resolve) => setTimeout(resolve, delay)); lastFrameTime = performance.now(); } } async function processAsyncIterable2(communicator, source, config) { let frameIndex = 0; const frameRate = config?.frameRate || 30; for await (const frame of source) { const timestamp = frameIndex * 1e6 / frameRate; await addFrameToWorker2(communicator, frame, timestamp); frameIndex++; } } async function processMediaStreamRealtime(communicator, stream, config) { const videoTracks = stream.getVideoTracks(); const audioTracks = stream.getAudioTracks(); const readers = []; const processingPromises = []; try { if (videoTracks.length > 0) { const videoTrack = videoTracks[0]; const processor = new MediaStreamTrackProcessor({ track: videoTrack }); const reader = processor.readable.getReader(); readers.push(reader); processingPromises.push( processVideoTrackRealtime(communicator, reader, config) ); } if (audioTracks.length > 0) { const audioTrack = audioTracks[0]; const processor = new MediaStreamTrackProcessor({ track: audioTrack }); const reader = processor.readable.getReader(); readers.push(reader); processingPromises.push(processAudioTrackRealtime(communicator, reader)); } await Promise.all(processingPromises); } finally { for (const reader of readers) { try { reader.cancel(); } catch (e) { } } for (const track of [...videoTracks, ...audioTracks]) { track.stop(); } } } async function processVideoTrackRealtime(communicator, reader, _config) { try { while (true) { const { value, done } = await reader.read(); if (done || !value) break; try { await addFrameToWorker2(communicator, value, value.timestamp || 0); } finally { value.close(); } } } catch (error) { throw new EncodeError( "video-encoding-error", `Real-time video stream processing error: ${error instanceof Error ? error.message : String(error)}`, error ); } } async function processAudioTrackRealtime(communicator, reader) { try { while (true) { const { value, done } = await reader.read(); if (done || !value) break; try { communicator.send("addAudioData", { audio: value, timestamp: value.timestamp || 0, format: "f32", sampleRate: value.sampleRate, numberOfFrames: value.numberOfFrames, numberOfChannels: value.numberOfChannels }); } finally { value.close(); } } } catch (error) { throw new EncodeError( "audio-encoding-error", `Real-time audio stream processing error: ${error instanceof Error ? error.message : String(error)}`, error ); } } async function addFrameToWorker2(communicator, frame, timestamp) { const videoFrame = await convertToVideoFrame(frame, timestamp); try { communicator.send("addVideoFrame", { frame: videoFrame, timestamp }); } finally { videoFrame.close(); } } async function processVideoFile2(communicator, videoFile, config) { try { const video = document.createElement("video"); video.muted = true; video.preload = "metadata"; const objectUrl = URL.createObjectURL(videoFile.file); video.src = objectUrl; await new Promise((resolve, reject) => { video.onloadedmetadata = () => resolve(); video.onerror = () => reject(new Error("Failed to load video file")); }); const { duration, videoWidth, videoHeight } = video; const frameRate = config.frameRate || 30; const totalFrames = Math.floor(duration * frameRate); let audioContext = null; let audioBuffer = null; if (config.audioBitrate > 0 && typeof AudioContext !== "undefined") { try { audioContext = new AudioContext(); const arrayBuffer = await videoFile.file.arrayBuffer(); audioBuffer = await audioContext.decodeAudioData(arrayBuffer); await processAudioFromFile2( communicator, audioBuffer, duration, frameRate ); } catch (audioError) { console.warn("Failed to process audio from VideoFile:", audioError); } } const canvas = document.createElement("canvas"); canvas.width = videoWidth; canvas.height = videoHeight; const ctx = canvas.getContext("2d"); if (!ctx) { throw new EncodeError( "initialization-failed", "Failed to get canvas context" ); } for (let frameIndex = 0; frameIndex < totalFrames; frameIndex++) { const timestamp = frameIndex / frameRate; video.currentTime = timestamp; await new Promise((resolve, reject) => { const onSeeked = () => { video.removeEventListener("seeked", onSeeked); resolve(); }; video.addEventListener("seeked", onSeeked, { once: true }); video.onerror = () => reject(new Error("Video seek failed")); }); ctx.drawImage(video, 0, 0, videoWidth, videoHeight); const videoFrame = new VideoFrame(canvas, { timestamp: frameIndex * (1e6 / frameRate) }); await addFrameToWorker2( communicator, videoFrame, frameIndex * (1e6 / frameRate) ); videoFrame.close(); await new Promise((resolve) => requestAnimationFrame(resolve)); } URL.revokeObjectURL(objectUrl); video.remove(); if (audioContext) { audioContext.close(); } } catch (error) { throw new EncodeError( "invalid-input", `VideoFile processing failed: ${error instanceof Error ? error.message : String(error)}`, error ); } } async function processAudioFromFile2(communicator, audioBuffer, duration, frameRate) { const sampleRate = audioBuffer.sampleRate; const numberOfChannels = audioBuffer.numberOfChannels; const totalSamples = audioBuffer.length; const chunkDurationMs = Math.min(20, 1e3 / frameRate); const samplesPerChunk = Math.floor(sampleRate * chunkDurationMs / 1e3); for (let offset = 0; offset < totalSamples; offset += samplesPerChunk) { const remainingSamples = Math.min(samplesPerChunk, totalSamples - offset); const timestamp = offset / sampleRate * 1e6; const channelData = []; for (let channel = 0; channel < numberOfChannels; channel++) { const sourceData = audioBuffer.getChannelData(channel); const chunkData = new Float32Array(remainingSamples); chunkData.set(sourceData.subarray(offset, offset + remainingSamples)); channelData.push(chunkData); } try { const interleavedData = new Float32Array( remainingSamples * numberOfChannels ); for (let frame = 0; frame < remainingSamples; frame++) { for (let channel = 0; channel < numberOfChannels; channel++) { interleavedData[frame * numberOfChannels + channel] = channelData[channel][frame]; } } const audioData = new AudioData({ format: "f32", sampleRate, numberOfFrames: remainingSamples, numberOfChannels, timestamp, data: interleavedData }); communicator.send("addAudioData", { audio: audioData, timestamp, format: "f32", sampleRate, numberOfFrames: remainingSamples, numberOfChannels }); audioData.close(); channelData.length = 0; } catch (error) { console.warn("Failed to create AudioData chunk:", error); } await new Promise((resolve) => setTimeout(resolve, 0)); } } async function calculateTotalFrames2(source, config) { try { if (Array.isArray(source)) { return source.length; } else if (source instanceof MediaStream) { return void 0; } else if (Symbol.asyncIterator in source) { return void 0; } else { const videoFile = source; const video = document.createElement("video"); video.muted = true; video.preload = "metadata"; const objectUrl = URL.createObjectURL(videoFile.file); video.src = objectUrl; try { await new Promise((resolve, reject) => { video.onloadedmetadata = () => resolve(); video.onerror = () => reject(new Error("Failed to load video metadata")); }); const frameRate = config.frameRate || 30; const totalFrames = Math.floor(video.duration * frameRate); URL.revokeObjectURL(objectUrl); return totalFrames; } catch (error) { URL.revokeObjectURL(objectUrl); throw error; } } } catch (error) { console.warn("Failed to calculate total frames for streaming:", error); return void 0; } } // src/utils/can-encode.ts async function canEncode(options) { try { if (!isWebCodecsSupported()) { return false; } if (!options) { return await testDefaultConfiguration(); } const hasVideoConfig = options.video && typeof options.video === "object"; const hasVideo = hasVideoConfig || !options.audio; if (hasVideo) { const videoCodec = hasVideoConfig ? options.video.codec || "avc" : "avc"; const videoSupported = await testVideoCodecSupport(videoCodec, options); if (!videoSupported) { return false; } } const hasAudioConfig = options.audio && typeof options.audio === "object"; if (hasAudioConfig) { const audioCodec = options.audio.codec || "aac"; const audioSupported = await testAudioCodecSupport(audioCodec, options); if (!audioSupported) { return false; } } else if (options.audio === void 0 && !hasVideoConfig) { const audioSupported = await testAudioCodecSupport("aac", options); if (!audioSupported) { return false; } } return true; } catch (error) { console.warn("canEncode error:", error); return false; } } function isWebCodecsSupported() { try { return typeof VideoEncoder !== "undefined" && typeof AudioEncoder !== "undefined" && typeof VideoFrame !== "undefined" && typeof AudioData !== "undefined"; } catch { return false; } } async function testDefaultConfiguration() { try { const defaultWidth = 640; const defaultHeight = 480; const defaultFrameRate = 30; const videoConfig = { codec: generateAvcCodecString( defaultWidth, defaultHeight, defaultFrameRate ), width: defaultWidth, height: defaultHeight, bitrate: 1e6, framerate: defaultFrameRate }; const videoSupport = await VideoEncoder.isConfigSupported(videoConfig); if (!videoSupport.supported) { return false; } const audioConfig = { codec: "mp4a.40.2", // AAC-LC sampleRate: 48e3, numberOfChannels: 2, bitrate: 128e3 }; const audioSupport = await AudioEncoder.isConfigSupported(audioConfig); return audioSupport.supported || false; } catch { return false; } } async function testVideoCodecSupport(codec, options) { try { const videoOptions = options?.video && typeof options.video === "object" ? options.video : {}; const codecString = getVideoCodecString( codec, options?.width || 640, options?.height || 480, options?.frameRate || 30 ); const config = { codec: codecString, width: options?.width || 640, height: options?.height || 480, bitrate: videoOptions.bitrate || 1e6, framerate: options?.frameRate || 30 }; if (videoOptions.hardwareAcceleration) { config.hardwareAcceleration = videoOptions.hardwareAcceleration; } if (videoOptions.latencyMode) { config.latencyMode = videoOptions.latencyMode; } const support = await VideoEncoder.isConfigSupported(config); return support.supported || false; } catch { return false; } } async function testAudioCodecSupport(codec, options) { try { const codecString = getAudioCodecString(codec); const audioOptions = typeof options?.audio === "object" ? options.audio : {}; const config = { codec: codecString, sampleRate: audioOptions.sampleRate || 48e3, numberOfChannels: audioOptions.channels || 2, bitrate: audioOptions.bitrate || 128e3 }; if (codec === "aac" && audioOptions.bitrateMode) { config.bitrateMode = audioOptions.bitrateMode; } const support = await AudioEncoder.isConfigSupported(config); return support.supported || false; } catch { return false; } } function getVideoCodecString(codec, width = 640, height = 480, frameRate = 30) { switch (codec) { case "avc": return generateAvcCodecString(width, height, frameRate); case "hevc": return "hev1.1.6.L93.B0"; // H.265 Main Profile case "vp9": return "vp09.00.10.08"; // VP9 Profile 0 case "vp8": return "vp8"; // VP8 case "av1": return "av01.0.04M.08"; // AV1 Main Profile Level 4.0 default: return codec; } } function generateAvcCodecString(width, height, frameRate, profile) { const mbPerSec = Math.ceil(width / 16) * Math.ceil(height / 16) * frameRate; let level; if (mbPerSec <= 108e3) level = 31; else if (mbPerSec <= 216e3) level = 32; else if (mbPerSec <= 245760) level = 40; else if (mbPerSec <= 589824) level = 50; else if (mbPerSec <= 983040) level = 51; else level = 52; const chosenProfile = profile ?? (width >= 1280 || height >= 720 ? "high" : "baseline"); const profileHex = chosenProfile === "high" ? "64" : chosenProfile === "main" ? "4d" : "42"; const levelHex = level.toString(16).padStart(2, "0"); return `avc1.${profileHex}00${levelHex}`; } function getAudioCodecString(codec) { switch (codec) { case "aac": return "mp4a.40.2"; // AAC-LC case "opus": return "opus"; // Opus default: return codec; } } // src/factory/encoder.ts function createEncoder(baseOptions = {}) { const factory = { async encode(source, additionalOptions) { const mergedOptions = mergeOptions(baseOptions, additionalOptions); return encode(source, mergedOptions); }, async *encodeStream(source, additionalOptions) { const mergedOptions = mergeOptions(baseOptions, additionalOptions); yield* encodeStream(source, mergedOptions); }, getConfig() { return { ...baseOptions }; }, extend(newOptions) { const extendedOptions = mergeOptions(baseOptions, newOptions); return createEncoder(extendedOptions); } }; return factory; } function mergeOptions(base, additional) { if (!additional) { return { ...base }; } return { ...base, ...additional, // ネストしたオブジェクトは個別にマージ video: { ...base.video, ...additional.video }, audio: additional.audio === false ? false : { ...base.audio, ...additional.audio } }; } var encoders = { /** * YouTube向け高品質エンコーダー */ youtube: createEncoder({ quality: "high", frameRate: 60, video: { codec: "avc" }, audio: { codec: "aac", bitrate: 192e3 }, container: "mp4" }), /** * Twitter向け最適化エンコーダー */ twitter: createEncoder({ quality: "medium", width: 1280, height: 720, video: { bitrate: 2e6 }, audio: { bitrate: 128e3 }, container: "mp4" }), /** * Discord向け最適化エンコーダー */ discord: createEncoder({ quality: "medium", video: { bitrate: 2e6 }, audio: { bitrate: 128e3 }, container: "mp4" }), /** * Web再生向けバランス型エンコーダー */ web: createEncoder({ quality: "medium", container: "mp4", video: { codec: "avc" }, audio: { codec: "aac" } }), /** * 軽量・高速エンコーダー */ fast: createEncoder({ quality: "low", video: { codec: "avc", hardwareAcceleration: "prefer-hardware", latencyMode: "realtime" }, audio: { codec: "aac", bitrate: 64e3 } }), /** * 高品質・低圧縮エンコーダー */ lossless: createEncoder({ qual