UNPKG

@eleven-am/transcoder

Version:

High-performance HLS transcoding library with hardware acceleration, intelligent client management, and distributed processing support for Node.js

1,149 lines 44.2 kB
"use strict"; /* * @eleven-am/transcoder * Copyright (C) 2025 Roy OSSAI * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <https://www.gnu.org/licenses/>. */ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.Stream = void 0; const path = __importStar(require("path")); const fp_1 = require("@eleven-am/fp"); const ffmpeg_1 = __importDefault(require("./ffmpeg")); const types_1 = require("./types"); const utils_1 = require("./utils"); var JobRangeStatus; (function (JobRangeStatus) { JobRangeStatus[JobRangeStatus["PROCESSING"] = 0] = "PROCESSING"; JobRangeStatus[JobRangeStatus["PROCESSED"] = 1] = "PROCESSED"; JobRangeStatus[JobRangeStatus["ERROR"] = 2] = "ERROR"; })(JobRangeStatus || (JobRangeStatus = {})); class Stream extends utils_1.ExtendedEventEmitter { constructor(quality, type, source, streamIndex, metadata, maxSegmentBatchSize, qualityService, hwDetector, optimisedAccel, config, jobProcessor) { super(); this.quality = quality; this.type = type; this.source = source; this.streamIndex = streamIndex; this.metadata = metadata; this.maxSegmentBatchSize = maxSegmentBatchSize; this.qualityService = qualityService; this.hwDetector = hwDetector; this.optimisedAccel = optimisedAccel; this.jobProcessor = jobProcessor; this.config = { ...Stream.DEFAULT_CONFIG, ...config, }; const [aQ, vQ] = this.loadQuality(); this.segments = this.buildSegments(); this.videoQuality = vQ; this.audioQuality = aQ; this.jobRange = []; this.timer = null; this.hasFallenBackToSoftware = false; this.streamCreatedAt = Date.now(); this.lastActivityAt = this.streamCreatedAt; this.metricsTimer = null; this.cachedHwOptions = new Map(); this.segmentRetries = new Map(); this.metrics = { segmentsProcessed: 0, segmentsFailed: 0, averageProcessingTime: 0, hardwareAccelUsed: Boolean(optimisedAccel), fallbacksToSoftware: 0, totalJobsStarted: 0, totalJobsCompleted: 0, }; } /** * Create a new stream * @param quality - The quality of the stream * @param type - The type of the stream (audio or video) * @param streamIndex - The index of the stream * @param source - The media source * @param maxSegmentBatchSize - The maximum number of segments to process at once * @param qualityService - The quality service * @param metadataService - The metadata service * @param hwDetector - The hardware acceleration detector * @param hwAccel - The hardware acceleration configuration * @param config - The stream configuration * @param jobProcessor - The job processor */ static create(quality, type, streamIndex, source, maxSegmentBatchSize, qualityService, metadataService, hwDetector, hwAccel, config, jobProcessor) { return fp_1.TaskEither .fromBind({ metadata: metadataService.getMetadata(source), optimisedAccel: metadataService.detectOptimalCodecConfig(source, hwAccel), }) .map(({ metadata, optimisedAccel }) => new Stream(quality, type, source, streamIndex, metadata, maxSegmentBatchSize, qualityService, hwDetector, optimisedAccel, config, jobProcessor)) .chain((stream) => stream.initialise()); } /** * Get the stream ID * @param fileId - The file ID * @param type - The type of the stream (audio or video) * @param quality - The quality of the stream * @param streamIndex - The index of the stream */ static getStreamId(fileId, type, quality, streamIndex) { return `${fileId}:${type}:${streamIndex}:${quality}`; } /** * Extract subtitle from a media source and convert to WebVTT * @param mediaSource - The media source * @param streamIndex - The index of the subtitle stream */ static getVTTSubtitle(mediaSource, streamIndex) { return this.runFFMPEGCommand([ '-map', `0:s:${streamIndex}`, '-c:s', 'webvtt', '-f', 'webvtt', ], mediaSource.getFilePath()); } /** * Get all convertible subtitle streams from media metadata * @param metadata - The media metadata */ static getConvertibleSubtitles(metadata) { return metadata.subtitles.filter((stream) => this.canConvertToVtt(stream)); } /** * Check if a subtitle stream can be converted to VTT * @param subtitleStream - The subtitle stream */ static canConvertToVtt(subtitleStream) { const supportedCodecs = [ 'subrip', 'webvtt', 'mov_text', 'ass', 'ssa', 'text', ]; const supportedExtensions = [ 'srt', 'vtt', 'ass', 'ssa', ]; return supportedCodecs.includes(subtitleStream.codec) || (subtitleStream.extension !== null && supportedExtensions.includes(subtitleStream.extension)); } /** * Run FFMPEG command * @param outputOptions - The output options to feed to the Ffmpeg * @param inputPath - The input path to the file to perform the command on */ static runFFMPEGCommand(outputOptions, inputPath) { return new Promise((resolve, reject) => { const stream = (0, ffmpeg_1.default)(inputPath) .outputOptions(outputOptions) .on('error', reject) .pipe(); resolve(stream); }); } /** * Create a screenshot from a media source at a specific timestamp * @param timestamp - The timestamp of the screenshot to be created */ generateScreenshot(timestamp) { const command = (0, ffmpeg_1.default)(this.source.getFilePath()); const videoProfile = this.buildVideoQuality(this.videoQuality?.value ?? types_1.VideoQualityEnum.ORIGINAL); if (timestamp > 0) { command.inputOptions(['-ss', timestamp.toFixed(6)]); } command.outputOptions([ '-map', `0:v:${this.streamIndex}`, '-vframes', '1', '-f', 'image2pipe', '-vcodec', 'mjpeg', '-an', '-sn', '-pix_fmt', 'yuvj420p', ]); command.videoFilters(`scale=${videoProfile.width}:${videoProfile.height}`); return new Promise((resolve, reject) => { const stream = command .on('error', reject) .pipe(); resolve(stream); }); } /** * Get the file ID */ getFileId() { return this.metadata.id; } /** * Builds the transcode command for the stream * @param segmentIndex - The index of the segment * @param priority - The priority of the segment */ buildTranscodeCommand(segmentIndex, priority) { this.debounceDispose(); // Prefetch upcoming segments when processing current segment this.prefetchSegments(segmentIndex, priority); return fp_1.TaskEither .fromBind({ distance: fp_1.TaskEither.of(this.getMinEncoderDistance(segmentIndex)), exists: this.source.segmentExist(this.type, this.streamIndex, this.quality, segmentIndex), segment: fp_1.TaskEither.fromNullable(this.segments.get(segmentIndex)), isScheduled: fp_1.TaskEither.of(this.isSegmentScheduled(segmentIndex)), }) .matchTask([ { predicate: ({ exists }) => exists, run: () => fp_1.TaskEither.of(undefined), }, { predicate: ({ isScheduled, distance, segment }) => isScheduled && distance <= this.config.maxEncoderDistance && segment.value?.state() !== 'rejected', run: ({ segment }) => fp_1.TaskEither .fromResult(() => segment.value.promise()) .timed(this.config.segmentTimeout, `Timed out waiting for segment ${segment.index}`), }, { predicate: () => this.type === types_1.StreamType.AUDIO, run: ({ segment }) => this.buildAudioTranscodeOptions(segment, priority), }, { predicate: () => this.type === types_1.StreamType.VIDEO, run: ({ segment }) => this.buildVideoTranscodeOptions(segment, priority), }, ]); } /** * Get the segment stream for a specific segment * @param segmentIndex - The index of the segment * @param priority - The priority of the segment */ getSegmentStream(segmentIndex, priority) { this.debounceDispose(); return this.buildTranscodeCommand(segmentIndex, priority) .chain(() => this.source.getSegmentStream(this.type, this.streamIndex, this.quality, segmentIndex)); } /** * Get the stream ID */ getStreamId() { return Stream.getStreamId(this.metadata.id, this.type, this.quality, this.streamIndex); } /** * Creates a new playlist for the stream */ getPlaylist() { this.debounceDispose(); const segments = Array.from(this.segments.values()); const sortedSegments = (0, fp_1.sortBy)(segments, 'index', 'asc'); const indices = [ '#EXTM3U', '#EXT-X-VERSION:6', '#EXT-X-PLAYLIST-TYPE:EVENT', '#EXT-X-START:TIME-OFFSET=0', '#EXT-X-TARGETDURATION:4', '#EXT-X-MEDIA-SEQUENCE:0', '#EXT-X-INDEPENDENT-SEGMENTS', ...sortedSegments.map((segment) => [ `#EXTINF:${segment.duration.toFixed(6)}`, `segment-${segment.index}.ts`, ]).flat(), '#EXT-X-ENDLIST', ]; return indices.join('\n'); } /** * Builds the video quality for the stream * @param quality - The video quality to build * @param index - The index of the video stream */ buildVideoQuality(quality, index) { const videoInfo = this.metadata.videos[index ?? this.streamIndex]; const profile = this.qualityService.buildValidVideoQuality(quality, videoInfo); const targetWidth = Math.round((profile.height / videoInfo.height) * videoInfo.width); const targetHeight = profile.height; const width = this.closestMultiple(targetWidth, 2); const height = this.closestMultiple(targetHeight, 2); return { ...profile, width, height, }; } /** * Builds the audio quality for the stream * @param quality - The audio quality to build * @param index - The index of the audio stream */ buildAudioQuality(quality, index) { const audioInfo = this.metadata.audios[index ?? this.streamIndex]; return this.qualityService.buildValidAudioQuality(quality, audioInfo); } /** * Dispose of the stream */ dispose() { this.segments.forEach((segment) => { if (segment.value?.state() === 'pending') { segment.value.reject(new Error('Stream disposed')); } }); this.cachedHwOptions.clear(); this.segmentRetries.clear(); this.jobRange.forEach((range) => { if (range.status === JobRangeStatus.PROCESSING) { range.status = JobRangeStatus.ERROR; } }); if (this.timer) { clearTimeout(this.timer); } if (this.metricsTimer) { clearInterval(this.metricsTimer); } this.emit('dispose', { id: this.getStreamId() }); return this.source.deleteTempFiles() .map(() => { this.segments.clear(); this.jobRange.length = 0; this.timer = null; this.metricsTimer = null; this.removeAllListeners(); }) .toResult(); } /** * Generate and emit comprehensive stream metrics */ emitMetrics() { this.lastActivityAt = Date.now(); const segmentStates = this.calculateSegmentStates(); const avgSegmentDuration = this.metadata.duration / this.segments.size; const remainingSegments = segmentStates.unstarted + segmentStates.pending; const estimatedTimeRemaining = remainingSegments > 0 && this.metrics.averageProcessingTime > 0 ? remainingSegments * this.metrics.averageProcessingTime : null; const metricsEvent = { streamId: this.getStreamId(), fileId: this.metadata.id, type: this.type, quality: this.quality, streamIndex: this.streamIndex, metrics: { ...this.metrics }, isUsingHardwareAcceleration: this.isUsingHardwareAcceleration(), currentAccelerationMethod: this.getCurrentAccelerationMethod(), originalAccelerationMethod: this.optimisedAccel?.method || null, hasFallenBackToSoftware: this.hasFallenBackToSoftware, totalSegments: this.segments.size, segmentsCompleted: segmentStates.completed, segmentsPending: segmentStates.pending, segmentsFailed: segmentStates.failed, segmentsUnstarted: segmentStates.unstarted, currentJobsActive: this.jobRange .filter((range) => range.status === JobRangeStatus.PROCESSING) .length, averageSegmentDuration: avgSegmentDuration, estimatedTimeRemaining, streamCreatedAt: this.streamCreatedAt, lastActivityAt: this.lastActivityAt, metricsGeneratedAt: Date.now(), }; this.emit('stream:metrics', metricsEvent); } /** * Calculate current segment states */ calculateSegmentStates() { let completed = 0; let pending = 0; let failed = 0; let unstarted = 0; this.segments.forEach((segment) => { const state = segment.value?.state(); if (state === 'fulfilled') { completed++; } else if (state === 'pending') { pending++; } else if (state === 'rejected') { failed++; } else { unstarted++; } }); return { completed, pending, failed, unstarted }; } /** * Initialize the stream */ initialise() { return this.checkSegmentsStatus() .map(() => { this.startPeriodicMetrics(); return this; }); } /** * Start a timer to periodically emit metrics */ startPeriodicMetrics() { if (this.config.metricsInterval <= 0) { return; } const interval = Math.max(5000, this.config.metricsInterval); this.metricsTimer = setInterval(() => this.emitMetrics(), interval); } /** * Get dynamic batch size based on hardware acceleration */ getDynamicBatchSize() { const baseSize = this.maxSegmentBatchSize; if (this.isUsingHardwareAcceleration()) { return Math.min(baseSize * 2, 200); } return Math.min(baseSize, 50); } /** * Get cached hardware acceleration options * Uses optimisedAccel unless we've fallen back to software * @param width - Video width * @param height - Video height * @param codec - Codec type */ getCachedHardwareOptions(width, height, codec) { const hwConfigToUse = this.hasFallenBackToSoftware ? null : this.optimisedAccel; const key = `${width}x${height}-${codec}-${hwConfigToUse?.method || 'software'}`; if (!this.cachedHwOptions.has(key)) { const options = this.hwDetector.applyHardwareConfig(hwConfigToUse, width, height, codec); this.cachedHwOptions.set(key, options); } return this.cachedHwOptions.get(key); } /** * Check if an error is related to hardware acceleration * @param err - The error to check */ isHardwareAccelerationError(err) { const message = err.message.toLowerCase(); return [ 'device creation failed', 'hardware device setup failed', 'nvenc', 'vaapi', 'qsv', 'videotoolbox', 'cuda', 'hardware acceleration', ].some((str) => message.includes(str)); } /** * Fallback to software encoding while preserving the original optimized config */ fallbackToSoftwareEncoding() { if (this.optimisedAccel && this.config.enableHardwareAccelFallback && !this.hasFallenBackToSoftware) { this.hasFallenBackToSoftware = true; this.cachedHwOptions.clear(); this.metrics.fallbacksToSoftware++; this.metrics.hardwareAccelUsed = false; } } /** * Get the segment for this stream (Audio) * @param segment - The segment to process * @param priority - The priority of the segment */ buildAudioTranscodeOptions(segment, priority) { const argsBuilder = () => { if (this.audioQuality?.value === types_1.AudioQualityEnum.ORIGINAL) { return fp_1.Either.of({ videoFilters: undefined, inputOptions: [], outputOptions: [ '-map', `0:a:${this.streamIndex}`, '-c:a', 'copy', '-vn', ], }); } return fp_1.Either.of({ videoFilters: undefined, inputOptions: [], outputOptions: [ '-map', `0:a:${this.streamIndex}`, '-c:a', 'aac', '-ac', '2', '-b:a', '128k', '-vn', ], }); }; return this.buildFFMPEGCommand(segment, argsBuilder, priority); } /** * Build the FFMPEG command for video transcoding * @param segment - The segment to process * @param priority - The priority of the segment */ buildVideoTranscodeOptions(segment, priority) { const video = this.metadata.videos[this.streamIndex]; const videoQuality = this.videoQuality; const argsBuilderFactory = (video, videoQuality) => (segments) => { if (videoQuality.value === types_1.VideoQualityEnum.ORIGINAL) { return fp_1.Either.of({ inputOptions: [], outputOptions: [ '-map', `0:v:${this.streamIndex}`, '-c:v', 'copy', '-force_key_frames', segments, '-strict', '-2', ], videoFilters: undefined, }); } const detailedQuality = this.buildVideoQuality(videoQuality.value); const codec = video.codec.includes('hevc') || video.codec.includes('h265') ? 'h265' : 'h264'; const options = this.getCachedHardwareOptions(detailedQuality.width, detailedQuality.height, codec); const outputOptions = [ '-map', `0:v:${this.streamIndex}`, '-bufsize', `${detailedQuality.maxBitrate * 5}`, '-b:v', `${detailedQuality.averageBitrate}`, '-maxrate', `${detailedQuality.maxBitrate}`, '-forced-idr', '1', ...options.outputOptions, '-force_key_frames', segments, '-strict', '-2', ]; return fp_1.Either.of({ videoFilters: options.videoFilters, inputOptions: options.inputOptions, outputOptions, }); }; return fp_1.TaskEither .fromBind({ video: fp_1.TaskEither.fromNullable(video), quality: fp_1.TaskEither.fromNullable(videoQuality), }) .map(({ video, quality }) => argsBuilderFactory(video, quality)) .chain((argsBuilder) => this.buildFFMPEGCommand(segment, argsBuilder, priority)); } /** * Build the segments for this stream */ buildSegments() { return this.metadata.keyframes.reduce((segments, startTime, index) => { const endTime = index < this.metadata.keyframes.length - 1 ? this.metadata.keyframes[index + 1] : this.metadata.duration; segments.set(index, { index, value: null, start: startTime, duration: endTime - startTime, }); return segments; }, new Map()); } /** * Prefetch upcoming segments to improve performance * @param currentIndex - The current segment index being processed * @param priority - The priority of the current segment */ prefetchSegments(currentIndex, priority) { const prefetchCount = 3; for (let i = 1; i <= prefetchCount; i++) { const nextIndex = currentIndex + i; const nextSegment = this.segments.get(nextIndex); if (nextSegment && !nextSegment.value) { const isProcessing = this.jobRange.some(range => range.start <= nextIndex && range.end >= nextIndex && range.status === JobRangeStatus.PROCESSING); if (!isProcessing) { setTimeout(() => { const nextSeg = this.segments.get(nextIndex); if (nextSeg && !nextSeg.value) { const prefetchPriority = priority / (i + 1); // Lower priority for further segments this.buildTranscodeCommand(nextIndex, prefetchPriority).toResult(); } }, 100 * i); } } } } /** * Check the status of the segments */ checkSegmentsStatus() { return fp_1.TaskEither .of(Array.from(this.segments.values())) .chainItems((segment) => this.checkSegmentFileStatus(segment)) .map(() => undefined); } /** * Check if the segment file exists * @param segment - The segment to check */ checkSegmentFileStatus(segment) { return this.source.segmentExist(this.type, this.streamIndex, this.quality, segment.index) .filter((exists) => exists, () => (0, fp_1.createBadRequestError)(`Segment ${segment.index} does not yet exist`)) .map(() => { segment.value = fp_1.Deferred.create().resolve(); }); } /** * Intelligently filter and prepare segments for processing * Only processes segments that actually need work, respects ongoing jobs * @param initialSegment - The segment to start processing from */ filterAndPrepareSegments(initialSegment) { const allSegments = (0, fp_1.sortBy)(Array.from(this.segments.values()), 'index', 'asc'); const startIndex = allSegments.findIndex((s) => s.index === initialSegment.index); if (startIndex === -1) { return []; } const segmentsToProcess = []; const dynamicBatchSize = this.getDynamicBatchSize(); const endIndex = Math.min(startIndex + dynamicBatchSize, allSegments.length); // Collect consecutive segments that need processing for (let i = startIndex; i < endIndex; i++) { const segment = allSegments[i]; const state = segment.value?.state(); // Stop if we hit a segment that's already processing or completed if (state === 'pending' || state === 'fulfilled') { break; } // Add segment if it needs processing if (!segment.value || state === 'rejected') { if (state === 'rejected') { segment.value.reset(); } segment.value = fp_1.Deferred.create(); segmentsToProcess.push(segment); } } // Try to batch segments for more efficient processing return this.optimizeBatchForProcessing(segmentsToProcess); } /** * Optimize batch of segments for efficient FFmpeg processing * Groups consecutive segments to minimize FFmpeg invocations */ optimizeBatchForProcessing(segments) { if (segments.length <= 1) { return segments; } // For hardware acceleration, larger batches are more efficient if (this.isUsingHardwareAcceleration()) { // Process up to 10 segments in one FFmpeg call with hardware acceleration return segments.slice(0, Math.min(10, segments.length)); } // For software encoding, balance between batch size and responsiveness // Process up to 5 segments in one FFmpeg call return segments.slice(0, Math.min(5, segments.length)); } /** * Generate FFMPEG transcoding arguments for processing segments */ getTranscodeArgs(builder, segments) { if (segments.length === 0) { return fp_1.Either.error((0, fp_1.createNotFoundError)(`No segments to process for stream ${this.type} ${this.streamIndex} ${this.quality}`)); } const startSegment = segments[0]; const lastSegment = segments[segments.length - 1]; let startTime = 0; let startKeyframeIndex = startSegment.index; let keyframeStartTime = 0; if (startSegment.index !== 0) { startKeyframeIndex = startSegment.index - 1; const prevSegment = this.segments.get(startKeyframeIndex); if (prevSegment) { keyframeStartTime = prevSegment.start; if (this.type === types_1.StreamType.AUDIO) { startTime = prevSegment.start; } else if (startSegment.index === this.segments.size - 1) { startTime = (prevSegment.start + this.metadata.duration) / 2; } else { startTime = (prevSegment.start + startSegment.start) / 2; } } } const nextSegmentIndex = lastSegment.index + 1; const nextSegment = this.segments.get(nextSegmentIndex); const endTime = nextSegment ? nextSegment.start : null; let timestamps = segments .filter((segment) => segment.index > startKeyframeIndex) .map((segment) => segment.start); if (timestamps.length === 0) { timestamps = [9999999]; } const segmentTimestamps = timestamps .map((time) => time.toFixed(6)) .join(','); const relativeTimestamps = timestamps .map((time) => (time - keyframeStartTime).toFixed(6)) .join(','); const options = { inputOptions: ['-nostats', '-hide_banner', '-loglevel', 'warning'], outputOptions: [], videoFilters: undefined, }; if (startTime > 0) { if (this.type === types_1.StreamType.VIDEO) { options.inputOptions.push('-noaccurate_seek'); } options.inputOptions.push('-ss', startTime.toFixed(6)); } if (endTime !== null) { const adjustedEndTime = endTime + (startTime - keyframeStartTime); options.inputOptions.push('-to', adjustedEndTime.toFixed(6)); } options.inputOptions.push('-fflags', '+genpts'); return builder(segmentTimestamps) .map((transcodeArgs) => ({ inputOptions: [ ...options.inputOptions, ...transcodeArgs.inputOptions, ], outputOptions: [ ...options.outputOptions, ...transcodeArgs.outputOptions, '-start_at_zero', '-copyts', '-muxdelay', '0', '-f', 'segment', '-segment_time_delta', '0.05', '-segment_format', 'mpegts', '-segment_times', relativeTimestamps, '-segment_list_type', 'flat', '-segment_list', 'pipe:1', '-segment_start_number', startKeyframeIndex.toString(), ], videoFilters: transcodeArgs.videoFilters, })); } /** * Build the FFMPEG command for transcoding */ buildFFMPEGCommand(initialSegment, builder, priority) { const segmentsToProcess = this.filterAndPrepareSegments(initialSegment); if (segmentsToProcess.length === 0) { return fp_1.TaskEither.fromResult(() => initialSegment.value.promise()); } return this.getTranscodeArgs(builder, segmentsToProcess) .toTaskEither() .chain((options) => this.source.getStreamDirectory(this.type, this.streamIndex, this.quality) .map((outputDir) => ({ options, outputDir, }))) .chain(({ options, outputDir }) => this.executeTranscodeCommand(initialSegment, segmentsToProcess, options, outputDir, priority)) .chain(() => fp_1.TaskEither.fromResult(() => initialSegment.value.promise())); } /** * Execute the actual Ffmpeg transcoding command */ executeTranscodeCommand(initialSegment, segments, options, outputDir, priority) { return fp_1.TaskEither.tryCatch(() => this.setupAndRunCommand(initialSegment, segments, options, outputDir, priority), 'Failed to execute transcode command'); } /** * Setup and run the Ffmpeg command with proper event handling */ setupAndRunCommand(initialSegment, segments, options, outputDir, priority) { return new Promise((resolve, reject) => { const command = this.createFfmpegCommand(options, outputDir); const job = this.createTranscodeJob(initialSegment, priority, command); const jobRange = this.createJobRange(initialSegment, segments); this.setupCommandEventHandlers(command, job, jobRange, segments, resolve, reject); this.emit('transcode:queued', job); this.jobRange.push(jobRange); this.metrics.totalJobsStarted++; // Submit job to processor this.jobProcessor.submitJob(job) .orElse((error) => { reject(error); return fp_1.TaskEither.of(undefined); }) .toResult(); }); } /** * Create the Ffmpeg command with options */ createFfmpegCommand(options, outputDir) { const { inputOptions, outputOptions, videoFilters } = options; const command = (0, ffmpeg_1.default)(this.source.getFilePath()) .inputOptions(inputOptions) .outputOptions(outputOptions) .output(path.join(outputDir, 'segment-%d.ts')); if (videoFilters) { command.videoFilters(videoFilters); } return command; } /** * Create a transcode job */ createTranscodeJob(initialSegment, priority, command) { return { id: `job-${Date.now()}-${Math.floor(Math.random() * 10000)}`, priority, process: command, createdAt: Date.now(), start: initialSegment.start, status: types_1.TranscodeStatus.QUEUED, }; } /** * Create a job range */ createJobRange(initialSegment, segments) { return { start: initialSegment.start, status: JobRangeStatus.PROCESSING, end: segments[segments.length - 1].duration, }; } /** * Set up comprehensive event handlers for the Ffmpeg command * @param command - The Ffmpeg command * @param job - The transcode job * @param jobRange - The job range * @param segments - The segments being processed * @param resolve - Resolve function * @param reject - Reject function */ setupCommandEventHandlers(command, job, jobRange, segments, resolve, reject) { let lastIndex = segments[0].index; const startTime = Date.now(); const disposeHandler = () => { command.kill('SIGINT'); this.off('dispose', disposeHandler); reject(new Error('Stream disposed during transcoding')); }; command.on('start', () => { job.status = types_1.TranscodeStatus.PROCESSING; this.emit('transcode:start', job); this.emitMetrics(); }); command.on('progress', async (progress) => { lastIndex = progress.segment; this.handleSegmentProgress(progress.segment); }); command.on('end', () => { const processingTime = Date.now() - startTime; this.updateMetricsOnSuccess(segments.length, processingTime); job.status = types_1.TranscodeStatus.PROCESSED; jobRange.status = JobRangeStatus.PROCESSED; this.emit('transcode:complete', job); this.emitMetrics(); this.off('dispose', disposeHandler); resolve(); }); command.on('error', (err) => { this.handleTranscodeError(err, job, jobRange, segments, lastIndex, disposeHandler); if (this.shouldRetryWithFallback(err, segments[0].index)) { this.retryWithFallback(segments[0], job, resolve, reject); } else { this.emitMetrics(); reject(err); } }); this.on('dispose', disposeHandler); } /** * Handle segment progress updates * @param segmentIndex - The index of the segment */ handleSegmentProgress(segmentIndex) { const segment = this.segments.get(segmentIndex); const nextSegment = this.segments.get(segmentIndex + 1); if (nextSegment && nextSegment.value?.state() === 'fulfilled') { // Optionally kill command early for efficiency // command.kill('SIGINT'); } if (segment) { segment.value = segment.value?.resolve() ?? fp_1.Deferred.create().resolve(); this.metrics.segmentsProcessed++; } } /** * Handle transcoding errors with potential hardware acceleration fallback * @param err - The error that occurred * @param job - The job that was processing * @param jobRange - The job range that was processing * @param segments - The segments that were being processed * @param lastIndex - The last index processed * @param disposeHandler - The disposed handler to call */ handleTranscodeError(err, job, jobRange, segments, lastIndex, disposeHandler) { try { const unprocessedSegments = segments.filter((segment) => segment.index > lastIndex && segment.value?.state() === 'pending'); let rejectedCount = 0; for (const segment of unprocessedSegments) { try { if (segment.value && segment.value.state() === 'pending') { segment.value.reject(new Error(err.message)); rejectedCount++; } } catch (segmentError) { console.warn(`Failed to reject segment ${segment.index}:`, segmentError); } } this.metrics.segmentsFailed += rejectedCount; job.status = types_1.TranscodeStatus.ERROR; jobRange.status = JobRangeStatus.ERROR; this.emit('transcode:error', { job, error: err, }); this.off('dispose', disposeHandler); } catch (handlingError) { console.error('Error in handleTranscodeError:', handlingError); job.status = types_1.TranscodeStatus.ERROR; jobRange.status = JobRangeStatus.ERROR; } } /** * Check if we should retry with fallback * @param err - The error to check * @param segmentIndex - The index of the segment */ shouldRetryWithFallback(err, segmentIndex) { if (!this.config.enableHardwareAccelFallback || !this.optimisedAccel || this.hasFallenBackToSoftware) { return false; } if (!this.isHardwareAccelerationError(err)) { return false; } const retryCount = this.segmentRetries.get(segmentIndex) || 0; return retryCount < this.config.maxRetries; } /** * Retry transcoding with software fallback * @param segment - The segment to retry * @param originalJob - The original job * @param lock - The distributed lock (if any) * @param resolve - Resolve function * @param reject - Reject function */ retryWithFallback(segment, originalJob, resolve, reject) { const previousMethod = this.optimisedAccel?.method || 'unknown'; this.fallbackToSoftwareEncoding(); const retryCount = (this.segmentRetries.get(segment.index) || 0) + 1; this.segmentRetries.set(segment.index, retryCount); this.emit('transcode:fallback', { job: originalJob, from: previousMethod, to: 'software', }); this.emitMetrics(); const retryAction = this.type === types_1.StreamType.AUDIO ? this.buildAudioTranscodeOptions(segment, originalJob.priority) : this.buildVideoTranscodeOptions(segment, originalJob.priority); return void retryAction .toResult() .then(resolve) .catch(reject); } /** * Update metrics on successful completion * @param segmentCount - Number of segments processed * @param processingTime - Time taken to process segments */ updateMetricsOnSuccess(segmentCount, processingTime) { this.metrics.totalJobsCompleted++; const totalJobs = this.metrics.totalJobsCompleted; this.metrics.averageProcessingTime = (this.metrics.averageProcessingTime * (totalJobs - 1) + processingTime) / totalJobs; } /** * Calculate the closest multiple of x to n */ closestMultiple(n, x) { if (x > n) { return x; } n += x / 2; n -= (n % x); return n; } /** * Load the quality of the stream */ loadQuality() { if (this.type === types_1.StreamType.AUDIO) { return [this.qualityService.parseAudioQuality(this.quality), null]; } else if (this.type === types_1.StreamType.VIDEO) { return [null, this.qualityService.parseVideoQuality(this.quality)]; } return [null, null]; } /** * Debounce the dispose method */ debounceDispose() { if (this.timer) { clearTimeout(this.timer); } this.timer = setTimeout(() => this.dispose(), this.config.disposeTimeout); } /** * Calculate distance from current encoders to a segment * @param segmentIndex - The index of the segment */ getMinEncoderDistance(segmentIndex) { const segment = this.segments.get(segmentIndex); if (!segment) { return Infinity; } const targetTime = segment.start; const distances = this.jobRange .filter((range) => range.status === JobRangeStatus.PROCESSING && range.start <= targetTime && targetTime <= range.start + range.end) .map((range) => targetTime - range.start); if (distances.length === 0) { return Infinity; } return Math.min(...distances); } /** * Check if a segment is already scheduled for processing * @param segmentIndex - The index of the segment */ isSegmentScheduled(segmentIndex) { const segment = this.segments.get(segmentIndex); if (!segment) { return false; } return this.jobRange.some((range) => range.status === JobRangeStatus.PROCESSING && segment.start >= range.start && segment.start <= (range.start + range.end)); } /** * Check if hardware acceleration is being used */ isUsingHardwareAcceleration() { return Boolean(this.optimisedAccel) && !this.hasFallenBackToSoftware; } /** * Get the current acceleration method */ getCurrentAccelerationMethod() { if (this.hasFallenBackToSoftware) { return 'software'; } return this.optimisedAccel?.method || 'software'; } } exports.Stream = Stream; Stream.DEFAULT_CONFIG = { disposeTimeout: 30 * 60 * 1000, maxEncoderDistance: 60, segmentTimeout: 60_000, enableHardwareAccelFallback: true, retryFailedSegments: true, metricsInterval: 30_000, maxRetries: 3, }; //# sourceMappingURL=stream.js.map