UNPKG

homebridge-plugin-utils

Version:

Opinionated utilities to provide common capabilities and create rich configuration webUI experiences for Homebridge plugins.

808 lines 47.7 kB
/* Copyright(C) 2023-2025, HJD (https://github.com/hjdhjd). All rights reserved. * * ffmpeg/options.ts: FFmpeg decoder and encoder options with hardware-accelerated codec support where available. */ import { HOMEKIT_STREAMING_HEADROOM, RPI_GPU_MINIMUM } from "./settings.js"; /** * Provides Homebridge FFmpeg transcoding, decoding, and encoding options, selecting codecs, pixel formats, and hardware acceleration for the host system. * * This class generates and adapts FFmpeg command-line arguments for livestreaming and event recording, optimizing for system hardware and codec availability. * * @example * * ```ts * const ffmpegOpts = new FfmpegOptions(optionsConfig); * * // Generate video encoder arguments for streaming. * const encoderOptions: VideoEncoderOptions = { * * bitrate: 3000, * fps: 30, * hardwareDecoding: true, * hardwareTranscoding: true, * height: 1080, * idrInterval: 2, * inputFps: 30, * level: H264Level.LEVEL4_0, * profile: H264Profile.HIGH, * smartQuality: true, * width: 1920 * }; * const args = ffmpegOpts.streamEncoder(encoderOptions); * * // Generate crop filter string, if cropping is enabled. * const crop = ffmpegOpts.cropFilter; * ``` * * @see AudioEncoderOptions * @see VideoEncoderOptions * @see FfmpegCodecs * @see {@link https://ffmpeg.org/ffmpeg.html | FFmpeg Documentation} * * @category FFmpeg */ export class FfmpegOptions { /** * FFmpeg codec and hardware capabilities for the current host. * */ codecSupport; /** * The configuration options used to initialize this instance. */ config; /** * Indicates if debug logging is enabled. */ debug; /** * Logging interface for output and errors. */ log; /** * Function returning the name for this options instance to be used for logging. */ name; hwPixelFormat; /** * Creates an instance of Homebridge FFmpeg encoding and decoding options. * * @param options - FFmpeg options configuration. * * @example * * ```ts * const ffmpegOpts = new FfmpegOptions(optionsConfig); * ``` */ constructor(options) { this.codecSupport = options.codecSupport; this.config = options; this.debug = options.debug ?? false; this.hwPixelFormat = []; this.log = options.log; this.name = options.name; // Configure our hardware acceleration support. this.configureHwAccel(); } /** * Determines and configures hardware-accelerated video decoding and transcoding for the host system. * * This internal method checks for the availability of hardware codecs and accelerators based on the host platform and updates * FFmpeg options to use the best available hardware or falls back to software processing when necessary. * It logs warnings or errors if required codecs or hardware acceleration are unavailable. * * This method is called automatically by the `FfmpegOptions` constructor and is not intended to be called directly. * * @returns `true` if hardware-accelerated transcoding is enabled after configuration, otherwise `false`. * * @example * * ```ts * // This method is invoked by the FfmpegOptions constructor: * const ffmpegOpts = new FfmpegOptions(optionsConfig); * * // Hardware acceleration configuration occurs automatically. * // Developers typically do not need to call configureHwAccel() directly. * ``` * * @see FfmpegCodecs * @see FfmpegOptions */ configureHwAccel() { let logMessage = ""; // Utility to return which hardware acceleration features are currently available to us. const accelCategories = () => { const categories = []; if (this.config.hardwareDecoding) { categories.push("decoding"); } if (this.config.hardwareTranscoding) { categories.push("\u26ED\uFE0E transcoding"); } return categories.join(" and "); }; // Hardware-accelerated decoding is enabled by default, where supported. Let's select the decoder options accordingly where supported. if (this.config.hardwareDecoding) { // Utility function to check that we have a specific decoder codec available to us. // eslint-disable-next-line @typescript-eslint/no-unused-vars const validateDecoder = (codec, pixelFormat) => { if (!this.config.codecSupport.hasDecoder("h264", codec)) { this.log.error("Unable to enable hardware-accelerated decoding. Your video processor does not have support for the " + codec + " decoder. " + "Using software decoding instead."); this.config.hardwareDecoding = false; return false; } this.hwPixelFormat.push(...pixelFormat); return true; }; // Utility function to check that we have a specific decoder codec available to us. const validateHwAccel = (accel, pixelFormat) => { if (!this.config.codecSupport.hasHwAccel(accel)) { this.log.error("Unable to enable hardware-accelerated decoding. Your video processor does not have support for the " + accel + " hardware accelerator. " + "Using software decoding instead."); this.config.hardwareDecoding = false; return false; } this.hwPixelFormat.push(...pixelFormat); return true; }; switch (this.codecSupport.hostSystem) { case "macOS.Apple": case "macOS.Intel": // Verify that we have hardware-accelerated decoding available to us. validateHwAccel("videotoolbox", ["videotoolbox_vld", "nv12", "yuv420p"]); break; case "raspbian": // If it's less than the minimum hardware GPU memory we need on an Raspberry Pi, we revert back to our default decoder. if (this.config.codecSupport.gpuMem < RPI_GPU_MINIMUM) { this.log.info("Disabling hardware-accelerated %s. Adjust the GPU memory configuration on your Raspberry Pi to at least %s MB to enable it.", accelCategories(), RPI_GPU_MINIMUM); this.config.hardwareDecoding = false; this.config.hardwareTranscoding = false; return false; } // Verify that we have the hardware decoder available to us. Unfortunately, at the moment, it seems that hardware decoding is flaky, at best, on Raspberry Pi. // validateDecoder("h264_mmal", [ "mmal", "yuv420p" ]); // validateDecoder("h264_v4l2m2ml", [ "yuv420p" ]); this.config.hardwareDecoding = false; break; default: // Back to software decoding unless we're on a known system that always supports hardware decoding. this.config.hardwareDecoding = false; break; } } // If we've enabled hardware-accelerated transcoding, let's select the encoder options accordingly where supported. if (this.config.hardwareTranscoding) { // Utility function to check that we have a specific encoder codec available to us. const validateEncoder = (codec) => { if (!this.config.codecSupport.hasEncoder("h264", codec)) { this.log.error("Unable to enable hardware-accelerated transcoding. Your video processor does not have support for the " + codec + " encoder. " + "Using software transcoding instead."); this.config.hardwareTranscoding = false; return false; } return true; }; switch (this.codecSupport.hostSystem) { case "macOS.Apple": case "macOS.Intel": // Verify that we have the hardware encoder available to us. validateEncoder("h264_videotoolbox"); // Validate that we have access to the AudioToolbox AAC encoder. if (!this.config.codecSupport.hasEncoder("aac", "aac_at")) { this.log.error("Your video processor does not have support for the native macOS AAC encoder, aac_at. Will attempt to use libfdk_aac instead."); } break; case "raspbian": // Verify that we have the hardware encoder available to us. validateEncoder("h264_v4l2m2m"); logMessage = "Raspberry Pi hardware acceleration will be used for livestreaming. " + "HomeKit Secure Video recordings are not supported by the hardware encoder and will use software transcoding instead"; // Ensure we have the pixel format the Raspberry Pi GPU is expecting available to us, if it isn't already. if (!this.hwPixelFormat.includes("yuv420p")) { this.hwPixelFormat.push("yuv420p"); } break; default: // Let's see if we have Intel QuickSync hardware decoding available to us. if (this.config.codecSupport.hasHwAccel("qsv") && this.config.codecSupport.hasDecoder("h264", "h264_qsv") && this.config.codecSupport.hasEncoder("h264", "h264_qsv") && this.config.codecSupport.hasDecoder("hevc", "hevc_qsv")) { this.config.hardwareDecoding = true; this.hwPixelFormat.push("qsv", "yuv420p"); logMessage = "Intel Quick Sync Video"; } else { // Back to software encoding. this.config.hardwareDecoding = false; this.config.hardwareTranscoding = false; } break; } } // Inform the user. if (this.config.hardwareDecoding || this.config.hardwareTranscoding) { this.log.info("\u26A1\uFE0F Hardware-accelerated " + accelCategories() + " enabled" + (logMessage.length ? ": " + logMessage : "") + "."); } return this.config.hardwareTranscoding; } /** * Returns the audio encoder arguments to use when transcoding. * * @param options - Optional. The encoder options to use for generating FFmpeg arguments. * @returns Array of FFmpeg command-line arguments for audio encoding. * * @example * * ```ts * const args = ffmpegOpts.audioEncoder(); * ``` */ audioEncoder(options = {}) { // Default our codec to AAC_ELD unless specified. options = Object.assign({}, { codec: 1 /* AudioRecordingCodecType.AAC_ELD */ }, options); // If we don't have libfdk_aac available to us, we're essentially dead in the water. let encoderOptions = []; // Utility function to return a default audio encoder codec. const defaultAudioEncoderOptions = () => { const audioOptions = []; if (this.config.codecSupport.hasEncoder("aac", "libfdk_aac")) { // Default to libfdk_aac since FFmpeg doesn't natively support AAC-ELD. We use the following options by default: // // -codec:a libfdk_aac Use the libfdk_aac encoder. // -afterburner 1 Increases audio quality at the expense of needing a little bit more computational power in libfdk_aac. // -eld_v2 1 Use the enhanced low delay v2 standard for better audio characteristics. audioOptions.push("-codec:a", "libfdk_aac", "-afterburner", "1"); switch (options.codec) { case 1 /* AudioRecordingCodecType.AAC_ELD */: audioOptions.push("-eld_v2", "1"); // If we're using Jellyfin's FFmpeg, it's libfdk_aac is broken and crashes when using spectral band replication. if (!/-Jellyfin$/.test(this.config.codecSupport.ffmpegVersion)) { // -eld_sbr 1 Use spectral band replication to further enhance audio. audioOptions.push("-eld_sbr", "1"); } break; case 0 /* AudioRecordingCodecType.AAC_LC */: default: audioOptions.push("-vbr", "4"); break; } } return audioOptions; }; switch (this.codecSupport.hostSystem) { case "macOS.Apple": case "macOS.Intel": // If we don't have audiotoolbox available, let's default back to libfdk_aac. if (!this.config.codecSupport.hasEncoder("aac", "aac_at")) { encoderOptions = defaultAudioEncoderOptions(); break; } // aac_at is the macOS audio encoder API. We use the following options: // // -codec:a aac_at Use the aac_at encoder on macOS. // -aac_at_mode cvbr Use the constrained variable bitrate setting to allow the encoder to optimize audio within the requested bitrates. encoderOptions = [ "-codec:a", "aac_at" ]; switch (options.codec) { case 1 /* AudioRecordingCodecType.AAC_ELD */: encoderOptions.push("-aac_at_mode", "cbr"); break; case 0 /* AudioRecordingCodecType.AAC_LC */: default: encoderOptions.push("-aac_at_mode", "vbr"); encoderOptions.push("-q:a", "2"); break; } break; default: encoderOptions = defaultAudioEncoderOptions(); break; } return encoderOptions; } /** * Returns the audio decoder to use when decoding. * * @returns The FFmpeg audio decoder string. */ get audioDecoder() { return "libfdk_aac"; } /** * Returns the video decoder arguments to use for decoding video. * * @param codec - Optional. Codec to decode (`"av1"`, `"h264"` (default), or `"hevc"`). * @returns Array of FFmpeg command-line arguments for video decoding or an empty array if the codec isn't supported. * * @example * * ```ts * const args = ffmpegOpts.videoDecoder("h264"); * ``` */ videoDecoder(codec = "h264") { switch (codec.toLowerCase()) { case "av1": codec = "av1"; break; case "h264": codec = "h264"; break; case "h265": case "hevc": codec = "hevc"; break; default: // If it's unknown to us, we bail out. return []; } // Intel QSV decoder to codec mapping. const qsvDecoder = { "av1": "av1_qsv", "h264": "h264_qsv", "hevc": "hevc_qsv" }; // Default to no special decoder options for inbound streams. let decoderOptions = []; // If we've enabled hardware-accelerated transcoding, let's select decoder options accordingly where supported. if (this.config.hardwareDecoding) { switch (this.codecSupport.hostSystem) { case "macOS.Apple": case "macOS.Intel": // h264_videotoolbox is the macOS hardware decoder and encoder API. We use the following options for decoding video: // // -hwaccel videotoolbox Select Video Toolbox for hardware-accelerated H.264 decoding. decoderOptions = [ "-hwaccel", "videotoolbox" ]; break; case "raspbian": // h264_mmal is the preferred Raspberry Pi hardware decoder codec. We use the following options for decoding video: // // -codec:v h264_mmal Select the Multimedia Abstraction Layer codec for hardware-accelerated H.264 processing. decoderOptions = [ // "-codec:v", "h264_mmal" ]; break; default: // h264_qsv is the Intel Quick Sync Video hardware encoder and decoder. // // -hwaccel qsv Select Quick Sync Video to enable hardware-accelerated H.264 decoding. // -codec:v X_qsv Select the Quick Sync Video codec for hardware-accelerated AV1, H.264, or HEVC processing. AV1 decoding isn't available // before 11th generation Intel CPUs. decoderOptions = ((codec === "av1") && (this.codecSupport.intelGeneration < 11)) ? [] : [ "-hwaccel", "qsv", "-hwaccel_output_format", "qsv", "-codec:v", qsvDecoder[codec] ]; break; } } return decoderOptions; } /** * Returns the FFmpeg crop filter string, or a default no-op filter if cropping is disabled. * * @returns The crop filter string for FFmpeg. */ get cropFilter() { // If we haven't enabled cropping, tell the crop filter to do nothing. if (!this.config.crop) { return "crop=w=iw*100:h=ih*100:x=iw*0:y=ih*0"; } // Generate our crop filter based on what the user has configured. return "crop=" + [ "w=iw*" + this.config.crop.width.toString(), "h=ih*" + this.config.crop.height.toString(), "x=iw*" + this.config.crop.x.toString(), "y=ih*" + this.config.crop.y.toString() ].join(":"); } /** * Generates the default set of FFmpeg video encoder arguments for software transcoding using libx264. * * This method builds command-line options for the FFmpeg libx264 encoder based on the provided encoder options, including bitrate, H.264 profile and level, pixel * format, frame rate, buffer size, and optional smart quality settings. It is used internally when hardware-accelerated transcoding is not enabled or supported. * * @param options - The encoder options to use for generating FFmpeg arguments. * * @returns An array of FFmpeg command-line arguments for software video encoding. * * @example * * ```ts * const encoderOptions: VideoEncoderOptions = { * * bitrate: 2000, * fps: 30, * height: 720, * idrInterval: 2, * inputFps: 30, * level: H264Level.LEVEL3_1, * profile: H264Profile.MAIN, * smartQuality: true, * width: 1280 * }; * * const args = ffmpegOpts['defaultVideoEncoderOptions'](encoderOptions); * ``` * * @see VideoEncoderOptions */ defaultVideoEncoderOptions(options) { const videoFilters = []; // fps=fps= Use the fps filter to provide the frame rate requested by HomeKit. We only need to apply this filter if our input and output // frame rates aren't already identical. const fpsFilter = ["fps=fps=" + options.fps.toString()]; // Set our FFmpeg pixel-level filters: // // scale=-2:min(ih\,height) Scale the video to the size that's being requested while respecting aspect ratios and ensuring our final dimensions are // a power of two. // format= Set the pixel formats we want to target for output. const pixelFilters = [ "scale=-2:min(ih\\," + options.height.toString() + ")", "format=" + [...new Set([...this.hwPixelFormat, "yuvj420p"])].join("|") ]; // Let's assemble our filter collection. If we're reducing our framerate, we want to frontload the fps filter so the downstream filters need to do less work. If we're // increasing our framerate, we want to do pixel operations on the minimal set of source frames that we need, since we're just going to duplicate them. if (options.fps < options.inputFps) { videoFilters.push(...fpsFilter, ...pixelFilters); } else { videoFilters.push(...pixelFilters, ...(options.fps > options.inputFps ? fpsFilter : [])); } // Default to the tried-and-true libx264. We use the following options by default: // // -codec:v libx264 Use the excellent libx264 H.264 encoder. // -preset veryfast Use the veryfast encoding preset in libx264, which provides a good balance of encoding speed and quality. // -profile:v Use the H.264 profile that HomeKit is requesting when encoding. // -level:v Use the H.264 profile level that HomeKit is requesting when encoding. // -noautoscale Don't attempt to scale the video stream automatically. // -bf 0 Disable B-frames when encoding to increase compatibility against occasionally finicky HomeKit clients. // -filter:v Set the pixel format and scale the video to the size we want while respecting aspect ratios and ensuring our final // dimensions are a power of two. // -g:v Set the group of pictures to the number of frames per second * the interval in between keyframes to ensure a solid // livestreamng exerience. // -bufsize size This is the decoder buffer size, which drives the variability / quality of the output bitrate. // -maxrate bitrate The maximum bitrate tolerance, used with -bufsize. This provides an upper bound on bitrate, with a little bit extra to // allow encoders some variation in order to maximize quality while honoring bandwidth constraints. const encoderOptions = [ "-codec:v", "libx264", "-preset", "veryfast", "-profile:v", this.getH264Profile(options.profile), "-level:v", this.getH264Level(options.level), "-noautoscale", "-bf", "0", "-filter:v", videoFilters.join(", "), "-g:v", (options.fps * options.idrInterval).toString(), "-bufsize", (2 * options.bitrate).toString() + "k", "-maxrate", (options.bitrate + (options.smartQuality ? HOMEKIT_STREAMING_HEADROOM : 0)).toString() + "k" ]; // Using libx264's constant rate factor mode produces generally better results across the board. We use a capped CRF approach, allowing libx264 to // make intelligent choices about how to adjust bitrate to achieve a certain quality level depending on the complexity of the scene being encoded, but // constraining it to a maximum bitrate to stay within the bandwidth constraints HomeKit is requesting. if (options.smartQuality) { // -crf 20 Use a constant rate factor of 20, to allow libx264 the ability to vary bitrates to achieve the visual quality we // want, constrained by our maximum bitrate. encoderOptions.push("-crf", "20"); } else { // For recording HKSV, we really want to maintain a tight rein on bitrate and don't want to freelance with perceived quality for two reasons - HKSV // is very latency sensitive and it's also very particular about bitrates and the specific format of the stream it receives. The second reason is that // HKSV typically requests bitrates of around 2000kbps, which results in a reasonably high quality recording, as opposed to the typical 2-300kbps // that livestreaming from the Home app itself generates. Those lower bitrates in livestreaming really benefit from the magic that using a good CRF value // can produce in libx264. encoderOptions.push("-b:v", options.bitrate.toString() + "k"); } return encoderOptions; } /** * Returns the video encoder options to use for HomeKit Secure Video (HKSV) event recording. * * @param options - Encoder options to use. * @returns Array of FFmpeg command-line arguments for video encoding. */ recordEncoder(options) { // We always disable smart quality when recording due to HomeKit's strict requirements here. options.smartQuality = false; // Generaly, we default to using the same encoding options we use to transcode livestreams, unless we have platform-specific quirks we need to address, // such as where we can have hardware-accelerated transcoded livestreaming, but not hardware-accelerated HKSV event recording. The other noteworthy // aspect here is that HKSV is quite specific in what it wants, and isn't vary tolerant of creative license in how you may choose to alter bitrate to // address quality. When we call our encoders, we also let them know we don't want any additional quality optimizations when transcoding HKSV events. switch (this.codecSupport.hostSystem) { case "raspbian": // Raspberry Pi struggles with hardware-accelerated HKSV event recording due to issues in the FFmpeg codec driver, currently. We hope this improves // over time and can offer it to Pi users, or develop a workaround. For now, we default to libx264. return this.defaultVideoEncoderOptions(options); default: // By default, we use the same options for HKSV and streaming. return this.streamEncoder(options); } } /** * Returns the video encoder options to use when transcoding for livestreaming. * * @param options - Encoder options to use. * @returns Array of FFmpeg command-line arguments for video encoding. * * @example * * ```ts * const args = ffmpegOpts.streamEncoder(encoderOptions); * ``` */ streamEncoder(options) { // Default hardware decoding and smart quality to true unless specified. options = Object.assign({}, { hardwareDecoding: true, hardwareTranscoding: this.config.hardwareTranscoding, smartQuality: true }, options); // In case we don't have a defined pixel format. if (!this.hwPixelFormat.length) { this.hwPixelFormat.push("yuvj420p"); } // If we aren't hardware-accelerated, we default to libx264. if (!this.config.hardwareTranscoding || !options.hardwareTranscoding) { return this.defaultVideoEncoderOptions(options); } // If we've enabled hardware-accelerated transcoding, let's select encoder options accordingly. // // We begin by adjusting the maximum bitrate tolerance used with -bufsize. This provides an upper bound on bitrate, with a little bit extra to allow encoders some // variation in order to maximize quality while honoring bandwidth constraints. const adjustedMaxBitrate = options.bitrate + (options.smartQuality ? HOMEKIT_STREAMING_HEADROOM : 0); // Initialize our options. const encoderOptions = []; let videoFilters = []; // fps=fps= Use the fps filter to provide the frame rate requested by HomeKit. We only need to apply this filter if our input and output // frame rates aren't already identical. const fpsFilter = ["fps=fps=" + options.fps.toString()]; // Set our FFmpeg pixel-level filters: // // crop Crop filter options, if requested. // scale=-2:min(ih\,height) Scale the video to the size that's being requested while respecting aspect ratios and ensuring our final dimensions are // a power of two. // format= Set the pixel formats we want to target for output. let pixelFilters = [ ...(this.config.crop ? this.cropFilter : []), "scale=-2:min(ih\\," + options.height.toString() + ")", "format=" + this.hwPixelFormat.join("|") ]; // Let's assemble our filter collection. If we're reducing our framerate, we want to frontload the fps filter so the downstream filters need to do less work. If we're // increasing our framerate, we want to do pixel operations on the minimal set of source frames that we need, since we're just going to duplicate them. if (options.fps < options.inputFps) { videoFilters = [...fpsFilter, ...pixelFilters]; } else { videoFilters = [...pixelFilters, ...(options.fps > options.inputFps ? fpsFilter : [])]; } switch (this.codecSupport.hostSystem) { case "macOS.Apple": // h264_videotoolbox is the macOS hardware encoder API. We use the following options on Apple Silicon: // // -codec:v Specify the macOS hardware encoder, h264_videotoolbox. // -allow_sw 1 Allow the use of the software encoder if the hardware encoder is occupied or unavailable. // This allows us to scale when we get multiple streaming requests simultaneously and consume all the available encode engines. // -realtime 1 We prefer speed over quality - if the encoder has to make a choice, sacrifice one for the other. // -coder cabac Use the cabac encoder for better video quality with the encoding profiles we use for HomeKit. // -profile:v Use the H.264 profile that HomeKit is requesting when encoding. // -level:v 0 We override what HomeKit requests for the H.264 profile level on macOS when we're using hardware-accelerated transcoding because // the hardware encoder is particular about how to use levels. Setting it to 0 allows the encoder to decide for itself. // -bf 0 Disable B-frames when encoding to increase compatibility against occasionally finicky HomeKit clients. // -noautoscale Don't attempt to scale the video stream automatically. // -filter:v Set the pixel format, adjust the frame rate if needed, and scale the video to the size we want while respecting aspect ratios and // ensuring our final dimensions are a power of two. // -g:v Set the group of pictures to the number of frames per second * the interval in between keyframes to ensure a solid // livestreamng exerience. // -bufsize size This is the decoder buffer size, which drives the variability / quality of the output bitrate. // -maxrate bitrate The maximum bitrate tolerance used in concert with -bufsize to constrain the maximum bitrate permitted. encoderOptions.push("-codec:v", "h264_videotoolbox", "-allow_sw", "1", "-realtime", "1", "-coder", "cabac", "-profile:v", this.getH264Profile(options.profile), "-level:v", "0", "-bf", "0", "-noautoscale", "-filter:v", videoFilters.join(", "), "-g:v", (options.fps * options.idrInterval).toString(), "-bufsize", (2 * options.bitrate).toString() + "k", "-maxrate", adjustedMaxBitrate.toString() + "k"); if (options.smartQuality) { // -q:v 90 Use a fixed quality scale of 90, to allow videotoolbox the ability to vary bitrates to achieve the visual quality we want, // constrained by our maximum bitrate. This is an Apple Silicon-specific feature. encoderOptions.push("-q:v", "90"); } else { // -b:v Average bitrate that's being requested by HomeKit. encoderOptions.push("-b:v", options.bitrate.toString() + "k"); } return encoderOptions; case "macOS.Intel": // h264_videotoolbox is the macOS hardware encoder API. We use the following options on Intel-based Macs: // // -codec:v Specify the macOS hardware encoder, h264_videotoolbox. // -allow_sw 1 Allow the use of the software encoder if the hardware encoder is occupied or unavailable. // This allows us to scale when we get multiple streaming requests simultaneously that can consume all the available encode engines. // -realtime 1 We prefer speed over quality - if the encoder has to make a choice, sacrifice one for the other. // -coder cabac Use the cabac encoder for better video quality with the encoding profiles we use for HomeKit. // -profile:v Use the H.264 profile that HomeKit is requesting when encoding. // -level:v 0 We override what HomeKit requests for the H.264 profile level on macOS when we're using hardware-accelerated transcoding because // the hardware encoder is particular about how to use levels. Setting it to 0 allows the encoder to decide for itself. // -bf 0 Disable B-frames when encoding to increase compatibility against occasionally finicky HomeKit clients. // -noautoscale Don't attempt to scale the video stream automatically. // -filter:v Set the pixel format, adjust the frame rate if needed, and scale the video to the size we want while respecting aspect ratios and // ensuring our final dimensions are a power of two. // -b:v Average bitrate that's being requested by HomeKit. We can't use a quality constraint and allow for more optimization of the // bitrate on Intel-based Macs due to hardware / API limitations. // -g:v Set the group of pictures to the number of frames per second * the interval in between keyframes to ensure a solid // livestreaming exerience. // -bufsize size This is the decoder buffer size, which drives the variability / quality of the output bitrate. // -maxrate bitrate The maximum bitrate tolerance used in concert with -bufsize to constrain the maximum bitrate permitted. return [ "-codec:v", "h264_videotoolbox", "-allow_sw", "1", "-realtime", "1", "-coder", "cabac", "-profile:v", this.getH264Profile(options.profile), "-level:v", "0", "-bf", "0", "-noautoscale", "-filter:v", videoFilters.join(", "), "-b:v", options.bitrate.toString() + "k", "-g:v", (options.fps * options.idrInterval).toString(), "-bufsize", (2 * options.bitrate).toString() + "k", "-maxrate", adjustedMaxBitrate.toString() + "k" ]; case "raspbian": // h264_v4l2m2m is the preferred interface to the Raspberry Pi hardware encoder API. We use the following options: // // -codec:v Specify the Raspberry Pi hardware encoder, h264_v4l2m2m. // -noautoscale Don't attempt to scale the video stream automatically. // -filter:v Set the pixel format, adjust the frame rate if needed, and scale the video to the size we want while respecting aspect ratios and // ensuring our final dimensions are a power of two. // -b:v Average bitrate that's being requested by HomeKit. We can't use a quality constraint and allow for more optimization of the // bitrate due to v4l2m2m limitations. // -g:v Set the group of pictures to the number of frames per second * the interval in between keyframes to ensure a solid // livestreamng exerience. // -bufsize size This is the decoder buffer size, which drives the variability / quality of the output bitrate. // -maxrate bitrate The maximum bitrate tolerance used in concert with -bufsize to constrain the maximum bitrate permitted. return [ "-codec:v", "h264_v4l2m2m", "-profile:v", this.getH264Profile(options.profile, true), "-bf", "0", "-noautoscale", "-reset_timestamps", "1", "-filter:v", videoFilters.join(", "), "-b:v", options.bitrate.toString() + "k", "-g:v", (options.fps * options.idrInterval).toString(), "-bufsize", (2 * options.bitrate).toString() + "k", "-maxrate", adjustedMaxBitrate.toString() + "k" ]; default: // Clear out any prior video filters. videoFilters = []; // We execute the following GPU-accelerated operations using the Quick Sync Video post-processing filter: // // crop Crop filter options, if requested. // hwupload If we aren't hardware decoding, we need to upload decoded frames to QSV to process them. // format=same Set the output pixel format to the same as the input, since it's already in the GPU. // w=...:h... Scale the video to the size that's being requested while respecting aspect ratios. pixelFilters = [ ...(this.config.crop ? this.cropFilter : []), (options.hardwareDecoding ? "" : "hwupload,") + "vpp_qsv=" + [ "format=same", "w=min(iw\\, (iw / ih) * " + options.height.toString() + ")", "h=min(ih\\, " + options.height.toString() + ")" ].join(":") ]; // Let's assemble our filter collection. If we're reducing our framerate, we want to frontload the fps filter so the downstream filters need to do less work. If // we're increasing our framerate, we want to do pixel operations on the minimal set of source frames that we need, since we're just going to duplicate them. if (options.fps < options.inputFps) { videoFilters.push(...fpsFilter, ...pixelFilters); } else { videoFilters.push(...pixelFilters, ...(options.fps > options.inputFps ? fpsFilter : [])); } // h264_qsv is the Intel Quick Sync Video hardware encoder API. We use the following options: // // -codec:v Specify the Intel Quick Sync Video hardware encoder, h264_qsv. // -profile:v Use the H.264 profile that HomeKit is requesting when encoding. // -level:v 0 We override what HomeKit requests for the H.264 profile level when we're using hardware-accelerated transcoding because // the hardware encoder will determine which levels to use. Setting it to 0 allows the encoder to decide for itself. // -bf 0 Disable B-frames when encoding to increase compatibility against occasionally finicky HomeKit clients. // -noautoscale Don't attempt to scale the video stream automatically. // -init_hw_device Initialize our hardware accelerator and assign it a name to be used in the FFmpeg command line. // -filter_hw_device Specify the hardware accelerator to be used with our video filter pipeline. // -filter:v Set the pixel format, adjust the frame rate if needed, and scale the video to the size we want while respecting aspect ratios and // ensuring our final dimensions are a power of two. // -g:v Set the group of pictures to the number of frames per second * the interval in between keyframes to ensure a solid // livestreamng exerience. // -bufsize size This is the decoder buffer size, which drives the variability / quality of the output bitrate. // -maxrate bitrate The maximum bitrate tolerance used in concert with -bufsize to constrain the maximum bitrate permitted. encoderOptions.push("-codec:v", "h264_qsv", "-profile:v", this.getH264Profile(options.profile), "-level:v", "0", "-bf", "0", "-noautoscale", "-init_hw_device", "qsv=hw", "-filter_hw_device", "hw", "-filter:v", videoFilters.join(", "), "-g:v", (options.fps * options.idrInterval).toString(), "-bufsize", (2 * options.bitrate).toString() + "k", "-maxrate", adjustedMaxBitrate.toString() + "k"); if (options.smartQuality) { // -global_quality 20 Use a global quality setting of 20, to allow QSV the ability to vary bitrates to achieve the visual quality we want, // constrained by our maximum bitrate. This leverages a QSV-specific feature known as intelligent constant quality. encoderOptions.push("-global_quality", "20"); } else { // -b:v Average bitrate that's being requested by HomeKit. encoderOptions.push("-b:v", options.bitrate.toString() + "k"); } return encoderOptions; } } /** * Returns the maximum pixel count supported by a specific hardware encoder on the host system, or `Infinity` if not limited. * * @returns Maximum supported pixel count. */ get hostSystemMaxPixels() { if (this.config.hardwareTranscoding) { switch (this.codecSupport.hostSystem) { case "raspbian": // For constrained environments like Raspberry Pi, when hardware transcoding has been selected for a camera, we limit the available source streams to no more // than 1080p. In practice, that means that devices like the G4 Pro can't use their highest quality stream for transcoding due to the limitations of the // Raspberry Pi GPU that cannot support higher pixel counts. return 1920 * 1080; default: break; } } return Infinity; } /** * Converts a HomeKit H.264 level enum value to the corresponding FFmpeg string or numeric representation. * * This helper is used to translate between HomeKit's `H264Level` enum and the string or numeric format expected by FFmpeg's `-level:v` argument. * * @param level - The H.264 level to translate. * @param numeric - Optional. If `true`, returns the numeric representation (e.g., "31"). If `false` or omitted, returns the standard string format (e.g., "3.1"). * * @returns The FFmpeg-compatible H.264 level string or numeric value. * * @example * * ```ts * ffmpegOpts['getH264Level'](H264Level.LEVEL3_1); // "3.1" * * ffmpegOpts['getH264Level'](H264Level.LEVEL4_0, true); // "40" * ``` * * @see H264Level */ getH264Level(level, numeric = false) { switch (level) { case 0 /* H264Level.LEVEL3_1 */: return numeric ? "31" : "3.1"; case 1 /* H264Level.LEVEL3_2 */: return numeric ? "32" : "3.2"; case 2 /* H264Level.LEVEL4_0 */: return numeric ? "40" : "4.0"; default: return numeric ? "31" : "3.1"; } } /** * Converts a HomeKit H.264 profile enum value to the corresponding FFmpeg string or numeric representation. * * This helper is used to translate between HomeKit's `H264Profile` enum and the string or numeric format expected by FFmpeg's `-profile:v` argument. * * @param profile - The H.264 profile to translate. * @param numeric - Optional. If `true`, returns the numeric representation (e.g., "100"). If `false` or omitted, returns the standard string format (e.g., "high"). * * @returns The FFmpeg-compatible H.264 profile string or numeric value. * * @example * * ```ts * ffmpegOpts['getH264Profile'](H264Profile.HIGH); // "high" * * ffmpegOpts['getH264Profile'](H264Profile.BASELINE, true); // "66" * ``` * * @see H264Profile */ getH264Profile(profile, numeric = false) { switch (profile) { case 0 /* H264Profile.BASELINE */: return numeric ? "66" : "baseline"; case 2 /* H264Profile.HIGH */: return numeric ? "100" : "high"; case 1 /* H264Profile.MAIN */: return numeric ? "77" : "main"; default: return numeric ? "77" : "main"; } } } //# sourceMappingURL=options.js.map