UNPKG

@ktt45678/vidstack

Version:

UI component library for building high-quality, accessible video and audio experiences on the web.

1,633 lines (1,615 loc) 164 kB
import { DOMEvent, EventsTarget, fscreen, ViewController, listenEvent, onDispose, signal, peek, isString, isNumber, State, tick, Component, functionThrottle, effect, untrack, functionDebounce, isArray, isKeyboardClick, isKeyboardEvent, waitIdlePeriod, deferredPromise, isUndefined, provideContext, setAttribute, animationFrameThrottle, uppercaseFirstChar, camelToKebabCase, setStyle, computed, prop, method, scoped, noop } from './vidstack-fG_Sx3Q9.js'; import { mediaContext, useMediaContext } from './vidstack-DQ4Fz5gz.js'; import { canOrientScreen, IS_IPHONE, isAudioSrc, canPlayAudioType, isVideoSrc, canPlayVideoType, isHLSSupported, isHLSSrc, isDASHSupported, isDASHSrc, IS_CHROME, IS_IOS, canGoogleCastSrc, canChangeVolume } from './vidstack-BpOkecTJ.js'; import { TimeRange, getTimeRangesStart, getTimeRangesEnd, updateTimeIntervals } from './vidstack-CLRUrTzh.js'; import { isTrackCaptionKind, TextTrackSymbol, TextTrack } from './vidstack-DSRs3D8P.js'; import { ListSymbol } from './vidstack-BXMqlVv4.js'; import { QualitySymbol } from './vidstack-BYmCj-36.js'; import { coerceToError } from './vidstack-DbBJlz7I.js'; import { preconnect, getRequestCredentials } from './vidstack-BnCZ4oyK.js'; import { isHTMLElement, isTouchPinchEvent, setAttributeIfEmpty } from './vidstack-DdUZGy1h.js'; import { round, clampNumber } from './vidstack-Dihypf8P.js'; import { FocusVisibleController } from './vidstack-DvBAQUpx.js'; import '../providers/vidstack-dash.js'; var _a$1; const GROUPED_LOG = Symbol("GROUPED_LOG" ); _a$1 = GROUPED_LOG; const _GroupedLog = class _GroupedLog { constructor(logger, level, title, root, parent) { this.logger = logger; this.level = level; this.title = title; this.root = root; this.parent = parent; this[_a$1] = true; this.logs = []; } log(...data) { this.logs.push({ data }); return this; } labelledLog(label, ...data) { this.logs.push({ label, data }); return this; } groupStart(title) { return new _GroupedLog(this.logger, this.level, title, this.root ?? this, this); } groupEnd() { this.parent?.logs.push(this); return this.parent ?? this; } dispatch() { return this.logger.dispatch(this.level, this.root ?? this); } }; let GroupedLog = _GroupedLog; function isGroupedLog(data) { return !!data?.[GROUPED_LOG]; } class Logger { constructor() { this._target = null; } error(...data) { return this.dispatch("error", ...data); } warn(...data) { return this.dispatch("warn", ...data); } info(...data) { return this.dispatch("info", ...data); } debug(...data) { return this.dispatch("debug", ...data); } errorGroup(title) { return new GroupedLog(this, "error", title); } warnGroup(title) { return new GroupedLog(this, "warn", title); } infoGroup(title) { return new GroupedLog(this, "info", title); } debugGroup(title) { return new GroupedLog(this, "debug", title); } setTarget(newTarget) { this._target = newTarget; } dispatch(level, ...data) { return this._target?.dispatchEvent( new DOMEvent("vds-log", { bubbles: true, composed: true, detail: { level, data } }) ) || false; } } var _a; class List extends EventsTarget { constructor() { super(...arguments); this._items = []; /** @internal */ this[_a] = false; } get length() { return this._items.length; } get readonly() { return this[ListSymbol._readonly]; } /** * Returns the index of the first occurrence of the given item, or -1 if it is not present. */ indexOf(item) { return this._items.indexOf(item); } /** * Returns an item matching the given `id`, or `null` if not present. */ getById(id) { if (id === "") return null; return this._items.find((item) => item.id === id) ?? null; } /** * Transform list to an array. */ toArray() { return [...this._items]; } [(_a = ListSymbol._readonly, Symbol.iterator)]() { return this._items.values(); } /** @internal */ [ListSymbol._add](item, trigger) { const index = this._items.length; if (!("" + index in this)) { Object.defineProperty(this, index, { get() { return this._items[index]; } }); } if (this._items.includes(item)) return; this._items.push(item); this.dispatchEvent(new DOMEvent("add", { detail: item, trigger })); } /** @internal */ [ListSymbol._remove](item, trigger) { const index = this._items.indexOf(item); if (index >= 0) { this[ListSymbol._onRemove]?.(item, trigger); this._items.splice(index, 1); this.dispatchEvent(new DOMEvent("remove", { detail: item, trigger })); } } /** @internal */ [ListSymbol._reset](trigger) { for (const item of [...this._items]) this[ListSymbol._remove](item, trigger); this._items = []; this[ListSymbol._setReadonly](false, trigger); this[ListSymbol._onReset]?.(); } /** @internal */ [ListSymbol._setReadonly](readonly, trigger) { if (this[ListSymbol._readonly] === readonly) return; this[ListSymbol._readonly] = readonly; this.dispatchEvent(new DOMEvent("readonly-change", { detail: readonly, trigger })); } } const CAN_FULLSCREEN = fscreen.fullscreenEnabled; class FullscreenController extends ViewController { constructor() { super(...arguments); /** * Tracks whether we're the active fullscreen event listener. Fullscreen events can only be * listened to globally on the document so we need to know if they relate to the current host * element or not. */ this._listening = false; this._active = false; } get active() { return this._active; } get supported() { return CAN_FULLSCREEN; } onConnect() { listenEvent(fscreen, "fullscreenchange", this._onChange.bind(this)); listenEvent(fscreen, "fullscreenerror", this._onError.bind(this)); onDispose(this._onDisconnect.bind(this)); } async _onDisconnect() { if (CAN_FULLSCREEN) await this.exit(); } _onChange(event) { const active = isFullscreen(this.el); if (active === this._active) return; if (!active) this._listening = false; this._active = active; this.dispatch("fullscreen-change", { detail: active, trigger: event }); } _onError(event) { if (!this._listening) return; this.dispatch("fullscreen-error", { detail: null, trigger: event }); this._listening = false; } async enter() { try { this._listening = true; if (!this.el || isFullscreen(this.el)) return; assertFullscreenAPI(); return fscreen.requestFullscreen(this.el); } catch (error) { this._listening = false; throw error; } } async exit() { if (!this.el || !isFullscreen(this.el)) return; assertFullscreenAPI(); return fscreen.exitFullscreen(); } } function canFullscreen() { return CAN_FULLSCREEN; } function isFullscreen(host) { if (fscreen.fullscreenElement === host) return true; try { return host.matches( // @ts-expect-error - `fullscreenPseudoClass` is missing from `@types/fscreen`. fscreen.fullscreenPseudoClass ); } catch (error) { return false; } } function assertFullscreenAPI() { if (CAN_FULLSCREEN) return; throw Error( "[vidstack] fullscreen API is not enabled or supported in this environment" ); } class ScreenOrientationController extends ViewController { constructor() { super(...arguments); this._type = signal(this._getScreenOrientation()); this._locked = signal(false); } /** * The current screen orientation type. * * @signal * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/ScreenOrientation} * @see https://w3c.github.io/screen-orientation/#screen-orientation-types-and-locks */ get type() { return this._type(); } /** * Whether the screen orientation is currently locked. * * @signal * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/ScreenOrientation} * @see https://w3c.github.io/screen-orientation/#screen-orientation-types-and-locks */ get locked() { return this._locked(); } /** * Whether the viewport is in a portrait orientation. * * @signal */ get portrait() { return this._type().startsWith("portrait"); } /** * Whether the viewport is in a landscape orientation. * * @signal */ get landscape() { return this._type().startsWith("landscape"); } static { /** * Whether the native Screen Orientation API is available. */ this.supported = canOrientScreen(); } /** * Whether the native Screen Orientation API is available. */ get supported() { return ScreenOrientationController.supported; } onConnect() { if (this.supported) { listenEvent(screen.orientation, "change", this._onOrientationChange.bind(this)); } else { const query = window.matchMedia("(orientation: landscape)"); query.onchange = this._onOrientationChange.bind(this); onDispose(() => query.onchange = null); } onDispose(this._onDisconnect.bind(this)); } async _onDisconnect() { if (this.supported && this._locked()) await this.unlock(); } _onOrientationChange(event) { this._type.set(this._getScreenOrientation()); this.dispatch("orientation-change", { detail: { orientation: peek(this._type), lock: this._currentLock }, trigger: event }); } /** * Locks the orientation of the screen to the desired orientation type using the * Screen Orientation API. * * @param lockType - The screen lock orientation type. * @throws Error - If screen orientation API is unavailable. * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Screen/orientation} * @see {@link https://w3c.github.io/screen-orientation} */ async lock(lockType) { if (peek(this._locked) || this._currentLock === lockType) return; this._assertScreenOrientationAPI(); await screen.orientation.lock(lockType); this._locked.set(true); this._currentLock = lockType; } /** * Unlocks the orientation of the screen to it's default state using the Screen Orientation * API. This method will throw an error if the API is unavailable. * * @throws Error - If screen orientation API is unavailable. * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Screen/orientation} * @see {@link https://w3c.github.io/screen-orientation} */ async unlock() { if (!peek(this._locked)) return; this._assertScreenOrientationAPI(); this._currentLock = void 0; await screen.orientation.unlock(); this._locked.set(false); } _assertScreenOrientationAPI() { if (this.supported) return; throw Error( "[vidstack] screen orientation API is not available" ); } _getScreenOrientation() { if (this.supported) return window.screen.orientation.type; return window.innerWidth >= window.innerHeight ? "landscape-primary" : "portrait-primary"; } } function isVideoQualitySrc(src) { return !isString(src) && "width" in src && "height" in src && isNumber(src.width) && isNumber(src.height); } const mediaState = new State({ artist: "", artwork: null, audioTrack: null, audioTracks: [], autoPlay: false, autoPlayError: null, audioGain: null, buffered: new TimeRange(), canLoad: false, canLoadPoster: false, canFullscreen: false, canOrientScreen: canOrientScreen(), canPictureInPicture: false, canPlay: false, clipStartTime: 0, clipEndTime: 0, controls: false, get iOSControls() { return IS_IPHONE && this.mediaType === "video" && (!this.playsInline || !fscreen.fullscreenEnabled && this.fullscreen); }, get nativeControls() { return this.controls || this.iOSControls; }, controlsVisible: false, get controlsHidden() { return !this.controlsVisible; }, crossOrigin: null, ended: false, error: null, fullscreen: false, get loop() { return this.providedLoop || this.userPrefersLoop; }, logLevel: "warn" , mediaType: "unknown", muted: false, paused: true, played: new TimeRange(), playing: false, playsInline: false, pictureInPicture: false, preload: "metadata", playbackRate: 1, qualities: [], quality: null, autoQuality: false, canSetQuality: true, canSetPlaybackRate: true, canSetVolume: false, canSetAudioGain: false, seekable: new TimeRange(), seeking: false, source: { src: "", type: "" }, sources: [], started: false, textTracks: [], textTrack: null, get hasCaptions() { return this.textTracks.filter(isTrackCaptionKind).length > 0; }, volume: 1, waiting: false, realCurrentTime: 0, get currentTime() { return this.ended ? this.duration : this.clipStartTime > 0 ? Math.max(0, Math.min(this.realCurrentTime - this.clipStartTime, this.duration)) : this.realCurrentTime; }, providedDuration: -1, intrinsicDuration: 0, get realDuration() { return this.providedDuration > 0 ? this.providedDuration : this.intrinsicDuration; }, get duration() { return this.clipEndTime > 0 ? this.clipEndTime - this.clipStartTime : Math.max(0, this.realDuration - this.clipStartTime); }, get title() { return this.providedTitle || this.inferredTitle; }, get poster() { return this.providedPoster || this.inferredPoster; }, get viewType() { return this.providedViewType !== "unknown" ? this.providedViewType : this.inferredViewType; }, get streamType() { return this.providedStreamType !== "unknown" ? this.providedStreamType : this.inferredStreamType; }, get currentSrc() { return this.source; }, get bufferedStart() { const start = getTimeRangesStart(this.buffered) ?? 0; return Math.max(0, start - this.clipStartTime); }, get bufferedEnd() { const end = getTimeRangesEnd(this.buffered) ?? 0; return Math.min(this.duration, Math.max(0, end - this.clipStartTime)); }, get seekableStart() { const start = getTimeRangesStart(this.seekable) ?? 0; return Math.max(0, start - this.clipStartTime); }, get seekableEnd() { const end = this.canPlay ? getTimeRangesEnd(this.seekable) ?? Infinity : 0; return this.clipEndTime > 0 ? Math.max(this.clipEndTime, Math.max(0, end - this.clipStartTime)) : end; }, get seekableWindow() { return Math.max(0, this.seekableEnd - this.seekableStart); }, // ~~ remote playback ~~ canAirPlay: false, canGoogleCast: false, remotePlaybackState: "disconnected", remotePlaybackType: "none", remotePlaybackLoader: null, remotePlaybackInfo: null, get isAirPlayConnected() { return this.remotePlaybackType === "airplay" && this.remotePlaybackState === "connected"; }, get isGoogleCastConnected() { return this.remotePlaybackType === "google-cast" && this.remotePlaybackState === "connected"; }, // ~~ responsive design ~~ pointer: "fine", orientation: "landscape", width: 0, height: 0, mediaWidth: 0, mediaHeight: 0, lastKeyboardAction: null, // ~~ user props ~~ userBehindLiveEdge: false, // ~~ live props ~~ liveEdgeTolerance: 10, minLiveDVRWindow: 60, get canSeek() { return /unknown|on-demand|:dvr/.test(this.streamType) && Number.isFinite(this.seekableWindow) && (!this.live || /:dvr/.test(this.streamType) && this.seekableWindow >= this.minLiveDVRWindow); }, get live() { return this.streamType.includes("live") || !Number.isFinite(this.realDuration); }, get liveEdgeStart() { return this.live && Number.isFinite(this.seekableEnd) ? Math.max(0, (this.liveSyncPosition ?? this.seekableEnd) - this.liveEdgeTolerance) : 0; }, get liveEdge() { return this.live && (!this.canSeek || !this.userBehindLiveEdge && this.currentTime >= this.liveEdgeStart); }, get liveEdgeWindow() { return this.live && Number.isFinite(this.seekableEnd) ? this.seekableEnd - this.liveEdgeStart : 0; }, // ~~ internal props ~~ autoPlaying: false, providedTitle: "", inferredTitle: "", providedLoop: false, userPrefersLoop: false, providedPoster: "", inferredPoster: "", inferredViewType: "unknown", providedViewType: "unknown", providedStreamType: "unknown", inferredStreamType: "unknown", liveSyncPosition: null, savedState: null }); const RESET_ON_SRC_QUALITY_CHANGE = /* @__PURE__ */ new Set([ "autoPlayError", "autoPlaying", "buffered", "canPlay", "error", "paused", "played", "playing", "seekable", "seeking", "waiting" ]); const RESET_ON_SRC_CHANGE = /* @__PURE__ */ new Set([ ...RESET_ON_SRC_QUALITY_CHANGE, "ended", "inferredPoster", "inferredStreamType", "inferredTitle", "intrinsicDuration", "liveSyncPosition", "realCurrentTime", "savedState", "started", "userBehindLiveEdge" ]); function softResetMediaState($media, isSourceQualityChange = false) { const filter = isSourceQualityChange ? RESET_ON_SRC_QUALITY_CHANGE : RESET_ON_SRC_CHANGE; mediaState.reset($media, (prop) => filter.has(prop)); tick(); } class MediaRemoteControl { constructor(_logger = new Logger() ) { this._logger = _logger; this._target = null; this._player = null; this._prevTrackIndex = -1; } /** * Set the target from which to dispatch media requests events from. The events should bubble * up from this target to the player element. * * @example * ```ts * const button = document.querySelector('button'); * remote.setTarget(button); * ``` */ setTarget(target) { this._target = target; this._logger?.setTarget(target); } /** * Returns the current player element. This method will attempt to find the player by * searching up from either the given `target` or default target set via `remote.setTarget`. * * @example * ```ts * const player = remote.getPlayer(); * ``` */ getPlayer(target) { if (this._player) return this._player; (target ?? this._target)?.dispatchEvent( new DOMEvent("find-media-player", { detail: (player) => void (this._player = player), bubbles: true, composed: true }) ); return this._player; } /** * Set the current player element so the remote can support toggle methods such as * `togglePaused` as they rely on the current media state. */ setPlayer(player) { this._player = player; } /** * Dispatch a request to start the media loading process. This will only work if the media * player has been initialized with a custom loading strategy `load="custom">`. * * @docs {@link https://www.vidstack.io/docs/player/core-concepts/loading#load-strategies} */ startLoading(trigger) { this._dispatchRequest("media-start-loading", trigger); } /** * Dispatch a request to start the poster loading process. This will only work if the media * player has been initialized with a custom poster loading strategy `posterLoad="custom">`. * * @docs {@link https://www.vidstack.io/docs/player/core-concepts/loading#load-strategies} */ startLoadingPoster(trigger) { this._dispatchRequest("media-poster-start-loading", trigger); } /** * Dispatch a request to connect to AirPlay. * * @see {@link https://www.apple.com/au/airplay} */ requestAirPlay(trigger) { this._dispatchRequest("media-airplay-request", trigger); } /** * Dispatch a request to connect to Google Cast. * * @see {@link https://developers.google.com/cast/docs/overview} */ requestGoogleCast(trigger) { this._dispatchRequest("media-google-cast-request", trigger); } /** * Dispatch a request to begin/resume media playback. */ play(trigger) { this._dispatchRequest("media-play-request", trigger); } /** * Dispatch a request to pause media playback. */ pause(trigger) { this._dispatchRequest("media-pause-request", trigger); } /** * Dispatch a request to set the media volume to mute (0). */ mute(trigger) { this._dispatchRequest("media-mute-request", trigger); } /** * Dispatch a request to unmute the media volume and set it back to it's previous state. */ unmute(trigger) { this._dispatchRequest("media-unmute-request", trigger); } /** * Dispatch a request to enter fullscreen. * * @docs {@link https://www.vidstack.io/docs/player/api/fullscreen#remote-control} */ enterFullscreen(target, trigger) { this._dispatchRequest("media-enter-fullscreen-request", trigger, target); } /** * Dispatch a request to exit fullscreen. * * @docs {@link https://www.vidstack.io/docs/player/api/fullscreen#remote-control} */ exitFullscreen(target, trigger) { this._dispatchRequest("media-exit-fullscreen-request", trigger, target); } /** * Dispatch a request to lock the screen orientation. * * @docs {@link https://www.vidstack.io/docs/player/screen-orientation#remote-control} */ lockScreenOrientation(lockType, trigger) { this._dispatchRequest("media-orientation-lock-request", trigger, lockType); } /** * Dispatch a request to unlock the screen orientation. * * @docs {@link https://www.vidstack.io/docs/player/api/screen-orientation#remote-control} */ unlockScreenOrientation(trigger) { this._dispatchRequest("media-orientation-unlock-request", trigger); } /** * Dispatch a request to enter picture-in-picture mode. * * @docs {@link https://www.vidstack.io/docs/player/api/picture-in-picture#remote-control} */ enterPictureInPicture(trigger) { this._dispatchRequest("media-enter-pip-request", trigger); } /** * Dispatch a request to exit picture-in-picture mode. * * @docs {@link https://www.vidstack.io/docs/player/api/picture-in-picture#remote-control} */ exitPictureInPicture(trigger) { this._dispatchRequest("media-exit-pip-request", trigger); } /** * Notify the media player that a seeking process is happening and to seek to the given `time`. */ seeking(time, trigger) { this._dispatchRequest("media-seeking-request", trigger, time); } /** * Notify the media player that a seeking operation has completed and to seek to the given `time`. * This is generally called after a series of `remote.seeking()` calls. */ seek(time, trigger) { this._dispatchRequest("media-seek-request", trigger, time); } seekToLiveEdge(trigger) { this._dispatchRequest("media-live-edge-request", trigger); } /** * Dispatch a request to update the length of the media in seconds. * * @example * ```ts * remote.changeDuration(100); // 100 seconds * ``` */ changeDuration(duration, trigger) { this._dispatchRequest("media-duration-change-request", trigger, duration); } /** * Dispatch a request to update the clip start time. This is the time at which media playback * should start at. * * @example * ```ts * remote.changeClipStart(100); // start at 100 seconds * ``` */ changeClipStart(startTime, trigger) { this._dispatchRequest("media-clip-start-change-request", trigger, startTime); } /** * Dispatch a request to update the clip end time. This is the time at which media playback * should end at. * * @example * ```ts * remote.changeClipEnd(100); // end at 100 seconds * ``` */ changeClipEnd(endTime, trigger) { this._dispatchRequest("media-clip-end-change-request", trigger, endTime); } /** * Dispatch a request to update the media volume to the given `volume` level which is a value * between 0 and 1. * * @docs {@link https://www.vidstack.io/docs/player/api/audio-gain#remote-control} * @example * ```ts * remote.changeVolume(0); // 0% * remote.changeVolume(0.05); // 5% * remote.changeVolume(0.5); // 50% * remote.changeVolume(0.75); // 70% * remote.changeVolume(1); // 100% * ``` */ changeVolume(volume, trigger) { this._dispatchRequest("media-volume-change-request", trigger, Math.max(0, Math.min(1, volume))); } /** * Dispatch a request to change the current audio track. * * @example * ```ts * remote.changeAudioTrack(1); // track at index 1 * ``` */ changeAudioTrack(index, trigger) { this._dispatchRequest("media-audio-track-change-request", trigger, index); } /** * Dispatch a request to change the video quality. The special value `-1` represents auto quality * selection. * * @example * ```ts * remote.changeQuality(-1); // auto * remote.changeQuality(1); // quality at index 1 * ``` */ changeQuality(index, trigger) { this._dispatchRequest("media-quality-change-request", trigger, index); } /** * Request auto quality selection. */ requestAutoQuality(trigger) { this.changeQuality(-1, trigger); } /** * Dispatch a request to change the mode of the text track at the given index. * * @example * ```ts * remote.changeTextTrackMode(1, 'showing'); // track at index 1 * ``` */ changeTextTrackMode(index, mode, trigger) { this._dispatchRequest("media-text-track-change-request", trigger, { index, mode }); } /** * Dispatch a request to change the media playback rate. * * @example * ```ts * remote.changePlaybackRate(0.5); // Half the normal speed * remote.changePlaybackRate(1); // Normal speed * remote.changePlaybackRate(1.5); // 50% faster than normal * remote.changePlaybackRate(2); // Double the normal speed * ``` */ changePlaybackRate(rate, trigger) { this._dispatchRequest("media-rate-change-request", trigger, rate); } /** * Dispatch a request to change the media audio gain. * * @example * ```ts * remote.changeAudioGain(1); // Disable audio gain * remote.changeAudioGain(1.5); // 50% louder * remote.changeAudioGain(2); // 100% louder * ``` */ changeAudioGain(gain, trigger) { this._dispatchRequest("media-audio-gain-change-request", trigger, gain); } /** * Dispatch a request to resume idle tracking on controls. */ resumeControls(trigger) { this._dispatchRequest("media-resume-controls-request", trigger); } /** * Dispatch a request to pause controls idle tracking. Pausing tracking will result in the * controls being visible until `remote.resumeControls()` is called. This method * is generally used when building custom controls and you'd like to prevent the UI from * disappearing. * * @example * ```ts * // Prevent controls hiding while menu is being interacted with. * function onSettingsOpen() { * remote.pauseControls(); * } * * function onSettingsClose() { * remote.resumeControls(); * } * ``` */ pauseControls(trigger) { this._dispatchRequest("media-pause-controls-request", trigger); } /** * Dispatch a request to toggle the media playback state. */ togglePaused(trigger) { const player = this.getPlayer(trigger?.target); if (!player) { this._noPlayerWarning(this.togglePaused.name); return; } if (player.state.paused) this.play(trigger); else this.pause(trigger); } /** * Dispatch a request to toggle the controls visibility. */ toggleControls(trigger) { const player = this.getPlayer(trigger?.target); if (!player) { this._noPlayerWarning(this.toggleControls.name); return; } if (!player.controls.showing) { player.controls.show(0, trigger); } else { player.controls.hide(0, trigger); } } /** * Dispatch a request to toggle the media muted state. */ toggleMuted(trigger) { const player = this.getPlayer(trigger?.target); if (!player) { this._noPlayerWarning(this.toggleMuted.name); return; } if (player.state.muted) this.unmute(trigger); else this.mute(trigger); } /** * Dispatch a request to toggle the media fullscreen state. * * @docs {@link https://www.vidstack.io/docs/player/api/fullscreen#remote-control} */ toggleFullscreen(target, trigger) { const player = this.getPlayer(trigger?.target); if (!player) { this._noPlayerWarning(this.toggleFullscreen.name); return; } if (player.state.fullscreen) this.exitFullscreen(target, trigger); else this.enterFullscreen(target, trigger); } /** * Dispatch a request to toggle the media picture-in-picture mode. * * @docs {@link https://www.vidstack.io/docs/player/api/picture-in-picture#remote-control} */ togglePictureInPicture(trigger) { const player = this.getPlayer(trigger?.target); if (!player) { this._noPlayerWarning(this.togglePictureInPicture.name); return; } if (player.state.pictureInPicture) this.exitPictureInPicture(trigger); else this.enterPictureInPicture(trigger); } /** * Show captions. */ showCaptions(trigger) { const player = this.getPlayer(trigger?.target); if (!player) { this._noPlayerWarning(this.showCaptions.name); return; } let tracks = player.state.textTracks, index = this._prevTrackIndex; if (!tracks[index] || !isTrackCaptionKind(tracks[index])) { index = -1; } if (index === -1) { index = tracks.findIndex((track) => isTrackCaptionKind(track) && track.default); } if (index === -1) { index = tracks.findIndex((track) => isTrackCaptionKind(track)); } if (index >= 0) this.changeTextTrackMode(index, "showing", trigger); this._prevTrackIndex = -1; } /** * Turn captions off. */ disableCaptions(trigger) { const player = this.getPlayer(trigger?.target); if (!player) { this._noPlayerWarning(this.disableCaptions.name); return; } const tracks = player.state.textTracks, track = player.state.textTrack; if (track) { const index = tracks.indexOf(track); this.changeTextTrackMode(index, "disabled", trigger); this._prevTrackIndex = index; } } /** * Dispatch a request to toggle the current captions mode. */ toggleCaptions(trigger) { const player = this.getPlayer(trigger?.target); if (!player) { this._noPlayerWarning(this.toggleCaptions.name); return; } if (player.state.textTrack) { this.disableCaptions(); } else { this.showCaptions(); } } userPrefersLoopChange(prefersLoop, trigger) { this._dispatchRequest("media-user-loop-change-request", trigger, prefersLoop); } _dispatchRequest(type, trigger, detail) { const request = new DOMEvent(type, { bubbles: true, composed: true, cancelable: true, detail, trigger }); let target = trigger?.target || null; if (target && target instanceof Component) target = target.el; const shouldUsePlayer = !target || target === document || target === window || target === document.body || this._player?.el && target instanceof Node && !this._player.el.contains(target); target = shouldUsePlayer ? this._target ?? this.getPlayer()?.el : target ?? this._target; { this._logger?.debugGroup(`\u{1F4E8} dispatching \`${type}\``).labelledLog("Target", target).labelledLog("Player", this._player).labelledLog("Request Event", request).labelledLog("Trigger Event", trigger).dispatch(); } if (this._player) { if (type === "media-play-request" && !this._player.state.canLoad) { target?.dispatchEvent(request); } else { this._player.canPlayQueue._enqueue(type, () => target?.dispatchEvent(request)); } } else { target?.dispatchEvent(request); } } _noPlayerWarning(method) { { console.warn( `[vidstack] attempted to call \`MediaRemoteControl.${method}\`() that requires player but failed because remote could not find a parent player element from target` ); } } } class LocalMediaStorage { constructor() { this.playerId = "vds-player"; this.mediaId = null; this._data = { volume: null, muted: null, audioGain: null, time: null, lang: null, captions: null, rate: null, quality: null }; this.saveTimeThrottled = functionThrottle(this.saveTime.bind(this), 1e3); } async getVolume() { return this._data.volume; } async setVolume(volume) { this._data.volume = volume; this.save(); } async getMuted() { return this._data.muted; } async setMuted(muted) { this._data.muted = muted; this.save(); } async getTime() { return this._data.time; } async setTime(time, ended) { const shouldClear = time < 0; this._data.time = !shouldClear ? time : null; if (shouldClear || ended) this.saveTime(); else this.saveTimeThrottled(); } async getLang() { return this._data.lang; } async setLang(lang) { this._data.lang = lang; this.save(); } async getCaptions() { return this._data.captions; } async setCaptions(enabled) { this._data.captions = enabled; this.save(); } async getPlaybackRate() { return this._data.rate; } async setPlaybackRate(rate) { this._data.rate = rate; this.save(); } async getAudioGain() { return this._data.audioGain; } async setAudioGain(gain) { this._data.audioGain = gain; this.save(); } async getVideoQuality() { return this._data.quality; } async setVideoQuality(quality) { this._data.quality = quality; this.save(); } onChange(src, mediaId, playerId = "vds-player") { const savedData = playerId ? localStorage.getItem(playerId) : null, savedTime = mediaId ? localStorage.getItem(mediaId) : null; this.playerId = playerId; this.mediaId = mediaId; this._data = { volume: null, muted: null, audioGain: null, lang: null, captions: null, rate: null, quality: null, ...savedData ? JSON.parse(savedData) : {}, time: savedTime ? +savedTime : null }; } save() { if (!this.playerId) return; const data = JSON.stringify({ ...this._data, time: void 0 }); localStorage.setItem(this.playerId, data); } saveTime() { if (!this.mediaId) return; const data = (this._data.time ?? 0).toString(); localStorage.setItem(this.mediaId, data); } } class NativeTextRenderer { constructor() { this.priority = 0; this._display = true; this._video = null; this._track = null; this._tracks = /* @__PURE__ */ new Set(); } canRender(_, video) { return !!video; } attach(video) { this._video = video; if (video) video.textTracks.onchange = this._onChange.bind(this); } addTrack(track) { this._tracks.add(track); this._attachTrack(track); } removeTrack(track) { track[TextTrackSymbol._native]?.remove?.(); track[TextTrackSymbol._native] = null; this._tracks.delete(track); } changeTrack(track) { const current = track?.[TextTrackSymbol._native]; if (current && current.track.mode !== "showing") { current.track.mode = "showing"; } this._track = track; } setDisplay(display) { this._display = display; this._onChange(); } detach() { if (this._video) this._video.textTracks.onchange = null; for (const track of this._tracks) this.removeTrack(track); this._tracks.clear(); this._video = null; this._track = null; } _attachTrack(track) { if (!this._video) return; const el = track[TextTrackSymbol._native] ??= this._createTrackElement(track); if (isHTMLElement(el)) { this._video.append(el); el.track.mode = el.default ? "showing" : "disabled"; } } _createTrackElement(track) { const el = document.createElement("track"), isDefault = track.default || track.mode === "showing", isSupported = track.src && track.type === "vtt"; el.id = track.id; el.src = isSupported ? track.src : ""; el.label = track.label; el.kind = track.kind; el.default = isDefault; track.language && (el.srclang = track.language); if (isDefault && !isSupported) { this._copyCues(track, el.track); } return el; } _copyCues(track, native) { if (track.src && track.type === "vtt" || native.cues?.length) return; for (const cue of track.cues) native.addCue(cue); } _onChange(event) { for (const track of this._tracks) { const native = track[TextTrackSymbol._native]; if (!native) continue; if (!this._display) { native.track.mode = native.managed ? "hidden" : "disabled"; continue; } const isShowing = native.track.mode === "showing"; if (isShowing) this._copyCues(track, native.track); track.setMode(isShowing ? "showing" : "disabled", event); } } } class TextRenderers { constructor(_media) { this._media = _media; this._video = null; this._renderers = []; this._nativeDisplay = false; this._nativeRenderer = null; this._customRenderer = null; const textTracks = _media.textTracks; this._textTracks = textTracks; effect(this._watchControls.bind(this)); onDispose(this._detach.bind(this)); listenEvent(textTracks, "add", this._onAddTrack.bind(this)); listenEvent(textTracks, "remove", this._onRemoveTrack.bind(this)); listenEvent(textTracks, "mode-change", this._update.bind(this)); } _watchControls() { const { nativeControls } = this._media.$state; this._nativeDisplay = nativeControls(); this._update(); } add(renderer) { this._renderers.push(renderer); untrack(this._update.bind(this)); } remove(renderer) { renderer.detach(); this._renderers.splice(this._renderers.indexOf(renderer), 1); untrack(this._update.bind(this)); } resetCustomRenderer() { if (!this._customRenderer) return; this._customRenderer.changeTrack(null); } /** @internal */ _attachVideo(video) { requestAnimationFrame(() => { this._video = video; if (video) { this._nativeRenderer = new NativeTextRenderer(); this._nativeRenderer.attach(video); for (const track of this._textTracks) this._addNativeTrack(track); } this._update(); }); } _addNativeTrack(track) { if (!isTrackCaptionKind(track)) return; this._nativeRenderer?.addTrack(track); } _removeNativeTrack(track) { if (!isTrackCaptionKind(track)) return; this._nativeRenderer?.removeTrack(track); } _onAddTrack(event) { this._addNativeTrack(event.detail); } _onRemoveTrack(event) { this._removeNativeTrack(event.detail); } _update() { const currentTrack = this._textTracks.selected; if (currentTrack && currentTrack.subtitleLoader && !currentTrack.contentLoaded) { Promise.resolve(currentTrack.subtitleLoader(currentTrack)).then((content) => { if (content) currentTrack.content = content; currentTrack.contentLoaded = true; this._setCurrentTrack(currentTrack); }); return; } this._setCurrentTrack(currentTrack); } _setCurrentTrack(currentTrack) { if (this._video && (this._nativeDisplay || currentTrack?.[TextTrackSymbol._nativeHLS])) { this._customRenderer?.changeTrack(null); this._nativeRenderer?.setDisplay(true); this._nativeRenderer?.changeTrack(currentTrack); return; } this._nativeRenderer?.setDisplay(false); this._nativeRenderer?.changeTrack(null); this._customRenderer?.changeTrack(null); if (!currentTrack) { return; } const customRenderer = this._renderers.sort((a, b) => a.priority - b.priority).find((renderer) => renderer.canRender(currentTrack, this._video)); if (this._customRenderer !== customRenderer) { this._customRenderer?.detach(); if (this._video) customRenderer?.attach(this._video); this._customRenderer = customRenderer ?? null; } if (this._video) customRenderer?.changeTrack(currentTrack, this._video); } _detach() { this._nativeRenderer?.detach(); this._nativeRenderer = null; this._customRenderer?.detach(); this._customRenderer = null; } } class TextTrackList extends List { constructor() { super(); this._canLoad = false; this._defaults = {}; this._storage = null; this._preferredLang = null; this._selectTracks = functionDebounce(async () => { if (!this._canLoad) return; if (!this._preferredLang && this._storage) { this._preferredLang = await this._storage.getLang(); } const showCaptions = await this._storage?.getCaptions(), kinds = [ ["captions", "subtitles"], "chapters", "descriptions", "metadata" ]; for (const kind of kinds) { const tracks = this.getByKind(kind); if (tracks.find((t) => t.mode === "showing")) continue; const preferredTrack = this._preferredLang ? tracks.find((track2) => track2.language === this._preferredLang) : null; const defaultTrack = isArray(kind) ? this._defaults[kind.find((kind2) => this._defaults[kind2]) || ""] : this._defaults[kind]; const track = preferredTrack ?? defaultTrack, isCaptionsKind = track && isTrackCaptionKind(track); if (track && (!isCaptionsKind || showCaptions !== false)) { track.mode = "showing"; if (isCaptionsKind) this._saveCaptionsTrack(track); } } }, 300); this._pendingRemoval = null; this._onTrackModeChangeBind = this._onTrackModeChange.bind(this); } get selected() { const track = this._items.find((t) => t.mode === "showing" && isTrackCaptionKind(t)); return track ?? null; } get selectedIndex() { const selected = this.selected; return selected ? this.indexOf(selected) : -1; } get preferredLang() { return this._preferredLang; } set preferredLang(lang) { this._preferredLang = lang; this._saveLang(lang); } add(init, trigger) { const isTrack = init instanceof TextTrack, track = isTrack ? init : new TextTrack(init), kind = init.kind === "captions" || init.kind === "subtitles" ? "captions" : init.kind; if (this._defaults[kind] && init.default) delete init.default; track.addEventListener("mode-change", this._onTrackModeChangeBind); this[ListSymbol._add](track, trigger); track[TextTrackSymbol._crossOrigin] = this[TextTrackSymbol._crossOrigin]; if (this._canLoad) track[TextTrackSymbol._canLoad](); if (init.default) this._defaults[kind] = track; this._selectTracks(); return this; } remove(track, trigger) { this._pendingRemoval = track; if (!this._items.includes(track)) return; if (track === this._defaults[track.kind]) delete this._defaults[track.kind]; track.mode = "disabled"; track[TextTrackSymbol._onModeChange] = null; track.removeEventListener("mode-change", this._onTrackModeChangeBind); this[ListSymbol._remove](track, trigger); this._pendingRemoval = null; return this; } clear(trigger) { for (const track of [...this._items]) { this.remove(track, trigger); } return this; } getByKind(kind) { const kinds = Array.isArray(kind) ? kind : [kind]; return this._items.filter((track) => kinds.includes(track.kind)); } /** @internal */ [(TextTrackSymbol._canLoad)]() { if (this._canLoad) return; for (const track of this._items) track[TextTrackSymbol._canLoad](); this._canLoad = true; this._selectTracks(); } _onTrackModeChange(event) { const track = event.detail; if (this._storage && isTrackCaptionKind(track) && track !== this._pendingRemoval) { this._saveCaptionsTrack(track); } if (track.mode === "showing") { const kinds = isTrackCaptionKind(track) ? ["captions", "subtitles"] : [track.kind]; for (const t of this._items) { if (t.mode === "showing" && t != track && kinds.includes(t.kind)) { t.mode = "disabled"; } } } this.dispatchEvent( new DOMEvent("mode-change", { detail: event.detail, trigger: event }) ); } _saveCaptionsTrack(track) { if (track.mode !== "disabled") { this._saveLang(track.language); } this._storage?.setCaptions?.(track.mode === "showing"); } _saveLang(lang) { this._storage?.setLang?.(this._preferredLang = lang); } setStorage(storage) { this._storage = storage; } } const SELECTED = Symbol("SELECTED" ); class SelectList extends List { get selected() { return this._items.find((item) => item.selected) ?? null; } get selectedIndex() { return this._items.findIndex((item) => item.selected); } /** @internal */ [ListSymbol._onRemove](item, trigger) { this[ListSymbol._select](item, false, trigger); } /** @internal */ [ListSymbol._add](item, trigger) { item[SELECTED] = false; Object.defineProperty(item, "selected", { get() { return this[SELECTED]; }, set: (selected) => { if (this.readonly) return; this[ListSymbol._onUserSelect]?.(); this[ListSymbol._select](item, selected); } }); super[ListSymbol._add](item, trigger); } /** @internal */ [ListSymbol._select](item, selected, trigger) { if (selected === item?.[SELECTED]) return; const prev = this.selected; if (item) item[SELECTED] = selected; const changed = !selected ? prev === item : prev !== item; if (changed) { if (prev) prev[SELECTED] = false; this.dispatchEvent( new DOMEvent("change", { detail: { prev, current: this.selected }, trigger }) ); } } } class AudioTrackList extends SelectList { } class VideoQualityList extends SelectList { constructor() { super(...arguments); this._auto = false; /** * Configures quality switching: * * - `current`: Trigger an immediate quality level switch. This will abort the current fragment * request if any, flush the whole buffer, and fetch fragment matching with current position * and requested quality level. * * - `next`: Trigger a quality level switch for next fragment. This could eventually flush * already buffered next fragment. * * - `load`: Set quality level for next loaded fragment. * * @see {@link https://www.vidstack.io/docs/player/api/video-quality#switch} * @see {@link https://github.com/video-dev/hls.js/blob/master/docs/API.md#quality-switch-control-api} */ this.switch = "current"; } /** * Whether automatic quality selection is enabled. */ get auto() { return this._auto || this.readonly; } /** @internal */ [(ListSymbol._onUserSelect)]() { this[QualitySymbol._setAuto](false); } /** @internal */ [ListSymbol._onReset](trigger) { this[QualitySymbol._enableAuto] = void 0; this[QualitySymbol._setAuto](false, trigger); } /** * Request automatic quality selection (if supported). This will be a no-op if the list is * `readonly` as that already implies auto-selection. */ autoSelect(trigger) { if (this.readonly || this._auto || !this[QualitySymbol._enableAuto]) return; this[QualitySymbol._enableAuto]?.(trigger); this[QualitySymbol._setAuto](true, trigger); } getBySrc(src) { return this._items.find((quality) => quality.src === src); } /** @internal */ [QualitySymbol._setAuto](auto, trigger) { if (this._auto === auto) return; this._auto = auto; this.dispatchEvent( new DOMEvent("auto-change", { detail: auto, trigger }) ); } } function isAudioProvider(provider) { return provider?.$$PROVIDER_TYPE === "AUDIO"; } function isVideoProvider(provider) { return provider?.$$PROVIDER_TYPE === "VIDEO"; } function isHLSProvider(provider) { return provider?.$$PROVIDER_TYPE === "HLS"; } function isDASHProvider(provider) { return provider?.$$PROVIDER_TYPE === "DASH"; } function isYouTubeProvider(provider) { return provider?.$$PROVIDER_TYPE === "YOUTUBE"; } function isVimeoProvider(provider) { return provider?.$$PROVIDER_TYPE === "VIMEO"; } function isGoogleCastProvider(provider) { return provider?.$$PROVIDER_TYPE === "GOOGLE_CAST"; } function isHTMLAudioElement(element) { return element instanceof HTMLAudioElement; } function isHTMLVideoElement(element) { return element instanceof HTMLVideoElement; } function isHTMLMediaElement(element) { return isHTMLAudioElement(element) || isHTMLVideoElement(element); } function isHTMLIFrameElement(element) { return element instanceof HTMLIFrameElement; } class MediaPlayerController extends ViewController { } const MEDIA_KEY_SHORTCUTS = { togglePaused: "k Space", toggleMuted: "m", toggleFullscreen: "f", togglePictureInPicture: "i", toggleCaptions: "c", seekBackward: "j J ArrowLeft", seekForward: "l L ArrowRight", volumeUp: "ArrowUp", volumeDown: "ArrowDown", speedUp: ">", slowDown: "<" }; const MODIFIER_KEYS = /* @__PURE__ */ new Set(["Shift", "Alt", "Meta", "Ctrl"]), BUTTON_SELECTORS = 'button, [role="button"]', IGNORE_SELECTORS = 'input, textarea, select, [contenteditable], [role^="menuitem"], [role="timer"]'; class MediaKeyboardController extends MediaPlayerController { constructor(_media) { super(); this._media = _media; this._timeSlider = null; } onConnect() { effect(this._onTargetChange.bind(this)); } _onTargetChange() { const { keyDisabled, keyTarget } = this.$props; if (keyDisabled()) return; const target = keyTarget() === "player" ? this.el : document, $active = signal(false); if (target === this.el) { this.listen("focusin", () => $active.set(true)); this.listen("focusout", (event) => { if (!this.el.contains(event.target)) $active.set(false); }); } else { if (!peek($active)) $active.set(document.querySelector("[data-media-player]") === this.el); listenEvent(document, "focusin", (event) => { const activePlayer = event.composedPath().find((el) => el instanceof Element && el.