@ktt45678/vidstack
Version:
UI component library for building high-quality, accessible video and audio experiences on the web.
1,489 lines (1,465 loc) • 300 kB
TypeScript
import { EventsTarget, DOMEvent, ViewController, InferEventDetail, SetRequired, MaybeStopEffect, Writable, WriteSignal, ReadSignalRecord, Context, Scope, Dispose, DeferredPromise, State, Store, Component, ReadSignal, StateContext } from './vidstack-ffSmmxHH.js';
import { VTTCue as VTTCue$1, VTTRegion, CaptionsFileFormat, CaptionsParserFactory, VTTHeaderMetadata } from 'media-captions';
import * as DASH from 'dashjs';
import * as HLS from 'hls.js';
type LogLevel = 'silent' | 'error' | 'warn' | 'info' | 'debug';
declare const GROUPED_LOG: unique symbol;
declare class GroupedLog {
readonly logger: Logger;
readonly level: LogLevel;
readonly title: string;
readonly root?: GroupedLog | undefined;
readonly parent?: GroupedLog | undefined;
readonly [GROUPED_LOG] = true;
readonly logs: ({
label?: string;
data: any[];
} | GroupedLog)[];
constructor(logger: Logger, level: LogLevel, title: string, root?: GroupedLog | undefined, parent?: GroupedLog | undefined);
log(...data: any[]): GroupedLog;
labelledLog(label: string, ...data: any[]): GroupedLog;
groupStart(title: string): GroupedLog;
groupEnd(): GroupedLog;
dispatch(): boolean;
}
declare class Logger {
private _target;
error(...data: any[]): boolean;
warn(...data: any[]): boolean;
info(...data: any[]): boolean;
debug(...data: any[]): boolean;
errorGroup(title: string): GroupedLog;
warnGroup(title: string): GroupedLog;
infoGroup(title: string): GroupedLog;
debugGroup(title: string): GroupedLog;
setTarget(newTarget: EventTarget | null): void;
dispatch(level: LogLevel, ...data: any[]): boolean;
}
declare const ADD: unique symbol;
declare const REMOVE: unique symbol;
declare const RESET: unique symbol;
declare const SELECT: unique symbol;
declare const READONLY: unique symbol;
declare const SET_READONLY: unique symbol;
declare const ON_RESET: unique symbol;
declare const ON_REMOVE: unique symbol;
declare const ON_USER_SELECT: unique symbol;
/** @internal */
declare const ListSymbol: {
readonly _add: typeof ADD;
readonly _remove: typeof REMOVE;
readonly _reset: typeof RESET;
readonly _select: typeof SELECT;
readonly _readonly: typeof READONLY;
readonly _setReadonly: typeof SET_READONLY;
readonly _onReset: typeof ON_RESET;
readonly _onRemove: typeof ON_REMOVE;
readonly _onUserSelect: typeof ON_USER_SELECT;
};
interface ListItem {
id: string;
}
declare class List<Item extends ListItem, Events extends ListEvents> extends EventsTarget<Events> implements Iterable<Item> {
[index: number]: Item | undefined;
protected _items: Item[];
/** @internal */
protected [ListSymbol._readonly]: boolean;
/** @internal */
protected [ListSymbol._onReset]?(trigger?: Event): void;
/** @internal */
protected [ListSymbol._onRemove]?(item: Item, trigger?: Event): void;
get length(): number;
get readonly(): boolean;
/**
* Returns the index of the first occurrence of the given item, or -1 if it is not present.
*/
indexOf(item: Item): number;
/**
* Returns an item matching the given `id`, or `null` if not present.
*/
getById(id: string): Item | null;
/**
* Transform list to an array.
*/
toArray(): Item[];
[Symbol.iterator](): IterableIterator<Item>;
/** @internal */
[ListSymbol._add](item: Item, trigger?: Event): void;
/** @internal */
[ListSymbol._remove](item: Item, trigger?: Event): void;
/** @internal */
[ListSymbol._reset](trigger?: Event): void;
/** @internal */
[ListSymbol._setReadonly](readonly: boolean, trigger?: Event): void;
}
interface ListEvents<Item extends ListItem = ListItem> {
add: ListAddEvent<Item>;
remove: ListRemoveEvent<Item>;
'readonly-change': ListReadonlyChangeEvent;
}
/**
* Fired when an item has been added to the list.
*
* @detail item
*/
interface ListAddEvent<Item extends ListItem> extends DOMEvent<Item> {
}
/**
* Fired when an item has been removed from the list.
*
* @detail item
*/
interface ListRemoveEvent<Item extends ListItem> extends DOMEvent<Item> {
}
/**
* Fired when the readonly state of the list has changed.
*
* @detail isReadonly
*/
interface ListReadonlyChangeEvent extends DOMEvent<boolean> {
}
interface FullscreenEvents {
'fullscreen-change': FullscreenChangeEvent;
'fullscreen-error': FullscreenErrorEvent;
}
/**
* Fired when an element enters/exits fullscreen. The event detail is a `boolean` indicating
* if fullscreen was entered (`true`) or exited (`false`).
*
* @bubbles
* @composed
* @detail isFullscreen
*/
interface FullscreenChangeEvent extends DOMEvent<boolean> {
}
/**
* Fired when an error occurs either entering or exiting fullscreen. This will generally occur
* if the user has not interacted with the page yet.
*
* @bubbles
* @composed
* @detail error
*/
interface FullscreenErrorEvent extends DOMEvent<unknown> {
}
declare class FullscreenController extends ViewController<{}, {}, FullscreenEvents> implements FullscreenAdapter {
/**
* Tracks whether we're the active fullscreen event listener. Fullscreen events can only be
* listened to globally on the document so we need to know if they relate to the current host
* element or not.
*/
private _listening;
private _active;
get active(): boolean;
get supported(): boolean;
protected onConnect(): void;
private _onDisconnect;
private _onChange;
private _onError;
enter(): Promise<void>;
exit(): Promise<void>;
}
declare function canFullscreen(): boolean;
interface FullscreenAdapter {
/**
* Whether the host element is in fullscreen mode.
*/
readonly active: boolean;
/**
* Whether the native browser fullscreen API is available, or the current provider can
* toggle fullscreen mode. This does not mean that the operation is guaranteed to be successful,
* only that it can be attempted.
*
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Fullscreen_API}
*/
readonly supported: boolean;
/**
* Request to display the current host element in fullscreen.
*
* @throws Error - if fullscreen API is not available.
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Element/requestFullscreen}
*/
enter(): Promise<void>;
/**
* Attempt to exit fullscreen on the current host element.
*
* @throws Error - if fullscreen API is not available.
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Document/exitFullscreen}
*/
exit(): Promise<void>;
}
declare global {
interface HTMLElementEventMap extends LoggerEvents {
}
}
interface LoggerEvents {
'vds-log': LogEvent;
}
interface LogEventDetail {
/**
* The log level.
*/
level: LogLevel;
/**
* Data to be logged.
*/
data?: any[];
}
/**
* @bubbles
* @composed
* @detail log
*/
interface LogEvent extends DOMEvent<LogEventDetail> {
}
type ScreenOrientationType =
/**
* Landscape-primary is an orientation where the screen width is greater than the screen height.
* If the device's natural orientation is landscape, then it is in landscape-primary when held
* in that position. If the device's natural orientation is portrait, the user agent sets
* landscape-primary from the two options as shown in the screen orientation values table.
*/
'landscape-primary'
/**
* Landscape-secondary is an orientation where the screen width is greater than the screen
* height. If the device's natural orientation is landscape, it is in landscape-secondary when
* rotated 180º from its natural orientation. If the device's natural orientation is portrait,
* the user agent sets landscape-secondary from the two options as shown in the screen
* orientation values table.
*/
| 'landscape-secondary'
/**
* Portrait-primary is an orientation where the screen width is less than or equal to the screen
* height. If the device's natural orientation is portrait, then it is in portrait-primary when
* held in that position. If the device's natural orientation is landscape, the user agent sets
* portrait-primary from the two options as shown in the screen orientation values table.
*/
| 'portrait-primary'
/**
* Portrait-secondary is an orientation where the screen width is less than or equal to the
* screen height. If the device's natural orientation is portrait, then it is in
* portrait-secondary when rotated 180º from its natural position. If the device's natural
* orientation is landscape, the user agent sets portrait-secondary from the two options as
* shown in the screen orientation values table.
*/
| 'portrait-secondary';
type ScreenOrientationLockType =
/**
* Any is an orientation that means the screen can be locked to any one of portrait-primary,
* portrait-secondary, landscape-primary and landscape-secondary.
*/
'any'
/**
* Landscape is an orientation where the screen width is greater than the screen height and
* depending on platform convention locking the screen to landscape can represent
* landscape-primary, landscape-secondary or both.
*/
| 'landscape'
/**
* Landscape-primary is an orientation where the screen width is greater than the screen height.
* If the device's natural orientation is landscape, then it is in landscape-primary when held
* in that position. If the device's natural orientation is portrait, the user agent sets
* landscape-primary from the two options as shown in the screen orientation values table.
*/
| 'landscape-primary'
/**
* Landscape-secondary is an orientation where the screen width is greater than the screen
* height. If the device's natural orientation is landscape, it is in landscape-secondary when
* rotated 180º from its natural orientation. If the device's natural orientation is portrait,
* the user agent sets landscape-secondary from the two options as shown in the screen
* orientation values table.
*/
| 'landscape-secondary'
/**
* Natural is an orientation that refers to either portrait-primary or landscape-primary
* depending on the device's usual orientation. This orientation is usually provided by the
* underlying operating system.
*/
| 'natural'
/**
* Portrait is an orientation where the screen width is less than or equal to the screen height
* and depending on platform convention locking the screen to portrait can represent
* portrait-primary, portrait-secondary or both.
*/
| 'portrait'
/**
* Portrait-primary is an orientation where the screen width is less than or equal to the screen
* height. If the device's natural orientation is portrait, then it is in portrait-primary when
* held in that position. If the device's natural orientation is landscape, the user agent sets
* portrait-primary from the two options as shown in the screen orientation values table.
*/
| 'portrait-primary'
/**
* Portrait-secondary is an orientation where the screen width is less than or equal to the
* screen height. If the device's natural orientation is portrait, then it is in
* portrait-secondary when rotated 180º from its natural position. If the device's natural
* orientation is landscape, the user agent sets portrait-secondary from the two options as
* shown in the screen orientation values table.
*/
| 'portrait-secondary';
interface ScreenOrientationEvents {
'orientation-change': ScreenOrientationChangeEvent;
}
interface ScreenOrientationChangeEventDetail {
orientation: ScreenOrientationType;
lock?: ScreenOrientationLockType;
}
/**
* Fired when the current screen orientation changes.
*
* @detail orientation
*/
interface ScreenOrientationChangeEvent extends DOMEvent<ScreenOrientationChangeEventDetail> {
}
declare class ScreenOrientationController extends ViewController<{}, {}, ScreenOrientationEvents> {
private _type;
private _locked;
private _currentLock;
/**
* The current screen orientation type.
*
* @signal
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/ScreenOrientation}
* @see https://w3c.github.io/screen-orientation/#screen-orientation-types-and-locks
*/
get type(): ScreenOrientationType | undefined;
/**
* Whether the screen orientation is currently locked.
*
* @signal
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/ScreenOrientation}
* @see https://w3c.github.io/screen-orientation/#screen-orientation-types-and-locks
*/
get locked(): boolean;
/**
* Whether the viewport is in a portrait orientation.
*
* @signal
*/
get portrait(): boolean;
/**
* Whether the viewport is in a landscape orientation.
*
* @signal
*/
get landscape(): boolean;
/**
* Whether the native Screen Orientation API is available.
*/
static readonly supported: boolean;
/**
* Whether the native Screen Orientation API is available.
*/
get supported(): boolean;
protected onConnect(): void;
protected _onDisconnect(): Promise<void>;
protected _onOrientationChange(event: Event): void;
/**
* Locks the orientation of the screen to the desired orientation type using the
* Screen Orientation API.
*
* @param lockType - The screen lock orientation type.
* @throws Error - If screen orientation API is unavailable.
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Screen/orientation}
* @see {@link https://w3c.github.io/screen-orientation}
*/
lock(lockType: ScreenOrientationLockType): Promise<void>;
/**
* Unlocks the orientation of the screen to it's default state using the Screen Orientation
* API. This method will throw an error if the API is unavailable.
*
* @throws Error - If screen orientation API is unavailable.
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Screen/orientation}
* @see {@link https://w3c.github.io/screen-orientation}
*/
unlock(): Promise<void>;
private _assertScreenOrientationAPI;
private _getScreenOrientation;
}
interface MediaRequestEvents {
'media-airplay-request': MediaAirPlayRequestEvent;
'media-audio-track-change-request': MediaAudioTrackChangeRequestEvent;
'media-clip-start-change-request': MediaClipStartChangeRequestEvent;
'media-clip-end-change-request': MediaClipEndChangeRequestEvent;
'media-duration-change-request': MediaDurationChangeRequestEvent;
'media-enter-fullscreen-request': MediaEnterFullscreenRequestEvent;
'media-exit-fullscreen-request': MediaExitFullscreenRequestEvent;
'media-enter-pip-request': MediaEnterPIPRequestEvent;
'media-exit-pip-request': MediaExitPIPRequestEvent;
'media-google-cast-request': MediaGoogleCastRequestEvent;
'media-live-edge-request': MediaLiveEdgeRequestEvent;
'media-loop-request': MediaLoopRequestEvent;
'media-user-loop-change-request': MediaUserLoopChangeRequestEvent;
'media-orientation-lock-request': MediaOrientationLockRequestEvent;
'media-orientation-unlock-request': MediaOrientationUnlockRequestEvent;
'media-mute-request': MediaMuteRequestEvent;
'media-pause-request': MediaPauseRequestEvent;
'media-pause-controls-request': MediaPauseControlsRequestEvent;
'media-play-request': MediaPlayRequestEvent;
'media-quality-change-request': MediaQualityChangeRequestEvent;
'media-rate-change-request': MediaRateChangeRequestEvent;
'media-audio-gain-change-request': MediaAudioGainChangeRequestEvent;
'media-resume-controls-request': MediaResumeControlsRequestEvent;
'media-seek-request': MediaSeekRequestEvent;
'media-seeking-request': MediaSeekingRequestEvent;
'media-start-loading': MediaStartLoadingRequestEvent;
'media-poster-start-loading': MediaPosterStartLoadingRequestEvent;
'media-text-track-change-request': MediaTextTrackChangeRequestEvent;
'media-unmute-request': MediaUnmuteRequestEvent;
'media-volume-change-request': MediaVolumeChangeRequestEvent;
}
/**
* Fired when requesting the AirPlay picker to open.
*
* @bubbles
* @composed
*/
interface MediaAirPlayRequestEvent extends DOMEvent<void> {
}
/**
* Fired when requesting the media poster to begin loading. This will only take effect if the
* `posterLoad` strategy on the player is set to `custom`.
*
* @bubbles
* @composed
*/
interface MediaPosterStartLoadingRequestEvent extends DOMEvent<void> {
}
/**
* Fired when requesting to change the `mode` on a text track at the given index in the
* `TextTrackList` on the player.
*
* @bubbles
* @composed
*/
interface MediaTextTrackChangeRequestEvent extends DOMEvent<{
index: number;
mode: TextTrackMode;
}> {
}
/**
* Fired when requesting the media to be muted.
*
* @bubbles
* @composed
*/
interface MediaMuteRequestEvent extends DOMEvent<void> {
}
/**
* Fired when requesting the media to be unmuted.
*
* @bubbles
* @composed
*/
interface MediaUnmuteRequestEvent extends DOMEvent<void> {
}
/**
* Whether to request fullscreen on the media (i.e., `<media-player>`). The `prefer-media` option
* will first see if the native fullscreen API is available, if not it'll try the media provider.
*/
type MediaFullscreenRequestTarget = 'prefer-media' | 'media' | 'provider';
/**
* Fired when requesting to change the current audio track to the given index in the
* `AudioTrackList` on the player.
*
* @bubbles
* @composed
*/
interface MediaAudioTrackChangeRequestEvent extends DOMEvent<number> {
}
/**
* Fired when requesting to change the clip start time. The event `detail` specifies the new start
* time in seconds.
*
* @bubbles
* @composed
*/
interface MediaClipStartChangeRequestEvent extends DOMEvent<number> {
}
/**
* Fired when requesting to change the clip end time. The event `detail` specifies the new end
* time in seconds.
*
* @bubbles
* @composed
*/
interface MediaClipEndChangeRequestEvent extends DOMEvent<number> {
}
/**
* Fired when requesting to change the length of the media. The event `detail` specifies the
* new length in seconds.
*
* @bubbles
* @composed
*/
interface MediaDurationChangeRequestEvent extends DOMEvent<number> {
}
/**
* Fired when requesting media to enter fullscreen. The event `detail` can specify the
* fullscreen target, which can be the media or provider (defaults to `prefer-media`).
*
* @bubbles
* @composed
*/
interface MediaEnterFullscreenRequestEvent extends DOMEvent<MediaFullscreenRequestTarget> {
}
/**
* Fired when requesting media to exit fullscreen. The event `detail` can specify the fullscreen
* target, which can be the media or provider (defaults to `prefer-media`).
*
* @bubbles
* @composed
*/
interface MediaExitFullscreenRequestEvent extends DOMEvent<MediaFullscreenRequestTarget> {
}
/**
* Fired when requesting media to enter picture-in-picture mode.
*
* @bubbles
* @composed
*/
interface MediaEnterPIPRequestEvent extends DOMEvent<void> {
}
/**
* Fired when requesting media to exit picture-in-picture mode.
*
* @bubbles
* @composed
*/
interface MediaExitPIPRequestEvent extends DOMEvent<void> {
}
/**
* Fired when requesting Google Cast.
*
* @bubbles
* @composed
*/
interface MediaGoogleCastRequestEvent extends DOMEvent<void> {
}
/**
* Fired when requesting media to seek to the live edge (i.e., set the current time to the current
* live time).
*/
interface MediaLiveEdgeRequestEvent extends DOMEvent<void> {
}
/**
* Fired when requesting media playback to begin/resume.
*
* @bubbles
* @composed
*/
interface MediaPlayRequestEvent extends DOMEvent<void> {
}
/**
* Fired when requesting to change the current video quality to the given index in the
* `VideoQualityList` on the player.
*
* @bubbles
* @composed
* @detail qualityIndex
*/
interface MediaQualityChangeRequestEvent extends DOMEvent<number> {
}
/**
* Fired when requesting to change the current playback rate.
*
* @bubbles
* @composed
* @detail rate
*/
interface MediaRateChangeRequestEvent extends DOMEvent<number> {
}
/**
* Fired when requesting to change the current audio gain.
*
* @bubbles
* @composed
* @detail gain
*/
interface MediaAudioGainChangeRequestEvent extends DOMEvent<number> {
}
/**
* Fired when requesting media playback to temporarily stop.
*
* @bubbles
* @composed
*/
interface MediaPauseRequestEvent extends DOMEvent<void> {
}
/**
* Fired when requesting a time change. In other words, moving the play head to a new position.
*
* @bubbles
* @composed
* @detail seekTo
*/
interface MediaSeekRequestEvent extends DOMEvent<number> {
}
/**
* Fired when seeking/scrubbing to a new playback position.
*
* @bubbles
* @composed
* @detail time
*/
interface MediaSeekingRequestEvent extends DOMEvent<number> {
}
/**
* Fired when requesting media to begin loading. This will only take effect if the `load`
* strategy on the player is set to `custom`.
*
* @bubbles
* @composed
*/
interface MediaStartLoadingRequestEvent extends DOMEvent<void> {
}
/**
* Fired when requesting the media volume to be set to a new level.
*
* @bubbles
* @composed
* @detail volume
*/
interface MediaVolumeChangeRequestEvent extends DOMEvent<number> {
}
/**
* Fired when controls visibility tracking may resume. This is typically called after requesting
* tracking to pause via `media-pause-controls-request`.
*
* @bubbles
* @composed
*/
interface MediaResumeControlsRequestEvent extends DOMEvent<void> {
}
/**
* Fired when controls visibility tracking should pause. This is typically used when a control
* is being actively interacted with, and we don't want the controls to be hidden before
* the interaction is complete (eg: scrubbing, or settings is open).
*
* @bubbles
* @composed
*/
interface MediaPauseControlsRequestEvent extends DOMEvent<void> {
}
/**
* Fired when requesting the poster _should_ be rendered by the media provider. This should be
* fired if a custom poster is _not_ being used.
*
* @bubbles
* @composed
*/
interface MediaShowPosterRequestEvent extends DOMEvent<void> {
}
/**
* Fired when requesting the poster should _not_ be rendered by the media provider. This
* should be fired if a custom poster element is being used (e.g., `media-poster`).
*
* @bubbles
* @composed
*/
interface MediaHidePosterRequestEvent extends DOMEvent<void> {
}
/**
* Internal event that is fired by a media provider when requesting media playback to restart after
* reaching the end. This event also helps notify the player that media will be looping.
*
* @internal
* @bubbles
* @composed
*/
interface MediaLoopRequestEvent extends DOMEvent<void> {
}
/**
* Fired when the user loop preference changes.
*
* @bubbles
* @composed
*/
interface MediaUserLoopChangeRequestEvent extends DOMEvent<boolean> {
}
/**
* Fired when requesting the screen orientation to be locked to a certain type.
*
* @bubbles
* @composed
*/
interface MediaOrientationLockRequestEvent extends DOMEvent<ScreenOrientationLockType> {
}
/**
* Fired when requesting the screen orientation to be unlocked.
*
* @bubbles
* @composed
*/
interface MediaOrientationUnlockRequestEvent extends DOMEvent<void> {
}
/**
* A simple facade for dispatching media requests to the nearest media player element.
*
* @docs {@link https://www.vidstack.io/docs/player/core-concepts/state-management#updating}
*
*/
declare class MediaRemoteControl {
private _logger;
private _target;
private _player;
private _prevTrackIndex;
constructor(_logger?: Logger | undefined);
/**
* Set the target from which to dispatch media requests events from. The events should bubble
* up from this target to the player element.
*
* @example
* ```ts
* const button = document.querySelector('button');
* remote.setTarget(button);
* ```
*/
setTarget(target: EventTarget | null): void;
/**
* Returns the current player element. This method will attempt to find the player by
* searching up from either the given `target` or default target set via `remote.setTarget`.
*
* @example
* ```ts
* const player = remote.getPlayer();
* ```
*/
getPlayer(target?: EventTarget | null): MediaPlayer | null;
/**
* Set the current player element so the remote can support toggle methods such as
* `togglePaused` as they rely on the current media state.
*/
setPlayer(player: MediaPlayer | null): void;
/**
* Dispatch a request to start the media loading process. This will only work if the media
* player has been initialized with a custom loading strategy `load="custom">`.
*
* @docs {@link https://www.vidstack.io/docs/player/core-concepts/loading#load-strategies}
*/
startLoading(trigger?: Event): void;
/**
* Dispatch a request to start the poster loading process. This will only work if the media
* player has been initialized with a custom poster loading strategy `posterLoad="custom">`.
*
* @docs {@link https://www.vidstack.io/docs/player/core-concepts/loading#load-strategies}
*/
startLoadingPoster(trigger?: Event): void;
/**
* Dispatch a request to connect to AirPlay.
*
* @see {@link https://www.apple.com/au/airplay}
*/
requestAirPlay(trigger?: Event): void;
/**
* Dispatch a request to connect to Google Cast.
*
* @see {@link https://developers.google.com/cast/docs/overview}
*/
requestGoogleCast(trigger?: Event): void;
/**
* Dispatch a request to begin/resume media playback.
*/
play(trigger?: Event): void;
/**
* Dispatch a request to pause media playback.
*/
pause(trigger?: Event): void;
/**
* Dispatch a request to set the media volume to mute (0).
*/
mute(trigger?: Event): void;
/**
* Dispatch a request to unmute the media volume and set it back to it's previous state.
*/
unmute(trigger?: Event): void;
/**
* Dispatch a request to enter fullscreen.
*
* @docs {@link https://www.vidstack.io/docs/player/api/fullscreen#remote-control}
*/
enterFullscreen(target?: MediaFullscreenRequestTarget, trigger?: Event): void;
/**
* Dispatch a request to exit fullscreen.
*
* @docs {@link https://www.vidstack.io/docs/player/api/fullscreen#remote-control}
*/
exitFullscreen(target?: MediaFullscreenRequestTarget, trigger?: Event): void;
/**
* Dispatch a request to lock the screen orientation.
*
* @docs {@link https://www.vidstack.io/docs/player/screen-orientation#remote-control}
*/
lockScreenOrientation(lockType: ScreenOrientationLockType, trigger?: Event): void;
/**
* Dispatch a request to unlock the screen orientation.
*
* @docs {@link https://www.vidstack.io/docs/player/api/screen-orientation#remote-control}
*/
unlockScreenOrientation(trigger?: Event): void;
/**
* Dispatch a request to enter picture-in-picture mode.
*
* @docs {@link https://www.vidstack.io/docs/player/api/picture-in-picture#remote-control}
*/
enterPictureInPicture(trigger?: Event): void;
/**
* Dispatch a request to exit picture-in-picture mode.
*
* @docs {@link https://www.vidstack.io/docs/player/api/picture-in-picture#remote-control}
*/
exitPictureInPicture(trigger?: Event): void;
/**
* Notify the media player that a seeking process is happening and to seek to the given `time`.
*/
seeking(time: number, trigger?: Event): void;
/**
* Notify the media player that a seeking operation has completed and to seek to the given `time`.
* This is generally called after a series of `remote.seeking()` calls.
*/
seek(time: number, trigger?: Event): void;
seekToLiveEdge(trigger?: Event): void;
/**
* Dispatch a request to update the length of the media in seconds.
*
* @example
* ```ts
* remote.changeDuration(100); // 100 seconds
* ```
*/
changeDuration(duration: number, trigger?: Event): void;
/**
* Dispatch a request to update the clip start time. This is the time at which media playback
* should start at.
*
* @example
* ```ts
* remote.changeClipStart(100); // start at 100 seconds
* ```
*/
changeClipStart(startTime: number, trigger?: Event): void;
/**
* Dispatch a request to update the clip end time. This is the time at which media playback
* should end at.
*
* @example
* ```ts
* remote.changeClipEnd(100); // end at 100 seconds
* ```
*/
changeClipEnd(endTime: number, trigger?: Event): void;
/**
* Dispatch a request to update the media volume to the given `volume` level which is a value
* between 0 and 1.
*
* @docs {@link https://www.vidstack.io/docs/player/api/audio-gain#remote-control}
* @example
* ```ts
* remote.changeVolume(0); // 0%
* remote.changeVolume(0.05); // 5%
* remote.changeVolume(0.5); // 50%
* remote.changeVolume(0.75); // 70%
* remote.changeVolume(1); // 100%
* ```
*/
changeVolume(volume: number, trigger?: Event): void;
/**
* Dispatch a request to change the current audio track.
*
* @example
* ```ts
* remote.changeAudioTrack(1); // track at index 1
* ```
*/
changeAudioTrack(index: number, trigger?: Event): void;
/**
* Dispatch a request to change the video quality. The special value `-1` represents auto quality
* selection.
*
* @example
* ```ts
* remote.changeQuality(-1); // auto
* remote.changeQuality(1); // quality at index 1
* ```
*/
changeQuality(index: number, trigger?: Event): void;
/**
* Request auto quality selection.
*/
requestAutoQuality(trigger?: Event): void;
/**
* Dispatch a request to change the mode of the text track at the given index.
*
* @example
* ```ts
* remote.changeTextTrackMode(1, 'showing'); // track at index 1
* ```
*/
changeTextTrackMode(index: number, mode: TextTrackMode, trigger?: Event): void;
/**
* Dispatch a request to change the media playback rate.
*
* @example
* ```ts
* remote.changePlaybackRate(0.5); // Half the normal speed
* remote.changePlaybackRate(1); // Normal speed
* remote.changePlaybackRate(1.5); // 50% faster than normal
* remote.changePlaybackRate(2); // Double the normal speed
* ```
*/
changePlaybackRate(rate: number, trigger?: Event): void;
/**
* Dispatch a request to change the media audio gain.
*
* @example
* ```ts
* remote.changeAudioGain(1); // Disable audio gain
* remote.changeAudioGain(1.5); // 50% louder
* remote.changeAudioGain(2); // 100% louder
* ```
*/
changeAudioGain(gain: number, trigger?: Event): void;
/**
* Dispatch a request to resume idle tracking on controls.
*/
resumeControls(trigger?: Event): void;
/**
* Dispatch a request to pause controls idle tracking. Pausing tracking will result in the
* controls being visible until `remote.resumeControls()` is called. This method
* is generally used when building custom controls and you'd like to prevent the UI from
* disappearing.
*
* @example
* ```ts
* // Prevent controls hiding while menu is being interacted with.
* function onSettingsOpen() {
* remote.pauseControls();
* }
*
* function onSettingsClose() {
* remote.resumeControls();
* }
* ```
*/
pauseControls(trigger?: Event): void;
/**
* Dispatch a request to toggle the media playback state.
*/
togglePaused(trigger?: Event): void;
/**
* Dispatch a request to toggle the controls visibility.
*/
toggleControls(trigger?: Event): void;
/**
* Dispatch a request to toggle the media muted state.
*/
toggleMuted(trigger?: Event): void;
/**
* Dispatch a request to toggle the media fullscreen state.
*
* @docs {@link https://www.vidstack.io/docs/player/api/fullscreen#remote-control}
*/
toggleFullscreen(target?: MediaFullscreenRequestTarget, trigger?: Event): void;
/**
* Dispatch a request to toggle the media picture-in-picture mode.
*
* @docs {@link https://www.vidstack.io/docs/player/api/picture-in-picture#remote-control}
*/
togglePictureInPicture(trigger?: Event): void;
/**
* Show captions.
*/
showCaptions(trigger?: Event): void;
/**
* Turn captions off.
*/
disableCaptions(trigger?: Event): void;
/**
* Dispatch a request to toggle the current captions mode.
*/
toggleCaptions(trigger?: Event): void;
userPrefersLoopChange(prefersLoop: boolean, trigger?: Event): void;
private _dispatchRequest;
private _noPlayerWarning;
}
type MediaKeyTarget = 'document' | 'player';
interface MediaKeyShortcuts {
[keys: string]: MediaKeyShortcut | undefined;
togglePaused?: MediaKeyShortcut;
toggleMuted?: MediaKeyShortcut;
toggleFullscreen?: MediaKeyShortcut;
togglePictureInPicture?: MediaKeyShortcut;
toggleCaptions?: MediaKeyShortcut;
seekBackward?: MediaKeyShortcut;
seekForward?: MediaKeyShortcut;
speedUp?: MediaKeyShortcut;
slowDown?: MediaKeyShortcut;
volumeUp?: MediaKeyShortcut;
volumeDown?: MediaKeyShortcut;
}
type MediaKeyShortcut = MediaKeysCallback | string | string[] | null;
interface MediaKeysCallback {
keys: string | string[];
/** @deprecated - use `onKeyUp` or `onKeyDown` */
callback?(event: KeyboardEvent, remote: MediaRemoteControl): void;
onKeyUp?(context: {
event: KeyboardEvent;
player: MediaPlayer;
remote: MediaRemoteControl;
}): void;
onKeyDown?(context: {
event: KeyboardEvent;
player: MediaPlayer;
remote: MediaRemoteControl;
}): void;
}
interface SelectListItem extends ListItem {
selected: boolean;
}
declare class SelectList<Item extends SelectListItem, Events extends SelectListEvents<Item>> extends List<Item, Events> {
get selected(): Item | null;
get selectedIndex(): number;
/** @internal */
protected [ListSymbol._onRemove](item: Item, trigger?: Event): void;
/** @internal */
protected [ListSymbol._onUserSelect]?(): void;
/** @internal */
[ListSymbol._add](item: Omit<Item, 'selected'>, trigger?: Event): void;
/** @internal */
[ListSymbol._select](item: Item | undefined, selected: boolean, trigger?: Event): void;
}
interface SelectListEvents<Item extends SelectListItem = SelectListItem> extends ListEvents<Item> {
change: SelectListChangeEvent<Item>;
}
/**
* @detail change
*/
interface SelectListChangeEvent<Item extends SelectListItem> extends DOMEvent<SelectListChangeEventDetail<Item>> {
}
interface SelectListChangeEventDetail<Item extends SelectListItem> {
prev: Item | null;
current: Item | null;
}
declare const SET_AUTO: unique symbol;
declare const ENABLE_AUTO: unique symbol;
/** @internal */
declare const QualitySymbol: {
readonly _setAuto: typeof SET_AUTO;
readonly _enableAuto: typeof ENABLE_AUTO;
};
/**
* @see {@link https://vidstack.io/docs/player/core-concepts/video-quality#quality-list}
*/
declare class VideoQualityList extends SelectList<VideoQuality, VideoQualityListEvents> {
private _auto;
/**
* Configures quality switching:
*
* - `current`: Trigger an immediate quality level switch. This will abort the current fragment
* request if any, flush the whole buffer, and fetch fragment matching with current position
* and requested quality level.
*
* - `next`: Trigger a quality level switch for next fragment. This could eventually flush
* already buffered next fragment.
*
* - `load`: Set quality level for next loaded fragment.
*
* @see {@link https://www.vidstack.io/docs/player/api/video-quality#switch}
* @see {@link https://github.com/video-dev/hls.js/blob/master/docs/API.md#quality-switch-control-api}
*/
switch: 'current' | 'next' | 'load';
/**
* Whether automatic quality selection is enabled.
*/
get auto(): boolean;
/** @internal */
[QualitySymbol._enableAuto]?: (trigger?: Event) => void;
/** @internal */
protected [ListSymbol._onUserSelect](): void;
/** @internal */
protected [ListSymbol._onReset](trigger?: Event): void;
/**
* Request automatic quality selection (if supported). This will be a no-op if the list is
* `readonly` as that already implies auto-selection.
*/
autoSelect(trigger?: Event): void;
getBySrc(src: unknown): VideoQuality | undefined;
/** @internal */
[QualitySymbol._setAuto](auto: boolean, trigger?: Event): void;
}
interface VideoQuality extends SelectListItem {
readonly id: string;
readonly src?: unknown;
readonly width: number;
readonly height: number;
readonly bitrate: number | null;
readonly codec: string | null;
}
interface VideoQualityListEvents {
add: VideoQualityAddEvent;
remove: VideoQualityRemoveEvent;
change: VideoQualityChangeEvent;
'auto-change': VideoQualityAutoChangeEvent;
'readonly-change': ListReadonlyChangeEvent;
}
interface VideoQualityListEvent<T> extends DOMEvent<T> {
target: VideoQualityList;
}
/**
* Fired when a video quality has been added to the list.
*
* @detail newQuality
*/
interface VideoQualityAddEvent extends VideoQualityListEvent<VideoQuality> {
}
/**
* Fired when a video quality has been removed from the list.
*
* @detail removedQuality
*/
interface VideoQualityRemoveEvent extends VideoQualityListEvent<VideoQuality> {
}
/**
* Fired when the selected video quality has changed.
*
* @detail change
*/
interface VideoQualityChangeEvent extends VideoQualityListEvent<VideoQualityChangeEventDetail> {
}
interface VideoQualityChangeEventDetail {
prev: VideoQuality | null;
current: VideoQuality;
}
/**
* Fired when auto quality selection is enabled or disabled.
*/
interface VideoQualityAutoChangeEvent extends VideoQualityListEvent<boolean> {
}
declare class MediaPlayerDelegate {
private _handle;
private _media;
constructor(_handle: (event: Event) => void, _media: MediaContext);
_notify: <Type extends keyof MediaEvents>(type: Type, ...init: InferEventDetail<MediaEvents[Type]> extends void | undefined | never ? [detail?: never, trigger?: Event] : [detail: InferEventDetail<MediaEvents[Type]>, trigger?: Event]) => void;
_ready(info?: {
duration: number;
seekable: TimeRanges;
buffered: TimeRanges;
}, trigger?: Event): Promise<void>;
private _attemptAutoplay;
}
/**
* The current media type.
*/
type MediaType = 'unknown' | 'audio' | 'video';
/**
* The current media stream type.
*/
type MediaStreamType = 'unknown' | 'on-demand' | 'live' | 'live:dvr' | 'll-live' | 'll-live:dvr';
type MediaCrossOrigin = '' | 'anonymous' | 'use-credentials';
type RemotePlaybackType = 'airplay' | 'google-cast' | 'none';
interface RemotePlaybackInfo {
deviceName?: string;
}
/**
* Indicates the current view type which determines how the media will be presented.
*/
type MediaViewType = 'unknown' | 'audio' | 'video';
/**
* Indicates the type of strategy that should be used to initiate the loading process.
*
* @docs {@see https://www.vidstack.io/docs/player/core-concepts/loading#load-strategies}
*/
type MediaLoadingStrategy = 'eager' | 'idle' | 'visible' | 'custom' | 'play';
/**
* Indicates the type of strategy that should be used to initiate the poster loading process.
*
* @docs {@see https://www.vidstack.io/docs/player/core-concepts/loading#load-strategies}
*/
type MediaPosterLoadingStrategy = 'eager' | 'idle' | 'visible' | 'custom';
/**
* A number which represents the general type of error that occurred.
*
* - *Abort Error Code (1):* The fetching of the associated resource was aborted by the user's
* request.
*
* - *Network Error Code (2):* Some kind of network error occurred which prevented the media from
* being successfully fetched, despite having previously been available.
*
* - *Decode Error Code (3):* Despite having previously been determined to be usable, an error
* occurred while trying to decode the media resource, resulting in an error.
*
* - *Invalid Resource Error Code (4):* The associated resource or media provider object (such as
* a `MediaStream`) has been found to be unsuitable.
*
* @see https://developer.mozilla.org/en-US/docs/Web/API/MediaError
* @see https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code
*/
type MediaErrorCode = 1 | 2 | 3 | 4;
interface MediaErrorDetail {
message: string;
code?: MediaErrorCode;
error?: Error;
mediaError?: MediaError;
}
type ParsedDASHManifest = {
[key: string]: any;
} & {
protocol: 'DASH';
};
type MediaSrc = string | AudioSrc | VideoSrc | HLSSrc | DASHSrc | YouTubeSrc | VimeoSrc;
type MediaSrcObject = MediaStream | MediaSource | Blob;
type HTMLMediaSrc = string | MediaSrcObject;
interface Src<T = unknown> {
src: T;
type: string;
}
interface AudioSrc extends AudioSrcMeta {
src: HTMLMediaSrc;
type: AudioMimeType;
}
type AudioMimeType = 'audio/mpeg' | 'audio/ogg' | 'audio/3gp' | 'audio/mp3' | 'audio/webm' | 'audio/flac' | 'audio/object';
interface AudioSrcMeta {
id?: string;
bitrate?: number;
channels?: number;
}
interface VideoSrc extends VideoSrcMeta {
src: HTMLMediaSrc;
type: VideoMimeType;
}
type VideoMimeType = 'video/mp4' | 'video/webm' | 'video/3gp' | 'video/ogg' | 'video/avi' | 'video/mpeg' | 'video/object';
interface VideoSrcMeta {
id?: string;
width?: number;
height?: number;
bitrate?: number;
framerate?: number;
codec?: string;
}
interface HLSSrc {
src: string;
type: HLSMimeType;
}
type HLSMimeType = 'application/vnd.apple.mpegurl' | 'audio/mpegurl' | 'audio/x-mpegurl' | 'application/x-mpegurl' | 'video/x-mpegurl' | 'video/mpegurl' | 'application/mpegurl';
interface DASHSrc {
src: string | ParsedDASHManifest;
type: DASHMimeType;
}
type DASHMimeType = 'application/dash+xml' | 'application/xml' | 'video/dash+xml' | 'video/xml' | 'video/dash';
interface YouTubeSrc {
src: string;
type: 'video/youtube';
}
interface VimeoSrc {
src: string;
type: 'video/vimeo';
}
declare function isVideoQualitySrc(src: Src): src is SetRequired<VideoSrc, 'width' | 'height'>;
interface MediaStorage {
getVolume(): Promise<number | null>;
setVolume?(volume: number): Promise<void>;
getMuted(): Promise<boolean | null>;
setMuted?(muted: boolean): Promise<void>;
getTime(): Promise<number | null>;
setTime?(time: number, ended?: boolean): Promise<void>;
getLang(): Promise<string | null>;
setLang?(lang: string | null): Promise<void>;
getCaptions(): Promise<boolean | null>;
setCaptions?(captions: boolean): Promise<void>;
getPlaybackRate(): Promise<number | null>;
setPlaybackRate?(rate: number): Promise<void>;
getVideoQuality(): Promise<SerializedVideoQuality | null>;
setVideoQuality?(quality: SerializedVideoQuality | null): Promise<void>;
getAudioGain(): Promise<number | null>;
setAudioGain?(gain: number | null): Promise<void>;
/**
* Called when media is ready for playback and new data can be loaded.
*/
onLoad?(src: Src): void | Promise<void>;
/**
* Called when the `mediaId` has changed. This method can return a function to be called
* before the next change.
*
* - The `mediaId` is computed from the current source and clip times. It will be `null` if
* there is no source.
*
* - The `playerId` is the string provided to the player `storage` prop (if set), or the `id`
* set on the player element, otherwise `undefined`.
*/
onChange?(src: Src, mediaId: string | null, playerId?: string): MaybeStopEffect;
/**
* Called when storage is being destroyed either because the `storage` property on the player
* has changed, or the player is being destroyed.
*/
onDestroy?(): void;
}
interface SerializedVideoQuality {
id: string;
width: number;
height: number;
bitrate?: number | null;
}
declare class LocalMediaStorage implements MediaStorage {
protected playerId: string;
protected mediaId: string | null;
private _data;
getVolume(): Promise<number | null>;
setVolume(volume: number): Promise<void>;
getMuted(): Promise<boolean | null>;
setMuted(muted: boolean): Promise<void>;
getTime(): Promise<number | null>;
setTime(time: number, ended: boolean): Promise<void>;
getLang(): Promise<string | null>;
setLang(lang: string | null): Promise<void>;
getCaptions(): Promise<boolean | null>;
setCaptions(enabled: boolean): Promise<void>;
getPlaybackRate(): Promise<number | null>;
setPlaybackRate(rate: any): Promise<void>;
getAudioGain(): Promise<number | null>;
setAudioGain(gain: number | null): Promise<void>;
getVideoQuality(): Promise<SerializedVideoQuality | null>;
setVideoQuality(quality: SerializedVideoQuality | null): Promise<void>;
onChange(src: Src, mediaId: string | null, playerId?: string): void;
protected save(): void;
protected saveTimeThrottled: (() => void) & {
cancel: () => void;
flush: () => void;
};
private saveTime;
}
/**
* @see {@link https://vidstack.io/docs/player/api/audio-tracks}
*/
declare class AudioTrackList extends SelectList<AudioTrack, AudioTrackListEvents> {
}
/**
* @see {@link https://vidstack.io/docs/player/api/audio-tracks}
*/
interface AudioTrack extends SelectListItem {
/**
* A string which uniquely identifies the track within the media.
*/
readonly id: string;
/**
* A human-readable label for the track, or an empty string if unknown.
*/
readonly label: string;
/**
* A string specifying the audio track's primary language, or an empty string if unknown. The
* language is specified as a BCP 47 (RFC 5646) language code, such as "en-US" or "pt-BR".
*/
readonly language: string;
/**
* A string specifying the category into which the track falls. For example, the main audio
* track would have a kind of "main".
*
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/AudioTrack/kind}
*/
readonly kind: string;
}
interface AudioTrackListEvents {
add: AudioTrackAddEvent;
remove: AudioTrackRemoveEvent;
change: AudioTrackChangeEvent;
'readonly-change': ListReadonlyChangeEvent;
}
interface AudioTrackListEvent<T> extends DOMEvent<T> {
target: AudioTrackList;
}
/**
* Fired when an audio track has been added to the list.
*
* @detail newTrack
*/
interface AudioTrackAddEvent extends AudioTrackListEvent<AudioTrack> {
}
/**
* Fired when an audio track has been removed from the list.
*
* @detail removedTrack
*/
interface AudioTrackRemoveEvent extends AudioTrackListEvent<AudioTrack> {
}
/**
* Fired when the selected audio track has changed.
*
* @detail change
*/
interface AudioTrackChangeEvent extends AudioTrackListEvent<ChangeAudioTrackEventDetail> {
}
interface ChangeAudioTrackEventDetail {
prev: AudioTrack | null;
current: AudioTrack;
}
declare const CROSS_ORIGIN: unique symbol;
declare const READY_STATE: unique symbol;
declare const UPDATE_ACTIVE_CUES: unique symbol;
declare const CAN_LOAD: unique symbol;
declare const ON_MODE_CHANGE: unique symbol;
declare const NATIVE: unique symbol;
declare const NATIVE_HLS: unique symbol;
declare const TextTrackSymbol: {
readonly _crossOrigin: typeof CROSS_ORIGIN;
readonly _readyState: typeof READY_STATE;
readonly _updateActiveCues: typeof UPDATE_ACTIVE_CUES;
readonly _canLoad: typeof CAN_LOAD;
readonly _onModeChange: typeof ON_MODE_CHANGE;
readonly _native: typeof NATIVE;
readonly _nativeHLS: typeof NATIVE_HLS;
};
/**
* - 0: Not Loading
* - 1: Loading
* - 2: Ready
* - 3: Error
*/
type TextTrackReadyState = 0 | 1 | 2 | 3;
interface VTTCueInit extends Omit<Partial<VTTCue$1>, 'startTime' | 'endTime' | 'text'>, Pick<VTTCue$1, 'startTime' | 'endTime' | 'text'> {
}
interface VTTRegionInit extends Omit<Partial<VTTRegion>, 'id'>, Pick<VTTRegion, 'id'> {
}
interface VTTContent {
cues?: VTTCueInit[];
regions?: VTTRegionInit[];
}
declare class TextTrack extends EventsTarget<TextTrackEvents> {
static createId(track: TextTrack | TextTrackInit): string;
readonly src?: string;
readonly type?: 'json' | CaptionsFileFormat | CaptionsParserFactory;
readonly encoding?: string;
readonly id = "";
readonly label = "";
readonly language = "";
readonly kind: TextTrackKind;
readonly default = false;
content?: TextTrackInit['content'];
mimeType?: string;
contentLoaded: boolean;
subtitleLoader?: (track: TextTrack) => Promise<string | null> | null;
private _canLoad;
private _currentTime;
private _mode;
private _metadata;
private _regions;
private _cues;
private _activeCues;
/** @internal */
[TextTrackSymbol._readyState]: TextTrackReadyState;
/** @internal */
[TextTrackSymbol._crossOrigin]?: () => string | null;
/** @internal */
[TextTrackSymbol._onModeChange]: (() => void) | null;
/** @internal */
[TextTrackSymbol._native]: {
default?: boolean;
managed?: boolean;
track: {
mode: TextTrackMode;
addCue(cue: any): void;
removeCue(cue: any): void;
};
remove?(): void;
} | null;
get metadata(): Readonly<VTTHeaderMetadata>;
get regions(): ReadonlyArray<VTTRegion>;
get cues(): ReadonlyArray<VTTCue