vidstack
Version:
Build awesome media experiences on the web.
1,531 lines (1,499 loc) • 48.3 kB
JavaScript
import { i as isUndefined, a as isFunction, w as waitTimeout, b as isNull, c as isString, d as deferredPromise, $ as $$_clone, e as $$_effect, f as $$_attr, g as $$_ref, h as $$_create_template, j as computed, E as EventsTarget, D as DOMEvent, k as isNumber, u as useDisposalBin, l as effect, o as onDispose, m as listenEvent, n as isNil, s as setAttribute, p as peek, q as camelToKebabCase } from './maverick.js';
const UA = navigator?.userAgent.toLowerCase();
const IS_IOS = /iphone|ipad|ipod|ios|crios|fxios/i.test(UA);
const IS_IPHONE = /(iphone|ipod)/gi.test(navigator?.platform);
const IS_CHROME = !!window.chrome;
const IS_SAFARI = !!window.safari || IS_IOS;
function canOrientScreen() {
return !isUndefined(screen.orientation) && isFunction(screen.orientation.lock) && isFunction(screen.orientation.unlock);
}
function canPlayHLSNatively(video) {
if (!video)
video = document.createElement("video");
return video.canPlayType("application/vnd.apple.mpegurl").length > 0;
}
function canUsePictureInPicture(video) {
return !!document.pictureInPictureEnabled && !video.disablePictureInPicture;
}
function canUseVideoPresentation(video) {
return isFunction(video.webkitSupportsPresentationMode) && isFunction(video.webkitSetPresentationMode);
}
async function canChangeVolume() {
const video = document.createElement("video");
video.volume = 0.5;
await waitTimeout(0);
return video.volume === 0.5;
}
function getMediaSource() {
return window?.MediaSource ?? window?.WebKitMediaSource;
}
function getSourceBuffer() {
return window?.SourceBuffer ?? window?.WebKitSourceBuffer;
}
function isHLSSupported() {
const MediaSource = getMediaSource();
if (isUndefined(MediaSource))
return false;
const isTypeSupported = MediaSource && isFunction(MediaSource.isTypeSupported) && MediaSource.isTypeSupported('video/mp4; codecs="avc1.42E01E,mp4a.40.2"');
const SourceBuffer = getSourceBuffer();
const isSourceBufferValid = isUndefined(SourceBuffer) || !isUndefined(SourceBuffer.prototype) && isFunction(SourceBuffer.prototype.appendBuffer) && isFunction(SourceBuffer.prototype.remove);
return !!isTypeSupported && !!isSourceBufferValid;
}
function preconnect(url, rel = "preconnect") {
const exists = document.querySelector(`link[href="${url}"]`);
if (!isNull(exists))
return true;
const link = document.createElement("link");
link.rel = rel;
link.href = url;
link.crossOrigin = "true";
document.head.append(link);
return true;
}
const pendingRequests = {};
function loadScript(src) {
if (pendingRequests[src])
return pendingRequests[src].promise;
const promise = deferredPromise(), exists = document.querySelector(`script[src="${src}"]`);
if (!isNull(exists)) {
promise.resolve();
return promise.promise;
}
const script = document.createElement("script");
script.src = src;
script.onload = () => {
promise.resolve();
delete pendingRequests[src];
};
script.onerror = () => {
promise.reject();
delete pendingRequests[src];
};
setTimeout(() => document.head.append(script), 0);
return promise.promise;
}
function getRequestCredentials(crossorigin) {
return crossorigin === "use-credentials" ? "include" : isString(crossorigin) ? "same-origin" : void 0;
}
const AUDIO_EXTENSIONS = /\.(m4a|m4b|mp4a|mpga|mp2|mp2a|mp3|m2a|m3a|wav|weba|aac|oga|spx)($|\?)/i;
const AUDIO_TYPES = /* @__PURE__ */ new Set([
"audio/mpeg",
"audio/ogg",
"audio/3gp",
"audio/mp4",
"audio/webm",
"audio/flac"
]);
const VIDEO_EXTENSIONS = /\.(mp4|og[gv]|webm|mov|m4v)(#t=[,\d+]+)?($|\?)/i;
const VIDEO_TYPES = /* @__PURE__ */ new Set([
"video/mp4",
"video/webm",
"video/3gp",
"video/ogg",
"video/avi",
"video/mpeg"
]);
const HLS_VIDEO_EXTENSIONS = /\.(m3u8)($|\?)/i;
const HLS_VIDEO_TYPES = /* @__PURE__ */ new Set([
// Apple sanctioned
"application/vnd.apple.mpegurl",
// Apple sanctioned for backwards compatibility
"audio/mpegurl",
// Very common
"audio/x-mpegurl",
// Very common
"application/x-mpegurl",
// Included for completeness
"video/x-mpegurl",
"video/mpegurl",
"application/mpegurl"
]);
function isHLSSrc({ src, type }) {
return typeof src === "string" && HLS_VIDEO_EXTENSIONS.test(src) || HLS_VIDEO_TYPES.has(type);
}
function isMediaStream(src) {
return typeof window.MediaStream !== "undefined" && src instanceof window.MediaStream;
}
const $$_templ$1 = /* @__PURE__ */ $$_create_template(`<audio preload="none" aria-hidden="true"></audio>`), $$_templ_2$1 = $$_templ$1;
class AudioProviderLoader {
canPlay({ src, type }) {
return isString(src) ? AUDIO_EXTENSIONS.test(src) || AUDIO_TYPES.has(type) || src.startsWith("blob:") && type === "audio/object" : type === "audio/object";
}
mediaType() {
return "audio";
}
async load() {
if (!this._audio) {
throw Error(
"[vidstack] `<audio>` element was not found - did you forget to include `<media-outlet>`?"
);
}
return new (await Promise.resolve().then(function () { return provider$2; })).AudioProvider(this._audio);
}
render($store) {
return (() => {
const $$_root = $$_clone($$_templ_2$1);
$$_effect(() => $$_attr($$_root, "controls", $store.controls()));
$$_effect(() => $$_attr($$_root, "crossorigin", $store.crossorigin()));
$$_ref($$_root, (el) => void (this._audio = el));
return $$_root;
})();
}
}
const $$_templ = /* @__PURE__ */ $$_create_template(`<video preload="none" aria-hidden="true"></video>`), $$_templ_2 = $$_templ;
class VideoProviderLoader {
canPlay(src) {
return isString(src.src) ? VIDEO_EXTENSIONS.test(src.src) || VIDEO_TYPES.has(src.type) || src.src.startsWith("blob:") && src.type === "video/object" || isHLSSrc(src) && canPlayHLSNatively() : src.type === "video/object";
}
mediaType() {
return "video";
}
async load(context) {
if (!this._video) {
throw Error(
"[vidstack] `<video>` element was not found - did you forget to include `<media-outlet>`?"
);
}
return new (await Promise.resolve().then(function () { return provider$1; })).VideoProvider(this._video, context);
}
render($store) {
const $poster = computed(() => $store.poster() && $store.controls() ? $store.poster() : null);
return (() => {
const $$_root = $$_clone($$_templ_2);
$$_effect(() => $$_attr($$_root, "controls", $store.controls()));
$$_effect(() => $$_attr($$_root, "crossorigin", $store.crossorigin()));
$$_effect(() => $$_attr($$_root, "poster", $poster()));
$$_ref($$_root, (el) => void (this._video = el));
return $$_root;
})();
}
}
const _HLSProviderLoader = class extends VideoProviderLoader {
preconnect() {
preconnect("https://cdn.jsdelivr.net", "preconnect");
}
canPlay({ src, type }) {
return _HLSProviderLoader.supported && isString(src) && (HLS_VIDEO_EXTENSIONS.test(src) || HLS_VIDEO_TYPES.has(type));
}
async load(context) {
if (!this._video) {
throw Error(
"[vidstack] `<video>` element was not found - did you forget to include `<media-outlet>`?"
);
}
return new (await Promise.resolve().then(function () { return provider; })).HLSProvider(this._video, context);
}
};
let HLSProviderLoader = _HLSProviderLoader;
HLSProviderLoader.supported = isHLSSupported();
const ATTACH_VIDEO = Symbol("ATTACH_VIDEO" );
const TEXT_TRACK_CROSSORIGIN = Symbol("TEXT_TRACK_CROSSORIGIN" );
const TEXT_TRACK_READY_STATE = Symbol("TEXT_TRACK_READY_STATE" );
const TEXT_TRACK_UPDATE_ACTIVE_CUES = Symbol("TEXT_TRACK_UPDATE_ACTIVE_CUES" );
const TEXT_TRACK_CAN_LOAD = Symbol("TEXT_TRACK_CAN_LOAD" );
const TEXT_TRACK_ON_MODE_CHANGE = Symbol("TEXT_TRACK_ON_MODE_CHANGE" );
const TEXT_TRACK_NATIVE = Symbol("TEXT_TRACK_NATIVE" );
const TEXT_TRACK_NATIVE_HLS = Symbol("TEXT_TRACK_NATIVE_HLS" );
function findActiveCue(time, cues) {
for (let i = 0, len = cues.length; i < len; i++) {
if (isCueActive(cues[i], time))
return cues[i];
}
return null;
}
function isCueActive(cue, time) {
return time >= cue.startTime && time < cue.endTime;
}
function onTrackChapterChange(tracks, currentTrack, onChange) {
const track = tracks.toArray().find((track2) => track2.kind === "chapters" && track2.mode === "showing");
if (track === currentTrack)
return;
if (!track) {
onChange(null);
return;
}
if (track.readyState == 2) {
onChange(track);
} else {
onChange(null);
track.addEventListener("load", () => onChange(track), { once: true });
}
}
var _a, _b, _c;
class TextTrack extends EventsTarget {
constructor(init) {
super();
this.id = "";
this.label = "";
this.language = "";
this.default = false;
this._canLoad = false;
this._currentTime = 0;
this._mode = "disabled";
this._metadata = {};
this._regions = [];
this._cues = [];
this._activeCues = [];
/* @internal */
this[_a] = 0;
/* @internal */
this[_b] = null;
/* @internal */
this[_c] = null;
for (const prop of Object.keys(init))
this[prop] = init[prop];
if (!this.type)
this.type = "vtt";
if (init.content) {
import('./captions/dev.js').then(({ parseText, VTTCue, VTTRegion }) => {
if (init.type === "json") {
this._parseJSON(init.content, VTTCue, VTTRegion);
} else {
parseText(init.content, { type: init.type }).then(({ cues, regions }) => {
this._cues = cues;
this._regions = regions;
this._readyState();
});
}
});
} else if (!init.src)
this[TEXT_TRACK_READY_STATE] = 2;
if (isTrackCaptionKind(this) && !this.label) {
throw Error(`[vidstack]: captions text track created without label: \`${this.src}\``);
}
}
static createId(track) {
return `id::${track.type}-${track.kind}-${track.src ?? track.label}`;
}
get metadata() {
return this._metadata;
}
get regions() {
return this._regions;
}
get cues() {
return this._cues;
}
get activeCues() {
return this._activeCues;
}
/**
* - 0: Not Loading
* - 1: Loading
* - 2: Ready
* - 3: Error
*/
get readyState() {
return this[TEXT_TRACK_READY_STATE];
}
get mode() {
return this._mode;
}
set mode(mode) {
this.setMode(mode);
}
addCue(cue, trigger) {
let i = 0, length = this._cues.length;
for (i = 0; i < length; i++)
if (cue.endTime <= this._cues[i].startTime)
break;
if (i === length)
this._cues.push(cue);
else
this._cues.splice(i, 0, cue);
if (trigger?.type !== "cuechange") {
this[TEXT_TRACK_NATIVE]?.track.addCue(cue);
}
this.dispatchEvent(new DOMEvent("add-cue", { detail: cue, trigger }));
if (isCueActive(cue, this._currentTime)) {
this[TEXT_TRACK_UPDATE_ACTIVE_CUES](this._currentTime, trigger);
}
}
removeCue(cue, trigger) {
const index = this._cues.indexOf(cue);
if (index >= 0) {
const isActive = this._activeCues.includes(cue);
this._cues.splice(index, 1);
this[TEXT_TRACK_NATIVE]?.track.removeCue(cue);
this.dispatchEvent(new DOMEvent("remove-cue", { detail: cue, trigger }));
if (isActive) {
this[TEXT_TRACK_UPDATE_ACTIVE_CUES](this._currentTime, trigger);
}
}
}
setMode(mode, trigger) {
if (this._mode === mode)
return;
this._mode = mode;
if (mode === "disabled") {
this._activeCues = [];
this._activeCuesChanged();
} else if (this.readyState === 2) {
this[TEXT_TRACK_UPDATE_ACTIVE_CUES](this._currentTime, trigger);
} else {
this._load();
}
this.dispatchEvent(new DOMEvent("mode-change", { detail: this, trigger }));
this[TEXT_TRACK_ON_MODE_CHANGE]?.();
}
/* @internal */
[(_a = TEXT_TRACK_READY_STATE, _b = TEXT_TRACK_ON_MODE_CHANGE, _c = TEXT_TRACK_NATIVE, TEXT_TRACK_UPDATE_ACTIVE_CUES)](currentTime, trigger) {
this._currentTime = currentTime;
if (this.mode === "disabled" || !this._cues.length)
return;
const activeCues = [];
for (let i = 0, length = this._cues.length; i < length; i++) {
const cue = this._cues[i];
if (isCueActive(cue, currentTime))
activeCues.push(cue);
}
let changed = activeCues.length !== this._activeCues.length;
if (!changed) {
for (let i = 0; i < activeCues.length; i++) {
if (!this._activeCues.includes(activeCues[i])) {
changed = true;
break;
}
}
}
this._activeCues = activeCues;
if (changed)
this._activeCuesChanged(trigger);
}
/* @internal */
[TEXT_TRACK_CAN_LOAD]() {
this._canLoad = true;
if (this._mode !== "disabled")
this._load();
}
async _load() {
if (!this._canLoad || !this.src || this[TEXT_TRACK_READY_STATE] > 0)
return;
this[TEXT_TRACK_READY_STATE] = 1;
this.dispatchEvent(new DOMEvent("load-start"));
try {
const { parseResponse, VTTCue, VTTRegion } = await import('./captions/dev.js'), crossorigin = this[TEXT_TRACK_CROSSORIGIN]?.();
const response = fetch(this.src, {
headers: this.type === "json" ? { "Content-Type": "application/json" } : void 0,
credentials: getRequestCredentials(crossorigin)
});
if (this.type === "json") {
this._parseJSON(await (await response).text(), VTTCue, VTTRegion);
} else {
const { errors, metadata, regions, cues } = await parseResponse(response, {
type: this.type,
encoding: this.encoding
});
if (errors[0]?.code === 0) {
throw errors[0];
} else {
this._metadata = metadata;
this._regions = regions;
this._cues = cues;
}
}
this._readyState();
} catch (error) {
this._errorState(error);
}
}
_readyState() {
this[TEXT_TRACK_READY_STATE] = 2;
if (!this.src || this.type !== "vtt") {
const nativeTrack = this[TEXT_TRACK_NATIVE]?.track;
if (nativeTrack)
for (const cue of this._cues)
nativeTrack.addCue(cue);
}
const loadEvent = new DOMEvent("load");
this[TEXT_TRACK_UPDATE_ACTIVE_CUES](this._currentTime, loadEvent);
this.dispatchEvent(loadEvent);
}
_errorState(error) {
this[TEXT_TRACK_READY_STATE] = 3;
this.dispatchEvent(new DOMEvent("error", { detail: error }));
}
_parseJSON(json, VTTCue, VTTRegion) {
try {
json = JSON.parse(json);
if (json.regions) {
this._regions = json.regions.map((json2) => Object.assign(new VTTRegion(), json2));
}
if (json.cues) {
this._cues = json.cues.filter((json2) => isNumber(json2.startTime) && isNumber(json2.endTime)).map((json2) => Object.assign(new VTTCue(0, 0, ""), json2));
}
} catch (error) {
{
console.error(`[vidstack] failed to parse JSON captions at: \`${this.src}\`
`, error);
}
this._errorState(error);
}
}
_activeCuesChanged(trigger) {
this.dispatchEvent(new DOMEvent("cue-change", { trigger }));
}
}
const captionRE = /captions|subtitles/;
function isTrackCaptionKind(track) {
return captionRE.test(track.kind);
}
function round(num, decimalPlaces = 2) {
return Number(num.toFixed(decimalPlaces));
}
function getNumberOfDecimalPlaces(num) {
return String(num).split(".")[1]?.length ?? 0;
}
function clampNumber(min, value, max) {
return Math.max(min, Math.min(max, value));
}
function isHTMLAudioElement(element) {
return element instanceof HTMLAudioElement;
}
function isHTMLVideoElement(element) {
return element instanceof HTMLVideoElement;
}
function isHTMLMediaElement(element) {
return isHTMLAudioElement(element) || isHTMLVideoElement(element);
}
const LIST_ADD = Symbol("LIST_ADD" );
const LIST_REMOVE = Symbol("LIST_REMOVE" );
const LIST_RESET = Symbol("LIST_RESET" );
const LIST_SELECT = Symbol("LIST_SELECT" );
const LIST_READONLY = Symbol("LIST_READONLY" );
const LIST_SET_READONLY = Symbol("LIST_SET_READONLY" );
const LIST_ON_RESET = Symbol("LIST_ON_RESET" );
const LIST_ON_REMOVE = Symbol("LIST_ON_REMOVE" );
const LIST_ON_USER_SELECT = Symbol("LIST_ON_USER_SELECT" );
const SET_AUTO_QUALITY = Symbol("SET_AUTO_QUALITY" );
const ENABLE_AUTO_QUALITY = Symbol("ENABLE_AUTO_QUALITY" );
function coerceToError(error) {
return error instanceof Error ? error : Error(JSON.stringify(error));
}
class RAFLoop {
constructor(_callback) {
this._callback = _callback;
}
_start() {
if (!isUndefined(this._id))
return;
this._loop();
}
_stop() {
if (isNumber(this._id))
window.cancelAnimationFrame(this._id);
this._id = void 0;
}
_loop() {
this._id = window.requestAnimationFrame(() => {
if (isUndefined(this._id))
return;
this._callback();
this._loop();
});
}
}
class HTMLMediaEvents {
constructor(_provider, _context) {
this._provider = _provider;
this._context = _context;
this._disposal = useDisposalBin();
this._waiting = false;
this._attachedLoadStart = false;
this._attachedCanPlay = false;
this._timeRAF = new RAFLoop(this._onRAF.bind(this));
this._handlers = /* @__PURE__ */ new Map() ;
this._handleDevEvent = this._onDevEvent.bind(this) ;
this._attachInitialListeners();
effect(this._attachTimeUpdate.bind(this));
onDispose(this._onDispose.bind(this));
}
get _media() {
return this._provider.media;
}
get _delegate() {
return this._context.delegate;
}
_onDispose() {
this._timeRAF._stop();
this._disposal.empty();
}
/**
* The `timeupdate` event fires surprisingly infrequently during playback, meaning your progress
* bar (or whatever else is synced to the currentTime) moves in a choppy fashion. This helps
* resolve that by retrieving time updates in a request animation frame loop.
*/
_onRAF() {
const newTime = this._provider.currentTime;
if (this._context.$store.currentTime() !== newTime)
this._updateCurrentTime(newTime);
}
_attachInitialListeners() {
this._attachEventListener("loadstart", this._onLoadStart);
this._attachEventListener("abort", this._onAbort);
this._attachEventListener("emptied", this._onEmptied);
this._attachEventListener("error", this._onError);
this._context.logger?.debug("attached initial media event listeners");
}
_attachLoadStartListeners() {
if (this._attachedLoadStart)
return;
this._disposal.add(
this._attachEventListener("loadeddata", this._onLoadedData),
this._attachEventListener("loadedmetadata", this._onLoadedMetadata),
this._attachEventListener("canplay", this._onCanPlay),
this._attachEventListener("canplaythrough", this._onCanPlayThrough),
this._attachEventListener("durationchange", this._onDurationChange),
this._attachEventListener("play", this._onPlay),
this._attachEventListener("progress", this._onProgress),
this._attachEventListener("stalled", this._onStalled),
this._attachEventListener("suspend", this._onSuspend)
);
this._attachedLoadStart = true;
}
_attachCanPlayListeners() {
if (this._attachedCanPlay)
return;
this._disposal.add(
this._attachEventListener("pause", this._onPause),
this._attachEventListener("playing", this._onPlaying),
this._attachEventListener("ratechange", this._onRateChange),
this._attachEventListener("seeked", this._onSeeked),
this._attachEventListener("seeking", this._onSeeking),
this._attachEventListener("ended", this._onEnded),
this._attachEventListener("volumechange", this._onVolumeChange),
this._attachEventListener("waiting", this._onWaiting)
);
this._attachedCanPlay = true;
}
_attachEventListener(eventType, handler) {
this._handlers.set(eventType, handler);
return listenEvent(
this._media,
eventType,
this._handleDevEvent
);
}
_onDevEvent(event2) {
this._context.logger?.debugGroup(`\u{1F4FA} fired \`${event2.type}\``).labelledLog("Event", event2).labelledLog("Media Store", { ...this._context.$store }).dispatch();
this._handlers.get(event2.type)?.call(this, event2);
}
_updateCurrentTime(time, trigger) {
this._delegate._dispatch("time-update", {
// Avoid errors where `currentTime` can have higher precision.
detail: {
currentTime: Math.min(time, this._context.$store.seekableEnd()),
played: this._media.played
},
trigger
});
}
_onLoadStart(event2) {
if (this._media.networkState === 3) {
this._onAbort(event2);
return;
}
this._attachLoadStartListeners();
this._delegate._dispatch("load-start", { trigger: event2 });
}
_onAbort(event2) {
this._delegate._dispatch("abort", { trigger: event2 });
}
_onEmptied() {
this._delegate._dispatch("emptied", { trigger: event });
}
_onLoadedData(event2) {
this._delegate._dispatch("loaded-data", { trigger: event2 });
}
_onLoadedMetadata(event2) {
this._onStreamTypeChange();
this._attachCanPlayListeners();
this._delegate._dispatch("volume-change", {
detail: {
volume: this._media.volume,
muted: this._media.muted
}
});
this._delegate._dispatch("loaded-metadata", { trigger: event2 });
if (IS_SAFARI && isHLSSrc(this._context.$store.source())) {
this._delegate._ready(this._getCanPlayDetail(), event2);
}
}
_getCanPlayDetail() {
return {
duration: this._media.duration,
buffered: this._media.buffered,
seekable: this._media.seekable
};
}
_onStreamTypeChange() {
const isLive = !Number.isFinite(this._media.duration);
this._delegate._dispatch("stream-type-change", {
detail: isLive ? "live" : "on-demand"
});
}
_onPlay(event2) {
if (!this._context.$store.canPlay)
return;
this._delegate._dispatch("play", { trigger: event2 });
}
_onPause(event2) {
if (this._media.readyState === 1 && !this._waiting)
return;
this._waiting = false;
this._timeRAF._stop();
this._delegate._dispatch("pause", { trigger: event2 });
}
_onCanPlay(event2) {
this._delegate._ready(this._getCanPlayDetail(), event2);
}
_onCanPlayThrough(event2) {
if (this._context.$store.started())
return;
this._delegate._dispatch("can-play-through", {
trigger: event2,
detail: this._getCanPlayDetail()
});
}
_onPlaying(event2) {
this._waiting = false;
this._delegate._dispatch("playing", { trigger: event2 });
this._timeRAF._start();
}
_onStalled(event2) {
this._delegate._dispatch("stalled", { trigger: event2 });
if (this._media.readyState < 3) {
this._waiting = true;
this._delegate._dispatch("waiting", { trigger: event2 });
}
}
_onWaiting(event2) {
if (this._media.readyState < 3) {
this._waiting = true;
this._delegate._dispatch("waiting", { trigger: event2 });
}
}
_onEnded(event2) {
this._timeRAF._stop();
this._updateCurrentTime(this._media.duration, event2);
this._delegate._dispatch("end", { trigger: event2 });
if (this._context.$store.loop()) {
this._onLoop();
} else {
this._delegate._dispatch("ended", { trigger: event2 });
}
}
_attachTimeUpdate() {
if (this._context.$store.paused()) {
listenEvent(this._media, "timeupdate", this._onTimeUpdate.bind(this));
}
}
_onTimeUpdate(event2) {
this._updateCurrentTime(this._media.currentTime, event2);
}
_onDurationChange(event2) {
this._onStreamTypeChange();
if (this._context.$store.ended()) {
this._updateCurrentTime(this._media.duration, event2);
}
this._delegate._dispatch("duration-change", {
detail: this._media.duration,
trigger: event2
});
}
_onVolumeChange(event2) {
this._delegate._dispatch("volume-change", {
detail: {
volume: this._media.volume,
muted: this._media.muted
},
trigger: event2
});
}
_onSeeked(event2) {
this._updateCurrentTime(this._media.currentTime, event2);
this._delegate._dispatch("seeked", {
detail: this._media.currentTime,
trigger: event2
});
if (Math.trunc(this._media.currentTime) === Math.trunc(this._media.duration) && getNumberOfDecimalPlaces(this._media.duration) > getNumberOfDecimalPlaces(this._media.currentTime)) {
this._updateCurrentTime(this._media.duration, event2);
if (!this._media.ended) {
this._context.player.dispatchEvent(
new DOMEvent("media-play-request", {
trigger: event2
})
);
}
}
}
_onSeeking(event2) {
this._delegate._dispatch("seeking", {
detail: this._media.currentTime,
trigger: event2
});
}
_onProgress(event2) {
this._delegate._dispatch("progress", {
detail: {
buffered: this._media.buffered,
seekable: this._media.seekable
},
trigger: event2
});
}
_onLoop() {
const hasCustomControls = isNil(this._media.controls);
if (hasCustomControls)
this._media.controls = false;
this._context.player.dispatchEvent(new DOMEvent("media-loop-request"));
}
_onSuspend(event2) {
this._delegate._dispatch("suspend", { trigger: event2 });
}
_onRateChange(event2) {
this._delegate._dispatch("rate-change", {
detail: this._media.playbackRate,
trigger: event2
});
}
_onError(event2) {
const error = this._media.error;
if (!error)
return;
this._delegate._dispatch("error", {
detail: {
message: error.message,
code: error.code,
mediaError: error
},
trigger: event2
});
}
}
class NativeAudioTracks {
constructor(_provider, _context) {
this._provider = _provider;
this._context = _context;
this._nativeTracks.onaddtrack = this._onAddNativeTrack.bind(this);
this._nativeTracks.onremovetrack = this._onRemoveNativeTrack.bind(this);
this._nativeTracks.onchange = this._onChangeNativeTrack.bind(this);
listenEvent(this._context.audioTracks, "change", this._onChangeTrack.bind(this));
}
get _nativeTracks() {
return this._provider.media.audioTracks;
}
_onAddNativeTrack(event) {
const _track = event.track;
if (_track.label === "")
return;
const audioTrack = {
id: _track.id + "",
label: _track.label,
language: _track.language,
kind: _track.kind,
selected: false
};
this._context.audioTracks[LIST_ADD](audioTrack, event);
if (_track.enabled)
audioTrack.selected = true;
}
_onRemoveNativeTrack(event) {
const track = this._context.audioTracks.getById(event.track.id);
if (track)
this._context.audioTracks[LIST_REMOVE](track, event);
}
_onChangeNativeTrack(event) {
let enabledTrack = this._getEnabledNativeTrack();
if (!enabledTrack)
return;
const track = this._context.audioTracks.getById(enabledTrack.id);
if (track)
this._context.audioTracks[LIST_SELECT](track, true, event);
}
_getEnabledNativeTrack() {
return Array.from(this._nativeTracks).find((track) => track.enabled);
}
_onChangeTrack(event) {
const { current } = event.detail;
if (!current)
return;
const track = this._nativeTracks.getTrackById(current.id);
if (track) {
const prev = this._getEnabledNativeTrack();
if (prev)
prev.enabled = false;
track.enabled = true;
}
}
}
class HTMLMediaProvider {
constructor(_media) {
this._media = _media;
}
setup(context) {
new HTMLMediaEvents(this, context);
if ("audioTracks" in this.media)
new NativeAudioTracks(this, context);
}
get type() {
return "";
}
get media() {
return this._media;
}
get paused() {
return this._media.paused;
}
get muted() {
return this._media.muted;
}
set muted(muted) {
this._media.muted = muted;
}
get volume() {
return this._media.volume;
}
set volume(volume) {
this._media.volume = volume;
}
get currentTime() {
return this._media.currentTime;
}
set currentTime(time) {
this._media.currentTime = time;
}
get playsinline() {
return this._media.hasAttribute("playsinline");
}
set playsinline(playsinline) {
setAttribute(this._media, "playsinline", playsinline);
}
get playbackRate() {
return this._media.playbackRate;
}
set playbackRate(rate) {
this._media.playbackRate = rate;
}
async play() {
return this._media.play();
}
async pause() {
return this._media.pause();
}
async loadSource({ src }, preload) {
this._media.preload = preload;
if (isMediaStream(src)) {
this._media.srcObject = src;
} else {
this._media.srcObject = null;
this._media.src = isString(src) ? src : window.URL.createObjectURL(src);
}
this._media.load();
}
}
class AudioProvider extends HTMLMediaProvider {
constructor() {
super(...arguments);
this.$$PROVIDER_TYPE = "AUDIO";
}
get type() {
return "audio";
}
setup(context) {
super.setup(context);
if (this.type === "audio")
context.delegate._dispatch("provider-setup", { detail: this });
}
/**
* The native HTML `<audio>` element.
*
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLAudioElement}
*/
get audio() {
return this._media;
}
}
var provider$2 = /*#__PURE__*/Object.freeze({
__proto__: null,
AudioProvider: AudioProvider
});
class NativeHLSTextTracks {
constructor(_video, _context) {
this._video = _video;
this._context = _context;
_video.textTracks.onaddtrack = this._onAddTrack.bind(this);
onDispose(this._onDispose.bind(this));
}
_onAddTrack(event) {
const nativeTrack = event.track;
if (!nativeTrack || findTextTrackElement(this._video, nativeTrack))
return;
const track = new TextTrack({
id: nativeTrack.id,
kind: nativeTrack.kind,
label: nativeTrack.label,
language: nativeTrack.language,
type: "vtt"
});
track[TEXT_TRACK_NATIVE] = { track: nativeTrack };
track[TEXT_TRACK_READY_STATE] = 2;
track[TEXT_TRACK_NATIVE_HLS] = true;
let lastIndex = 0;
const onCueChange = (event2) => {
if (!nativeTrack.cues)
return;
for (let i = lastIndex; i < nativeTrack.cues.length; i++) {
track.addCue(nativeTrack.cues[i], event2);
lastIndex++;
}
};
onCueChange(event);
nativeTrack.oncuechange = onCueChange;
this._context.textTracks.add(track, event);
track.setMode(nativeTrack.mode, event);
}
_onDispose() {
this._video.textTracks.onaddtrack = null;
for (const track of this._context.textTracks) {
const nativeTrack = track[TEXT_TRACK_NATIVE]?.track;
if (nativeTrack?.oncuechange)
nativeTrack.oncuechange = null;
}
}
}
function findTextTrackElement(video, track) {
return Array.from(video.children).find((el) => el.track === track);
}
class VideoPictureInPicture {
constructor(_video, _media) {
this._video = _video;
this._media = _media;
this._onChange = (active, event) => {
this._media.delegate._dispatch("picture-in-picture-change", {
detail: active,
trigger: event
});
};
listenEvent(this._video, "enterpictureinpicture", this._onEnter.bind(this));
listenEvent(this._video, "leavepictureinpicture", this._onExit.bind(this));
}
get active() {
return document.pictureInPictureElement === this._video;
}
get supported() {
return canUsePictureInPicture(this._video);
}
async enter() {
return this._video.requestPictureInPicture();
}
exit() {
return document.exitPictureInPicture();
}
_onEnter(event) {
this._onChange(true, event);
}
_onExit(event) {
this._onChange(false, event);
}
}
class VideoPresentation {
constructor(_video, _media) {
this._video = _video;
this._media = _media;
this._mode = "inline";
listenEvent(this._video, "webkitpresentationmodechanged", this._onModeChange.bind(this));
}
get _supported() {
return canUseVideoPresentation(this._video);
}
async _setPresentationMode(mode) {
if (this._mode === mode)
return;
this._video.webkitSetPresentationMode(mode);
}
_onModeChange() {
const prevMode = this._mode;
this._mode = this._video.webkitPresentationMode;
{
this._media.logger?.infoGroup("presentation mode change").labelledLog("Mode", this._mode).labelledLog("Event", event).dispatch();
}
this._media.player?.dispatchEvent(
new DOMEvent("video-presentation-change", {
detail: this._mode,
trigger: event
})
);
["fullscreen", "picture-in-picture"].forEach((type) => {
if (this._mode === type || prevMode === type) {
this._media.delegate._dispatch(`${type}-change`, {
detail: this._mode === type,
trigger: event
});
}
});
}
}
class FullscreenPresentationAdapter {
constructor(_presentation) {
this._presentation = _presentation;
}
get active() {
return this._presentation._mode === "fullscreen";
}
get supported() {
return this._presentation._supported;
}
async enter() {
this._presentation._setPresentationMode("fullscreen");
}
async exit() {
this._presentation._setPresentationMode("inline");
}
}
class PIPPresentationAdapter {
constructor(_presentation) {
this._presentation = _presentation;
}
get active() {
return this._presentation._mode === "picture-in-picture";
}
get supported() {
return this._presentation._supported;
}
async enter() {
this._presentation._setPresentationMode("picture-in-picture");
}
async exit() {
this._presentation._setPresentationMode("inline");
}
}
class VideoProvider extends HTMLMediaProvider {
constructor(video, context) {
super(video);
this.$$PROVIDER_TYPE = "VIDEO";
if (canUseVideoPresentation(video)) {
const presentation = new VideoPresentation(video, context);
this.fullscreen = new FullscreenPresentationAdapter(presentation);
this.pictureInPicture = new PIPPresentationAdapter(presentation);
} else if (canUsePictureInPicture(video)) {
this.pictureInPicture = new VideoPictureInPicture(video, context);
}
}
get type() {
return "video";
}
setup(context) {
super.setup(context);
if (canPlayHLSNatively(this.video)) {
new NativeHLSTextTracks(this.video, context);
}
context.textRenderers[ATTACH_VIDEO](this.video);
onDispose(() => {
context.textRenderers[ATTACH_VIDEO](null);
});
if (this.type === "video")
context.delegate._dispatch("provider-setup", { detail: this });
}
/**
* The native HTML `<video>` element.
*
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLVideoElement}
*/
get video() {
return this._media;
}
}
var provider$1 = /*#__PURE__*/Object.freeze({
__proto__: null,
VideoProvider: VideoProvider
});
const toDOMEventType = (type) => camelToKebabCase(type);
class HLSController {
constructor(_video) {
this._video = _video;
this._instance = null;
this._stopLiveSync = null;
this._config = {};
this._callbacks = /* @__PURE__ */ new Set();
}
get instance() {
return this._instance;
}
setup(ctor, context) {
this._context = context;
const isLive = peek(context.$store.streamType).includes("live"), isLiveLowLatency = peek(context.$store.streamType).includes("ll-");
this._instance = new ctor({
lowLatencyMode: isLiveLowLatency,
backBufferLength: isLiveLowLatency ? 4 : isLive ? 8 : void 0,
renderTextTracksNatively: false,
...this._config
});
const dispatcher = this._dispatchHLSEvent.bind(this);
for (const event of Object.values(ctor.Events))
this._instance.on(event, dispatcher);
this._instance.on(ctor.Events.ERROR, this._onError.bind(this));
for (const callback of this._callbacks)
callback(this._instance);
context.player.dispatchEvent(new DOMEvent("hls-instance", { detail: this._instance }));
this._instance.attachMedia(this._video);
this._instance.on(ctor.Events.AUDIO_TRACK_SWITCHED, this._onAudioSwitch.bind(this));
this._instance.on(ctor.Events.LEVEL_SWITCHED, this._onLevelSwitched.bind(this));
this._instance.on(ctor.Events.LEVEL_LOADED, this._onLevelLoaded.bind(this));
this._instance.on(ctor.Events.NON_NATIVE_TEXT_TRACKS_FOUND, this._onTracksFound.bind(this));
this._instance.on(ctor.Events.CUES_PARSED, this._onCuesParsed.bind(this));
context.qualities[ENABLE_AUTO_QUALITY] = this._enableAutoQuality.bind(this);
listenEvent(context.qualities, "change", this._onQualityChange.bind(this));
listenEvent(context.audioTracks, "change", this._onAudioChange.bind(this));
this._stopLiveSync = effect(this._liveSync.bind(this));
}
_liveSync() {
if (!this._context.$store.live())
return;
const raf = new RAFLoop(this._liveSyncPosition.bind(this));
raf._start();
return raf._stop.bind(raf);
}
_liveSyncPosition() {
this._context.$store.liveSyncPosition.set(this._instance?.liveSyncPosition ?? Infinity);
}
_dispatchHLSEvent(eventType, detail) {
this._context.player.dispatchEvent(new DOMEvent(toDOMEventType(eventType), { detail }));
}
_onTracksFound(eventType, data) {
const event = new DOMEvent(eventType, { detail: data });
let currentTrack = -1;
for (let i = 0; i < data.tracks.length; i++) {
const nonNativeTrack = data.tracks[i], init = nonNativeTrack.subtitleTrack ?? nonNativeTrack.closedCaptions, track = new TextTrack({
id: `hls-${nonNativeTrack.kind}${i}`,
src: init?.url,
label: nonNativeTrack.label,
language: init?.lang,
kind: nonNativeTrack.kind
});
track[TEXT_TRACK_READY_STATE] = 2;
track[TEXT_TRACK_ON_MODE_CHANGE] = () => {
if (track.mode === "showing") {
this._instance.subtitleTrack = i;
currentTrack = i;
} else if (currentTrack === i) {
this._instance.subtitleTrack = -1;
currentTrack = -1;
}
};
if (nonNativeTrack.default)
track.setMode("showing", event);
this._context.textTracks.add(track, event);
}
}
_onCuesParsed(eventType, data) {
const track = this._context.textTracks.getById(`hls-${data.track}`);
if (!track)
return;
const event = new DOMEvent(eventType, { detail: data });
for (const cue of data.cues) {
cue.positionAlign = "auto";
track.addCue(cue, event);
}
}
_onAudioSwitch(eventType, data) {
const track = this._context.audioTracks[data.id];
if (track) {
this._context.audioTracks[LIST_SELECT](
track,
true,
new DOMEvent(eventType, { detail: data })
);
}
}
_onLevelSwitched(eventType, data) {
const quality = this._context.qualities[data.level];
if (quality) {
this._context.qualities[LIST_SELECT](
quality,
true,
new DOMEvent(eventType, { detail: data })
);
}
}
_onLevelLoaded(eventType, data) {
if (this._context.$store.canPlay())
return;
const { type, live, totalduration: duration } = data.details;
const event = new DOMEvent(eventType, { detail: data });
this._context.delegate._dispatch("stream-type-change", {
detail: live ? type === "EVENT" && Number.isFinite(duration) ? "live:dvr" : "live" : "on-demand",
trigger: event
});
this._context.delegate._dispatch("duration-change", { detail: duration, trigger: event });
const media = this._instance.media;
if (this._instance.currentLevel === -1) {
this._context.qualities[SET_AUTO_QUALITY](true, event);
}
for (const track of this._instance.audioTracks) {
this._context.audioTracks[LIST_ADD](
{
id: track.id + "",
label: track.name,
language: track.lang || "",
kind: "main"
},
event
);
}
for (const level of this._instance.levels) {
this._context.qualities[LIST_ADD](
{
width: level.width,
height: level.height,
codec: level.codecSet,
bitrate: level.bitrate
},
event
);
}
media.dispatchEvent(new DOMEvent("canplay", { trigger: event }));
}
_onError(eventType, data) {
{
this._context.logger?.errorGroup(`HLS error \`${eventType}\``).labelledLog("Media Element", this._instance?.media).labelledLog("HLS Instance", this._instance).labelledLog("Event Type", eventType).labelledLog("Data", data).labelledLog("Src", peek(this._context.$store.source)).labelledLog("Media Store", { ...this._context.$store }).dispatch();
}
if (data.fatal) {
switch (data.type) {
case "networkError":
this._instance?.startLoad();
break;
case "mediaError":
this._instance?.recoverMediaError();
break;
default:
this._instance?.destroy();
this._instance = null;
break;
}
}
}
_enableAutoQuality() {
if (this._instance)
this._instance.currentLevel = -1;
}
_onQualityChange() {
const { qualities } = this._context;
if (!this._instance || qualities.auto)
return;
this._instance[qualities.switch + "Level"] = qualities.selectedIndex;
if (IS_CHROME)
this._video.currentTime = this._video.currentTime;
}
_onAudioChange() {
const { audioTracks } = this._context;
if (this._instance && this._instance.audioTrack !== audioTracks.selectedIndex) {
this._instance.audioTrack = audioTracks.selectedIndex;
}
}
_destroy() {
if (this._context)
this._context.qualities[ENABLE_AUTO_QUALITY] = void 0;
this._instance?.destroy();
this._instance = null;
this._stopLiveSync?.();
this._stopLiveSync = null;
this._context?.logger?.info("\u{1F3D7}\uFE0F Destroyed HLS instance");
}
}
class HLSLibLoader {
constructor(_lib, _context, _callback) {
this._lib = _lib;
this._context = _context;
this._callback = _callback;
this._startLoading();
}
async _startLoading() {
this._context.logger?.info("\u{1F3D7}\uFE0F Loading HLS Library");
const callbacks = {
onLoadStart: this._onLoadStart.bind(this),
onLoaded: this._onLoaded.bind(this),
onLoadError: this._onLoadError.bind(this)
};
let ctor = await loadHLSScript(this._lib, callbacks);
if (isUndefined(ctor) && !isString(this._lib))
ctor = await importHLS(this._lib, callbacks);
if (!ctor)
return null;
if (!ctor.isSupported()) {
const message = "[vidstack]: `hls.js` is not supported in this environment";
this._context.logger?.error(message);
this._context.player.dispatchEvent(new DOMEvent("hls-unsupported"));
this._context.delegate._dispatch("error", { detail: { message, code: 4 } });
return null;
}
return ctor;
}
_onLoadStart() {
{
this._context.logger?.infoGroup("Starting to load `hls.js`").labelledLog("URL", this._lib).dispatch();
}
this._context.player.dispatchEvent(new DOMEvent("hls-lib-load-start"));
}
_onLoaded(ctor) {
{
this._context.logger?.infoGroup("Loaded `hls.js`").labelledLog("Library", this._lib).labelledLog("Constructor", ctor).dispatch();
}
this._context.player.dispatchEvent(
new DOMEvent("hls-lib-loaded", {
detail: ctor
})
);
this._callback(ctor);
}
_onLoadError(e) {
const error = coerceToError(e);
{
this._context.logger?.errorGroup("Failed to load `hls.js`").labelledLog("Library", this._lib).labelledLog("Error", e).dispatch();
}
this._context.player.dispatchEvent(
new DOMEvent("hls-lib-load-error", {
detail: error
})
);
this._context.delegate._dispatch("error", {
detail: { message: error.message, code: 4 }
});
}
}
async function importHLS(loader, callbacks = {}) {
if (isUndefined(loader))
return void 0;
callbacks.onLoadStart?.();
if (loader.prototype && loader.prototype !== Function) {
callbacks.onLoaded?.(loader);
return loader;
}
try {
const ctor = (await loader())?.default;
if (ctor && !!ctor.isSupported) {
callbacks.onLoaded?.(ctor);
} else {
throw Error(
true ? "[vidstack] failed importing `hls.js`. Dynamic import returned invalid constructor." : ""
);
}
return ctor;
} catch (err) {
callbacks.onLoadError?.(err);
}
return void 0;
}
async function loadHLSScript(src, callbacks = {}) {
if (!isString(src))
return void 0;
callbacks.onLoadStart?.();
try {
await loadScript(src);
if (!isFunction(window.Hls)) {
throw Error(
true ? "[vidstack] failed loading `hls.js`. Could not find a valid `Hls` constructor on window" : ""
);
}
const ctor = window.Hls;
callbacks.onLoaded?.(ctor);
return ctor;
} catch (err) {
callbacks.onLoadError?.(err);
}
return void 0;
}
const JS_DELIVR_CDN = "https://cdn.jsdelivr.net";
class HLSProvider extends VideoProvider {
constructor() {
super(...arguments);
this.$$PROVIDER_TYPE = "HLS";
this._ctor = null;
this._controller = new HLSController(this.video);
this._library = `${JS_DELIVR_CDN}/npm/hls.js@^1.0.0/dist/hls${".js" }`;
}
/**
* The `hls.js` constructor.
*/
get ctor() {
return this._ctor;
}
/**
* The current `hls.js` instance.
*/
get instance() {
return this._controller.instance;
}
get type() {
return "hls";
}
get canLiveSync() {
return true;
}
/**
* The `hls.js` configuration object.
*
* @see {@link https://github.com/video-dev/hls.js/blob/master/docs/API.md#fine-tuning}
*/
get config() {
return this._controller._config;
}
set config(config) {
this._controller._config = config;
}
/**
* The `hls.js` constructor (supports dynamic imports) or a URL of where it can be found.
*
* @defaultValue `https://cdn.jsdelivr.net/npm/hls.js@^1.0.0/dist/hls.min.js`
*/
get library() {
return this._library;
}
set library(library) {
this._library = library;
}
preconnect() {
if (!isString(this._library))
return;
preconnect(this._library);
}
setup(context) {
super.setup(context);
new HLSLibLoader(this._library, context, (ctor) => {
this._ctor = ctor;
this._controller.setup(ctor, context);
context.delegate._dispatch("provider-setup", { detail: this });
const src = peek(context.$store.source);
if (src)
this.loadSource(src);
});
}
async loadSource({ src }) {
if (!isString(src))
return;
this._controller.instance?.loadSource(src);
}
/**
* The given callback is invoked when a new `hls.js` instance is created and right before it's
* attached to media.
*/
onInstance(callback) {
const instance = this._controller.instance;
if (instance)
callback(instance);
this._controller._callbacks.add(callback);
return () => this._controller._callbacks.delete(callback);
}
destroy() {
this._controller._destroy();
}
}
/**
* Whether `hls.js` is supported in this environment.
*/
HLSProvider.supported = isHLSSupported();
var provider = /*#__PURE__*/Object.freeze({
__proto__: null,
HLSProvider: HLSProvider
});
export { AudioProviderLoader as A, onTrackChapterChange as B, getNumberOfDecimalPlaces as C, canChangeVolume as D, ENABLE_AUTO_QUALITY as E, HLSProviderLoader as H, IS_SAFARI as I, LIST_READONLY as L, SET_AUTO_QUALITY as S, TextTrack as T, VideoProviderLoader as V, LIST_ADD as a, LIST_REMOVE as b, canOrientScreen as c, LIST_ON_REMOVE as d, LIST_RESET as e, LIST_SET_READONLY as f, getRequestCredentials as g, LIST_ON_RESET as h, isHTMLMediaElement as i, LIST_SELECT as j, LIST_ON_USER_SELECT as k, coerceToError as l, TEXT_TRACK_CAN_LOAD as m, TEXT_TRACK_UPDATE_ACTIVE_CUES as n, isTrackCaptionKind as o, preconnect as p, TEXT_TRACK_NATIVE as q, round as r, ATTACH_VIDEO as s, TEXT_TRACK_NATIVE_HLS as t, TEXT_TRACK_CROSSORIGIN as u, TEXT_TRACK_ON_MODE_CHANGE as v, IS_IPHONE as w, clampNumber as x, findActiveCue as y, isCueActive as z };