vidstack
Version:
Build awesome media experiences on the web.
226 lines (222 loc) • 7.84 kB
JavaScript
import { peek, effect } from 'maverick.js';
import { isUndefined, isNumber, DOMEvent, listenEvent, camelToKebabCase } from 'maverick.js/std';
import { G as ENABLE_AUTO_QUALITY, H as TextTrack, J as TEXT_TRACK_READY_STATE, K as TEXT_TRACK_ON_MODE_CHANGE, L as LIST_SELECT, N as SET_AUTO_QUALITY, P as LIST_ADD, Q as IS_CHROME } from '../../media-core.js';
class RAFLoop {
constructor(_callback) {
this._callback = _callback;
}
_id;
_start() {
if (!isUndefined(this._id))
return;
this._loop();
}
_stop() {
if (isNumber(this._id))
window.cancelAnimationFrame(this._id);
this._id = void 0;
}
_loop() {
this._id = window.requestAnimationFrame(() => {
if (isUndefined(this._id))
return;
this._callback();
this._loop();
});
}
}
const toDOMEventType = (type) => camelToKebabCase(type);
class HLSController {
constructor(_video) {
this._video = _video;
}
_context;
_instance = null;
_stopLiveSync = null;
_config = {};
_callbacks = /* @__PURE__ */ new Set();
get instance() {
return this._instance;
}
setup(ctor, context) {
this._context = context;
const isLive = peek(context.$store.streamType).includes("live"), isLiveLowLatency = peek(context.$store.streamType).includes("ll-");
this._instance = new ctor({
lowLatencyMode: isLiveLowLatency,
backBufferLength: isLiveLowLatency ? 4 : isLive ? 8 : void 0,
renderTextTracksNatively: false,
...this._config
});
const dispatcher = this._dispatchHLSEvent.bind(this);
for (const event of Object.values(ctor.Events))
this._instance.on(event, dispatcher);
this._instance.on(ctor.Events.ERROR, this._onError.bind(this));
for (const callback of this._callbacks)
callback(this._instance);
context.player.dispatchEvent(new DOMEvent("hls-instance", { detail: this._instance }));
this._instance.attachMedia(this._video);
this._instance.on(ctor.Events.AUDIO_TRACK_SWITCHED, this._onAudioSwitch.bind(this));
this._instance.on(ctor.Events.LEVEL_SWITCHED, this._onLevelSwitched.bind(this));
this._instance.on(ctor.Events.LEVEL_LOADED, this._onLevelLoaded.bind(this));
this._instance.on(ctor.Events.NON_NATIVE_TEXT_TRACKS_FOUND, this._onTracksFound.bind(this));
this._instance.on(ctor.Events.CUES_PARSED, this._onCuesParsed.bind(this));
context.qualities[ENABLE_AUTO_QUALITY] = this._enableAutoQuality.bind(this);
listenEvent(context.qualities, "change", this._onQualityChange.bind(this));
listenEvent(context.audioTracks, "change", this._onAudioChange.bind(this));
this._stopLiveSync = effect(this._liveSync.bind(this));
}
_liveSync() {
if (!this._context.$store.live())
return;
const raf = new RAFLoop(this._liveSyncPosition.bind(this));
raf._start();
return raf._stop.bind(raf);
}
_liveSyncPosition() {
this._context.$store.liveSyncPosition.set(this._instance?.liveSyncPosition ?? Infinity);
}
_dispatchHLSEvent(eventType, detail) {
this._context.player.dispatchEvent(new DOMEvent(toDOMEventType(eventType), { detail }));
}
_onTracksFound(eventType, data) {
const event = new DOMEvent(eventType, { detail: data });
let currentTrack = -1;
for (let i = 0; i < data.tracks.length; i++) {
const nonNativeTrack = data.tracks[i], init = nonNativeTrack.subtitleTrack ?? nonNativeTrack.closedCaptions, track = new TextTrack({
id: `hls-${nonNativeTrack.kind}${i}`,
src: init?.url,
label: nonNativeTrack.label,
language: init?.lang,
kind: nonNativeTrack.kind
});
track[TEXT_TRACK_READY_STATE] = 2;
track[TEXT_TRACK_ON_MODE_CHANGE] = () => {
if (track.mode === "showing") {
this._instance.subtitleTrack = i;
currentTrack = i;
} else if (currentTrack === i) {
this._instance.subtitleTrack = -1;
currentTrack = -1;
}
};
if (nonNativeTrack.default)
track.setMode("showing", event);
this._context.textTracks.add(track, event);
}
}
_onCuesParsed(eventType, data) {
const track = this._context.textTracks.getById(`hls-${data.track}`);
if (!track)
return;
const event = new DOMEvent(eventType, { detail: data });
for (const cue of data.cues) {
cue.positionAlign = "auto";
track.addCue(cue, event);
}
}
_onAudioSwitch(eventType, data) {
const track = this._context.audioTracks[data.id];
if (track) {
this._context.audioTracks[LIST_SELECT](
track,
true,
new DOMEvent(eventType, { detail: data })
);
}
}
_onLevelSwitched(eventType, data) {
const quality = this._context.qualities[data.level];
if (quality) {
this._context.qualities[LIST_SELECT](
quality,
true,
new DOMEvent(eventType, { detail: data })
);
}
}
_onLevelLoaded(eventType, data) {
if (this._context.$store.canPlay())
return;
const { type, live, totalduration: duration } = data.details;
const event = new DOMEvent(eventType, { detail: data });
this._context.delegate._dispatch("stream-type-change", {
detail: live ? type === "EVENT" && Number.isFinite(duration) ? "live:dvr" : "live" : "on-demand",
trigger: event
});
this._context.delegate._dispatch("duration-change", { detail: duration, trigger: event });
const media = this._instance.media;
if (this._instance.currentLevel === -1) {
this._context.qualities[SET_AUTO_QUALITY](true, event);
}
for (const track of this._instance.audioTracks) {
this._context.audioTracks[LIST_ADD](
{
id: track.id + "",
label: track.name,
language: track.lang || "",
kind: "main"
},
event
);
}
for (const level of this._instance.levels) {
this._context.qualities[LIST_ADD](
{
width: level.width,
height: level.height,
codec: level.codecSet,
bitrate: level.bitrate
},
event
);
}
media.dispatchEvent(new DOMEvent("canplay", { trigger: event }));
}
_onError(eventType, data) {
{
this._context.logger?.errorGroup(`HLS error \`${eventType}\``).labelledLog("Media Element", this._instance?.media).labelledLog("HLS Instance", this._instance).labelledLog("Event Type", eventType).labelledLog("Data", data).labelledLog("Src", peek(this._context.$store.source)).labelledLog("Media Store", { ...this._context.$store }).dispatch();
}
if (data.fatal) {
switch (data.type) {
case "networkError":
this._instance?.startLoad();
break;
case "mediaError":
this._instance?.recoverMediaError();
break;
default:
this._instance?.destroy();
this._instance = null;
break;
}
}
}
_enableAutoQuality() {
if (this._instance)
this._instance.currentLevel = -1;
}
_onQualityChange() {
const { qualities } = this._context;
if (!this._instance || qualities.auto)
return;
this._instance[qualities.switch + "Level"] = qualities.selectedIndex;
if (IS_CHROME)
this._video.currentTime = this._video.currentTime;
}
_onAudioChange() {
const { audioTracks } = this._context;
if (this._instance && this._instance.audioTrack !== audioTracks.selectedIndex) {
this._instance.audioTrack = audioTracks.selectedIndex;
}
}
_destroy() {
if (this._context)
this._context.qualities[ENABLE_AUTO_QUALITY] = void 0;
this._instance?.destroy();
this._instance = null;
this._stopLiveSync?.();
this._stopLiveSync = null;
this._context?.logger?.info("\u{1F3D7}\uFE0F Destroyed HLS instance");
}
}
export { HLSController as H, RAFLoop as R };