vidstack
Version:
Build awesome media experiences on the web.
222 lines (218 loc) • 6.6 kB
JavaScript
import { peek, effect } from 'maverick.js';
import { isUndefined, isNumber, DOMEvent, listenEvent, camelToKebabCase } from 'maverick.js/std';
import { F as ENABLE_AUTO_QUALITY, G as TextTrack, H as TEXT_TRACK_READY_STATE, J as TEXT_TRACK_ON_MODE_CHANGE, L as LIST_SELECT, K as SET_AUTO_QUALITY, N as LIST_ADD, P as IS_CHROME } from '../../media-core.js';
class RAFLoop {
constructor(_callback) {
this.jf = _callback;
}
xi;
O() {
if (!isUndefined(this.xi))
return;
this.yi();
}
P() {
if (isNumber(this.xi))
window.cancelAnimationFrame(this.xi);
this.xi = void 0;
}
yi() {
this.xi = window.requestAnimationFrame(() => {
if (isUndefined(this.xi))
return;
this.jf();
this.yi();
});
}
}
const toDOMEventType = (type) => camelToKebabCase(type);
class HLSController {
constructor(_video) {
this.qa = _video;
}
ph;
za = null;
qh = null;
nh = {};
oh = /* @__PURE__ */ new Set();
get instance() {
return this.za;
}
setup(ctor, context) {
this.ph = context;
const isLive = peek(context.$store.streamType).includes("live"), isLiveLowLatency = peek(context.$store.streamType).includes("ll-");
this.za = new ctor({
lowLatencyMode: isLiveLowLatency,
backBufferLength: isLiveLowLatency ? 4 : isLive ? 8 : void 0,
renderTextTracksNatively: false,
...this.nh
});
const dispatcher = this.rh.bind(this);
for (const event of Object.values(ctor.Events))
this.za.on(event, dispatcher);
this.za.on(ctor.Events.ERROR, this.Wb.bind(this));
for (const callback of this.oh)
callback(this.za);
context.player.dispatchEvent(new DOMEvent("hls-instance", { detail: this.za }));
this.za.attachMedia(this.qa);
this.za.on(ctor.Events.AUDIO_TRACK_SWITCHED, this.sh.bind(this));
this.za.on(ctor.Events.LEVEL_SWITCHED, this.th.bind(this));
this.za.on(ctor.Events.LEVEL_LOADED, this.uh.bind(this));
this.za.on(ctor.Events.NON_NATIVE_TEXT_TRACKS_FOUND, this.vh.bind(this));
this.za.on(ctor.Events.CUES_PARSED, this.wh.bind(this));
context.qualities[ENABLE_AUTO_QUALITY] = this.xh.bind(this);
listenEvent(context.qualities, "change", this.Pf.bind(this));
listenEvent(context.audioTracks, "change", this.yh.bind(this));
this.qh = effect(this.zh.bind(this));
}
zh() {
if (!this.ph.$store.live())
return;
const raf = new RAFLoop(this.Ah.bind(this));
raf.O();
return raf.P.bind(raf);
}
Ah() {
this.ph.$store.liveSyncPosition.set(this.za?.liveSyncPosition ?? Infinity);
}
rh(eventType, detail) {
this.ph.player.dispatchEvent(new DOMEvent(toDOMEventType(eventType), { detail }));
}
vh(eventType, data) {
const event = new DOMEvent(eventType, { detail: data });
let currentTrack = -1;
for (let i = 0; i < data.tracks.length; i++) {
const nonNativeTrack = data.tracks[i], init = nonNativeTrack.subtitleTrack ?? nonNativeTrack.closedCaptions, track = new TextTrack({
id: `hls-${nonNativeTrack.kind}${i}`,
src: init?.url,
label: nonNativeTrack.label,
language: init?.lang,
kind: nonNativeTrack.kind
});
track[TEXT_TRACK_READY_STATE] = 2;
track[TEXT_TRACK_ON_MODE_CHANGE] = () => {
if (track.mode === "showing") {
this.za.subtitleTrack = i;
currentTrack = i;
} else if (currentTrack === i) {
this.za.subtitleTrack = -1;
currentTrack = -1;
}
};
if (nonNativeTrack.default)
track.setMode("showing", event);
this.ph.textTracks.add(track, event);
}
}
wh(eventType, data) {
const track = this.ph.textTracks.getById(`hls-${data.track}`);
if (!track)
return;
const event = new DOMEvent(eventType, { detail: data });
for (const cue of data.cues) {
cue.positionAlign = "auto";
track.addCue(cue, event);
}
}
sh(eventType, data) {
const track = this.ph.audioTracks[data.id];
if (track) {
this.ph.audioTracks[LIST_SELECT](
track,
true,
new DOMEvent(eventType, { detail: data })
);
}
}
th(eventType, data) {
const quality = this.ph.qualities[data.level];
if (quality) {
this.ph.qualities[LIST_SELECT](
quality,
true,
new DOMEvent(eventType, { detail: data })
);
}
}
uh(eventType, data) {
if (this.ph.$store.canPlay())
return;
const { type, live, totalduration: duration } = data.details;
const event = new DOMEvent(eventType, { detail: data });
this.ph.delegate.p("stream-type-change", {
detail: live ? type === "EVENT" && Number.isFinite(duration) ? "live:dvr" : "live" : "on-demand",
trigger: event
});
this.ph.delegate.p("duration-change", { detail: duration, trigger: event });
const media = this.za.media;
if (this.za.currentLevel === -1) {
this.ph.qualities[SET_AUTO_QUALITY](true, event);
}
for (const track of this.za.audioTracks) {
this.ph.audioTracks[LIST_ADD](
{
id: track.id + "",
label: track.name,
language: track.lang || "",
kind: "main"
},
event
);
}
for (const level of this.za.levels) {
this.ph.qualities[LIST_ADD](
{
width: level.width,
height: level.height,
codec: level.codecSet,
bitrate: level.bitrate
},
event
);
}
media.dispatchEvent(new DOMEvent("canplay", { trigger: event }));
}
Wb(eventType, data) {
if (data.fatal) {
switch (data.type) {
case "networkError":
this.za?.startLoad();
break;
case "mediaError":
this.za?.recoverMediaError();
break;
default:
this.za?.destroy();
this.za = null;
break;
}
}
}
xh() {
if (this.za)
this.za.currentLevel = -1;
}
Pf() {
const { qualities } = this.ph;
if (!this.za || qualities.auto)
return;
this.za[qualities.switch + "Level"] = qualities.selectedIndex;
if (IS_CHROME)
this.qa.currentTime = this.qa.currentTime;
}
yh() {
const { audioTracks } = this.ph;
if (this.za && this.za.audioTrack !== audioTracks.selectedIndex) {
this.za.audioTrack = audioTracks.selectedIndex;
}
}
Jg() {
if (this.ph)
this.ph.qualities[ENABLE_AUTO_QUALITY] = void 0;
this.za?.destroy();
this.za = null;
this.qh?.();
this.qh = null;
}
}
export { HLSController as H, RAFLoop as R };