@livepeer/core
Version:
Livepeer UI Kit's core vanilla JS library.
1,389 lines (1,378 loc) • 79.8 kB
JavaScript
"use strict";
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/media.ts
var media_exports = {};
__export(media_exports, {
addLegacyMediaMetricsToStore: () => addLegacyMediaMetricsToStore,
addMetricsToStore: () => addMetricsToStore,
calculateVideoQualityDimensions: () => calculateVideoQualityDimensions,
createControllerStore: () => createControllerStore,
getBoundedVolume: () => getBoundedVolume,
getMediaSourceType: () => getMediaSourceType
});
module.exports = __toCommonJS(media_exports);
// src/media/controller.ts
var import_middleware = require("zustand/middleware");
var import_vanilla = require("zustand/vanilla");
// src/media/errors.ts
var STREAM_OPEN_ERROR_MESSAGE = "stream open failed";
var STREAM_OFFLINE_ERROR_MESSAGE = "stream is offline";
var STREAM_WAITING_FOR_DATA_ERROR_MESSAGE = "stream is waiting for data";
var ACCESS_CONTROL_ERROR_MESSAGE = "shutting down since this session is not allowed to view this stream";
var BFRAMES_ERROR_MESSAGE = "metadata indicates that webrtc playback contains bframes";
var NOT_ACCEPTABLE_ERROR_MESSAGE = "response indicates unacceptable playback protocol";
var PERMISSIONS_ERROR_MESSAGE = "user did not allow the permissions request";
var isStreamOfflineError = (error) => error.message.toLowerCase().includes(STREAM_OPEN_ERROR_MESSAGE) || error.message.toLowerCase().includes(STREAM_WAITING_FOR_DATA_ERROR_MESSAGE) || error.message.toLowerCase().includes(STREAM_OFFLINE_ERROR_MESSAGE);
var isAccessControlError = (error) => error.message.toLowerCase().includes(ACCESS_CONTROL_ERROR_MESSAGE);
var isBframesError = (error) => error.message.toLowerCase().includes(BFRAMES_ERROR_MESSAGE);
var isNotAcceptableError = (error) => error.message.toLowerCase().includes(NOT_ACCEPTABLE_ERROR_MESSAGE);
var isPermissionsError = (error) => error.message.toLowerCase().includes(PERMISSIONS_ERROR_MESSAGE);
// src/media/utils.ts
var import_nanoid = require("nanoid");
// src/media/metrics-utils.ts
var getMetricsReportingPOSTUrl = async (opts) => {
const resolvedReportingUrl = await getMetricsReportingUrl({
playbackUrl: opts.playbackUrl,
path: "/analytics/log"
});
if (!resolvedReportingUrl) {
return null;
}
return resolvedReportingUrl?.toString?.() ?? null;
};
var getMetricsReportingWebsocketUrl = async (opts) => {
if (!opts.playbackId) {
return null;
}
const resolvedReportingUrl = await getMetricsReportingUrl({
playbackUrl: opts.playbackUrl,
path: `/json_video+${opts.playbackId}.js`
});
if (!resolvedReportingUrl) {
return null;
}
resolvedReportingUrl.protocol = "wss:";
if (resolvedReportingUrl && opts.sessionToken) {
resolvedReportingUrl.searchParams.set("tkn", opts.sessionToken);
}
return resolvedReportingUrl?.toString?.() ?? null;
};
var LP_DOMAINS = ["livepeer", "livepeercdn", "lp-playback"];
var getMetricsReportingUrl = async ({
playbackUrl,
path
}) => {
try {
const parsedPlaybackUrl = new URL(playbackUrl);
const splitHost = parsedPlaybackUrl.host.split(".");
const includesDomain = LP_DOMAINS.includes(
splitHost?.[splitHost.length - 2] ?? ""
);
const tld = splitHost?.[splitHost?.length - 1] ?? null;
const tldMapped = tld === "com" ? "studio" : tld === "studio" ? "studio" : tld === "fun" ? "fun:20443" : tld === "monster" ? "monster" : null;
if (includesDomain && tldMapped) {
const isCatalystPlayback = parsedPlaybackUrl.host.includes("catalyst");
try {
const getRedirectedUrl = async () => {
const response = await fetch(
`https://playback.livepeer.${tldMapped}${path}`
);
await response.text();
return response?.url ?? null;
};
const finalUrl = isCatalystPlayback ? `https://${parsedPlaybackUrl.host}${path}` : await getRedirectedUrl();
const url = finalUrl ? new URL(finalUrl) : null;
return url ?? null;
} catch (error) {
console.log("Could not fetch reporting URL.", error);
}
}
} catch (error) {
console.error(error?.message);
return null;
}
return null;
};
var ASSET_URL_PART_VALUE = "hls";
var FLV_URL_PART_VALUE = "flv";
var WEBRTC_URL_PART_VALUE = "webrtc";
var RECORDING_URL_PART_VALUE = "recordings";
var getPlaybackIdFromSourceUrl = (sourceUrl) => {
const parsedUrl = new URL(sourceUrl);
const parts = parsedUrl.pathname.split("/");
const includesAssetUrl = parts.includes(ASSET_URL_PART_VALUE);
const includesWebRtcUrl = parts.includes(WEBRTC_URL_PART_VALUE);
const includesFlvUrl = parts.includes(FLV_URL_PART_VALUE);
const includesRecording = parts.includes(RECORDING_URL_PART_VALUE);
const playbackId = includesWebRtcUrl || includesFlvUrl ? parts?.[(parts?.length ?? 0) - 1] : includesRecording || includesAssetUrl ? parts?.[(parts?.length ?? 0) - 2] ?? null : null;
if (playbackId?.includes("+")) {
const split = playbackId.split("+")?.[1];
if (split) {
return split;
}
}
return playbackId ?? null;
};
// src/media/mime.ts
var getMimeType = (ending) => {
for (const value of Object.keys(mime)) {
if (mime[value]?.some((v) => v === ending)) {
return value;
}
}
return null;
};
var mime = {
"application/mp4": ["mp4s", "m4p"],
"application/ogg": ["ogx"],
"application/vnd.apple.mpegurl": ["m3u8"],
"audio/3gpp": ["*3gpp"],
"audio/adpcm": ["adp"],
"audio/amr": ["amr"],
"audio/basic": ["au", "snd"],
"audio/midi": ["mid", "midi", "kar", "rmi"],
"audio/mobile-xmf": ["mxmf"],
"audio/mp3": ["*mp3"],
"audio/mp4": ["m4a", "mp4a"],
"audio/mpeg": ["mpga", "mp2", "mp2a", "mp3", "m2a", "m3a"],
"audio/ogg": ["oga", "ogg", "spx", "opus"],
"audio/s3m": ["s3m"],
"audio/silk": ["sil"],
"audio/vnd.dece.audio": ["uva", "uvva"],
"audio/vnd.digital-winds": ["eol"],
"audio/vnd.dra": ["dra"],
"audio/vnd.dts.hd": ["dtshd"],
"audio/vnd.dts": ["dts"],
"audio/vnd.lucent.voice": ["lvp"],
"audio/vnd.ms-playready.media.pya": ["pya"],
"audio/vnd.nuera.ecelp4800": ["ecelp4800"],
"audio/vnd.nuera.ecelp7470": ["ecelp7470"],
"audio/vnd.nuera.ecelp9600": ["ecelp9600"],
"audio/vnd.rip": ["rip"],
"audio/wav": ["wav"],
"audio/wave": ["*wav"],
"audio/webm": ["weba"],
"audio/x-aac": ["aac"],
"audio/x-aiff": ["aif", "aiff", "aifc"],
"audio/x-caf": ["caf"],
"audio/x-flac": ["flac"],
"audio/x-m4a": ["*m4a"],
"audio/x-matroska": ["mka"],
"audio/x-mpegurl": ["m3u"],
"audio/x-ms-wax": ["wax"],
"audio/x-ms-wma": ["wma"],
"audio/x-pn-realaudio-plugin": ["rmp"],
"audio/x-pn-realaudio": ["ram", "ra"],
"audio/x-realaudio": ["*ra"],
"audio/x-wav": ["*wav"],
"audio/xm": ["xm"],
"video/3gpp": ["3gp", "3gpp"],
"video/3gpp2": ["3g2"],
"video/h261": ["h261"],
"video/h263": ["h263"],
"video/h264": ["h264"],
"video/iso.segment": ["m4s"],
"video/jpeg": ["jpgv"],
"video/jpm": ["*jpm", "jpgm"],
"video/mj2": ["mj2", "mjp2"],
"video/mp2t": ["ts"],
"video/mp4": ["mp4", "mp4v", "mpg4"],
"video/mpeg": ["mpeg", "mpg", "mpe", "m1v", "m2v"],
"video/ogg": ["ogv"],
"video/quicktime": ["qt", "mov"],
"video/vnd.dece.hd": ["uvh", "uvvh"],
"video/vnd.dece.mobile": ["uvm", "uvvm"],
"video/vnd.dece.pd": ["uvp", "uvvp"],
"video/vnd.dece.sd": ["uvs", "uvvs"],
"video/vnd.dece.video": ["uvv", "uvvv"],
"video/vnd.dvb.file": ["dvb"],
"video/vnd.fvt": ["fvt"],
"video/vnd.mpegurl": ["mxu", "m4u"],
"video/vnd.ms-playready.media.pyv": ["pyv"],
"video/vnd.uvvu.mp4": ["uvu", "uvvu"],
"video/vnd.vivo": ["viv"],
"video/vp8": ["vp8"],
"video/webm": ["webm"],
"video/x-f4v": ["f4v"],
"video/x-fli": ["fli"],
"video/x-flv": ["flv"],
"video/x-m4v": ["m4v"],
"video/x-matroska": ["mkv", "mk3d", "mks"],
"video/x-mng": ["mng"],
"video/x-ms-asf": ["asf", "asx"],
"video/x-ms-vob": ["vob"],
"video/x-ms-wm": ["wm"],
"video/x-ms-wmv": ["wmv"],
"video/x-ms-wmx": ["wmx"],
"video/x-ms-wvx": ["wvx"],
"video/x-msvideo": ["avi"],
"video/x-sgi-movie": ["movie"],
"video/x-smv": ["smv"]
};
// src/media/src.ts
var audioExtensions = /\.(m4a|mp4a|mpga|mp2|mp2a|mp3|m2a|m3a|wav|weba|aac|oga|spx)($|\?)/i;
var base64String = /data:video/i;
var hlsExtensions = /\.(m3u8)($|\?)/i;
var flvExtensions = /\/(flv)\//i;
var imageExtensions = /\.(jpg|jpeg|png|gif|bmp|webp)($|\?)/i;
var vttExtensions = /\.(vtt)($|\?)/i;
var mimeFromBase64Pattern = /data:(.+?);base64/;
var videoExtensions = /\.(mp4|ogv|webm|mov|m4v|avi|m3u8)($|\?)/i;
var webrtcExtensions = /(webrtc|sdp)/i;
var getMediaSourceType = (src, opts) => {
if (!src) {
return null;
}
const sourceTest = src?.toLowerCase();
const base64Mime = sourceTest.match(mimeFromBase64Pattern);
const resolvedWidth = opts?.sizing?.width ?? null;
const resolvedHeight = opts?.sizing?.height ?? null;
return webrtcExtensions.test(sourceTest) ? {
type: "webrtc",
src,
mime: "video/h264",
width: resolvedWidth,
height: resolvedHeight
} : hlsExtensions.test(sourceTest) ? {
type: "hls",
src,
mime: getMimeType(hlsExtensions.exec(sourceTest)?.[1] ?? ""),
width: resolvedWidth,
height: resolvedHeight
} : videoExtensions.test(sourceTest) ? {
type: "video",
src,
mime: getMimeType(videoExtensions.exec(sourceTest)?.[1] ?? ""),
width: resolvedWidth,
height: resolvedHeight
} : audioExtensions.test(sourceTest) ? {
type: "audio",
src,
mime: getMimeType(audioExtensions.exec(sourceTest)?.[1] ?? ""),
width: resolvedWidth,
height: resolvedHeight
} : base64String.test(sourceTest) ? {
type: "video",
src,
mime: base64Mime ? base64Mime[1] : "video/mp4",
width: resolvedWidth,
height: resolvedHeight
} : imageExtensions.test(sourceTest) ? {
type: "image",
src,
mime: getMimeType(
imageExtensions.exec(sourceTest)?.[1] ?? ""
),
width: resolvedWidth,
height: resolvedHeight
} : vttExtensions.test(sourceTest) ? {
type: "vtt",
src,
mime: getMimeType(
vttExtensions.exec(sourceTest)?.[1] ?? ""
),
width: null,
height: null
} : flvExtensions.test(sourceTest) ? {
type: "video",
src,
mime: "video/x-flv",
width: resolvedWidth,
height: resolvedHeight
} : null;
};
// src/media/utils.ts
var DEFAULT_ASPECT_RATIO = 16 / 9;
var getFilteredNaN = (value) => value && !Number.isNaN(value) && Number.isFinite(value) ? value : 0;
var getBoundedSeek = (seek, duration) => Math.min(
Math.max(0, getFilteredNaN(seek)),
// seek to near the end
getFilteredNaN(duration) ? getFilteredNaN(duration) - 0.01 : 0
);
var getBoundedRate = (rate) => {
if (typeof rate === "number") {
return Math.min(Math.max(0, getFilteredNaN(rate)), 20);
}
const match = String(rate).match(/(\d+(\.\d+)?)/);
return match ? Math.min(Math.max(0, getFilteredNaN(Number.parseFloat(match[0]))), 20) : 1;
};
var getBoundedVolume = (volume) => Math.min(Math.max(0, getFilteredNaN(volume)), 1);
var nanoid = (0, import_nanoid.customAlphabet)(
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
22
);
var generateRandomToken = () => {
return nanoid();
};
var getClipParams = ({
requestedTime,
clipLength,
playbackOffsetMs
}) => {
const currentTime = requestedTime ?? Date.now();
const estimatedServerClipTime = currentTime - (playbackOffsetMs ?? 0);
const startTime = estimatedServerClipTime - clipLength * 1e3;
const endTime = estimatedServerClipTime;
return {
startTime,
endTime
};
};
var getProgressAria = ({
progress,
duration,
live
}) => {
const progressParsed = getHoursMinutesSeconds(progress ?? null);
const durationParsed = getHoursMinutesSeconds(duration ?? null);
const progressText = `${progressParsed.hours ? `${progressParsed.hours} hours ` : ""}${progressParsed.minutes ? `${progressParsed.minutes} minutes ` : ""}${progressParsed.seconds ? `${progressParsed.seconds} seconds` : ""}`;
const durationText = `${durationParsed.hours ? `${durationParsed.hours} hours ` : ""}${durationParsed.minutes ? `${durationParsed.minutes} minutes ` : ""}${durationParsed.seconds ? `${durationParsed.seconds} seconds` : ""}`;
const progressDisplay = live ? `Live ${progressText}` : `${progressText} of ${durationText}`;
const formattedTimeDisplay = getFormattedHoursMinutesSeconds(
progress ?? null
);
const formattedDuration = getFormattedHoursMinutesSeconds(duration ?? null);
const formattedTime = live ? formattedTimeDisplay : `${formattedTimeDisplay} / ${formattedDuration}`;
return {
progress: progressDisplay,
time: formattedTime
};
};
var sortSources = ({
src,
videoQuality,
screenWidth,
aspectRatio,
lowLatency,
hasRecentWebRTCTimeout
}) => {
if (!src) {
return null;
}
if (typeof src === "string") {
const mediaSourceType = getMediaSourceType(src);
return mediaSourceType ? [mediaSourceType] : null;
}
const filteredVideoSources = src.filter(
(s) => s.type === "audio" || s.type === "hls" || s.type === "webrtc" || s.type === "video"
).filter((s) => {
if (s.type === "hls" && lowLatency === "force") {
return false;
}
if (s.type === "webrtc" && (lowLatency === false || hasRecentWebRTCTimeout)) {
return false;
}
return true;
});
const videoQualityDimensions = calculateVideoQualityDimensions(
videoQuality,
aspectRatio
);
const targetWidth = videoQualityDimensions?.width ?? screenWidth ?? 1280;
const sourceWithParentDelta = filteredVideoSources?.map(
(s) => s.type === "hls" || s.type === "webrtc" ? { ...s, parentWidthDelta: null } : {
...s,
parentWidthDelta: (
// first we check if the URL contains the video quality selector
videoQuality && videoQuality !== "auto" && s?.src?.includes(videoQuality) ? 0 : (
// otherwise use the width of the src
s?.width ? Math.abs(targetWidth - s.width) : (
// otherwise guess the width of the src based on the url
s?.src.includes("static360p") || s?.src.includes("low-bitrate") ? Math.abs(targetWidth - 480) : s?.src.includes("static720p") ? Math.abs(targetWidth - 1280) : s?.src.includes("static1080p") ? Math.abs(targetWidth - 1920) : s?.src.includes("static2160p") ? Math.abs(targetWidth - 3840) : null
)
)
)
}
) ?? [];
const sortedSources = sourceWithParentDelta.sort((a, b) => {
if (a.type === "video" && b.type === "video") {
return b?.parentWidthDelta !== null && a?.parentWidthDelta !== null ? a.parentWidthDelta - b.parentWidthDelta : 1;
}
if (a.type === "video" && (b.type === "hls" || b.type === "webrtc")) {
return -1;
}
if (a.type === "webrtc" && b.type === "hls") {
return -1;
}
return 1;
});
return sortedSources;
};
var parseCurrentSourceAndPlaybackId = ({
accessKey,
aspectRatio,
playbackRate,
isHlsSupported,
jwt,
sessionToken,
source,
videoQuality,
ingestPlayback
}) => {
if (!source) {
return null;
}
const playbackId = getPlaybackIdFromSourceUrl(source.src);
const url = new URL(source.src);
if (sessionToken) {
url.searchParams.append("tkn", sessionToken);
}
if (source.type !== "webrtc" && source.type !== "hls") {
if (jwt) {
url.searchParams.append("jwt", jwt);
} else if (accessKey) {
url.searchParams.append("accessKey", accessKey);
}
}
if (source.type === "webrtc") {
if (playbackRate === "constant") {
url.searchParams.append("constant", "true");
}
const videoTrackSelector = getVideoTrackSelectorForQuality(
videoQuality,
aspectRatio
);
if (videoTrackSelector) {
url.searchParams.append("video", videoTrackSelector);
}
}
if (ingestPlayback) {
url.searchParams.append("ingestpb", String(ingestPlayback));
}
const newSrc = {
...source,
src: url.toString()
};
const videoSourceIfHlsUnsupported = newSrc?.type === "hls" && !isHlsSupported ? {
...newSrc,
type: "video"
} : newSrc;
return {
currentSource: videoSourceIfHlsUnsupported,
playbackId
};
};
var getNewSource = ({
accessKey,
aspectRatio,
isHlsSupported,
jwt,
lowLatency,
playbackRate,
screenWidth,
sessionToken,
src,
videoQuality,
hasRecentWebRTCTimeout,
ingestPlayback
}) => {
const sortedSources = sortSources({
src,
screenWidth,
videoQuality,
aspectRatio: aspectRatio ?? DEFAULT_ASPECT_RATIO,
lowLatency,
hasRecentWebRTCTimeout
});
const parsedSource = parseCurrentSourceAndPlaybackId({
accessKey: accessKey ?? null,
aspectRatio: aspectRatio ?? null,
isHlsSupported,
jwt: jwt ?? null,
playbackRate,
sessionToken,
source: sortedSources?.[0] ?? null,
videoQuality,
ingestPlayback
});
return {
currentSource: parsedSource?.currentSource ?? null,
playbackId: parsedSource?.playbackId ?? null,
sortedSources
};
};
var getVideoTrackSelectorForQuality = (videoQuality, aspectRatio) => {
if (videoQuality === "auto") {
return null;
}
const videoQualityDimensions = calculateVideoQualityDimensions(
videoQuality,
aspectRatio
);
if (videoQualityDimensions?.width && videoQualityDimensions?.height) {
return `~${videoQualityDimensions.width}x${videoQualityDimensions.height}`;
}
return null;
};
function calculateVideoQualityDimensions(videoQuality, aspectRatio) {
const height = videoQuality === "1080p" ? 1080 : videoQuality === "720p" ? 720 : videoQuality === "480p" ? 480 : videoQuality === "360p" ? 360 : videoQuality === "240p" ? 240 : videoQuality === "144p" ? 144 : null;
return {
width: height !== null ? Math.round(height * (aspectRatio ?? 16 / 9)) : null,
height
};
}
var getFormattedHoursMinutesSeconds = (valueInSeconds) => {
const hoursMinutesSeconds = getHoursMinutesSeconds(valueInSeconds);
if (hoursMinutesSeconds.seconds || hoursMinutesSeconds.minutes || hoursMinutesSeconds.hours) {
if (hoursMinutesSeconds.hours > 0) {
return `${hoursMinutesSeconds.hours}:${hoursMinutesSeconds.minutes.toString().padStart(2, "0")}:${hoursMinutesSeconds.seconds < 10 ? "0" : ""}${hoursMinutesSeconds.seconds}`;
}
return `${hoursMinutesSeconds.minutes}:${hoursMinutesSeconds.seconds.toString().padStart(2, "0")}`;
}
return "0:00";
};
var getHoursMinutesSeconds = (valueInSeconds) => {
if (valueInSeconds !== void 0 && valueInSeconds !== null && !Number.isNaN(valueInSeconds) && Number.isFinite(valueInSeconds)) {
const roundedValue = Math.round(valueInSeconds);
const hours = Math.floor(roundedValue / 3600);
const seconds = Math.floor(roundedValue % 60);
if (hours > 0) {
const minutes2 = Math.floor(roundedValue % 3600 / 60);
return {
hours,
minutes: minutes2,
seconds
};
}
const minutes = Math.floor(roundedValue / 60);
return {
hours: 0,
minutes,
seconds
};
}
return {
hours: 0,
minutes: 0,
seconds: 0
};
};
// src/media/controller.ts
var DEFAULT_AUTOHIDE_TIME = 3e3;
var DEFAULT_SEEK_TIME = 5e3;
var DEFAULT_VOLUME_LEVEL = 1;
var webrtcTimeoutLastTime = null;
var getHasRecentWebRTCTimeout = (cacheWebRTCFailureMs) => {
if (!webrtcTimeoutLastTime || !cacheWebRTCFailureMs) return false;
return Date.now() - webrtcTimeoutLastTime < cacheWebRTCFailureMs;
};
var createControllerStore = ({
device,
storage,
src,
initialProps,
playbackId
}) => {
const initialPlaybackRate = initialProps?.playbackRate ?? 1;
const initialVolume = getBoundedVolume(
initialProps.volume ?? DEFAULT_VOLUME_LEVEL
);
const initialVideoQuality = initialProps.videoQuality ?? "auto";
const sessionToken = generateRandomToken();
const thumbnailSrc = typeof src === "string" ? null : src?.find?.((s) => s.type === "image");
const lowLatency = initialProps.lowLatency ?? true;
const parsedInputSource = getNewSource({
accessKey: initialProps?.accessKey,
aspectRatio: initialProps?.aspectRatio,
isHlsSupported: device.isHlsSupported,
jwt: initialProps?.jwt,
playbackRate: initialPlaybackRate,
lowLatency,
screenWidth: device.screenWidth,
sessionToken,
src,
videoQuality: initialVideoQuality,
hasRecentWebRTCTimeout: getHasRecentWebRTCTimeout(
initialProps.cacheWebRTCFailureMs
),
ingestPlayback: initialProps.ingestPlayback ?? false
});
const initialControls = {
hlsConfig: null,
autohide: DEFAULT_AUTOHIDE_TIME,
lastError: 0,
lastInteraction: Date.now(),
requestedMeasureLastTime: 0,
muted: initialVolume === 0,
playbackId: playbackId ?? parsedInputSource?.playbackId ?? null,
playbackOffsetMs: null,
playLastTime: 0,
pauseLastTime: 0,
requestedClipParams: null,
requestedFullscreenLastTime: 0,
requestedPictureInPictureLastTime: 0,
requestedPlayPauseLastTime: 0,
requestedRangeToSeekTo: 0,
sessionToken,
size: null,
thumbnail: thumbnailSrc ?? null,
volume: initialVolume
};
const store = (0, import_vanilla.createStore)(
(0, import_middleware.subscribeWithSelector)(
(0, import_middleware.persist)(
(set, get) => ({
currentSource: parsedInputSource.currentSource,
canPlay: false,
hidden: false,
/** Current volume of the media. 0 if it is muted. */
volume: initialVolume,
/** The playback rate for the media. Defaults to 1. */
playbackRate: initialPlaybackRate,
videoQuality: "auto",
/** Current progress of the media (in seconds) */
progress: 0,
/** Current total duration of the media (in seconds) */
duration: 0,
/** Current buffered end time for the media (in seconds) */
buffered: 0,
/** Current buffered percent */
bufferedPercent: 0,
poster: thumbnailSrc?.src ?? null,
/** If the video element is mounted on the DOM */
mounted: false,
/** If the media is fullscreen. */
fullscreen: false,
/** If the media is in picture in picture mode */
pictureInPicture: false,
playing: false,
waiting: false,
stalled: false,
loading: true,
ended: false,
/** If the media has experienced an error. */
error: null,
errorCount: 0,
/** If the content is live media */
live: false,
/** If the media has been played yet. */
hasPlayed: false,
/** The sorted sources that were passed in to the Player */
sortedSources: parsedInputSource.sortedSources,
/** The final playback URL for the media that is playing, after redirects. */
currentUrl: null,
metricsReportingUrl: null,
aria: {
progress: "No progress, content is loading",
fullscreen: "Full screen (f)",
pictureInPicture: "Mini player (i)",
playPause: "Play (k)",
clip: initialProps.clipLength ? `Clip last ${Number(initialProps.clipLength).toFixed(
0
)} seconds (x)` : null,
time: "0:00"
},
__initialProps: {
accessKey: initialProps.accessKey ?? null,
aspectRatio: initialProps?.aspectRatio ?? null,
autoPlay: initialProps.autoPlay ?? false,
backoff: Math.max(initialProps.backoff ?? 500, 100),
backoffMax: Math.max(initialProps.backoffMax ?? 3e4, 1e4),
calculateDelay: initialProps.calculateDelay ?? ((count) => {
if (count === 0) {
return 0;
}
const delayTime = Math.min(
Math.max(initialProps.backoff ?? 500, 100) * 2 ** (count - 1),
Math.max(initialProps.backoffMax ?? 3e4, 1e4)
);
return delayTime;
}),
clipLength: initialProps.clipLength ?? null,
cacheWebRTCFailureMs: initialProps.cacheWebRTCFailureMs ?? null,
hotkeys: initialProps?.hotkeys ?? true,
jwt: initialProps.jwt ?? null,
lowLatency,
onError: initialProps?.onError ?? null,
playbackRate: initialPlaybackRate,
posterLiveUpdate: initialProps.posterLiveUpdate ?? 3e4,
preload: initialProps.preload ?? "none",
storage,
timeout: initialProps.timeout ?? 1e4,
videoQuality: initialVideoQuality,
viewerId: initialProps.viewerId ?? null,
volume: initialVolume ?? null,
ingestPlayback: initialProps.ingestPlayback ?? false,
iceServers: initialProps.iceServers
},
__device: device,
__controls: initialControls,
__metadata: null,
__controlsFunctions: {
setMounted: () => set(() => ({
mounted: true
})),
setPoster: (poster) => set(() => ({
poster
})),
setMetricsReportingUrl: (metricsReportingUrl) => set(() => ({
metricsReportingUrl
})),
onWebRTCTimeout: () => {
webrtcTimeoutLastTime = Date.now();
},
setAutohide: (autohide) => set(({ __controls }) => ({
__controls: {
...__controls,
autohide
}
})),
setHlsConfig: (hlsConfig) => set(({ __controls }) => ({
__controls: {
...__controls,
hlsConfig
}
})),
setHidden: (hidden) => set(({ playing }) => ({
hidden: playing ? hidden : false
})),
updateLastInteraction: () => set(({ __controls }) => ({
__controls: { ...__controls, lastInteraction: Date.now() }
})),
updatePlaybackOffsetMs: (offset) => set(({ __controls }) => ({
__controls: {
...__controls,
playbackOffsetMs: offset
}
})),
onCanPlay: () => set(() => ({
canPlay: true,
loading: false
})),
onPlay: () => set(({ aria, __controls, __controlsFunctions }) => {
__controlsFunctions.onError(null);
const title = "Pause (k)";
return {
playing: true,
hasPlayed: true,
error: null,
errorCount: 0,
stalled: false,
waiting: false,
ended: false,
__controls: {
...__controls,
playLastTime: Date.now()
},
aria: {
...aria,
playPause: title
}
};
}),
onPause: () => set(({ aria, __controls }) => {
const title = "Play (k)";
return {
playing: false,
hidden: false,
stalled: false,
waiting: false,
ended: false,
aria: {
...aria,
playPause: title
},
__controls: {
...__controls,
pauseLastTime: Date.now()
}
};
}),
togglePlay: (force) => {
const { hidden, __device, __controlsFunctions } = store.getState();
if (!force && hidden && __device.isMobile) {
__controlsFunctions.setHidden(false);
} else {
set(({ __controls }) => ({
__controls: {
...__controls,
requestedPlayPauseLastTime: Date.now(),
lastInteraction: Date.now()
}
}));
}
},
onProgress: (time) => set(({ aria, progress, duration, live }) => {
const progressAria = getProgressAria({
progress,
duration,
live
});
const playPauseTitle = "Pause (k)";
return {
aria: {
...aria,
progress: progressAria.progress,
time: progressAria.time,
playPause: playPauseTitle
},
progress: getFilteredNaN(time),
waiting: false,
stalled: false,
ended: false
};
}),
requestSeek: (time) => set(({ duration, __controls }) => ({
__controls: {
...__controls,
requestedRangeToSeekTo: getBoundedSeek(time, duration)
},
progress: getBoundedSeek(time, duration)
})),
onDurationChange: (duration) => set(({ live }) => ({
duration,
live: duration === Number.POSITIVE_INFINITY ? true : live
})),
setWebsocketMetadata: (metadata) => set(() => ({ __metadata: metadata })),
updateBuffered: (buffered) => set(({ duration }) => {
const durationFiltered = getFilteredNaN(duration);
const percent = durationFiltered > 0 && buffered > 0 ? buffered / durationFiltered * 100 : 0;
return {
buffered,
bufferedPercent: Number(percent.toFixed(2))
};
}),
setVideoQuality: (videoQuality) => set(({ __initialProps, __controls, playbackRate, __device }) => {
const parsedSourceNew = getNewSource({
accessKey: __initialProps?.accessKey,
aspectRatio: __initialProps.aspectRatio,
isHlsSupported: __device.isHlsSupported,
jwt: __initialProps?.jwt,
lowLatency: __initialProps.lowLatency,
playbackRate,
screenWidth: device.screenWidth,
sessionToken: __controls.sessionToken,
src,
videoQuality,
hasRecentWebRTCTimeout: getHasRecentWebRTCTimeout(
__initialProps.cacheWebRTCFailureMs
),
ingestPlayback: __initialProps.ingestPlayback
});
return {
sortedSources: parsedSourceNew.sortedSources,
videoQuality,
currentSource: parsedSourceNew.currentSource,
__controls: {
...__controls,
playbackId: playbackId ?? parsedSourceNew.playbackId
}
};
}),
setPlaybackRate: (rate) => set(() => ({
playbackRate: getBoundedRate(rate ?? 1)
})),
requestSeekDiff: (difference) => set(({ progress, duration, __controls }) => ({
__controls: {
...__controls,
requestedRangeToSeekTo: getBoundedSeek(
getFilteredNaN(progress) + difference / 1e3,
duration
)
}
})),
requestSeekBack: (difference = DEFAULT_SEEK_TIME) => get().__controlsFunctions.requestSeekDiff(-difference),
requestSeekForward: (difference = DEFAULT_SEEK_TIME) => get().__controlsFunctions.requestSeekDiff(difference),
onFinalUrl: (currentUrl) => set(() => ({ currentUrl })),
requestMeasure: () => set(({ __controls }) => {
return {
__controls: {
...__controls,
requestedMeasureLastTime: Date.now()
}
};
}),
setSize: (size) => set(({ __controls }) => {
return {
__controls: {
...__controls,
size: {
...__controls.size,
...size
}
}
};
}),
onWaiting: () => set(() => ({ waiting: true })),
onStalled: () => set(() => ({ stalled: true })),
onLoading: () => set(() => ({ loading: true })),
onEnded: () => set(() => ({ ended: true })),
setFullscreen: (fullscreen) => set(({ aria }) => {
const title = fullscreen ? "Exit full screen (f)" : "Full screen (f)";
return {
fullscreen,
aria: {
...aria,
fullscreen: title
}
};
}),
requestToggleFullscreen: () => set(({ __controls }) => ({
__controls: {
...__controls,
requestedFullscreenLastTime: Date.now()
}
})),
setPictureInPicture: (pictureInPicture) => set(({ aria }) => {
const title = pictureInPicture ? "Exit mini player (i)" : "Mini player (i)";
return {
pictureInPicture,
aria: {
...aria,
pictureInPicture: title
}
};
}),
requestTogglePictureInPicture: () => set(({ __controls }) => ({
__controls: {
...__controls,
requestedPictureInPictureLastTime: Date.now()
}
})),
setLive: (live) => set(() => ({ live })),
requestClip: () => set(({ __controls, __initialProps }) => ({
__controls: {
...__controls,
requestedClipParams: __initialProps.clipLength ? getClipParams({
requestedTime: Date.now(),
clipLength: __initialProps.clipLength,
playbackOffsetMs: __controls.playbackOffsetMs
}) : null
}
})),
requestVolume: (newVolume) => set(({ __controls }) => ({
volume: getBoundedVolume(newVolume),
__controls: {
...__controls,
volume: newVolume === 0 ? newVolume : getBoundedVolume(newVolume),
muted: newVolume === 0
}
})),
setVolume: (newVolume) => set(({ __controls }) => ({
volume: getBoundedVolume(newVolume),
__controls: {
...__controls,
muted: newVolume === 0
}
})),
requestToggleMute: (forceValue) => set(({ __controls }) => {
const previousVolume = getBoundedVolume(__controls.volume) || 0;
const nonMutedVolume = previousVolume > 0.01 ? previousVolume : DEFAULT_VOLUME_LEVEL;
const mutedVolume = 0;
const newMutedValue = forceValue ?? !__controls.muted;
return {
volume: newMutedValue ? mutedVolume : nonMutedVolume,
__controls: {
...__controls,
muted: newMutedValue
}
};
}),
onError: (rawError) => set(
({
currentSource,
sortedSources,
__controls,
errorCount,
__device,
__initialProps,
videoQuality,
playbackRate
}) => {
const msSinceLastError = Date.now() - __controls.lastError;
const error = rawError ? {
type: isAccessControlError(rawError) ? "access-control" : isBframesError(rawError) || isNotAcceptableError(rawError) ? "fallback" : isStreamOfflineError(rawError) ? "offline" : isPermissionsError(rawError) ? "permissions" : "unknown",
message: rawError?.message ?? "Error with playback."
} : null;
if (__initialProps.onError) {
try {
__initialProps.onError(error);
} catch (e) {
console.error(e);
}
}
const base = {
error,
...error ? {
errorCount: errorCount + 1,
playing: false,
__controls: {
...__controls,
lastError: Date.now()
}
} : { __controls }
};
if (!error) {
return base;
}
console.error(error);
if (error.type === "offline" || error.type === "access-control" || error.type === "permissions") {
return base;
}
if (typeof sortedSources === "string" || !Array.isArray(sortedSources)) {
return base;
}
if (msSinceLastError < errorCount * 500) {
return base;
}
const currentSourceBaseUrl = currentSource ? new URL(currentSource.src) : "";
if (currentSourceBaseUrl) {
currentSourceBaseUrl.search = "";
}
const currentSourceIndex = sortedSources.findIndex(
(s) => s.src === currentSourceBaseUrl.toString()
);
const rotatedSources = [
...sortedSources.slice(currentSourceIndex + 1),
...sortedSources.slice(0, currentSourceIndex + 1)
];
const hasRecentWebRTCTimeout = getHasRecentWebRTCTimeout(
__initialProps.cacheWebRTCFailureMs
);
const canPlaySourceType = (src2) => {
const hasOneWebRTCSource = sortedSources.some(
(s) => s?.type === "webrtc"
);
if (__initialProps.lowLatency === "force" && hasOneWebRTCSource && src2.type !== "webrtc") {
return false;
}
if (__initialProps.lowLatency === false) {
return src2.type !== "webrtc";
}
if (hasRecentWebRTCTimeout) {
return src2.type !== "webrtc";
}
return src2.type === "webrtc" ? __device.isWebRTCSupported : true;
};
const nextPlayableIndex = rotatedSources.findIndex(
(s) => canPlaySourceType(s)
);
const nextSourceIndex = nextPlayableIndex !== -1 ? (currentSourceIndex + 1 + nextPlayableIndex) % sortedSources.length : -1;
const nextSource = nextSourceIndex !== -1 ? sortedSources[nextSourceIndex] : null;
const parsedSourceNew = parseCurrentSourceAndPlaybackId({
accessKey: __initialProps?.accessKey ?? null,
aspectRatio: __initialProps?.aspectRatio ?? null,
isHlsSupported: __device.isHlsSupported,
jwt: __initialProps?.jwt ?? null,
playbackRate,
sessionToken: __controls.sessionToken,
source: nextSource,
videoQuality,
ingestPlayback: __initialProps.ingestPlayback
});
return {
...base,
currentSource: parsedSourceNew?.currentSource ?? null,
__controls: {
...base.__controls,
playbackId: playbackId ?? parsedSourceNew?.playbackId ?? null
}
};
}
)
}
}),
{
name: "livepeer-media-controller",
version: 2,
// since these values are persisted across media, only persist volume & videoQuality
partialize: ({ volume, videoQuality }) => ({
volume,
videoQuality
}),
storage: (0, import_middleware.createJSONStorage)(() => storage)
}
)
)
);
const destroy = store.persist.onFinishHydration(
({ videoQuality, volume }) => {
if (videoQuality !== store.getState().videoQuality) {
store.getState().__controlsFunctions.setVideoQuality(videoQuality);
}
if (volume !== store.getState().volume) {
store.getState().__controlsFunctions.requestVolume(volume);
}
}
);
return { store, destroy };
};
// src/media/metrics.ts
var LegacyPlaybackMonitor = class {
active = false;
values = [];
score = null;
averagingSteps = 20;
store;
constructor(store) {
this.store = store;
}
init() {
if (!this.active) {
this.values = [];
this.score = null;
this.active = true;
}
}
reset() {
if (!this.active) {
this.init();
} else {
this.values = [];
}
}
destroy() {
if (this.active) {
this.values = [];
this.score = null;
this.active = false;
}
}
calculateScore() {
if (this.active) {
const latestPlaybackRecord = this.values.length > 0 ? this.values[this.values.length - 1] : void 0;
const currentValue = {
clockTime: Date.now() * 1e-3,
mediaTime: this.store.getState().progress,
score: latestPlaybackRecord ? this.valueToScore(latestPlaybackRecord) : 0
};
this.values.push(currentValue);
if (this.values.length < 2) {
return null;
}
const oldestValue = this.values[0];
if (oldestValue !== void 0) {
const averagedScore = this.valueToScore(oldestValue, currentValue);
if (this.values.length > this.averagingSteps) {
this.values.shift();
}
this.score = Math.max(averagedScore, currentValue.score);
return Math.round(this.score * 10) / 10;
}
}
return null;
}
valueToScore(a, b) {
const rate = this?.store?.getState?.()?.playbackRate ?? 1;
return (b?.mediaTime ?? this.store.getState().progress - a.mediaTime) / (b?.clockTime ?? Date.now() * 1e-3 - a.clockTime) / (rate === "constant" ? 1 : rate);
}
};
var Timer = class {
totalTime = 0;
countStarts = 0;
startTime = 0;
start() {
this.startTime = Date.now();
this.countStarts++;
}
stop() {
this.totalTime += this.startTime > 0 ? Date.now() - this.startTime : 0;
this.startTime = 0;
}
getTotalTime() {
this.totalTime += this.startTime > 0 ? Date.now() - this.startTime : 0;
this.startTime = this.startTime > 0 ? Date.now() : 0;
return this.totalTime;
}
getCountStarts() {
return this.countStarts;
}
};
function isInIframe() {
try {
return typeof window !== "undefined" && window.self !== window.top;
} catch (e) {
return true;
}
}
var LegacyMetricsStatus = class {
requestedPlayTime = null;
firstFrameTime = null;
bootMs;
retryCount = 0;
connected = false;
store;
destroy;
currentMetrics;
previousMetrics = null;
timeWaiting = new Timer();
timeStalled = new Timer();
timeUnpaused = new Timer();
constructor(store, bootMs, opts) {
const currentState = store.getState();
this.store = store;
this.bootMs = bootMs;
const windowHref = typeof window !== "undefined" ? window?.location?.href ?? "" : "";
const pageUrl = isInIframe() ? typeof document !== "undefined" ? document?.referrer || windowHref : windowHref : windowHref;
const playerPrefix = currentState?.currentSource?.type ?? "unknown";
const version = currentState?.__device.version ?? "unknown";
this.currentMetrics = {
autoplay: currentState.__initialProps.autoPlay ? "autoplay" : currentState.__initialProps.preload === "auto" ? "preload-full" : currentState.__initialProps.preload === "metadata" ? "preload-metadata" : "standard",
duration: null,
firstPlayback: null,
nError: null,
nStalled: null,
nWaiting: null,
offset: null,
pageUrl,
playbackScore: null,
player: `${playerPrefix}-${version}`,
sourceType: currentState?.currentSource?.mime ?? "unknown",
sourceUrl: currentState?.currentSource?.src ?? null,
playerHeight: null,
playerWidth: null,
preloadTime: null,
timeStalled: 0,
timeUnpaused: 0,
timeWaiting: 0,
ttff: null,
uid: currentState.__initialProps.viewerId ?? "",
userAgent: String(currentState?.__device?.userAgent ?? "").replace(
/\\|"/gm,
""
),
videoHeight: null,
videoWidth: null
};
this.destroy = store.subscribe((state, prevState) => {
if (this.requestedPlayTime === null && state.__controls.playLastTime !== 0) {
this.requestedPlayTime = Math.max(
state.__controls.playLastTime - bootMs,
0
);
}
if (state.currentSource?.src !== prevState.currentSource?.src) {
const playerPrefix2 = state?.currentSource?.type ?? "unknown";
const version2 = state?.__device.version ?? "unknown";
this.currentMetrics.player = `${playerPrefix2}-${version2}`;
this.currentMetrics.sourceType = state.currentSource?.mime ?? "unknown";
this.currentMetrics.sourceUrl = state.currentSource?.src ?? null;
}
if (state.playing !== prevState.playing) {
if (state.playing) {
this.timeStalled.stop();
this.timeWaiting.stop();
this.timeUnpaused.start();
} else {
this.timeUnpaused.stop();
this.timeStalled.stop();
this.timeWaiting.stop();
}
}
if (opts?.disableProgressListener !== true && state.progress !== prevState.progress && !this.timeUnpaused.startTime) {
this.timeStalled.stop();
this.timeWaiting.stop();
this.timeUnpaused.start();
}
if (state.stalled !== prevState.stalled) {
if (state.stalled) {
this.timeStalled.start();
this.timeUnpaused.stop();
} else if (state.playing) {
this.timeStalled.stop();
this.timeWaiting.stop();
this.timeUnpaused.start();
}
}
if (state.waiting !== prevState.waiting) {
if (state.waiting) {
this.timeWaiting.start();
this.timeUnpaused.stop();
} else if (state.playing) {
this.timeStalled.stop();
this.timeWaiting.stop();
this.timeUnpaused.start();
}
}
});
}
addError(error) {
this.currentMetrics.nError = (this.currentMetrics.nError ?? 0) + 1;
this.currentMetrics.lastError = error;
}
getFirstPlayback() {
return this.currentMetrics.firstPlayback;
}
setFirstPlayback() {
this.currentMetrics.firstPlayback = Date.now() - this.bootMs;
}
getFirstFrameTime() {
return this.firstFrameTime;
}
setFirstFrameTime() {
this.firstFrameTime = Date.now() - this.bootMs;
}
setPlaybackScore(playbackScore) {
this.currentMetrics.playbackScore = playbackScore;
}
setConnected(isConnected) {
this.connected = isConnected;
}
getMetrics() {
const currentMetrics = {
...this.currentMetrics,
playerHeight: this.store.getState().__controls.size?.container?.height || null,
playerWidth: this.store.getState().__controls.size?.container?.width || null,
videoWidth: this.store.getState().__controls.size?.media?.width || null,
videoHeight: this.store.getState().__controls.size?.media?.height || null,
duration: this.store.getState().duration || null,
nWaiting: this.timeWaiting.getCountStarts(),
nStalled: this.timeStalled.getCountStarts(),
timeWaiting: this.timeWaiting.getTotalTime(),
timeStalled: this.timeStalled.getTotalTime(),
timeUnpaused: this.timeUnpaused.getTotalTime(),
offset: this.store.getState().__controls.playbackOffsetMs || null,
// this is the amount of time that a video has had to preload content, from boot until play was requested
preloadTime: this.requestedPlayTime,
// time from when the first `play` event is emitted and the first progress update
ttff: this.firstFrameTime && this.requestedPlayTime && this.firstFrameTime > 0 && this.requestedPlayTime > 0 ? Math.max(this.firstFrameTime - this.requestedPlayTime, 0) : null
};
const previousMetrics = this.previousMetrics;
this.previousMetrics = currentMetrics;
return {
current: currentMetrics,
previous: previousMetrics
};
}
};
function addLegacyMediaMetricsToStore(store, opts) {
const bootMs = Date.now();
const defaultResponse = {
metrics: null,
destroy: () => {
}
};
if (!store) {
return defaultResponse;
}
if (!WebSocket) {
console.log("Environment does not support WebSocket");
return defaultResponse;
}
let websocketPromise = null;
let timer = null;
let reportingActive = true;
const metricsStatus = new LegacyMetricsStatus(store, bootMs, opts);
const monitor = new LegacyPlaybackMonitor(store);
const report = async () => {
const ws = await websocketPromise;
if (!reportingActive || !ws) {
return;
}
const playbackScore = monitor.calculateScore();