@livepeer/core-web
Version:
Livepeer UI Kit's core web library, for adding reactive stores to video elements.
1,308 lines (1,300 loc) • 46.4 kB
JavaScript
;
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/broadcast.ts
var broadcast_exports = {};
__export(broadcast_exports, {
addBroadcastEventListeners: () => addBroadcastEventListeners,
createBroadcastStore: () => createBroadcastStore,
createSilentAudioTrack: () => createSilentAudioTrack,
getBroadcastDeviceInfo: () => getBroadcastDeviceInfo
});
module.exports = __toCommonJS(broadcast_exports);
var import_errors3 = require("@livepeer/core/errors");
var import_utils9 = require("@livepeer/core/utils");
var import_middleware = require("zustand/middleware");
var import_vanilla = require("zustand/vanilla");
// src/media/controls/controller.ts
var import_errors2 = require("@livepeer/core/errors");
var import_utils6 = require("@livepeer/core/utils");
// src/hls/hls.ts
var import_media = require("@livepeer/core/media");
var import_hls = __toESM(require("hls.js"), 1);
// src/media/utils.ts
var import_utils = require("@livepeer/core/utils");
var isClient = () => typeof window !== "undefined";
// src/webrtc/shared.ts
var import_errors = require("@livepeer/core/errors");
var getRTCPeerConnectionConstructor = () => {
if (!isClient()) {
return null;
}
return window.RTCPeerConnection || window.webkitRTCPeerConnection || window.mozRTCPeerConnection || null;
};
function createPeerConnection(host, iceServers) {
const RTCPeerConnectionConstructor = getRTCPeerConnectionConstructor();
if (!RTCPeerConnectionConstructor) {
throw new Error("No RTCPeerConnection constructor found in this browser.");
}
const hostNoPort = host?.split(":")[0];
const defaultIceServers = host ? [
{
urls: `stun:${hostNoPort}`
},
{
urls: `turn:${hostNoPort}`,
username: "livepeer",
credential: "livepeer"
}
] : [];
return new RTCPeerConnectionConstructor({
iceServers: iceServers ? Array.isArray(iceServers) ? iceServers : [iceServers] : defaultIceServers
});
}
var DEFAULT_TIMEOUT = 1e4;
async function negotiateConnectionWithClientOffer(peerConnection, endpoint, ofr, controller, accessControl, sdpTimeout) {
if (peerConnection && endpoint && ofr) {
const response = await postSDPOffer(
endpoint,
ofr.sdp,
controller,
accessControl,
sdpTimeout
);
if (response.ok) {
const answerSDP = await response.text();
await peerConnection.setRemoteDescription(
new RTCSessionDescription({ type: "answer", sdp: answerSDP })
);
const playheadUtc = response.headers.get("Playhead-Utc");
return new Date(playheadUtc ?? /* @__PURE__ */ new Date());
}
if (response.status === 406) {
throw new Error(import_errors.NOT_ACCEPTABLE_ERROR_MESSAGE);
}
const errorMessage = await response.text();
throw new Error(errorMessage);
}
throw new Error("Peer connection not defined.");
}
function preferCodec(sdp, codec) {
const lines = sdp.split("\r\n");
const mLineIndex = lines.findIndex((line) => line.startsWith("m=video"));
if (mLineIndex === -1) return sdp;
const codecRegex = new RegExp(`a=rtpmap:(\\d+) ${codec}(/\\d+)+`);
const codecLine = lines.find((line) => codecRegex.test(line));
if (!codecLine) return sdp;
const codecPayload = codecRegex.exec(codecLine)[1];
const mLineElements = lines[mLineIndex].split(" ");
const reorderedMLine = [
...mLineElements.slice(0, 3),
codecPayload,
...mLineElements.slice(3).filter((payload) => payload !== codecPayload)
];
lines[mLineIndex] = reorderedMLine.join(" ");
return lines.join("\r\n");
}
async function constructClientOffer(peerConnection, endpoint, noIceGathering) {
if (peerConnection && endpoint) {
const originalCreateOffer = peerConnection.createOffer.bind(peerConnection);
peerConnection.createOffer = async function(...args) {
const originalOffer = await originalCreateOffer.apply(this, args);
return new RTCSessionDescription({
// @ts-ignore (TODO: fix this)
type: originalOffer.type,
// @ts-ignore (TODO: fix this)
sdp: preferCodec(originalOffer.sdp, "H264")
});
};
const offer = await peerConnection.createOffer();
await peerConnection.setLocalDescription(offer);
if (noIceGathering) {
return peerConnection.localDescription;
}
const ofr = await waitToCompleteICEGathering(peerConnection);
if (!ofr) {
throw Error("failed to gather ICE candidates for offer");
}
return ofr;
}
return null;
}
var playbackIdPattern = /([/+])([^/+?]+)$/;
var REPLACE_PLACEHOLDER = "PLAYBACK_ID";
var MAX_REDIRECT_CACHE_SIZE = 10;
var redirectUrlCache = /* @__PURE__ */ new Map();
function getCachedTemplate(key) {
const cachedItem = redirectUrlCache.get(key);
if (cachedItem) {
redirectUrlCache.delete(key);
redirectUrlCache.set(key, cachedItem);
}
return cachedItem;
}
function setCachedTemplate(key, value) {
if (redirectUrlCache.has(key)) {
redirectUrlCache.delete(key);
} else if (redirectUrlCache.size >= MAX_REDIRECT_CACHE_SIZE) {
const oldestKey = redirectUrlCache.keys().next().value;
if (oldestKey) {
redirectUrlCache.delete(oldestKey);
}
}
redirectUrlCache.set(key, value);
}
async function postSDPOffer(endpoint, data, controller, accessControl, sdpTimeout) {
const id = setTimeout(
() => controller.abort(),
sdpTimeout ?? DEFAULT_TIMEOUT
);
const urlForPost = new URL(endpoint);
const parsedMatches = urlForPost.pathname.match(playbackIdPattern);
const currentPlaybackId = parsedMatches?.[2];
const cachedTemplateUrl = getCachedTemplate(endpoint);
if (cachedTemplateUrl && currentPlaybackId) {
urlForPost.host = cachedTemplateUrl.host;
urlForPost.pathname = cachedTemplateUrl.pathname.replace(
REPLACE_PLACEHOLDER,
currentPlaybackId
);
urlForPost.search = cachedTemplateUrl.search;
}
const response = await fetch(urlForPost.toString(), {
method: "POST",
mode: "cors",
headers: {
"content-type": "application/sdp",
...accessControl?.accessKey ? {
"Livepeer-Access-Key": accessControl.accessKey
} : {},
...accessControl?.jwt ? {
"Livepeer-Jwt": accessControl.jwt
} : {}
},
body: data,
signal: controller.signal
});
clearTimeout(id);
return response;
}
async function getRedirectUrl(endpoint, abortController, timeout) {
try {
const cachedTemplateUrl = getCachedTemplate(endpoint);
if (cachedTemplateUrl) {
const currentIngestUrl = new URL(endpoint);
const matches = currentIngestUrl.pathname.match(playbackIdPattern);
const currentPlaybackId = matches?.[2];
if (currentPlaybackId) {
const finalRedirectUrl = new URL(cachedTemplateUrl);
finalRedirectUrl.pathname = cachedTemplateUrl.pathname.replace(
REPLACE_PLACEHOLDER,
currentPlaybackId
);
return finalRedirectUrl;
}
}
const id = setTimeout(
() => abortController.abort(),
timeout ?? DEFAULT_TIMEOUT
);
const response = await fetch(endpoint, {
method: "HEAD",
signal: abortController.signal
});
await response.text();
clearTimeout(id);
const actualRedirectedUrl = new URL(response.url);
if (actualRedirectedUrl) {
const templateForCache = new URL(actualRedirectedUrl);
templateForCache.pathname = templateForCache.pathname.replace(
playbackIdPattern,
`$1${REPLACE_PLACEHOLDER}`
);
if (!templateForCache.searchParams.has("ingestpb") || templateForCache.searchParams.get("ingestpb") !== "true") {
setCachedTemplate(endpoint, templateForCache);
}
}
return actualRedirectedUrl;
} catch (e) {
return null;
}
}
async function waitToCompleteICEGathering(peerConnection) {
return new Promise((resolve) => {
setTimeout(() => {
resolve(peerConnection.localDescription);
}, 5e3);
peerConnection.onicegatheringstatechange = (_ev) => {
if (peerConnection.iceGatheringState === "complete") {
resolve(peerConnection.localDescription);
}
};
});
}
// src/media/controls/pictureInPicture.ts
var isPictureInPictureSupported = (element) => {
if (typeof document === "undefined") {
return true;
}
const videoElement = element ?? document.createElement("video");
const isPiPDisabled = Boolean(
videoElement.disablePictureInPicture
);
const { apiType } = getPictureInPictureMode(videoElement);
return Boolean(apiType) && !isPiPDisabled;
};
var getPictureInPictureMode = (element) => {
if (isClient() && element instanceof HTMLVideoElement) {
if (document?.pictureInPictureEnabled) {
return { apiType: "w3c", element };
}
if (element?.webkitSupportsPresentationMode?.("picture-in-picture")) {
return { apiType: "webkit", element };
}
}
return { apiType: null };
};
// src/webrtc/whip.ts
var import_utils8 = require("@livepeer/core/utils");
var STANDARD_FPS = 30;
var VIDEO_WEBRTC_INITIALIZED_ATTRIBUTE = "data-livepeer-video-whip-initialized";
var createNewWHIP = ({
ingestUrl,
element,
callbacks,
sdpTimeout,
noIceGathering,
iceServers
}) => {
if (element.getAttribute(VIDEO_WEBRTC_INITIALIZED_ATTRIBUTE) === "true") {
return {
destroy: () => {
}
};
}
element.setAttribute(VIDEO_WEBRTC_INITIALIZED_ATTRIBUTE, "true");
let destroyed = false;
const abortController = new AbortController();
let peerConnection = null;
getRedirectUrl(ingestUrl, abortController, sdpTimeout).then((redirectUrl) => {
if (destroyed || !redirectUrl) {
return;
}
const redirectUrlString = redirectUrl.toString().replace("video+", "");
peerConnection = createPeerConnection(redirectUrl.host, iceServers);
if (peerConnection) {
peerConnection.addEventListener("negotiationneeded", async (_ev) => {
try {
const ofr = await constructClientOffer(
peerConnection,
redirectUrlString,
noIceGathering
);
await negotiateConnectionWithClientOffer(
peerConnection,
ingestUrl,
ofr,
abortController,
{},
sdpTimeout
);
} catch (e) {
callbacks?.onError?.(e);
}
});
peerConnection.addEventListener(
"connectionstatechange",
async (_ev) => {
try {
if (peerConnection?.connectionState === "failed") {
callbacks?.onError?.(new Error("Failed to connect to peer."));
}
if (peerConnection?.connectionState === "connected") {
callbacks?.onConnected?.();
}
} catch (e) {
callbacks?.onError?.(e);
}
}
);
callbacks?.onRTCPeerConnection?.(peerConnection);
} else {
(0, import_utils8.warn)("Could not create peer connection.");
}
}).catch((e) => callbacks?.onError?.(e));
return {
destroy: () => {
destroyed = true;
abortController?.abort?.();
peerConnection?.close?.();
element?.removeAttribute?.(VIDEO_WEBRTC_INITIALIZED_ATTRIBUTE);
}
};
};
var attachMediaStreamToPeerConnection = async ({
mediaStream,
peerConnection
}) => {
const newVideoTrack = mediaStream?.getVideoTracks?.()?.[0] ?? null;
const newAudioTrack = mediaStream?.getAudioTracks?.()?.[0] ?? null;
const transceivers = peerConnection.getTransceivers();
let videoTransceiver = transceivers.find(
(t) => t.receiver.track.kind === "video"
);
let audioTransceiver = transceivers.find(
(t) => t.receiver.track.kind === "audio"
);
if (newVideoTrack) {
if (videoTransceiver) {
await videoTransceiver.sender.replaceTrack(newVideoTrack);
} else {
videoTransceiver = await peerConnection.addTransceiver(newVideoTrack, {
direction: "sendonly"
});
}
}
if (newAudioTrack) {
if (audioTransceiver) {
await audioTransceiver.sender.replaceTrack(newAudioTrack);
} else {
audioTransceiver = await peerConnection.addTransceiver(newAudioTrack, {
direction: "sendonly"
});
}
}
};
var getUserMedia = (constraints) => {
if (typeof navigator === "undefined") {
return null;
}
if (navigator?.mediaDevices?.getUserMedia) {
return navigator.mediaDevices.getUserMedia(constraints);
}
if (navigator?.getUserMedia) {
return navigator.getUserMedia(constraints);
}
if (navigator?.webkitGetUserMedia) {
return navigator.webkitGetUserMedia(constraints);
}
if (navigator?.mozGetUserMedia) {
return navigator.mozGetUserMedia(constraints);
}
if (navigator?.msGetUserMedia) {
return navigator.msGetUserMedia(constraints);
}
(0, import_utils8.warn)(
"getUserMedia is not supported in this environment. Check if you are in a secure (HTTPS) context - https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia"
);
return null;
};
var getMediaDevices = () => {
if (typeof navigator === "undefined") {
return null;
}
if (!navigator.mediaDevices) {
(0, import_utils8.warn)(
"mediaDevices was not found in this environment. Check if you are in a secure (HTTPS) context - https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia"
);
return null;
}
return navigator.mediaDevices;
};
var getDisplayMediaExists = () => {
if (typeof navigator === "undefined") {
return false;
}
if (!navigator?.mediaDevices?.getDisplayMedia) {
return false;
}
return true;
};
var getDisplayMedia = (options) => {
if (typeof navigator === "undefined") {
(0, import_utils8.warn)("getDisplayMedia does not exist in this environment.");
return null;
}
if (!navigator?.mediaDevices?.getDisplayMedia) {
(0, import_utils8.warn)("getDisplayMedia does not exist in this environment.");
return null;
}
return navigator.mediaDevices.getDisplayMedia(options);
};
var createMirroredVideoTrack = (originalTrack) => {
if (originalTrack.kind !== "video") {
(0, import_utils8.warn)("Cannot mirror non-video track");
return originalTrack;
}
try {
const canvas = document.createElement("canvas");
canvas.style.position = "absolute";
canvas.style.top = "-9999px";
document.body.appendChild(canvas);
const ctx = canvas.getContext("2d");
if (!ctx) {
(0, import_utils8.warn)("Could not get canvas context for mirroring video");
return originalTrack;
}
const video = document.createElement("video");
video.srcObject = new MediaStream([originalTrack]);
video.autoplay = true;
video.muted = true;
video.playsInline = true;
const settings = originalTrack.getSettings();
if (settings.width && settings.height) {
canvas.width = settings.width;
canvas.height = settings.height;
}
const mirroredStream = canvas.captureStream(STANDARD_FPS);
const mirroredTrack = mirroredStream.getVideoTracks()[0];
let animationFrameId;
const drawFrame = () => {
if (video.readyState >= 2) {
if (canvas.width !== video.videoWidth || canvas.height !== video.videoHeight) {
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
}
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.save();
ctx.scale(-1, 1);
ctx.drawImage(video, -canvas.width, 0, canvas.width, canvas.height);
ctx.restore();
}
animationFrameId = requestAnimationFrame(drawFrame);
};
video.onloadedmetadata = () => {
if (!canvas.width || !canvas.height) {
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
}
video.play().catch((e) => {
(0, import_utils8.warn)(`Failed to play video in mirroring process: ${e.message}`);
});
drawFrame();
};
originalTrack.addEventListener("ended", () => {
cancelAnimationFrame(animationFrameId);
mirroredTrack.stop();
video.pause();
video.srcObject = null;
if (canvas.parentNode) {
canvas.parentNode.removeChild(canvas);
}
});
return mirroredTrack;
} catch (err) {
(0, import_utils8.warn)(`Error creating mirrored track: ${err.message}`);
return originalTrack;
}
};
// src/broadcast.ts
var delay = (ms) => {
return new Promise((resolve) => setTimeout(resolve, ms));
};
var getBroadcastDeviceInfo = (version) => ({
version,
isMediaDevicesSupported: Boolean(getMediaDevices()),
isRTCPeerConnectionSupported: Boolean(getRTCPeerConnectionConstructor()),
isDisplayMediaSupported: Boolean(getDisplayMediaExists())
});
var createBroadcastStore = ({
ingestUrl,
device,
storage,
initialProps
}) => {
const initialControls = {
requestedUpdateDeviceListLastTime: 0,
requestedForceRenegotiateLastTime: 0,
requestedAudioInputDeviceId: "default",
requestedVideoInputDeviceId: null,
previousVideoInputDeviceId: null,
mediaDevices: null,
microphoneTrack: null
};
const store = (0, import_vanilla.createStore)(
(0, import_middleware.subscribeWithSelector)(
(0, import_middleware.persist)(
// biome-ignore lint/correctness/noUnusedFunctionParameters: ignored using `--suppress`
(set, get) => ({
audio: initialProps?.audio !== false,
video: initialProps?.video !== false,
hydrated: false,
mounted: false,
enabled: initialProps?.forceEnabled ?? false,
status: "idle",
mediaStream: null,
mediaDevices: null,
peerConnection: null,
ingestUrl: ingestUrl ?? null,
mediaDeviceIds: {
audioinput: "default",
videoinput: "default"
},
aria: {
audioTrigger: initialProps?.audio === false ? "Turn audio on (space)" : "Turn audio off (space)",
start: "Start broadcasting (b)",
screenshareTrigger: "Share screen (d)",
videoTrigger: initialProps?.video === false ? "Turn video on (v)" : "Turn video off (v)"
},
__initialProps: {
aspectRatio: initialProps?.aspectRatio ?? null,
audio: initialProps?.audio ?? true,
creatorId: initialProps.creatorId ?? null,
forceEnabled: initialProps?.forceEnabled ?? false,
hotkeys: initialProps.hotkeys ?? true,
ingestUrl: ingestUrl ?? null,
video: initialProps?.video ?? true,
noIceGathering: initialProps?.noIceGathering ?? false,
silentAudioTrack: initialProps?.silentAudioTrack ?? false,
iceServers: initialProps?.iceServers,
mirrored: initialProps?.mirrored ?? false
},
__device: device,
__controls: initialControls,
__metadata: null,
__controlsFunctions: {
updateMediaStream: (mediaStream) => set(() => ({
mediaStream
})),
setPeerConnection: (peerConnection) => set(() => ({
peerConnection
})),
setIngestUrl: (ingestUrl2) => set(() => ({
ingestUrl: ingestUrl2
})),
requestForceRenegotiate: () => set(({ __controls }) => ({
__controls: {
...__controls,
requestedForceRenegotiateLastTime: Date.now()
}
})),
rotateAudioSource: () => set(({ mediaDeviceIds, mediaDevices, __controls }) => {
if (!mediaDevices) {
(0, import_utils9.warn)(
"Could not rotate audio source, no audio media devices detected."
);
return {};
}
const audioDevices = mediaDevices.filter(
(m) => m.kind === "audioinput"
);
const currentAudioInputIndex = audioDevices.findIndex(
(s) => s.deviceId === mediaDeviceIds.audioinput
);
const nextAudioInputDevice = audioDevices[(currentAudioInputIndex + 1) % audioDevices.length] ?? null;
return {
__controls: {
...__controls,
requestedAudioInputDeviceId: nextAudioInputDevice.deviceId
}
};
}),
rotateVideoSource: () => set(({ mediaDeviceIds, mediaDevices, __controls }) => {
if (!mediaDevices) {
(0, import_utils9.warn)(
"Could not rotate video source, no video media devices detected."
);
return {};
}
const videoDevices = mediaDevices.filter(
(m) => m.kind === "videoinput"
);
const currentVideoInputIndex = videoDevices.findIndex(
(s) => s.deviceId === mediaDeviceIds.videoinput
);
const nextVideoInputDevice = videoDevices[(currentVideoInputIndex + 1) % videoDevices.length] ?? null;
return {
__controls: {
...__controls,
requestedVideoInputDeviceId: nextVideoInputDevice.deviceId
}
};
}),
toggleDisplayMedia: () => set(({ __controls, mediaDeviceIds, aria }) => {
if (mediaDeviceIds.videoinput === "screen") {
return {
aria: {
...aria,
screenshareTrigger: "Share screen (d)"
},
__controls: {
...__controls,
requestedVideoInputDeviceId: __controls.previousVideoInputDeviceId
}
};
}
return {
aria: {
...aria,
screenshareTrigger: "Stop sharing screen (d)"
},
__controls: {
...__controls,
previousVideoInputDeviceId: mediaDeviceIds.videoinput,
requestedVideoInputDeviceId: "screen"
}
};
}),
setInitialState: (deviceIds, audio, video) => set(({ __controls }) => ({
hydrated: true,
audio,
video,
__controls: {
...__controls,
requestedAudioInputDeviceId: deviceIds?.audioinput ?? "default",
requestedVideoInputDeviceId: deviceIds?.videoinput === "screen" ? "default" : deviceIds?.videoinput ?? "default"
}
})),
requestMediaDeviceId: (deviceId, type) => set(({ __controls }) => ({
__controls: {
...__controls,
...type === "videoinput" ? {
requestedVideoInputDeviceId: deviceId
} : type === "audioinput" ? {
requestedAudioInputDeviceId: deviceId
} : {}
}
})),
setStatus: (status) => set(() => ({
status
})),
setMediaDeviceIds: (newMediaDeviceIds) => set(({ mediaDeviceIds }) => ({
mediaDeviceIds: {
...mediaDeviceIds,
...newMediaDeviceIds
}
})),
updateDeviceList: (mediaDevices) => set(({ __controls }) => ({
__controls: {
...__controls,
mediaDevices
}
})),
requestDeviceListInfo: () => set(({ __controls }) => ({
__controls: {
...__controls,
requestedUpdateDeviceListLastTime: Date.now()
}
})),
toggleVideo: () => set(({ video, aria }) => ({
video: !video,
aria: {
...aria,
videoTrigger: !video ? "Turn video off (v)" : "Turn video on (v)"
}
})),
toggleAudio: () => set(({ audio, aria }) => ({
audio: !audio,
aria: {
...aria,
audioTrigger: !audio ? "Turn audio off (space)" : "Turn audio on (space)"
}
})),
toggleEnabled: () => set(({ enabled, aria }) => ({
enabled: !enabled,
aria: {
...aria,
start: enabled ? "Start broadcasting (b)" : "Stop broadcasting (b)"
}
}))
}
}),
{
name: "livepeer-broadcast-controller",
version: 1,
// these values are persisted across broadcasts
partialize: ({ audio, video, mediaDeviceIds }) => ({
audio,
video,
mediaDeviceIds
}),
storage: (0, import_middleware.createJSONStorage)(() => storage)
}
)
)
);
const destroy = store.persist.onFinishHydration(
({ mediaDeviceIds, audio, video }) => {
store.getState().__controlsFunctions.setInitialState(mediaDeviceIds, audio, video);
}
);
return { store, destroy };
};
var MEDIA_BROADCAST_INITIALIZED_ATTRIBUTE = "data-livepeer-broadcast-initialized";
var allKeyTriggers = [
"KeyL",
"KeyV",
"KeyB",
"Space",
"KeyD",
"KeyC",
"KeyM"
];
var addBroadcastEventListeners = (element, store, mediaStore) => {
const onKeyUp = (e) => {
e.preventDefault();
e.stopPropagation();
const code = e.code;
if (allKeyTriggers.includes(code)) {
if (code === "Space" || code === "KeyL") {
store.getState().__controlsFunctions.toggleAudio();
} else if (code === "KeyV") {
store.getState().__controlsFunctions.toggleVideo();
} else if (code === "KeyB") {
store.getState().__controlsFunctions.toggleEnabled();
} else if (code === "KeyD") {
store.getState().__controlsFunctions.toggleDisplayMedia();
} else if (code === "KeyC") {
store.getState().__controlsFunctions.rotateVideoSource();
} else if (code === "KeyM") {
store.getState().__controlsFunctions.rotateAudioSource();
}
}
};
const onDeviceChange = () => {
store.getState().__controlsFunctions.requestDeviceListInfo();
};
const mediaDevices = getMediaDevices();
mediaDevices?.addEventListener?.("devicechange", onDeviceChange);
const parentElementOrElement = element?.parentElement ?? element;
if (element) {
if (parentElementOrElement) {
if (store.getState().__initialProps.hotkeys) {
parentElementOrElement.addEventListener("keyup", onKeyUp);
parentElementOrElement.setAttribute("tabindex", "0");
}
}
element.setAttribute(MEDIA_BROADCAST_INITIALIZED_ATTRIBUTE, "true");
}
const { destroy: destroyEffects } = addEffectsToStore(
element,
store,
mediaStore
);
const removeHydrationListener = store.persist.onFinishHydration(
({ mediaDeviceIds, audio, video }) => {
store.getState().__controlsFunctions.setInitialState(mediaDeviceIds, audio, video);
}
);
return {
destroy: () => {
removeHydrationListener?.();
parentElementOrElement?.removeEventListener?.("keyup", onKeyUp);
mediaDevices?.removeEventListener?.("devicechange", onDeviceChange);
destroyEffects?.();
element?.removeAttribute?.(MEDIA_BROADCAST_INITIALIZED_ATTRIBUTE);
}
};
};
var cleanupWhip = () => {
};
var cleanupMediaStream = () => {
};
var addEffectsToStore = (element, store, mediaStore) => {
const destroyErrorCount = mediaStore.subscribe(
({ errorCount }) => errorCount,
async (errorCount) => {
if (errorCount > 0) {
const delayTime = 500 * 2 ** (errorCount - 1);
await delay(delayTime);
store.getState().__controlsFunctions.requestForceRenegotiate();
}
}
);
const destroyMediaSyncMounted = mediaStore.subscribe(
({ mounted }) => mounted,
async (mounted) => {
store.setState({ mounted });
}
);
const destroyMediaSyncError = mediaStore.subscribe(
({ error }) => error,
async (error) => {
if (error?.type === "permissions") {
store.setState((state) => ({
__controls: {
...state.__controls,
requestedVideoInputDeviceId: state.mediaDeviceIds.videoinput
}
}));
}
}
);
const destroyPictureInPictureSupportedMonitor = store.subscribe(
(state) => state.mediaStream,
async () => {
const isPipSupported = isPictureInPictureSupported(element);
if (!isPipSupported) {
mediaStore.setState((state) => ({
__device: {
...state.__device,
isPictureInPictureSupported: isPipSupported
}
}));
}
},
{
equalityFn: (a, b) => a?.id === b?.id
}
);
const destroyWhip = store.subscribe(
({ enabled, ingestUrl, __controls, mounted, __initialProps }) => ({
enabled,
ingestUrl,
requestedForceRenegotiateLastTime: __controls.requestedForceRenegotiateLastTime,
mounted,
noIceGathering: __initialProps.noIceGathering,
silentAudioTrack: __initialProps.silentAudioTrack,
iceServers: __initialProps.iceServers
}),
async ({ enabled, ingestUrl, noIceGathering, iceServers }) => {
await cleanupWhip?.();
if (!enabled) {
return;
}
if (!ingestUrl) {
(0, import_utils9.warn)(
"No ingest URL provided, cannot start stream. Please check the configuration passed to the Broadcast component."
);
return;
}
let unmounted = false;
const onErrorComposed = (err) => {
if (!unmounted) {
mediaStore.getState().__controlsFunctions.setLive(false);
mediaStore.getState().__controlsFunctions?.onError?.(err);
}
};
store.getState().__controlsFunctions.setStatus("pending");
const { destroy } = createNewWHIP({
ingestUrl,
element,
callbacks: {
onRTCPeerConnection: (peerConnection) => {
store.getState().__controlsFunctions.setPeerConnection(peerConnection);
},
onConnected: () => {
store.getState().__controlsFunctions.setStatus("live");
mediaStore.getState().__controlsFunctions.onError(null);
},
onError: onErrorComposed
},
sdpTimeout: null,
noIceGathering,
iceServers
});
cleanupWhip = () => {
unmounted = true;
destroy?.();
store.getState().__controlsFunctions.setStatus("idle");
};
},
{
equalityFn: (a, b) => a.requestedForceRenegotiateLastTime === b.requestedForceRenegotiateLastTime && a.ingestUrl === b.ingestUrl && a.enabled === b.enabled && a.mounted === b.mounted
}
);
const destroyRequestUserMedia = store.subscribe(
(state) => ({
hydrated: state.hydrated,
mounted: state.mounted,
video: state.video,
audio: state.audio,
requestedAudioDeviceId: state.__controls.requestedAudioInputDeviceId,
requestedVideoDeviceId: state.__controls.requestedVideoInputDeviceId,
initialAudioConfig: state.__initialProps.audio,
initialVideoConfig: state.__initialProps.video,
mirrored: state.__initialProps.mirrored,
previousMediaStream: state.mediaStream,
silentAudioTrack: state.__initialProps.silentAudioTrack
}),
async ({
hydrated,
mounted,
audio,
video,
requestedAudioDeviceId,
requestedVideoDeviceId,
previousMediaStream,
initialAudioConfig,
initialVideoConfig,
silentAudioTrack,
mirrored
}) => {
try {
if (!mounted || !hydrated) {
return;
}
const shouldRequestAudio = audio || silentAudioTrack;
if (!shouldRequestAudio && !video) {
console.log(
"|||| FORCING VIDEO ENABLED to request getUserMedia ||||"
);
(0, import_utils9.warn)(
"At least one of audio and video must be requested. Overriding video to be enabled so that `getUserMedia` can be requested."
);
store.setState({ video: true });
video = true;
}
const audioConstraints = typeof initialAudioConfig !== "boolean" ? initialAudioConfig : null;
const videoConstraints = typeof initialVideoConfig !== "boolean" ? initialVideoConfig : null;
console.log(
"|||| Requesting media with audio:",
shouldRequestAudio,
"and video:",
video,
"||||"
);
const stream = await (requestedVideoDeviceId === "screen" ? getDisplayMedia({
// for now, only the microphone audio track is supported - we don't support multiple
// discrete audio tracks
audio: false,
// we assume that if the user is requested to share screen, they want to enable video,
// and we don't listen to the `video` enabled state
//
// we apply the video constraints to the video track
video: videoConstraints ?? true
}) : getUserMedia({
// Always request audio if silentAudioTrack is enabled
audio: shouldRequestAudio && requestedAudioDeviceId && requestedAudioDeviceId !== "default" ? {
...audioConstraints ? audioConstraints : {},
deviceId: {
ideal: requestedAudioDeviceId
}
} : shouldRequestAudio ? {
...audioConstraints ? audioConstraints : {}
} : false,
video: video && requestedVideoDeviceId && requestedVideoDeviceId !== "default" ? {
...videoConstraints ? videoConstraints : {},
deviceId: {
ideal: requestedVideoDeviceId
},
...mirrored ? { facingMode: "user" } : {}
} : video ? {
...videoConstraints ? videoConstraints : {},
...mirrored ? { facingMode: "user" } : {}
} : false
}));
if (stream) {
const microphoneTrack = stream?.getAudioTracks()?.[0] ?? null;
if (microphoneTrack) {
store.setState((state) => ({
__controls: {
...state.__controls,
microphoneTrack
}
}));
}
const allAudioTracks = stream?.getAudioTracks() ?? [];
const allVideoTracks = stream?.getVideoTracks() ?? [];
const allAudioDeviceIds = allAudioTracks.map(
(track) => track?.getSettings()?.deviceId
);
const allVideoDeviceIds = allVideoTracks.map(
(track) => track?.getSettings()?.deviceId
);
const firstAudioDeviceId = allAudioDeviceIds?.[0] ?? null;
const firstVideoDeviceId = allVideoDeviceIds?.[0] ?? null;
store.getState().__controlsFunctions.setMediaDeviceIds({
...firstAudioDeviceId ? { audioinput: firstAudioDeviceId } : {},
...firstVideoDeviceId ? {
videoinput: requestedVideoDeviceId === "screen" ? "screen" : firstVideoDeviceId
} : {}
});
const mergedMediaStream = new MediaStream();
const mergedAudioTrack = allAudioTracks?.[0] ?? previousMediaStream?.getAudioTracks?.()?.[0] ?? null;
let mergedVideoTrack = allVideoTracks?.[0] ?? previousMediaStream?.getVideoTracks?.()?.[0] ?? null;
if (mergedVideoTrack && mirrored && requestedVideoDeviceId !== "screen") {
try {
const videoSettings = mergedVideoTrack.getSettings();
const isFrontFacing = videoSettings.facingMode === "user" || !videoSettings.facingMode;
if (isFrontFacing) {
element.classList.add("livepeer-mirrored-video");
mergedVideoTrack = createMirroredVideoTrack(mergedVideoTrack);
} else {
element.classList.remove("livepeer-mirrored-video");
}
} catch (err) {
(0, import_utils9.warn)(
`Failed to apply video mirroring: ${err.message}`
);
}
} else {
element.classList.remove("livepeer-mirrored-video");
}
if (mergedAudioTrack) mergedMediaStream.addTrack(mergedAudioTrack);
if (mergedVideoTrack) mergedMediaStream.addTrack(mergedVideoTrack);
store.getState().__controlsFunctions.updateMediaStream(mergedMediaStream);
}
} catch (e) {
if (e?.name === "NotAllowedError") {
mediaStore.getState().__controlsFunctions.onError(new Error(import_errors3.PERMISSIONS_ERROR_MESSAGE));
} else {
(0, import_utils9.warn)(e?.message);
}
}
},
{
equalityFn: (a, b) => a.hydrated === b.hydrated && a.mounted === b.mounted && a.requestedAudioDeviceId === b.requestedAudioDeviceId && a.requestedVideoDeviceId === b.requestedVideoDeviceId
}
);
const destroyAudioVideoEnabled = store.subscribe(
(state) => ({
audio: state.audio,
video: state.video,
mediaStream: state.mediaStream,
silentAudioTrack: state.__initialProps.silentAudioTrack,
peerConnection: state.peerConnection,
microphoneTrack: state.__controls.microphoneTrack
}),
async ({
audio,
video,
mediaStream,
silentAudioTrack,
peerConnection,
microphoneTrack
}) => {
if (!mediaStream) return;
for (const videoTrack of mediaStream.getVideoTracks()) {
videoTrack.enabled = video;
}
if (silentAudioTrack) {
if (peerConnection) {
const currentAudioTrack = mediaStream.getAudioTracks()[0];
if (!audio && microphoneTrack) {
if (currentAudioTrack && currentAudioTrack !== microphoneTrack) {
currentAudioTrack.enabled = true;
} else {
const silentTrack = createSilentAudioTrack();
if (currentAudioTrack) {
mediaStream.removeTrack(currentAudioTrack);
}
mediaStream.addTrack(silentTrack);
const audioSender = peerConnection.getSenders().find((s) => s.track && s.track.kind === "audio");
if (audioSender) {
await audioSender.replaceTrack(silentTrack);
}
}
} else if (audio && microphoneTrack) {
if (currentAudioTrack === microphoneTrack) {
microphoneTrack.enabled = true;
} else {
if (currentAudioTrack) {
mediaStream.removeTrack(currentAudioTrack);
}
mediaStream.addTrack(microphoneTrack);
const audioSender = peerConnection.getSenders().find((s) => s.track && s.track.kind === "audio");
if (audioSender) {
await audioSender.replaceTrack(microphoneTrack);
microphoneTrack.enabled = true;
}
}
}
} else {
for (const audioTrack of mediaStream.getAudioTracks()) {
audioTrack.enabled = audio;
}
}
} else {
for (const audioTrack of mediaStream.getAudioTracks()) {
audioTrack.enabled = audio;
}
}
},
{
equalityFn: (a, b) => a.audio === b.audio && a.video === b.video && a.mediaStream?.id === b.mediaStream?.id
}
);
const destroyPeerConnectionAndMediaStream = store.subscribe(
({ mediaStream, peerConnection }) => ({ mediaStream, peerConnection }),
async ({ mediaStream, peerConnection }) => {
if (!mediaStream || !peerConnection) {
return;
}
await attachMediaStreamToPeerConnection({
mediaStream,
peerConnection
});
},
{
equalityFn: (a, b) => a.mediaStream?.id === b.mediaStream?.id && a.peerConnection === b.peerConnection
}
);
const destroyMediaStream = store.subscribe(
(state) => state.mediaStream,
async (mediaStream) => {
await cleanupMediaStream?.();
if (mediaStream) {
element.srcObject = mediaStream;
const togglePlay = () => {
mediaStore.getState().__controlsFunctions.togglePlay(true);
};
element.addEventListener("loadedmetadata", togglePlay);
cleanupMediaStream = () => {
element?.removeEventListener?.("loadedmetadata", togglePlay);
element.srcObject = null;
};
} else {
element.srcObject = null;
}
},
{
equalityFn: (a, b) => a?.id === b?.id
}
);
const destroyUpdateDeviceList = store.subscribe(
(state) => ({
mounted: state.mounted,
requestedUpdateDeviceListLastTime: state.__controls.requestedUpdateDeviceListLastTime
}),
async ({ mounted }) => {
if (!mounted) {
return;
}
const mediaDevices = getMediaDevices();
const devices = await mediaDevices?.enumerateDevices();
if (devices) {
store.getState().__controlsFunctions.updateDeviceList(
devices.filter((d) => d.deviceId)
);
}
},
{
equalityFn: (a, b) => a.mounted === b.mounted && a.requestedUpdateDeviceListLastTime === b.requestedUpdateDeviceListLastTime
}
);
const destroyMapDeviceListToFriendly = store.subscribe(
(state) => ({
mediaDeviceIds: state.mediaDeviceIds,
mediaDevices: state.__controls.mediaDevices
}),
async ({ mediaDeviceIds, mediaDevices }) => {
if (mediaDevices) {
const extendedDevices = mediaDevices.filter((d) => d.deviceId).map((device, i) => ({
deviceId: device.deviceId,
kind: device.kind,
groupId: device.groupId,
label: device.label || null,
friendlyName: device.label ?? `${device.kind === "audioinput" ? "Audio Source" : device.kind === "audiooutput" ? "Audio Output" : "Video Source"} ${i + 1} (${device.deviceId === "default" ? "default" : device.deviceId.slice(0, 6)})`
}));
const isScreenshare = mediaDeviceIds.videoinput === "screen";
if (isScreenshare) {
extendedDevices.push({
deviceId: mediaDeviceIds.videoinput,
label: "Screen share",
groupId: "none",
kind: "videoinput",
friendlyName: "Screen share"
});
}
store.setState({
mediaDevices: extendedDevices
});
}
},
{
equalityFn: (a, b) => a.mediaDeviceIds === b.mediaDeviceIds && a.mediaDevices === b.mediaDevices
}
);
const destroyPeerConnectionAudioHandler = store.subscribe(
(state) => ({
peerConnection: state.peerConnection,
audio: state.audio,
mediaStream: state.mediaStream,
silentAudioTrack: state.__initialProps.silentAudioTrack,
microphoneTrack: state.__controls.microphoneTrack
}),
async ({
peerConnection,
audio,
mediaStream,
silentAudioTrack,
microphoneTrack
}) => {
if (!peerConnection || !mediaStream || !silentAudioTrack) return;
if (!audio && microphoneTrack) {
const currentAudioTracks = mediaStream.getAudioTracks();
const currentAudioTrack = currentAudioTracks[0];
if (currentAudioTrack && currentAudioTrack !== microphoneTrack) {
return;
}
const silentTrack = createSilentAudioTrack();
for (const track of currentAudioTracks) {
mediaStream.removeTrack(track);
}
mediaStream.addTrack(silentTrack);
const audioSender = peerConnection.getSenders().find((s) => s.track && s.track.kind === "audio");
if (audioSender) {
await audioSender.replaceTrack(silentTrack);
}
}
},
{
equalityFn: (a, b) => a.peerConnection === b.peerConnection && a.audio === b.audio
}
);
return {
destroy: () => {
destroyAudioVideoEnabled?.();
destroyErrorCount?.();
destroyMapDeviceListToFriendly?.();
destroyMediaStream?.();
destroyMediaSyncError?.();
destroyMediaSyncMounted?.();
destroyPeerConnectionAndMediaStream?.();
destroyPeerConnectionAudioHandler?.();
destroyPictureInPictureSupportedMonitor?.();
destroyRequestUserMedia?.();
destroyUpdateDeviceList?.();
destroyWhip?.();
}
};
};
var createSilentAudioTrack = () => {
const ctx = new (window.AudioContext || window.webkitAudioContext)();
const oscillator = ctx.createOscillator();
const dst = ctx.createMediaStreamDestination();
const gainNode = ctx.createGain();
gainNode.gain.value = 0;
oscillator.type = "sine";
oscillator.frequency.value = 440;
oscillator.connect(gainNode);
gainNode.connect(dst);
oscillator.start();
const track = dst.stream.getAudioTracks()[0];
track.enabled = true;
return track;
};
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
addBroadcastEventListeners,
createBroadcastStore,
createSilentAudioTrack,
getBroadcastDeviceInfo
});
//# sourceMappingURL=index.cjs.map