@eatsjobs/media-mock
Version:
Media-Mock is a JavaScript library that simulates media devices (like webcams) in web applications, allowing developers to test and debug media constraints, device configurations, and stream functionality without needing physical devices. This is particul
801 lines (800 loc) • 26.9 kB
JavaScript
var D = Object.defineProperty;
var x = (n, e, t) => e in n ? D(n, e, { enumerable: !0, configurable: !0, writable: !0, value: t }) : n[e] = t;
var h = (n, e, t) => x(n, typeof e != "symbol" ? e + "" : e, t);
function v(n, e, t) {
const i = n[e];
let a = !1;
const r = Object.getOwnPropertyDescriptor(n, e);
if (!r || r.configurable !== !1)
try {
Object.defineProperty(n, e, {
writable: !0,
configurable: !0,
value: t
}), a = !0;
} catch (s) {
console.warn(
`Object.defineProperty failed for ${String(e)}. Attempting fallback...`,
s
);
try {
n[e] = t, a = !0, console.log(
`Successfully mocked ${String(e)} via direct assignment`
);
} catch (o) {
return console.error(
`All mocking strategies failed for ${String(e)}:`,
o
), () => {
try {
n[e] = i;
} catch (u) {
console.warn(
`Failed to restore property ${String(e)}:`,
u
);
}
};
}
}
else {
console.warn(
`Cannot redefine non-configurable property: ${String(e)}. Attempting fallback strategy...`
);
try {
n[e] = t, a = !0, console.log(
`Successfully mocked ${String(e)} via direct assignment`
);
} catch (s) {
return console.warn(
`Fallback assignment also failed for ${String(e)}:`,
s
), () => {
try {
n[e] = i;
} catch (o) {
console.warn(
`Failed to restore non-configurable property ${String(e)}:`,
o
);
}
};
}
}
return () => {
if (a)
try {
Object.defineProperty(n, e, {
writable: !0,
configurable: !0,
value: i
});
} catch {
try {
n[e] = i;
} catch (o) {
console.warn(
`Failed to restore property ${String(e)} via both methods:`,
o
);
}
}
};
}
function l({
deviceId: n,
groupId: e,
kind: t,
label: i,
mockCapabilities: a = {
width: { min: 1, max: 1280 },
height: { min: 1, max: 720 }
}
}) {
return {
deviceId: n,
groupId: e,
kind: t,
label: i,
getCapabilities: () => a,
toJSON() {
return {
deviceId: `${this.deviceId}`,
kind: this.kind,
label: `${this.label}`,
groupId: `${this.groupId}`
};
}
};
}
const p = {
aspectRatio: !0,
deviceId: !0,
displaySurface: !0,
echoCancellation: !0,
facingMode: !0,
frameRate: !0,
groupId: !1,
height: !0,
sampleRate: !1,
sampleSize: !1,
torch: !0,
volume: !0,
whiteBalanceMode: !0,
width: !0,
zoom: !0
}, F = {
...p,
torch: !1
}, I = {
"iPhone 12": {
videoResolutions: [
{ width: 1920, height: 1080 },
{ width: 1280, height: 720 },
{ width: 640, height: 480 },
{ width: 320, height: 240 }
],
mediaDeviceInfo: [
l({
deviceId: "A7FB77364106629BF38E043E6B000EE5FD680B9B",
kind: "videoinput",
label: "Front Camera",
groupId: "C1B048C04520A18C3611DC837450814245482489",
mockCapabilities: {
aspectRatio: { max: 4032, min: 33068783068783067e-20 },
deviceId: "1A100C35A33042B643BE0438DBBF9FDC95AF1913",
facingMode: ["user"],
frameRate: { max: 60, min: 1 },
groupId: "C1B048C04520A18C3611DC837450814245482489",
height: { max: 3024, min: 1 },
whiteBalanceMode: ["manual", "continuous"],
width: { max: 4032, min: 1 },
zoom: { max: 4, min: 1 }
}
}),
l({
deviceId: "9729B396E0C2B460BC7B69C0E368EB0B605058A9",
kind: "videoinput",
label: "Back Dual Wide Camera",
groupId: "A1F2417053FF79495E7D01AF37A6C4461CE0C060",
mockCapabilities: {
aspectRatio: { max: 4032, min: 33068783068783067e-20 },
deviceId: "D87C414E22C375BB0697DCB83A24D97BD520624D",
facingMode: ["environment"],
focusDistance: { min: 0.12 },
frameRate: { max: 60, min: 1 },
groupId: "A1F2417053FF79495E7D01AF37A6C4461CE0C060",
height: { max: 3024, min: 1 },
torch: !0,
whiteBalanceMode: ["manual", "continuous"],
width: { max: 4032, min: 1 },
zoom: { max: 2, min: 0.5 }
}
}),
l({
deviceId: "0B74C1149038CA5235F6C2325E53AE22AA920379",
kind: "videoinput",
label: "Back Ultra Wide Camera",
groupId: "B402A3862F28FB8D54BDF33BD7D41874FE175517",
mockCapabilities: {
aspectRatio: { max: 4032, min: 33068783068783067e-20 },
deviceId: "BE00A990BEDE2D324EB0AD51F567EE4ADC24D9B0",
facingMode: ["environment"],
focusDistance: { min: 0.12 },
frameRate: { max: 60, min: 1 },
groupId: "B402A3862F28FB8D54BDF33BD7D41874FE175517",
height: { max: 3024, min: 1 },
torch: !0,
whiteBalanceMode: ["manual", "continuous"],
width: { max: 4032, min: 1 },
zoom: { max: 4, min: 1 }
}
}),
l({
deviceId: "C92FE814FCB4F2F856CDCBFD1C555429774DD0E2",
kind: "videoinput",
label: "Back Camera",
groupId: "14122C2CE97B69A84360822AB87E8206C32B5BD8",
mockCapabilities: {
aspectRatio: { max: 4032, min: 33068783068783067e-20 },
deviceId: "D13A012C1D5C9F9899B40BDA0790184EE57FD282",
facingMode: ["environment"],
focusDistance: { min: 0.12 },
frameRate: { max: 60, min: 1 },
groupId: "14122C2CE97B69A84360822AB87E8206C32B5BD8",
height: { max: 3024, min: 1 },
torch: !0,
whiteBalanceMode: ["manual", "continuous"],
width: { max: 4032, min: 1 },
zoom: { max: 4, min: 1 }
}
})
],
supportedConstraints: p
},
"Samsung Galaxy M53": {
videoResolutions: [
{ width: 1920, height: 1080 },
{ width: 1280, height: 720 },
{ width: 640, height: 480 }
],
mediaDeviceInfo: [
l({
deviceId: "87fcafb209f5ff2a6d7c8a5d14afe1c9aba9f209330e93933e545e40b102b35f",
groupId: "f70f63d2f4eea57dafe6c6b60833aa69a02f06bb0a6878cb277fb4d70daa9020",
kind: "videoinput",
label: "camera2 1, facing front",
mockCapabilities: {
aspectRatio: { max: 2400, min: 9191176470588235e-19 },
deviceId: "87fcafb209f5ff2a6d7c8a5d14afe1c9aba9f209330e93933e545e40b102b35f",
facingMode: ["user"],
frameRate: { max: 30, min: 1 },
groupId: "f70f63d2f4eea57dafe6c6b60833aa69a02f06bb0a6878cb277fb4d70daa9020",
height: { max: 1088, min: 1 },
resizeMode: ["none", "crop-and-scale"],
width: { max: 2400, min: 1 }
}
}),
l({
deviceId: "81cb5898aebd672ef65d04ed1bc7b00c704f2b6aa94200bc5556ff02c89ea14d",
groupId: "7300f91d6cb037dcaa6fe16abb59f4e9f92fb471e2280ff0e313e07c49cb536c",
kind: "videoinput",
label: "camera2 2, facing front",
mockCapabilities: {
aspectRatio: { max: 2400, min: 9191176470588235e-19 },
deviceId: "81cb5898aebd672ef65d04ed1bc7b00c704f2b6aa94200bc5556ff02c89ea14d",
facingMode: ["user"],
frameRate: { max: 30, min: 1 },
groupId: "7300f91d6cb037dcaa6fe16abb59f4e9f92fb471e2280ff0e313e07c49cb536c",
height: { max: 1088, min: 1 },
resizeMode: ["none", "crop-and-scale"],
width: { max: 2400, min: 1 }
}
}),
l({
deviceId: "99be6eecad8c050052df5dbb08b0460d2715b0a3b18fc5c7f08d6073d312ca34",
groupId: "40f44b864c99ab042a21cf87df882d0ef5c7f88f7cbfcee74cefc1e393b8616b",
kind: "videoinput",
label: "camera2 0, facing back",
mockCapabilities: {
aspectRatio: { max: 3840, min: 462962962962963e-18 },
deviceId: "99be6eecad8c050052df5dbb08b0460d2715b0a3b18fc5c7f08d6073d312ca34",
facingMode: ["environment"],
frameRate: { max: 30, min: 1 },
groupId: "40f44b864c99ab042a21cf87df882d0ef5c7f88f7cbfcee74cefc1e393b8616b",
height: { max: 2160, min: 1 },
resizeMode: ["none", "crop-and-scale"],
width: { max: 3840, min: 1 },
torch: !0
}
})
],
supportedConstraints: p
},
"Mac Desktop": {
videoResolutions: [
{ width: 1920, height: 1080 },
{ width: 1280, height: 720 },
{ width: 640, height: 480 }
],
mediaDeviceInfo: [
l({
deviceId: "e91a0ba82ba051029709163c442d340a3919dfabd",
groupId: "7ce19c839ef9ab1a4cba8d4dd4d3c1bbbf3ad",
kind: "videoinput",
label: "FaceTime HD Camera (2C0E:82E3)",
mockCapabilities: {
aspectRatio: { max: 1920, min: 5208333333333333e-19 },
backgroundBlur: [!1],
deviceId: "370CF6B3449B7B73599E8DAEEE75FB41788A0712",
frameRate: { max: 30, min: 1 },
groupId: "F2EFF7249C97B5531FF959C8F977138341165F6B",
height: { max: 1920, min: 1 },
width: { max: 1920, min: 1 }
}
})
],
supportedConstraints: F
}
};
async function A(n, e = 60 * 1e3) {
const t = new Image();
t.src = n;
try {
return await Promise.race([
t.decode(),
new Promise(
(i, a) => setTimeout(
() => a(
new Error(`Image load timeout after ${e / 1e3} seconds`)
),
e
)
)
]), t;
} catch (i) {
throw new Error(`Failed to load image: ${n}. Details: ${i}`);
}
}
function M() {
return {
mediaDevices: {
getUserMedia: !0,
getSupportedConstraints: !0,
enumerateDevices: !0
}
};
}
function w(n) {
var i;
const e = [
"mp4",
"webm",
"ogg",
"mov",
"avi",
"mkv",
"flv",
"wmv",
"m4v",
"3gp",
"mpg",
"mpeg",
"asf",
"rm",
"vob"
], t = (i = n.split(".").pop()) == null ? void 0 : i.toLowerCase();
return e.includes(t ?? "");
}
function b() {
return typeof requestAnimationFrame == "function";
}
function k(n, e = 60 * 1e3) {
return new Promise((t, i) => {
let a = null, r = !1;
const c = () => {
a !== null && (clearTimeout(a), a = null), n.removeEventListener("loadeddata", s), n.removeEventListener("error", o);
}, s = async () => {
if (!r) {
r = !0, c();
try {
await n.play(), t();
} catch (d) {
console.warn("Video autoplay failed (may be blocked by browser):", d), t();
}
}
}, o = () => {
var d;
r || (r = !0, c(), console.error(
"Failed to load video source. Ensure the format is supported and the URL is valid."
), console.error("Video error details:", {
error: (d = n.error) == null ? void 0 : d.message,
target: n,
networkState: n.networkState,
readyState: n.readyState,
currentSrc: n.currentSrc
}), i(new Error(`Video failed to load: ${n.src}`)));
};
a = setTimeout(() => {
r || (r = !0, c(), i(
new Error(`Video loading timed out after ${e / 1e3} seconds`)
));
}, e), n.addEventListener("loadeddata", s, { once: !0 }), n.addEventListener("error", o, { once: !0 }), n.load();
});
}
async function B(n, e = 60 * 1e3) {
if (w(n)) {
const t = document.createElement("video");
return t.src = n, t.muted = !0, t.playsInline = !0, t.loop = !0, t.autoplay = !0, t.hidden = !0, t.crossOrigin = "anonymous", await k(t, e), t;
} else
return await A(n, e);
}
class E {
constructor() {
h(this, "settings", {
mediaURL: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mP8/w8AAgMBgQn2nAAAAABJRU5ErkJggg==",
device: I["iPhone 12"],
constraints: I["iPhone 12"].supportedConstraints,
canvasScaleFactor: 1,
mediaTimeout: 60 * 1e3
// 60 seconds
});
h(this, "mediaMockImageId", "media-mock-image");
h(this, "mediaMockCanvasId", "media-mock-canvas");
h(this, "currentImage");
h(this, "currentVideo");
h(this, "mapUnmockFunction", /* @__PURE__ */ new Map());
h(this, "currentStream");
h(this, "intervalId", null);
h(this, "rafId", null);
h(this, "debug", !1);
h(this, "canvas");
h(this, "ctx");
h(this, "mockedVideoTracksHandler", (e) => e);
h(this, "fps", 30);
h(this, "resolution", {
width: 640,
height: 480
});
h(this, "lastDrawTime", 0);
}
/**
* The Image or the video that will be used as source.
* @public
* @param {string} mediaURL
* @returns {Promise<MediaMockClass>}
*/
async setMediaURL(e) {
if (!e || typeof e != "string" || e.trim() === "")
throw new Error("Invalid mediaURL: must be a non-empty string");
const t = await B(e, this.settings.mediaTimeout);
return this.settings.mediaURL = e, t instanceof HTMLImageElement ? (this.currentVideo && (this.currentVideo.pause(), this.currentVideo.src = ""), this.currentImage = t, this.currentVideo = void 0) : t instanceof HTMLVideoElement && (this.currentImage && (this.currentImage.src = ""), this.currentVideo = t, this.currentImage = void 0), (this.intervalId !== null || this.rafId !== null) && await this.startDrawingLoop(), this;
}
async startDrawingLoop() {
this.stopDrawingLoop();
const { width: e, height: t } = this.resolution;
if (w(this.settings.mediaURL)) {
if (!this.currentVideo)
throw new Error("Video media not loaded");
this.startVideoDrawingLoop(e, t);
} else {
if (!this.currentImage)
throw new Error("Image media not loaded");
this.debug && console.log(`
Canvas: ${e}x${t},
Image: ${this.currentImage.naturalWidth}x${this.currentImage.naturalHeight}`), this.startImageDrawingLoop(e, t);
}
}
/**
* Start drawing loop for video using RequestAnimationFrame with FPS throttling
*/
startVideoDrawingLoop(e, t) {
const i = 1e3 / this.fps;
this.lastDrawTime = performance.now();
const a = () => {
if (!this.ctx || !this.currentVideo)
return;
const r = performance.now();
r - this.lastDrawTime >= i && (this.ctx.clearRect(0, 0, e, t), this.ctx.fillStyle = "#ffffff", this.ctx.fillRect(0, 0, e, t), this.ctx.drawImage(this.currentVideo, 0, 0, e, t), this.lastDrawTime = r), b() && (this.rafId = requestAnimationFrame(a));
};
b() ? this.rafId = requestAnimationFrame(a) : this.intervalId = setInterval(() => {
!this.ctx || !this.currentVideo || (this.ctx.clearRect(0, 0, e, t), this.ctx.fillStyle = "#ffffff", this.ctx.fillRect(0, 0, e, t), this.ctx.drawImage(this.currentVideo, 0, 0, e, t));
}, i);
}
/**
* Start drawing loop for image using RequestAnimationFrame
* For static images, draws once and then relies on canvas stream
*/
startImageDrawingLoop(e, t) {
const i = () => {
if (!this.ctx || !this.currentImage)
return;
this.currentImage.id = this.mediaMockImageId, this.ctx.clearRect(0, 0, e, t), this.ctx.fillStyle = "#ffffff", this.ctx.fillRect(0, 0, e, t);
const { naturalWidth: a, naturalHeight: r } = this.currentImage;
if (r === 0 || t === 0 || !Number.isFinite(a / r) || !Number.isFinite(e / t))
return;
const c = a / r, s = e / t;
let o, u, d, f;
const m = this.settings.canvasScaleFactor;
c > s ? (o = e * m, u = e * m / c, d = (e - o) / 2, f = (t - u) / 2) : (u = t * m, o = t * m * c, d = (e - o) / 2, f = (t - u) / 2), this.ctx.drawImage(
this.currentImage,
d,
f,
o,
u
), b() && (this.rafId = requestAnimationFrame(i));
};
if (b())
this.rafId = requestAnimationFrame(i);
else {
const a = 1e3 / this.fps;
this.intervalId = setInterval(i, a);
}
}
/**
* Stop the drawing loop (either RAF or setInterval)
*/
stopDrawingLoop() {
this.rafId !== null && (cancelAnimationFrame(this.rafId), this.rafId = null), this.intervalId !== null && (clearInterval(this.intervalId), this.intervalId = null), this.lastDrawTime = 0;
}
/**
* Add a new device and trigger a device change event.
*
* @public
* @param {MockMediaDeviceInfo} newDevice
*/
addMockDevice(e) {
return this.settings.device.mediaDeviceInfo.push(e), this.triggerDeviceChange(), this;
}
/**
* Remove a device and trigger a device change event.
*
* @public
* @param {string} deviceId
*/
removeMockDevice(e) {
return this.settings.device.mediaDeviceInfo = this.settings.device.mediaDeviceInfo.filter(
(t) => t.deviceId !== e
), this.triggerDeviceChange(), this;
}
triggerDeviceChange() {
navigator.mediaDevices.dispatchEvent(new Event("devicechange"));
}
/**
* Debug mode will append the canvas and loaded image to the body if available.
*
* @public
*/
enableDebugMode() {
return this.debug = !0, this.canvas != null && document.querySelector(this.mediaMockCanvasId) == null && (this.canvas.style.border = "10px solid red", document.body.append(this.canvas)), this.currentImage != null && document.querySelector(this.mediaMockImageId) == null && (this.currentImage.style.border = "10px solid red", document.body.append(this.currentImage)), this;
}
/**
* Removes the debug canvas and image from the body.
*
* @public
* @returns {typeof MediaMock}
*/
disableDebugMode() {
var i, a;
this.debug = !1;
const e = document.getElementById(this.mediaMockCanvasId), t = document.getElementById(this.mediaMockImageId);
return e == null || e.remove(), t == null || t.remove(), (i = this.currentImage) != null && i.parentNode && (this.currentImage.style.border = "", this.currentImage.remove()), (a = this.canvas) != null && a.parentNode && (this.canvas.style.border = "", this.canvas.remove()), this;
}
setMockedVideoTracksHandler(e) {
return this.mockedVideoTracksHandler = e, this;
}
/**
* Replaces the navigator.mediaDevices functions.
*
* @public
* @param {DeviceConfig} device
* @param {MockOptions} [options=createDefaultMockOptions()]
* @returns {typeof MediaMock}
*/
mock(e, t = M()) {
if (this.settings.device = e, typeof navigator.mediaDevices > "u") {
class i extends EventTarget {
}
v(navigator, "mediaDevices", new i());
}
if (t != null && t.mediaDevices.getUserMedia) {
const i = v(
navigator.mediaDevices,
"getUserMedia",
(a) => this.getMockStream(a)
);
this.mapUnmockFunction.set("getUserMedia", i);
}
if (t != null && t.mediaDevices.getSupportedConstraints) {
const i = v(
navigator.mediaDevices,
"getSupportedConstraints",
() => this.settings.constraints
);
this.mapUnmockFunction.set(
"getSupportedConstraints",
i
);
}
if (t != null && t.mediaDevices.enumerateDevices) {
const i = v(
navigator.mediaDevices,
"enumerateDevices",
async () => this.settings.device.mediaDeviceInfo
);
this.mapUnmockFunction.set("enumerateDevices", i);
}
return this;
}
/**
* Stops the mock and removes the mock functions.
*
* @public
* @returns {typeof MediaMock}
*/
unmock() {
return this.stopMockStream(), this.disableDebugMode(), this.mapUnmockFunction.forEach((e) => {
e();
}), this.mapUnmockFunction.clear(), this;
}
stopMockStream() {
var e, t, i, a;
this.stopDrawingLoop(), (t = (e = this.currentStream) == null ? void 0 : e.getVideoTracks()) == null || t.forEach((r) => {
r.stop();
}), (a = (i = this.currentStream) == null ? void 0 : i.stop) == null || a.call(i), this.currentStream = void 0, this.currentVideo && (this.currentVideo.pause(), this.currentVideo.src = "", this.currentVideo = void 0), this.currentImage && (this.currentImage.src = "", this.currentImage = void 0), this.canvas && (this.canvas.parentNode && this.canvas.remove(), this.canvas = void 0), this.ctx = void 0;
}
/**
* Set the scale factor for the image in the canvas.
* Values between 0 and N, where lower values create more margin,
* and higher values fill more of the canvas.
*
* @public
* @param {number} factor - Scale factor between 0 and N
* @returns {typeof MediaMock}
*/
setCanvasScaleFactor(e) {
return this.settings.canvasScaleFactor = Math.max(0.1, e), this;
}
/**
* Set the timeout for media loading (images and videos) in milliseconds.
*
* @public
* @param {number} timeoutMs - Timeout in milliseconds (default: 60000 = 60 seconds)
* @returns {typeof MediaMock}
*/
setMediaTimeout(e) {
if (e <= 0)
throw new Error("Media timeout must be a positive number");
return this.settings.mediaTimeout = e, this;
}
async getMockStream(e) {
this.resolution = this.getResolution(e, this.settings.device), this.fps = this.getFPSFromConstraints(e), this.canvas = document.createElement("canvas"), this.canvas.id = this.mediaMockCanvasId;
const { width: t, height: i } = this.resolution;
if (this.canvas.width = t, this.canvas.height = i, this.ctx = this.canvas.getContext("2d"), !this.ctx)
throw new Error("Failed to get 2D canvas context");
this.ctx.fillStyle = "#ffffff", this.ctx.fillRect(0, 0, t, i), await this.setMediaURL(this.settings.mediaURL), await this.startDrawingLoop(), this.debug && this.enableDebugMode();
const a = this.canvas.captureStream(this.fps), r = (a == null ? void 0 : a.getVideoTracks()) ?? [], c = this.getFacingModeFromConstraints(e), s = this.getDeviceForFacingMode(
c,
this.settings.device
);
return r.forEach((o) => {
if (s != null && s.label && Object.defineProperty(o, "label", {
value: s.label,
writable: !1,
configurable: !1
}), s != null && s.deviceId && Object.defineProperty(o, "id", {
value: s.deviceId,
writable: !1,
configurable: !1
}), !o.getCapabilities)
if (s != null && s.getCapabilities)
o.getCapabilities = (function() {
return s.getCapabilities();
}).bind(o);
else {
const d = this.settings.device.videoResolutions, f = d.map((g) => g.width), m = d.map((g) => g.height);
o.getCapabilities = (function() {
return {
width: { min: Math.min(...f), max: Math.max(...f) },
height: { min: Math.min(...m), max: Math.max(...m) },
frameRate: { min: 1, max: 60 },
facingMode: ["user", "environment"],
resizeMode: ["none", "crop-and-scale"]
};
}).bind(o);
}
const u = o.getSettings.bind(o);
o.getSettings = () => {
const d = u();
return d.frameRate === void 0 && (d.frameRate = this.fps), (d.width === void 0 || d.height === void 0) && (d.width = this.resolution.width, d.height = this.resolution.height), d;
};
}), this.currentStream = new MediaStream(
this.mockedVideoTracksHandler(r)
), this.currentStream;
}
getFPSFromConstraints(e) {
return typeof e.video == "object" && e.video.frameRate ? typeof e.video.frameRate == "number" ? e.video.frameRate : e.video.frameRate.ideal || 30 : 30;
}
/**
* Extract facingMode from constraints (can be a string or ConstrainDOMString)
*/
getFacingModeFromConstraints(e) {
if (typeof e.video == "object" && e.video.facingMode) {
const t = e.video.facingMode;
if (typeof t == "string")
return t;
const i = t;
if (i.ideal) {
const a = i.ideal;
return Array.isArray(a) ? a[0] : a;
}
if (i.exact) {
const a = i.exact;
return Array.isArray(a) ? a[0] : a;
}
}
return null;
}
/**
* Get the appropriate camera device based on facingMode
* Falls back to last videoinput if no matching camera found
*/
getDeviceForFacingMode(e, t) {
const i = t.mediaDeviceInfo.filter(
(r) => r.kind === "videoinput"
);
if (!i.length)
return;
if (!e)
return i[0];
const a = i.filter((r) => {
const s = r.getCapabilities().facingMode;
return Array.isArray(s) && s.includes(e);
});
return a.length > 0 ? a[a.length - 1] : i[0];
}
/**
* Get the appropriate resolution based on device orientation and constraints
* @param constraints Media constraints
* @param deviceConfig Device configuration
* @returns Resolution object with width and height
*/
getResolution(e, t) {
const i = window.innerHeight > window.innerWidth, a = e.video || {}, r = this.extractConstraintValue(
a.width,
640
), c = this.extractConstraintValue(
a.height,
480
);
let s = this.findExactMatch(
t.videoResolutions,
r,
c,
i
);
return s || (s = this.findBestFitResolution(
t.videoResolutions,
r,
c,
i
)), s || (s = this.getFallbackResolution(
t.videoResolutions,
i
)), s;
}
/**
* Extract numeric value from constraint (handles number, object with ideal/exact, etc.)
*/
extractConstraintValue(e, t) {
return typeof e == "number" ? e : e && typeof e == "object" ? e.ideal ?? e.exact ?? e.max ?? t : t;
}
/**
* Find exact resolution match considering orientation
*/
findExactMatch(e, t, i, a) {
const r = e.find(
(c) => c.width === t && c.height === i
);
if (r)
return a && r.width > r.height ? { width: r.height, height: r.width } : r;
if (a) {
const c = e.find(
(s) => s.width === i && s.height === t
);
if (c)
return { width: c.height, height: c.width };
}
}
/**
* Find best resolution match by aspect ratio and size preference
*/
findBestFitResolution(e, t, i, a) {
const r = t / i, c = t * i, s = e.map((o) => {
const u = a && o.width > o.height ? { width: o.height, height: o.width } : o, d = u.width / u.height, f = u.width * u.height, m = Math.abs(d - r), g = Math.abs(f - c) / c, C = m * 2 + g;
return { resolution: u, score: C };
});
return s.sort((o, u) => o.score - u.score), s[0].resolution;
}
/**
* Get fallback resolution based on orientation
*/
getFallbackResolution(e, t) {
if (e.length === 0)
return { width: 640, height: 480 };
if (t) {
const a = e.find((c) => c.height > c.width);
if (a) return a;
const r = e[0];
return { width: r.height, height: r.width };
}
return e.find((a) => a.width >= a.height) || e[0];
}
}
const S = new E();
export {
S as MediaMock,
E as MediaMockClass,
l as createMediaDeviceInfo,
I as devices
};