@audiowave/react
Version:
React audio visualization component
1,060 lines (1,059 loc) • 33.8 kB
JavaScript
import De, { forwardRef as Me, useRef as _, useState as B, useCallback as G, useEffect as ie, useLayoutEffect as ke, useImperativeHandle as je } from "react";
var ye = { exports: {} }, me = {};
/**
* @license React
* react-jsx-runtime.production.js
*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
var we;
function Fe() {
if (we) return me;
we = 1;
var e = Symbol.for("react.transitional.element"), r = Symbol.for("react.fragment");
function n(i, s, a) {
var u = null;
if (a !== void 0 && (u = "" + a), s.key !== void 0 && (u = "" + s.key), "key" in s) {
a = {};
for (var m in s)
m !== "key" && (a[m] = s[m]);
} else a = s;
return s = a.ref, {
$$typeof: e,
type: i,
key: u,
ref: s !== void 0 ? s : null,
props: a
};
}
return me.Fragment = r, me.jsx = n, me.jsxs = n, me;
}
var Ee = {};
/**
* @license React
* react-jsx-runtime.development.js
*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
var Oe;
function ze() {
return Oe || (Oe = 1, process.env.NODE_ENV !== "production" && function() {
function e(t) {
if (t == null) return null;
if (typeof t == "function")
return t.$$typeof === x ? null : t.displayName || t.name || null;
if (typeof t == "string") return t;
switch (t) {
case O:
return "Fragment";
case J:
return "Profiler";
case X:
return "StrictMode";
case Y:
return "Suspense";
case ue:
return "SuspenseList";
case A:
return "Activity";
}
if (typeof t == "object")
switch (typeof t.tag == "number" && console.error(
"Received an unexpected object in getComponentNameFromType(). This is likely a bug in React. Please file an issue."
), t.$$typeof) {
case D:
return "Portal";
case b:
return (t.displayName || "Context") + ".Provider";
case T:
return (t._context.displayName || "Context") + ".Consumer";
case z:
var o = t.render;
return t = t.displayName, t || (t = o.displayName || o.name || "", t = t !== "" ? "ForwardRef(" + t + ")" : "ForwardRef"), t;
case W:
return o = t.displayName || null, o !== null ? o : e(t.type) || "Memo";
case P:
o = t._payload, t = t._init;
try {
return e(t(o));
} catch {
}
}
return null;
}
function r(t) {
return "" + t;
}
function n(t) {
try {
r(t);
var o = !1;
} catch {
o = !0;
}
if (o) {
o = console;
var d = o.error, g = typeof Symbol == "function" && Symbol.toStringTag && t[Symbol.toStringTag] || t.constructor.name || "Object";
return d.call(
o,
"The provided key is an unsupported type %s. This value must be coerced to a string before using it here.",
g
), r(t);
}
}
function i(t) {
if (t === O) return "<>";
if (typeof t == "object" && t !== null && t.$$typeof === P)
return "<...>";
try {
var o = e(t);
return o ? "<" + o + ">" : "<...>";
} catch {
return "<...>";
}
}
function s() {
var t = C.A;
return t === null ? null : t.getOwner();
}
function a() {
return Error("react-stack-top-frame");
}
function u(t) {
if (U.call(t, "key")) {
var o = Object.getOwnPropertyDescriptor(t, "key").get;
if (o && o.isReactWarning) return !1;
}
return t.key !== void 0;
}
function m(t, o) {
function d() {
Z || (Z = !0, console.error(
"%s: `key` is not a prop. Trying to access it will result in `undefined` being returned. If you need to access the same value within the child component, you should pass it as a different prop. (https://react.dev/link/special-props)",
o
));
}
d.isReactWarning = !0, Object.defineProperty(t, "key", {
get: d,
configurable: !0
});
}
function c() {
var t = e(this.type);
return ae[t] || (ae[t] = !0, console.error(
"Accessing element.ref was removed in React 19. ref is now a regular prop. It will be removed from the JSX Element type in a future release."
)), t = this.props.ref, t !== void 0 ? t : null;
}
function f(t, o, d, g, M, S, oe, K) {
return d = S.ref, t = {
$$typeof: F,
type: t,
key: o,
props: S,
_owner: M
}, (d !== void 0 ? d : null) !== null ? Object.defineProperty(t, "ref", {
enumerable: !1,
get: c
}) : Object.defineProperty(t, "ref", { enumerable: !1, value: null }), t._store = {}, Object.defineProperty(t._store, "validated", {
configurable: !1,
enumerable: !1,
writable: !0,
value: 0
}), Object.defineProperty(t, "_debugInfo", {
configurable: !1,
enumerable: !1,
writable: !0,
value: null
}), Object.defineProperty(t, "_debugStack", {
configurable: !1,
enumerable: !1,
writable: !0,
value: oe
}), Object.defineProperty(t, "_debugTask", {
configurable: !1,
enumerable: !1,
writable: !0,
value: K
}), Object.freeze && (Object.freeze(t.props), Object.freeze(t)), t;
}
function E(t, o, d, g, M, S, oe, K) {
var N = o.children;
if (N !== void 0)
if (g)
if (R(N)) {
for (g = 0; g < N.length; g++)
p(N[g]);
Object.freeze && Object.freeze(N);
} else
console.error(
"React.jsx: Static children should always be an array. You are likely explicitly calling React.jsxs or React.jsxDEV. Use the Babel transform instead."
);
else p(N);
if (U.call(o, "key")) {
N = e(t);
var L = Object.keys(o).filter(function(fe) {
return fe !== "key";
});
g = 0 < L.length ? "{key: someKey, " + L.join(": ..., ") + ": ...}" : "{key: someKey}", Q[N + g] || (L = 0 < L.length ? "{" + L.join(": ..., ") + ": ...}" : "{}", console.error(
`A props object containing a "key" prop is being spread into JSX:
let props = %s;
<%s {...props} />
React keys must be passed directly to JSX without using spread:
let props = %s;
<%s key={someKey} {...props} />`,
g,
N,
L,
N
), Q[N + g] = !0);
}
if (N = null, d !== void 0 && (n(d), N = "" + d), u(o) && (n(o.key), N = "" + o.key), "key" in o) {
d = {};
for (var ee in o)
ee !== "key" && (d[ee] = o[ee]);
} else d = o;
return N && m(
d,
typeof t == "function" ? t.displayName || t.name || "Unknown" : t
), f(
t,
N,
S,
M,
s(),
d,
oe,
K
);
}
function p(t) {
typeof t == "object" && t !== null && t.$$typeof === F && t._store && (t._store.validated = 1);
}
var y = De, F = Symbol.for("react.transitional.element"), D = Symbol.for("react.portal"), O = Symbol.for("react.fragment"), X = Symbol.for("react.strict_mode"), J = Symbol.for("react.profiler"), T = Symbol.for("react.consumer"), b = Symbol.for("react.context"), z = Symbol.for("react.forward_ref"), Y = Symbol.for("react.suspense"), ue = Symbol.for("react.suspense_list"), W = Symbol.for("react.memo"), P = Symbol.for("react.lazy"), A = Symbol.for("react.activity"), x = Symbol.for("react.client.reference"), C = y.__CLIENT_INTERNALS_DO_NOT_USE_OR_WARN_USERS_THEY_CANNOT_UPGRADE, U = Object.prototype.hasOwnProperty, R = Array.isArray, I = console.createTask ? console.createTask : function() {
return null;
};
y = {
"react-stack-bottom-frame": function(t) {
return t();
}
};
var Z, ae = {}, se = y["react-stack-bottom-frame"].bind(
y,
a
)(), V = I(i(a)), Q = {};
Ee.Fragment = O, Ee.jsx = function(t, o, d, g, M) {
var S = 1e4 > C.recentlyCreatedOwnerStacks++;
return E(
t,
o,
d,
!1,
g,
M,
S ? Error("react-stack-top-frame") : se,
S ? I(i(t)) : V
);
}, Ee.jsxs = function(t, o, d, g, M) {
var S = 1e4 > C.recentlyCreatedOwnerStacks++;
return E(
t,
o,
d,
!0,
g,
M,
S ? Error("react-stack-top-frame") : se,
S ? I(i(t)) : V
);
};
}()), Ee;
}
process.env.NODE_ENV === "production" ? ye.exports = Fe() : ye.exports = ze();
var $ = ye.exports, q = /* @__PURE__ */ ((e) => (e.MEDIA_NOT_SUPPORTED = "MEDIA_NOT_SUPPORTED", e.PERMISSION_DENIED = "PERMISSION_DENIED", e.AUDIO_CONTEXT_ERROR = "AUDIO_CONTEXT_ERROR", e.SHARED_BUFFER_ERROR = "SHARED_BUFFER_ERROR", e.RENDER_ERROR = "RENDER_ERROR", e.CONFIGURATION_ERROR = "CONFIGURATION_ERROR", e.INVALID_STATE = "INVALID_STATE", e.ADAPTER_ERROR = "ADAPTER_ERROR", e.CONNECTION_TIMEOUT = "CONNECTION_TIMEOUT", e))(q || {});
class ne extends Error {
constructor(r, n, i) {
super(n), this.type = r, this.originalError = i, this.name = "AudioWaveError";
}
}
const Ce = 256;
class Pe {
constructor() {
this.recentMax = 0, this.longTermMax = 0, this.history = [], this.maxHistoryLength = 60;
}
// ~1 second at 60fps
scale(r) {
this.history.push(r), this.history.length > this.maxHistoryLength && this.history.shift(), this.recentMax = Math.max(this.recentMax * 0.95, r), this.longTermMax = this.longTermMax * 0.998 + r * 2e-3;
const n = Math.max(
this.recentMax * 0.6 + this.longTermMax * 0.4,
30
// Minimum threshold for quiet environments
), i = r / n * 180;
return Math.min(255, i * (1 - i / 400));
}
}
function Ue(e) {
let r = 0;
const n = 128;
for (let c = 0; c < e.length; c++) {
const f = e[c] - n;
r += f * f;
}
const s = Math.sqrt(r / e.length) / 128, a = 2e-3, u = 8e-3;
let m;
if (s < a)
m = 1;
else if (s < u)
m = 1 + ((s - a) / (u - a)) ** 2.5 * 8;
else {
const c = Math.log10(s + 1e-3) + 3;
m = Math.max(10, c * 85);
}
return Math.max(1, Math.min(255, m));
}
let Ae = null;
function qe(e, r = "transparent") {
const n = e.getContext("2d");
if (!n)
return null;
const { width: i, height: s } = e, a = i / 2, u = s / 2;
return n.clearRect(0, 0, i, s), r !== "transparent" && (n.fillStyle = r, n.fillRect(0, 0, i, s)), {
context: n,
width: i,
height: s,
halfWidth: a,
halfHeight: u
};
}
function Be({ context: e, x: r, y: n, width: i, height: s, color: a, rounded: u }) {
e.fillStyle = a, u > 0 ? (e.beginPath(), e.roundRect(r, n, i, s, u), e.fill()) : e.fillRect(r, n, i, s);
}
const Ve = ({
canvas: e,
backgroundColor: r
}) => {
const n = e.height, i = e.width, s = Math.round(i / 2), a = e.getContext("2d");
return a ? (a.clearRect(0, 0, i, n), r !== "transparent" && (a.fillStyle = r, a.fillRect(0, 0, i, n)), { context: a, height: n, width: i, halfWidth: s }) : null;
}, ge = ({
context: e,
color: r,
rounded: n,
x: i,
y: s,
w: a,
h: u
}) => {
e.fillStyle = r, e.beginPath(), e.roundRect ? (e.roundRect(i, s, a, u, n), e.fill()) : e.fillRect(i, s, a, u);
}, Le = ({
context: e,
color: r,
rounded: n,
width: i,
height: s,
barWidth: a
}) => {
ge({
context: e,
color: r,
rounded: n,
x: i / 2 + a / 2,
y: s / 2 - 1,
h: 2,
w: i - (i / 2 + a / 2)
});
}, $e = ({
audioData: e,
unit: r,
index: n,
index2: i,
canvas: s,
isAudioInProgress: a,
isPausedAudio: u,
picks: m,
backgroundColor: c,
barWidth: f,
mainBarColor: E,
secondaryBarColor: p,
rounded: y,
animateCurrentPick: F,
fullscreen: D,
gain: O = 1,
amplitudeMode: X = "peak"
}) => {
const J = Ve({ canvas: s, backgroundColor: c });
if (!J) return;
const { context: T, height: b, width: z, halfWidth: Y } = J;
if (e != null && e.length && a) {
let W = 0;
switch (X === "adaptive" ? Ae || (Ae = new Pe()) : Ae = null, X) {
case "rms": {
const A = Ue(e), x = Math.max(0.1, Math.min(10, O));
W = Math.min(255, A * x);
break;
}
case "adaptive": {
let A = 0;
const x = 128;
for (let R = 0; R < e.length; R++) {
const I = Math.abs(e[R] - x);
I > A && (A = I);
}
const C = Ae.scale(A * 2), U = Math.max(0.1, Math.min(10, O));
W = Math.min(255, C * U);
break;
}
default: {
let A = 0;
const x = 128;
for (let R = 0; R < e.length; R++) {
const I = Math.abs(e[R] - x);
I > A && (A = I);
}
const C = A * 2, U = Math.max(0.1, Math.min(10, O));
W = Math.min(255, C * U);
break;
}
}
if (!u) {
if (i.current >= f) {
i.current = 0;
const x = W / Ce * 100, C = (100 - x) / 2, U = n.current === f ? {
startY: C,
barHeight: x
} : null;
n.current >= r ? n.current = f : n.current += f, m.length > (D ? z : Y) / f && m.pop(), m.unshift(U);
}
i.current += 1;
}
if (!D && ue(), F && !u) {
const x = W / Ce * b;
ge({
context: T,
rounded: y,
color: E,
x: D ? z : Y,
y: (b - x) / 2,
// Center the bar vertically
h: x,
// Height should be proportional to amplitude
w: f
});
}
let P = (D ? z : Y) - i.current;
for (const A of m)
A && ge({
context: T,
color: E,
rounded: y,
x: P,
y: A.startY * b / 100 > b / 2 - 1 ? b / 2 - 1 : A.startY * b / 100,
h: A.barHeight * b / 100 > 2 ? A.barHeight * b / 100 : 2,
w: f
}), P -= f;
} else
m.length = 0;
function ue() {
Le({
context: T,
color: p,
rounded: y,
width: z,
height: b,
barWidth: f
});
}
};
function Ye(e, r, n, i, s = 1) {
if (e.style.width = `${r}px`, e.style.height = `${n}px`, e.width = r, e.height = n, i && i !== "transparent") {
const a = e.getContext("2d");
a && (a.fillStyle = i, a.fillRect(0, 0, r, n));
}
}
const We = Me(
({
source: e,
width: r = j.width,
height: n = j.height,
backgroundColor: i = j.backgroundColor,
barColor: s = j.barColor,
secondaryBarColor: a = j.secondaryBarColor,
barWidth: u = j.barWidth,
gap: m = j.gap,
rounded: c = j.rounded,
showBorder: f = !1,
borderColor: E = "#333333",
borderWidth: p = 1,
borderRadius: y = 0,
speed: F = j.speed,
animateCurrentPick: D = j.animateCurrentPick,
fullscreen: O = j.fullscreen,
gain: X = 1,
amplitudeMode: J = "peak",
isPaused: T = !1,
placeholder: b,
showPlaceholderBackground: z = !0,
customRenderer: Y,
className: ue,
canvasClassName: W,
onStateChange: P,
onRenderStart: A,
onRenderStop: x,
onError: C
}, U) => {
const R = _(null), I = _(null), Z = _(null), ae = _(null), se = _(null), V = _(null), Q = _(null), [t, o] = B(window.innerWidth), d = t < 768, g = Math.trunc(F), M = Math.trunc(m), S = Math.trunc(d && M > 0 ? u + 1 : u), oe = S + M * S, K = _([]), N = _(g), L = _(S), ee = _(S), [fe, Ie] = B({ width: 0, height: 0 }), [te, Te] = B(new Uint8Array(0)), [k, be] = B("idle"), de = _("idle"), ve = _(null), H = G(
(l) => {
be(l), de.current = l, P == null || P(l);
},
[P]
), xe = G(() => {
K.current.length = 0, L.current = 0, ee.current = 0, Te(new Uint8Array(0));
}, []);
ie(() => {
const l = () => {
o(window.innerWidth);
};
return window.addEventListener("resize", l), () => window.removeEventListener("resize", l);
}, []);
const Re = G(() => {
if (!V.current || !e)
return;
if (de.current === "visualizing" && !T && typeof e.getAudioData == "function") {
const h = e.getAudioData();
if (h && h.length > 0) {
const w = h.reduce((ce, le) => ce + le, 0) / h.length;
if (w >= 0 && w <= 255) {
const ce = Math.min(h.length, V.current.length);
for (let le = 0; le < ce; le++)
V.current[le] = h[le];
Te(new Uint8Array(V.current));
}
}
}
Q.current = requestAnimationFrame(Re);
}, [T, e]), Se = G(
(l, h) => {
const w = new ne(
q.RENDER_ERROR,
`${h}: ${l.message}`,
l
);
C == null || C(w), console.error("AudioWave Error:", w);
},
[C]
), _e = G(async () => {
if (e)
try {
if (typeof e.getAudioData != "function" || typeof e.isActive != "function")
throw new Error("Audio source must implement getAudioData() and isActive() methods");
ve.current = e, V.current = new Uint8Array(1024), V.current.fill(128), H("visualizing"), A == null || A(), Re();
} catch (l) {
Se(
l instanceof Error ? l : new Error(String(l)),
"Failed to set up audio source"
);
}
}, [e, Se, A, Re, H]), he = G(
(l = !1) => {
var h, w;
if (Q.current && (cancelAnimationFrame(Q.current), Q.current = null), se.current && (se.current.disconnect(), se.current = null), ae.current && (ae.current.disconnect(), ae.current = null), Z.current && Z.current.state !== "closed") {
try {
(w = (h = Z.current).close) == null || w.call(h);
} catch (re) {
console.warn("Error closing audio context:", re);
}
Z.current = null;
}
l || (ve.current = null), l || (V.current = null, H("idle"), x == null || x());
},
[x, H]
);
ke(() => {
if (R.current) {
if (N.current >= g || !te.length) {
N.current = te.length ? 0 : g;
const l = (e == null ? void 0 : e.isActive()) ?? !1;
if (Y) {
const h = {
canvas: R.current,
context: R.current.getContext("2d"),
audioData: te,
isActive: l,
timestamp: performance.now(),
dimensions: {
width: R.current.width,
height: R.current.height
}
};
Y(h);
} else
$e({
audioData: te,
unit: oe,
index: L,
index2: ee,
canvas: R.current,
isAudioInProgress: l || k === "paused" || T,
// Keep rendering when paused
isPausedAudio: k === "paused" || T,
// Support both internal state and external prop
picks: K.current,
backgroundColor: i,
barWidth: S,
mainBarColor: s,
secondaryBarColor: a,
rounded: c,
animateCurrentPick: D,
fullscreen: O,
gain: X,
amplitudeMode: J
});
if (f && R.current) {
const h = R.current.getContext("2d");
h && (h.strokeStyle = E, h.lineWidth = p, y > 0 && h.roundRect ? (h.beginPath(), h.roundRect(
p / 2,
p / 2,
R.current.width - p,
R.current.height - p,
y
), h.stroke()) : h.strokeRect(0, 0, R.current.width, R.current.height));
}
}
N.current += 1;
}
}, [
// Effect dependencies
te,
S,
i,
s,
a,
c,
O,
// Border properties
f,
E,
p,
y,
// Additional required dependencies
g,
oe,
T,
e,
// Need source to check isActive()
Y,
k,
D,
X,
J
]);
const pe = G(() => {
const l = R.current, h = I.current;
if (!l || !h)
return;
let w, re;
if (typeof r == "string")
if (r.endsWith("%")) {
const ce = Number.parseFloat(r);
w = h.clientWidth * ce / 100;
} else
w = Number.parseInt(r, 10) || 800;
else
w = r;
re = n, (fe.width !== w || fe.height !== re) && (Ye(l, w, re, i), Ie({ width: w, height: re }));
}, [r, n, fe, i]);
return ie(() => (e ? _e() : T || he(), () => {
const l = T || de.current === "paused";
he(l);
}), [e, _e, he, T]), ie(() => {
pe();
const l = new ResizeObserver(pe);
return I.current && l.observe(I.current), () => {
l.disconnect();
};
}, [pe]), ie(() => () => {
he(!1), K.current.length = 0, L.current = 0, ee.current = 0;
}, [he]), je(
U,
() => ({
pause: () => {
const l = de.current;
(l === "visualizing" || l === "idle" && e) && H("paused");
},
resume: () => {
const l = de.current;
(l === "paused" || l === "idle" && e) && H("visualizing");
},
clear: () => {
xe();
},
isPaused: () => k === "paused",
getState: () => k,
getAudioData: () => te
}),
[k, H, xe, te, e]
), /* @__PURE__ */ $.jsxs(
"div",
{
ref: I,
className: ue,
style: {
width: typeof r == "string" && r.endsWith("%") ? "100%" : typeof r == "string" ? r : `${r}px`,
height: `${n}px`,
position: "relative",
display: "flex",
alignItems: "center",
justifyContent: "center",
backgroundColor: !e && !T && k !== "paused" && z ? i : "transparent",
border: !e && !T && k !== "paused" && z && f ? `${p}px solid ${E}` : "none",
borderRadius: !e && !T && k !== "paused" && z && y > 0 ? `${y}px` : "0"
},
children: [
/* @__PURE__ */ $.jsx(
"canvas",
{
ref: R,
className: W,
style: {
width: typeof r == "string" && r.endsWith("%") ? r : "100%",
height: "100%",
display: e || T || k === "paused" ? "block" : "none",
// Keep canvas visible when paused
borderRadius: y > 0 ? `${y}px` : "0"
}
}
),
!e && !T && k !== "paused" && /* @__PURE__ */ $.jsx(
"div",
{
style: {
position: "absolute",
top: "50%",
left: "50%",
transform: "translate(-50%, -50%)",
color: "#666",
opacity: 0.5,
pointerEvents: "none"
},
children: b || // Default placeholder - simple wave icon
/* @__PURE__ */ $.jsxs(
"svg",
{
width: 48,
height: 48,
viewBox: "0 0 24 24",
fill: "currentColor",
"aria-label": "Audio waveform placeholder",
children: [
/* @__PURE__ */ $.jsx("title", { children: "Audio waveform placeholder" }),
/* @__PURE__ */ $.jsx("rect", { x: "2", y: "8", width: "2", height: "8", rx: "1" }),
/* @__PURE__ */ $.jsx("rect", { x: "6", y: "4", width: "2", height: "16", rx: "1" }),
/* @__PURE__ */ $.jsx("rect", { x: "10", y: "6", width: "2", height: "12", rx: "1" }),
/* @__PURE__ */ $.jsx("rect", { x: "14", y: "2", width: "2", height: "20", rx: "1" }),
/* @__PURE__ */ $.jsx("rect", { x: "18", y: "7", width: "2", height: "10", rx: "1" })
]
}
)
}
)
]
}
);
}
);
We.displayName = "AudioWave";
var v = /* @__PURE__ */ ((e) => (e.INACTIVE = "inactive", e.CONNECTING = "connecting", e.ACTIVE = "active", e.ERROR = "error", e))(v || {});
class He {
constructor(r = {}) {
this.audioContext = null, this.analyserNode = null, this.sourceNode = null, this.audioData = null, this.state = v.INACTIVE, this.mediaStream = null, this.inputAudioNode = null, this.options = {
fftSize: 2048,
smoothingTimeConstant: 0,
// No smoothing for real-time waveform visualization
...r
};
}
/**
* Initialize from MediaStream (getUserMedia, getDisplayMedia)
*/
async initializeFromMediaStream(r) {
if (this.state !== v.INACTIVE)
throw new ne(
q.INVALID_STATE,
"WebAudioSource is already initialized"
);
this.state = v.CONNECTING, this.mediaStream = r;
try {
await this.setupWebAudio(), this.sourceNode = this.audioContext.createMediaStreamSource(r), this.connectNodes(), this.state = v.ACTIVE;
} catch (n) {
throw this.cleanup(), this.state = v.ERROR, new ne(
q.AUDIO_CONTEXT_ERROR,
"Failed to initialize from MediaStream",
n instanceof Error ? n : new Error(String(n))
);
}
}
/**
* Initialize from HTMLMediaElement (audio/video elements)
*/
async initializeFromMediaElement(r) {
if (this.state !== v.INACTIVE)
throw new ne(
q.INVALID_STATE,
"WebAudioSource is already initialized"
);
this.state = v.CONNECTING;
try {
await this.setupWebAudio(), this.sourceNode = this.audioContext.createMediaElementSource(r), this.sourceNode.connect(this.audioContext.destination), this.connectNodes(), this.state = v.ACTIVE;
} catch (n) {
throw this.cleanup(), this.state = v.ERROR, new ne(
q.AUDIO_CONTEXT_ERROR,
"Failed to initialize from HTMLMediaElement",
n instanceof Error ? n : new Error(String(n))
);
}
}
/**
* Initialize from existing AudioNode
*/
async initializeFromAudioNode(r) {
if (this.state !== v.INACTIVE)
throw new ne(
q.INVALID_STATE,
"WebAudioSource is already initialized"
);
this.state = v.CONNECTING, this.inputAudioNode = r;
try {
this.audioContext = r.context, this.audioContext.state === "suspended" && await this.audioContext.resume(), this.setupAnalyser(), this.sourceNode = r, this.connectNodes(), this.state = v.ACTIVE;
} catch (n) {
throw this.cleanup(), this.state = v.ERROR, new ne(
q.AUDIO_CONTEXT_ERROR,
"Failed to initialize from AudioNode",
n instanceof Error ? n : new Error(String(n))
);
}
}
/**
* Get current audio data for visualization (Standard AudioSource interface)
*/
getAudioData() {
return this.state !== v.ACTIVE || !this.analyserNode || !this.audioData ? null : (this.analyserNode.getByteTimeDomainData(this.audioData), new Uint8Array(this.audioData));
}
/**
* Get time domain data specifically (for debugging/comparison)
*/
getTimeDomainData() {
if (this.state !== v.ACTIVE || !this.analyserNode || !this.audioData)
return null;
const r = new Uint8Array(this.analyserNode.frequencyBinCount);
return this.analyserNode.getByteTimeDomainData(r), r;
}
/**
* Get frequency domain data specifically (for debugging/comparison)
*/
getFrequencyData() {
if (this.state !== v.ACTIVE || !this.analyserNode || !this.audioData)
return null;
const r = new Uint8Array(this.analyserNode.frequencyBinCount);
return this.analyserNode.getByteFrequencyData(r), r;
}
/**
* Check if the source is currently active (Standard AudioSource interface)
*/
isActive() {
return this.state === v.ACTIVE;
}
/**
* Get current state of the audio source (Extended interface)
*/
getState() {
return this.state;
}
/**
* Get configuration used by this audio source (Extended interface)
*/
getConfig() {
return {
fftSize: this.options.fftSize
};
}
/**
* Destroy the source and clean up all resources (Extended interface)
*/
destroy() {
this.cleanup(), this.state = v.INACTIVE;
}
/**
* Set up Web Audio API context and analyser
*/
async setupWebAudio() {
this.audioContext = new (window.AudioContext || window.webkitAudioContext)(), this.audioContext.state === "suspended" && await this.audioContext.resume(), this.setupAnalyser();
}
/**
* Set up analyser node
*/
setupAnalyser() {
this.audioContext && (this.analyserNode = this.audioContext.createAnalyser(), this.analyserNode.fftSize = this.options.fftSize, this.analyserNode.smoothingTimeConstant = this.options.smoothingTimeConstant, this.audioData = new Uint8Array(new ArrayBuffer(this.analyserNode.fftSize)));
}
/**
* Connect source node to analyser
*/
connectNodes() {
this.sourceNode && this.analyserNode && this.sourceNode.connect(this.analyserNode);
}
/**
* Clean up all resources
*/
cleanup() {
this.mediaStream && (this.mediaStream.getTracks().forEach((r) => r.stop()), this.mediaStream = null), this.sourceNode && this.sourceNode !== this.inputAudioNode && this.sourceNode.disconnect(), this.sourceNode = null, this.analyserNode && (this.analyserNode.disconnect(), this.analyserNode = null), this.audioContext && !this.inputAudioNode && this.audioContext.state !== "closed" && this.audioContext.close().catch(console.warn), this.audioContext = null, this.inputAudioNode = null, this.audioData = null;
}
}
function Ne(e = {}) {
const [r, n] = B(null), [i, s] = B(null), a = _(null), u = _(!1), m = G(
async (c) => {
var f;
if (!c) return null;
if (typeof c == "object" && "getAudioData" in c && "isActive" in c)
return u.current = !0, c;
try {
const E = new He();
if (c instanceof MediaStream)
await E.initializeFromMediaStream(c);
else if (c instanceof HTMLMediaElement || c instanceof HTMLAudioElement || c instanceof HTMLVideoElement)
await E.initializeFromMediaElement(c);
else if ("context" in c && typeof c.connect == "function")
await E.initializeFromAudioNode(c);
else
throw new Error(`Unsupported audio source type: ${typeof c}`);
return a.current = E, u.current = !1, E;
} catch (E) {
const p = E instanceof Error ? E : new Error(String(E));
return s(p), (f = e.onError) == null || f.call(e, p), null;
}
},
[e]
);
return ie(() => {
let c = !0;
return (async () => {
var E;
try {
s(null), a.current && !u.current && (a.current.destroy(), a.current = null);
const p = await m(e.source ?? null);
c && n(p);
} catch (p) {
if (c) {
const y = p instanceof Error ? p : new Error(String(p));
s(y), (E = e.onError) == null || E.call(e, y);
}
}
})(), () => {
c = !1;
};
}, [e.source, m, e]), ie(() => () => {
a.current && !u.current && a.current.destroy();
}, []), {
source: r,
error: i
};
}
function Xe(e, r = {}) {
return Ne({ ...r, source: e });
}
function Je(e, r = {}) {
return Ne({ ...r, source: e });
}
function Ze(e, r = {}) {
return Ne({ ...r, source: e });
}
function Qe(e) {
const { provider: r, deviceId: n = "default" } = e, [i, s] = B("active"), [a, u] = B(null), m = _(null), c = _(null), f = i === "active";
ie(() => {
var D;
if (!r) {
u("Audio provider not available");
return;
}
const p = {
getAudioData: () => c.current,
isActive: () => i === "active"
};
m.current = p;
const y = r.onAudioData((O) => {
c.current = O;
}), F = (D = r.onAudioError) == null ? void 0 : D.call(r, (O) => {
u(O), s("idle");
});
return () => {
y == null || y(), F == null || F(), c.current = null;
};
}, [r, i]);
const E = G(() => {
u(null);
}, []);
return {
status: i,
isActive: f,
source: m.current,
error: a,
deviceId: n,
clearError: E
};
}
const Ke = "0.1.0", et = {
/** Web Audio API support */
WEB_AUDIO: typeof AudioContext < "u" || typeof globalThis.webkitAudioContext < "u",
/** MediaStream API support */
MEDIA_STREAM: typeof navigator < "u" && "mediaDevices" in navigator,
/** SharedArrayBuffer support (Electron) */
SHARED_ARRAY_BUFFER: typeof SharedArrayBuffer < "u",
/** Canvas 2D support */
CANVAS_2D: typeof CanvasRenderingContext2D < "u",
/** RequestAnimationFrame support */
ANIMATION_FRAME: typeof requestAnimationFrame < "u"
}, tt = {
fftSize: 2048
}, j = {
// Dimensions
width: "100%",
// Full width by default
height: 200,
// Standard height for good visibility
// Visual styling
backgroundColor: "transparent",
barColor: "#ffffff",
secondaryBarColor: "#5e5e5e",
barWidth: 2,
gap: 1,
rounded: 0,
// Animation & rendering
speed: 3,
animateCurrentPick: !0,
fullscreen: !1,
onlyActive: !1,
amplitudeMode: "peak"
};
export {
v as AudioSourceState,
We as AudioWave,
ne as AudioWaveError,
q as AudioWaveErrorType,
tt as DEFAULT_AUDIO_CONFIG,
j as DEFAULT_VISUALIZER_PROPS,
et as FEATURES,
Ke as VERSION,
He as WebAudioSource,
$e as drawByLiveStream,
Be as renderBar,
Ye as resizeCanvas,
qe as setupCanvas,
Ze as useAudioNodeSource,
Qe as useCustomAudio,
Ne as useMediaAudio,
Je as useMediaElementSource,
Xe as useMediaStreamSource
};
//# sourceMappingURL=index.esm.js.map