@mode-7/mod
Version:
mod - Modular Web Audio components for React
1,508 lines (1,493 loc) • 193 kB
JavaScript
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/index.ts
var index_exports = {};
__export(index_exports, {
ADSR: () => ADSR,
AudioProvider: () => AudioProvider,
AutoWah: () => AutoWah,
BitCrusher: () => BitCrusher,
Button: () => Button,
Chorus: () => Chorus,
Clock: () => Clock,
Compressor: () => Compressor,
CrossFade: () => CrossFade,
Delay: () => Delay,
Distortion: () => Distortion,
EQ: () => EQ,
FilePicker: () => FilePicker,
Filter: () => Filter,
Flanger: () => Flanger,
Gate: () => Gate,
Knob: () => Knob,
LFO: () => LFO,
LevelMeter: () => LevelMeter,
LevelMeterCanvas: () => LevelMeter2,
Limiter: () => Limiter,
MP3Deck: () => MP3Deck,
Microphone: () => Microphone,
Mixer: () => Mixer,
Monitor: () => Monitor,
NoiseGenerator: () => NoiseGenerator,
Oscilloscope: () => Oscilloscope,
OscilloscopeCanvas: () => Oscilloscope2,
Panner: () => Panner,
Phaser: () => Phaser,
ProgressBar: () => ProgressBar,
Reverb: () => Reverb,
RingModulator: () => RingModulator,
Select: () => Select,
Sequencer: () => Sequencer,
Slider: () => Slider,
SpectrumAnalyzer: () => SpectrumAnalyzer,
SpectrumAnalyzerCanvas: () => SpectrumAnalyzer2,
StreamingAudioDeck: () => StreamingAudioDeck,
TextInput: () => TextInput,
ToneGenerator: () => ToneGenerator,
Tremolo: () => Tremolo,
VCA: () => VCA,
XYPad: () => XYPad,
useAudioContext: () => useAudioContext,
useModStream: () => useModStream,
useModStreamToMediaStream: () => useModStreamToMediaStream
});
module.exports = __toCommonJS(index_exports);
// src/context/AudioContext.tsx
var import_react = require("react");
var import_jsx_runtime = require("react/jsx-runtime");
var AudioContextContext = (0, import_react.createContext)({ audioContext: null });
var useAudioContext = () => {
const context = (0, import_react.useContext)(AudioContextContext);
return context.audioContext;
};
var AudioProvider = ({ children }) => {
const [audioContext, setAudioContext] = (0, import_react.useState)(null);
(0, import_react.useEffect)(() => {
const ctx = new (window.AudioContext || window.webkitAudioContext)();
setAudioContext(ctx);
const resumeAudio = () => {
if (ctx.state === "suspended") {
ctx.resume();
}
};
document.addEventListener("click", resumeAudio);
document.addEventListener("keydown", resumeAudio);
return () => {
document.removeEventListener("click", resumeAudio);
document.removeEventListener("keydown", resumeAudio);
if (ctx.state !== "closed") {
ctx.close();
}
};
}, []);
return /* @__PURE__ */ (0, import_jsx_runtime.jsx)(AudioContextContext.Provider, { value: { audioContext }, children });
};
// src/hooks/useModStream.ts
var import_react2 = require("react");
var useModStream = (label) => {
const [, setUpdateTrigger] = (0, import_react2.useState)(0);
const internalRef = (0, import_react2.useRef)(null);
const reactiveRef = (0, import_react2.useRef)({
get current() {
return internalRef.current;
},
set current(value) {
const oldValue = internalRef.current;
internalRef.current = value;
if (oldValue === null && value !== null || oldValue !== null && value === null) {
this._timestamp = Date.now();
setUpdateTrigger((prev) => prev + 1);
}
},
_timestamp: 0
});
if (label) {
reactiveRef.current._label = label;
}
return reactiveRef.current;
};
// src/hooks/useModStreamToMediaStream.ts
var import_react3 = require("react");
function useModStreamToMediaStream(modStreamRef) {
const [mediaStream, setMediaStream] = (0, import_react3.useState)(null);
const destinationRef = (0, import_react3.useRef)(null);
(0, import_react3.useEffect)(() => {
if (!modStreamRef.current) {
setMediaStream(null);
return;
}
const audioContext = modStreamRef.current.context;
if (!audioContext) {
setMediaStream(null);
return;
}
const destination = audioContext.createMediaStreamDestination();
destinationRef.current = destination;
modStreamRef.current.gain.connect(destination);
setMediaStream(destination.stream);
return () => {
if (modStreamRef.current && destinationRef.current) {
try {
modStreamRef.current.gain.disconnect(destinationRef.current);
} catch (e) {
}
}
destinationRef.current = null;
};
}, [modStreamRef.current?.audioNode]);
return mediaStream;
}
// src/components/sources/Microphone.tsx
var import_react5 = __toESM(require("react"));
// src/hooks/useControlledState.ts
var import_react4 = require("react");
function useControlledState(controlledValue, defaultValue, onChange) {
const [internalValue, setInternalValue] = (0, import_react4.useState)(defaultValue);
const isControlled = controlledValue !== void 0;
const value = isControlled ? controlledValue : internalValue;
const setValue = (0, import_react4.useCallback)((newValue) => {
if (!isControlled) {
setInternalValue(newValue);
}
onChange?.(newValue);
}, [isControlled, onChange]);
return [value, setValue];
}
// src/components/sources/Microphone.tsx
var import_jsx_runtime2 = require("react/jsx-runtime");
var Microphone = import_react5.default.forwardRef(({
output,
label = "microphone",
deviceId: initialDeviceId,
gain: controlledGain,
onGainChange,
isMuted: controlledMuted,
onMutedChange,
selectedDeviceId: controlledDeviceId,
onSelectedDeviceIdChange,
onDevicesChange,
onError,
children
}, ref) => {
const audioContext = useAudioContext();
const [gain, setGain] = useControlledState(controlledGain, 1, onGainChange);
const [isMuted, setMuted] = useControlledState(controlledMuted, false, onMutedChange);
const [error, setError] = (0, import_react5.useState)(null);
const [devices, setDevices] = (0, import_react5.useState)([]);
const [selectedDeviceId, setSelectedDeviceId] = useControlledState(
controlledDeviceId,
initialDeviceId || null,
onSelectedDeviceIdChange
);
const gainNodeRef = (0, import_react5.useRef)(null);
const mediaStreamRef = (0, import_react5.useRef)(null);
const sourceNodeRef = (0, import_react5.useRef)(null);
const refreshDevices = async () => {
try {
const allDevices = await navigator.mediaDevices.enumerateDevices();
const audioInputs = allDevices.filter((device) => device.kind === "audioinput").map((device) => ({
deviceId: device.deviceId,
label: device.label || `Microphone ${device.deviceId.slice(0, 5)}`
}));
setDevices(audioInputs);
if (!selectedDeviceId && audioInputs.length > 0) {
setSelectedDeviceId(audioInputs[0].deviceId);
}
} catch (err) {
console.error("Failed to enumerate devices:", err);
}
};
(0, import_react5.useEffect)(() => {
refreshDevices();
}, []);
(0, import_react5.useEffect)(() => {
if (!audioContext) return;
if (!selectedDeviceId && devices.length > 0) return;
const setupMicrophone = async () => {
try {
if (mediaStreamRef.current) {
mediaStreamRef.current.getTracks().forEach((track) => track.stop());
}
if (sourceNodeRef.current) {
sourceNodeRef.current.disconnect();
}
const audioConstraints = selectedDeviceId && selectedDeviceId !== "default" ? { deviceId: { exact: selectedDeviceId } } : true;
const mediaStream = await navigator.mediaDevices.getUserMedia({
audio: audioConstraints
});
mediaStreamRef.current = mediaStream;
await refreshDevices();
const sourceNode = audioContext.createMediaStreamSource(mediaStream);
sourceNodeRef.current = sourceNode;
if (!gainNodeRef.current) {
const gainNode = audioContext.createGain();
gainNode.gain.value = isMuted ? 0 : gain;
gainNodeRef.current = gainNode;
output.current = {
audioNode: gainNode,
// Expose the GAIN as the audioNode, not the source
gain: gainNode,
context: audioContext,
metadata: {
label,
sourceType: "microphone"
}
};
}
sourceNode.connect(gainNodeRef.current);
setError(null);
} catch (err) {
setError(err instanceof Error ? err.message : "Failed to access microphone");
console.error("Microphone access error:", err);
}
};
setupMicrophone();
return () => {
if (sourceNodeRef.current) {
sourceNodeRef.current.disconnect();
sourceNodeRef.current = null;
}
if (mediaStreamRef.current) {
mediaStreamRef.current.getTracks().forEach((track) => track.stop());
mediaStreamRef.current = null;
}
};
}, [audioContext, label, selectedDeviceId, devices.length]);
(0, import_react5.useEffect)(() => {
return () => {
if (gainNodeRef.current) {
gainNodeRef.current.disconnect();
gainNodeRef.current = null;
}
if (sourceNodeRef.current) {
sourceNodeRef.current.disconnect();
sourceNodeRef.current = null;
}
if (mediaStreamRef.current) {
mediaStreamRef.current.getTracks().forEach((track) => track.stop());
mediaStreamRef.current = null;
}
output.current = null;
};
}, []);
(0, import_react5.useEffect)(() => {
if (gainNodeRef.current && !isMuted) {
gainNodeRef.current.gain.value = gain;
}
}, [gain, isMuted]);
(0, import_react5.useEffect)(() => {
if (gainNodeRef.current) {
gainNodeRef.current.gain.value = isMuted ? 0 : gain;
}
}, [isMuted, gain]);
const selectDevice = (deviceId) => {
setSelectedDeviceId(deviceId);
};
(0, import_react5.useImperativeHandle)(ref, () => ({
selectDevice,
refreshDevices,
getState: () => ({ gain, isMuted, devices, selectedDeviceId, error })
}), [gain, isMuted, devices, selectedDeviceId, error]);
(0, import_react5.useEffect)(() => {
onDevicesChange?.(devices);
}, [devices, onDevicesChange]);
(0, import_react5.useEffect)(() => {
onError?.(error);
}, [error, onError]);
if (error) {
console.warn(`Microphone error: ${error}`);
}
if (children) {
return /* @__PURE__ */ (0, import_jsx_runtime2.jsx)(import_jsx_runtime2.Fragment, { children: children({
gain,
setGain,
isMuted,
setMuted,
devices,
selectedDeviceId,
selectDevice,
refreshDevices,
isActive: !!output.current,
error
}) });
}
return null;
});
Microphone.displayName = "Microphone";
// src/components/sources/MP3Deck.tsx
var import_react6 = __toESM(require("react"));
var import_jsx_runtime3 = require("react/jsx-runtime");
var MP3Deck = import_react6.default.forwardRef(({
output,
label = "mp3-deck",
src: controlledSrc,
onSrcChange,
gain: controlledGain,
onGainChange,
loop: controlledLoop,
onLoopChange,
onPlayingChange,
onTimeUpdate,
onError,
onEnd,
children
}, ref) => {
const audioContext = useAudioContext();
const [src, setSrc] = useControlledState(controlledSrc, "", onSrcChange);
const [gain, setGain] = useControlledState(controlledGain, 1, onGainChange);
const [loop, setLoop] = useControlledState(controlledLoop, false, onLoopChange);
const [isPlaying, setIsPlaying] = (0, import_react6.useState)(false);
const [currentTime, setCurrentTime] = (0, import_react6.useState)(0);
const [duration, setDuration] = (0, import_react6.useState)(0);
const [error, setError] = (0, import_react6.useState)(null);
const [isReady, setIsReady] = (0, import_react6.useState)(false);
const audioElementRef = (0, import_react6.useRef)(null);
const sourceNodeRef = (0, import_react6.useRef)(null);
const gainNodeRef = (0, import_react6.useRef)(null);
const blobUrlRef = (0, import_react6.useRef)(null);
(0, import_react6.useEffect)(() => {
if (!audioContext) return;
const gainNode = audioContext.createGain();
gainNode.gain.value = gain;
gainNodeRef.current = gainNode;
output.current = {
audioNode: gainNode,
gain: gainNode,
context: audioContext,
metadata: {
label,
sourceType: "mp3"
}
};
return () => {
gainNode.disconnect();
output.current = null;
gainNodeRef.current = null;
};
}, [audioContext, label]);
(0, import_react6.useEffect)(() => {
if (!audioContext || !src || !gainNodeRef.current) return;
let audioElement = null;
let sourceNode = null;
const currentSrc = src;
const setupAudio = async () => {
try {
if (sourceNodeRef.current) {
try {
sourceNodeRef.current.disconnect();
} catch (e) {
}
sourceNodeRef.current = null;
}
if (audioElementRef.current) {
const oldElement = audioElementRef.current;
oldElement.pause();
if (typeof oldElement.removeAttribute === "function") {
oldElement.removeAttribute("src");
} else {
oldElement.src = "";
}
oldElement.load();
audioElementRef.current = null;
}
setIsReady(false);
setIsPlaying(false);
setError(null);
setCurrentTime(0);
setDuration(0);
audioElement = new Audio();
audioElement.loop = loop;
if (!src.startsWith("blob:")) {
audioElement.crossOrigin = "anonymous";
}
audioElement.preload = "auto";
audioElement.src = src;
audioElementRef.current = audioElement;
sourceNode = audioContext.createMediaElementSource(audioElement);
sourceNodeRef.current = sourceNode;
sourceNode.connect(gainNodeRef.current);
audioElement.addEventListener("loadedmetadata", () => {
setDuration(audioElement.duration);
});
audioElement.addEventListener("canplaythrough", () => {
setIsReady(true);
});
audioElement.addEventListener("timeupdate", () => {
setCurrentTime(audioElement.currentTime);
});
audioElement.addEventListener("play", () => setIsPlaying(true));
audioElement.addEventListener("pause", () => setIsPlaying(false));
audioElement.addEventListener("ended", () => {
setIsPlaying(false);
onEnd?.();
});
audioElement.addEventListener("error", () => {
setError("Failed to load audio file");
setIsReady(false);
});
} catch (err) {
setError(err instanceof Error ? err.message : "Failed to load audio");
setIsReady(false);
}
};
setupAudio();
return () => {
if (audioElement) {
audioElement.pause();
if (typeof audioElement.removeAttribute === "function") {
audioElement.removeAttribute("src");
} else {
audioElement.src = "";
}
audioElement.load();
}
if (sourceNode) {
sourceNode.disconnect();
}
if (blobUrlRef.current === currentSrc && currentSrc.startsWith("blob:")) {
URL.revokeObjectURL(currentSrc);
blobUrlRef.current = null;
}
};
}, [audioContext, src]);
(0, import_react6.useEffect)(() => {
if (audioElementRef.current) {
audioElementRef.current.loop = loop;
}
}, [loop]);
(0, import_react6.useEffect)(() => {
if (gainNodeRef.current) {
gainNodeRef.current.gain.value = gain;
}
}, [gain]);
const loadFile = (file) => {
if (blobUrlRef.current) {
URL.revokeObjectURL(blobUrlRef.current);
}
const url = URL.createObjectURL(file);
blobUrlRef.current = url;
setSrc(url);
};
(0, import_react6.useEffect)(() => {
const checkConnection = () => {
if (!output.current && isPlaying) {
pause();
}
};
const interval = setInterval(checkConnection, 100);
return () => clearInterval(interval);
}, [isPlaying]);
const play = async () => {
if (audioElementRef.current && audioContext) {
if (audioContext.state === "suspended") {
await audioContext.resume();
}
audioElementRef.current.play().catch(() => {
setError("Playback failed. User interaction may be required.");
});
}
};
const pause = () => {
if (audioElementRef.current) {
audioElementRef.current.pause();
}
};
const stop = () => {
if (audioElementRef.current) {
audioElementRef.current.pause();
audioElementRef.current.currentTime = 0;
}
};
const seek = (time) => {
if (audioElementRef.current) {
audioElementRef.current.currentTime = time;
}
};
(0, import_react6.useImperativeHandle)(ref, () => ({
play,
pause,
stop,
seek,
loadFile,
getState: () => ({ src, gain, loop, isPlaying, isReady, currentTime, duration, error })
}), [src, gain, loop, isPlaying, isReady, currentTime, duration, error]);
(0, import_react6.useEffect)(() => {
onPlayingChange?.(isPlaying);
}, [isPlaying, onPlayingChange]);
(0, import_react6.useEffect)(() => {
onTimeUpdate?.(currentTime, duration);
}, [currentTime, duration, onTimeUpdate]);
(0, import_react6.useEffect)(() => {
onError?.(error);
}, [error, onError]);
if (error) {
console.warn(`MP3Deck error: ${error}`);
}
if (children) {
return /* @__PURE__ */ (0, import_jsx_runtime3.jsx)(import_jsx_runtime3.Fragment, { children: children({
src,
setSrc,
loadFile,
gain,
setGain,
loop,
setLoop,
isPlaying,
play,
pause,
stop,
currentTime,
duration,
seek,
isActive: !!output.current,
isReady,
error
}) });
}
return null;
});
MP3Deck.displayName = "MP3Deck";
// src/components/sources/StreamingAudioDeck.tsx
var import_react7 = __toESM(require("react"));
var import_jsx_runtime4 = require("react/jsx-runtime");
var StreamingAudioDeck = import_react7.default.forwardRef(({
output,
label = "streaming-audio-deck",
url: controlledUrl,
onUrlChange,
gain: controlledGain,
onGainChange,
loop: controlledLoop,
onLoopChange,
onPlayingChange,
onTimeUpdate,
onError,
onEnd,
children
}, ref) => {
const audioContext = useAudioContext();
const [url, setUrl] = useControlledState(controlledUrl, "", onUrlChange);
const [gain, setGain] = useControlledState(controlledGain, 1, onGainChange);
const [loop, setLoop] = useControlledState(controlledLoop, false, onLoopChange);
const [isPlaying, setIsPlaying] = (0, import_react7.useState)(false);
const [currentTime, setCurrentTime] = (0, import_react7.useState)(0);
const [duration, setDuration] = (0, import_react7.useState)(0);
const [error, setError] = (0, import_react7.useState)(null);
const audioElementRef = (0, import_react7.useRef)(null);
const sourceNodeRef = (0, import_react7.useRef)(null);
const gainNodeRef = (0, import_react7.useRef)(null);
(0, import_react7.useEffect)(() => {
if (!audioContext) return;
const gainNode = audioContext.createGain();
gainNode.gain.value = gain;
gainNodeRef.current = gainNode;
output.current = {
audioNode: gainNode,
// Use gain as the audio node initially
gain: gainNode,
context: audioContext,
metadata: {
label,
sourceType: "stream"
}
};
return () => {
gainNode.disconnect();
output.current = null;
gainNodeRef.current = null;
};
}, [audioContext, label]);
(0, import_react7.useEffect)(() => {
if (!audioContext || !url || !gainNodeRef.current) return;
const shouldAutoPlay = audioElementRef.current?.paused === false;
let audioElement = null;
let sourceNode = null;
const setupStreaming = async () => {
try {
if (sourceNodeRef.current) {
try {
sourceNodeRef.current.disconnect();
} catch (e) {
}
sourceNodeRef.current = null;
}
if (audioElementRef.current) {
const oldElement = audioElementRef.current;
oldElement.pause();
oldElement.removeAttribute("src");
oldElement.load();
audioElementRef.current = null;
}
setError(null);
audioElement = new Audio();
audioElement.crossOrigin = "anonymous";
audioElement.src = url;
audioElement.loop = loop;
audioElementRef.current = audioElement;
sourceNode = audioContext.createMediaElementSource(audioElement);
sourceNodeRef.current = sourceNode;
sourceNode.connect(gainNodeRef.current);
if (output.current) {
output.current.audioNode = sourceNode;
}
const handlePlay = () => setIsPlaying(true);
const handlePause = () => setIsPlaying(false);
const handleEnded = () => {
setIsPlaying(false);
onEnd?.();
};
const handleLoadedMetadata = () => {
setDuration(audioElement.duration);
};
const handleTimeUpdate = () => {
setCurrentTime(audioElement.currentTime);
};
const handleError = (e) => {
setError("Failed to load stream");
console.error("Stream error:", e);
};
audioElement.addEventListener("play", handlePlay);
audioElement.addEventListener("pause", handlePause);
audioElement.addEventListener("ended", handleEnded);
audioElement.addEventListener("loadedmetadata", handleLoadedMetadata);
audioElement.addEventListener("timeupdate", handleTimeUpdate);
audioElement.addEventListener("error", handleError);
if (shouldAutoPlay) {
if (audioContext.state === "suspended") {
await audioContext.resume();
}
try {
await audioElement.play();
setIsPlaying(true);
} catch (err) {
console.warn("Auto-play failed for new URL:", err);
setIsPlaying(false);
}
} else {
setIsPlaying(false);
}
} catch (err) {
setError(err instanceof Error ? err.message : "Failed to setup stream");
console.error("StreamingAudioDeck error:", err);
setIsPlaying(false);
}
};
setupStreaming();
return () => {
if (sourceNode) {
try {
sourceNode.disconnect();
} catch (e) {
}
}
if (audioElement) {
audioElement.pause();
audioElement.removeAttribute("src");
audioElement.load();
}
sourceNodeRef.current = null;
audioElementRef.current = null;
};
}, [audioContext, url]);
(0, import_react7.useEffect)(() => {
if (gainNodeRef.current) {
gainNodeRef.current.gain.value = gain;
}
}, [gain]);
(0, import_react7.useEffect)(() => {
if (audioElementRef.current) {
audioElementRef.current.loop = loop;
}
}, [loop]);
const play = async () => {
if (audioElementRef.current && audioContext) {
if (audioContext.state === "suspended") {
await audioContext.resume();
}
audioElementRef.current.play().catch((err) => {
setError("Failed to start stream. User interaction may be required.");
console.warn("Stream playback blocked:", err);
});
}
};
const pause = () => {
if (audioElementRef.current) {
audioElementRef.current.pause();
}
};
const stop = () => {
if (audioElementRef.current) {
audioElementRef.current.pause();
audioElementRef.current.currentTime = 0;
}
};
const seek = (time) => {
if (audioElementRef.current) {
audioElementRef.current.currentTime = time;
}
};
(0, import_react7.useImperativeHandle)(ref, () => ({
play,
pause,
stop,
seek,
getState: () => ({ url, gain, loop, isPlaying, currentTime, duration, error })
}), [url, gain, loop, isPlaying, currentTime, duration, error]);
(0, import_react7.useEffect)(() => {
onPlayingChange?.(isPlaying);
}, [isPlaying, onPlayingChange]);
(0, import_react7.useEffect)(() => {
onTimeUpdate?.(currentTime, duration);
}, [currentTime, duration, onTimeUpdate]);
(0, import_react7.useEffect)(() => {
onError?.(error);
}, [error, onError]);
if (error) {
console.warn(`StreamingAudioDeck error: ${error}`);
}
if (children) {
return /* @__PURE__ */ (0, import_jsx_runtime4.jsx)(import_jsx_runtime4.Fragment, { children: children({
url,
setUrl,
gain,
setGain,
loop,
setLoop,
isPlaying,
play,
pause,
stop,
currentTime,
duration,
seek,
isActive: !!output.current,
error
}) });
}
return null;
});
StreamingAudioDeck.displayName = "StreamingAudioDeck";
// src/components/sources/ToneGenerator.tsx
var import_react8 = __toESM(require("react"));
var import_jsx_runtime5 = require("react/jsx-runtime");
var ToneGenerator = import_react8.default.forwardRef(({
output,
label = "tone-generator",
frequency: controlledFrequency,
onFrequencyChange,
gain: controlledGain,
onGainChange,
waveform: controlledWaveform,
onWaveformChange,
cv,
cvAmount = 100,
children
}, ref) => {
const audioContext = useAudioContext();
const [frequency, setFrequency] = useControlledState(controlledFrequency, 440, onFrequencyChange);
const [gain, setGain] = useControlledState(controlledGain, 0.3, onGainChange);
const [waveform, setWaveform] = useControlledState(controlledWaveform, "square", onWaveformChange);
const oscillatorRef = (0, import_react8.useRef)(null);
const gainNodeRef = (0, import_react8.useRef)(null);
const cvGainRef = (0, import_react8.useRef)(null);
(0, import_react8.useEffect)(() => {
if (!audioContext) return;
const oscillator = audioContext.createOscillator();
oscillator.type = waveform;
oscillator.frequency.value = frequency;
oscillatorRef.current = oscillator;
const gainNode = audioContext.createGain();
gainNode.gain.value = gain;
gainNodeRef.current = gainNode;
oscillator.connect(gainNode);
oscillator.start();
output.current = {
audioNode: oscillator,
gain: gainNode,
context: audioContext,
metadata: {
label,
sourceType: "tone"
}
};
return () => {
oscillator.stop();
oscillator.disconnect();
gainNode.disconnect();
output.current = null;
oscillatorRef.current = null;
gainNodeRef.current = null;
if (cvGainRef.current) {
cvGainRef.current.disconnect();
cvGainRef.current = null;
}
};
}, [audioContext, label]);
(0, import_react8.useEffect)(() => {
if (!cv?.current || !oscillatorRef.current || !audioContext) return;
const cvGain = audioContext.createGain();
cvGain.gain.value = cvAmount;
cvGainRef.current = cvGain;
cv.current.gain.connect(cvGain);
cvGain.connect(oscillatorRef.current.frequency);
return () => {
if (cvGain && cv.current) {
try {
cv.current.gain.disconnect(cvGain);
cvGain.disconnect();
} catch (e) {
}
}
};
}, [cv?.current?.audioNode ? String(cv.current.audioNode) : "null", cvAmount]);
(0, import_react8.useEffect)(() => {
if (oscillatorRef.current) {
oscillatorRef.current.frequency.value = frequency;
}
}, [frequency]);
(0, import_react8.useEffect)(() => {
if (gainNodeRef.current) {
gainNodeRef.current.gain.value = gain;
}
}, [gain]);
(0, import_react8.useEffect)(() => {
if (oscillatorRef.current) {
oscillatorRef.current.type = waveform;
}
}, [waveform]);
(0, import_react8.useEffect)(() => {
if (cvGainRef.current) {
cvGainRef.current.gain.value = cvAmount;
}
}, [cvAmount]);
(0, import_react8.useImperativeHandle)(ref, () => ({
getState: () => ({ frequency, gain, waveform })
}), [frequency, gain, waveform]);
if (children) {
return /* @__PURE__ */ (0, import_jsx_runtime5.jsx)(import_jsx_runtime5.Fragment, { children: children({
frequency,
setFrequency,
gain,
setGain,
waveform,
setWaveform,
isActive: !!output.current
}) });
}
return null;
});
ToneGenerator.displayName = "ToneGenerator";
// src/components/sources/NoiseGenerator.tsx
var import_react9 = __toESM(require("react"));
var import_jsx_runtime6 = require("react/jsx-runtime");
var NoiseGenerator = import_react9.default.forwardRef(({
output,
label = "noise-generator",
gain: controlledGain,
onGainChange,
type: controlledType,
onTypeChange,
cv,
cvAmount = 0.3,
children
}, ref) => {
const audioContext = useAudioContext();
const [gain, setGain] = useControlledState(controlledGain, 0.3, onGainChange);
const [type, setType] = useControlledState(controlledType, "white", onTypeChange);
const bufferSourceRef = (0, import_react9.useRef)(null);
const gainNodeRef = (0, import_react9.useRef)(null);
const cvGainRef = (0, import_react9.useRef)(null);
const cvMultiplierRef = (0, import_react9.useRef)(null);
const createWhiteNoiseBuffer = (audioContext2) => {
const bufferSize = audioContext2.sampleRate * 2;
const buffer = audioContext2.createBuffer(1, bufferSize, audioContext2.sampleRate);
const output2 = buffer.getChannelData(0);
for (let i = 0; i < bufferSize; i++) {
output2[i] = Math.random() * 2 - 1;
}
return buffer;
};
const createPinkNoiseBuffer = (audioContext2) => {
const bufferSize = audioContext2.sampleRate * 2;
const buffer = audioContext2.createBuffer(1, bufferSize, audioContext2.sampleRate);
const output2 = buffer.getChannelData(0);
let b0 = 0, b1 = 0, b2 = 0, b3 = 0, b4 = 0, b5 = 0, b6 = 0;
for (let i = 0; i < bufferSize; i++) {
const white = Math.random() * 2 - 1;
b0 = 0.99886 * b0 + white * 0.0555179;
b1 = 0.99332 * b1 + white * 0.0750759;
b2 = 0.969 * b2 + white * 0.153852;
b3 = 0.8665 * b3 + white * 0.3104856;
b4 = 0.55 * b4 + white * 0.5329522;
b5 = -0.7616 * b5 - white * 0.016898;
output2[i] = b0 + b1 + b2 + b3 + b4 + b5 + b6 + white * 0.5362;
output2[i] *= 0.11;
b6 = white * 0.115926;
}
return buffer;
};
(0, import_react9.useEffect)(() => {
if (!audioContext) return;
const gainNode = audioContext.createGain();
gainNode.gain.value = gain;
gainNodeRef.current = gainNode;
const cvMultiplier = audioContext.createGain();
cvMultiplier.gain.value = 1;
cvMultiplierRef.current = cvMultiplier;
gainNode.connect(cvMultiplier);
output.current = {
audioNode: cvMultiplier,
gain: cvMultiplier,
context: audioContext,
metadata: {
label,
sourceType: "tone"
}
};
return () => {
gainNode.disconnect();
cvMultiplier.disconnect();
output.current = null;
gainNodeRef.current = null;
cvMultiplierRef.current = null;
if (cvGainRef.current) {
cvGainRef.current.disconnect();
cvGainRef.current = null;
}
};
}, [audioContext, label]);
(0, import_react9.useEffect)(() => {
if (!cv?.current || !cvMultiplierRef.current || !audioContext) return;
const cvGain = audioContext.createGain();
cvGain.gain.value = cvAmount;
cvGainRef.current = cvGain;
cvMultiplierRef.current.gain.value = 0;
cv.current.gain.connect(cvGain);
cvGain.connect(cvMultiplierRef.current.gain);
return () => {
if (cvGain && cv.current && cvMultiplierRef.current) {
try {
cv.current.gain.disconnect(cvGain);
cvGain.disconnect();
cvMultiplierRef.current.gain.value = 1;
} catch (e) {
}
}
};
}, [cv?.current?.audioNode ? String(cv.current.audioNode) : "null", cvAmount]);
(0, import_react9.useEffect)(() => {
if (cvGainRef.current) {
cvGainRef.current.gain.value = cvAmount;
}
}, [cvAmount]);
(0, import_react9.useEffect)(() => {
if (!audioContext || !gainNodeRef.current) return;
if (bufferSourceRef.current) {
bufferSourceRef.current.stop();
bufferSourceRef.current.disconnect();
}
const buffer = type === "white" ? createWhiteNoiseBuffer(audioContext) : createPinkNoiseBuffer(audioContext);
const bufferSource = audioContext.createBufferSource();
bufferSource.buffer = buffer;
bufferSource.loop = true;
bufferSourceRef.current = bufferSource;
bufferSource.connect(gainNodeRef.current);
bufferSource.start(0);
return () => {
if (bufferSource) {
try {
bufferSource.stop();
bufferSource.disconnect();
} catch (e) {
}
}
};
}, [audioContext, type]);
(0, import_react9.useEffect)(() => {
if (gainNodeRef.current) {
gainNodeRef.current.gain.value = gain;
}
}, [gain]);
(0, import_react9.useImperativeHandle)(ref, () => ({
getState: () => ({ gain, type })
}), [gain, type]);
if (children) {
return /* @__PURE__ */ (0, import_jsx_runtime6.jsx)(import_jsx_runtime6.Fragment, { children: children({
gain,
setGain,
type,
setType,
isActive: !!output.current
}) });
}
return null;
});
NoiseGenerator.displayName = "NoiseGenerator";
// src/components/cv/LFO.tsx
var import_react10 = __toESM(require("react"));
var import_jsx_runtime7 = require("react/jsx-runtime");
var LFO = import_react10.default.forwardRef(({
output,
label = "lfo",
frequency: controlledFrequency,
onFrequencyChange,
amplitude: controlledAmplitude,
onAmplitudeChange,
waveform: controlledWaveform,
onWaveformChange,
children
}, ref) => {
const audioContext = useAudioContext();
const [frequency, setFrequency] = useControlledState(controlledFrequency, 1, onFrequencyChange);
const [amplitude, setAmplitude] = useControlledState(controlledAmplitude, 1, onAmplitudeChange);
const [waveform, setWaveform] = useControlledState(controlledWaveform, "sine", onWaveformChange);
const oscillatorRef = (0, import_react10.useRef)(null);
const gainNodeRef = (0, import_react10.useRef)(null);
(0, import_react10.useEffect)(() => {
if (!audioContext) return;
const oscillator = audioContext.createOscillator();
oscillator.type = waveform;
oscillator.frequency.value = frequency;
oscillatorRef.current = oscillator;
const gainNode = audioContext.createGain();
gainNode.gain.value = amplitude;
gainNodeRef.current = gainNode;
oscillator.connect(gainNode);
oscillator.start(0);
output.current = {
audioNode: oscillator,
gain: gainNode,
context: audioContext,
metadata: {
label,
sourceType: "cv"
}
};
return () => {
oscillator.stop();
oscillator.disconnect();
gainNode.disconnect();
output.current = null;
oscillatorRef.current = null;
gainNodeRef.current = null;
};
}, [audioContext, label]);
(0, import_react10.useEffect)(() => {
if (oscillatorRef.current) {
oscillatorRef.current.frequency.value = frequency;
}
}, [frequency]);
(0, import_react10.useEffect)(() => {
if (gainNodeRef.current) {
gainNodeRef.current.gain.value = amplitude;
}
}, [amplitude]);
(0, import_react10.useEffect)(() => {
if (oscillatorRef.current) {
oscillatorRef.current.type = waveform;
}
}, [waveform]);
(0, import_react10.useImperativeHandle)(ref, () => ({
getState: () => ({ frequency, amplitude, waveform })
}), [frequency, amplitude, waveform]);
if (children) {
return /* @__PURE__ */ (0, import_jsx_runtime7.jsx)(import_jsx_runtime7.Fragment, { children: children({
frequency,
setFrequency,
amplitude,
setAmplitude,
waveform,
setWaveform,
isActive: !!output.current
}) });
}
return null;
});
LFO.displayName = "LFO";
// src/components/cv/ADSR.tsx
var import_react11 = __toESM(require("react"));
var import_jsx_runtime8 = require("react/jsx-runtime");
var ADSR = import_react11.default.forwardRef(({
gate,
output,
label = "adsr",
attack: controlledAttack,
onAttackChange,
decay: controlledDecay,
onDecayChange,
sustain: controlledSustain,
onSustainChange,
release: controlledRelease,
onReleaseChange,
children
}, ref) => {
const audioContext = useAudioContext();
const [attack, setAttack] = useControlledState(controlledAttack, 0.01, onAttackChange);
const [decay, setDecay] = useControlledState(controlledDecay, 0.1, onDecayChange);
const [sustain, setSustain] = useControlledState(controlledSustain, 0.7, onSustainChange);
const [release, setRelease] = useControlledState(controlledRelease, 0.3, onReleaseChange);
const constantSourceRef = (0, import_react11.useRef)(null);
const gainNodeRef = (0, import_react11.useRef)(null);
const isGateOpenRef = (0, import_react11.useRef)(false);
const attackRef = (0, import_react11.useRef)(attack);
const decayRef = (0, import_react11.useRef)(decay);
const sustainRef = (0, import_react11.useRef)(sustain);
const releaseRef = (0, import_react11.useRef)(release);
(0, import_react11.useEffect)(() => {
attackRef.current = attack;
}, [attack]);
(0, import_react11.useEffect)(() => {
decayRef.current = decay;
}, [decay]);
(0, import_react11.useEffect)(() => {
sustainRef.current = sustain;
}, [sustain]);
(0, import_react11.useEffect)(() => {
releaseRef.current = release;
}, [release]);
const triggerEnvelope = (0, import_react11.useRef)(() => {
});
const releaseEnvelope = (0, import_react11.useRef)(() => {
});
(0, import_react11.useEffect)(() => {
if (!audioContext) return;
const constantSource = audioContext.createConstantSource();
constantSource.offset.value = 1;
constantSourceRef.current = constantSource;
const gainNode = audioContext.createGain();
gainNode.gain.value = 0;
gainNodeRef.current = gainNode;
constantSource.connect(gainNode);
constantSource.start(0);
triggerEnvelope.current = () => {
if (!gainNode || isGateOpenRef.current) return;
isGateOpenRef.current = true;
const now = audioContext.currentTime;
const a = attackRef.current;
const d = decayRef.current;
const s = sustainRef.current;
gainNode.gain.cancelScheduledValues(now);
gainNode.gain.setValueAtTime(gainNode.gain.value, now);
gainNode.gain.linearRampToValueAtTime(1, now + a);
gainNode.gain.linearRampToValueAtTime(s, now + a + d);
};
releaseEnvelope.current = () => {
if (!gainNode || !isGateOpenRef.current) return;
isGateOpenRef.current = false;
const now = audioContext.currentTime;
const r = releaseRef.current;
gainNode.gain.cancelScheduledValues(now);
gainNode.gain.setValueAtTime(gainNode.gain.value, now);
gainNode.gain.linearRampToValueAtTime(0, now + r);
};
output.current = {
audioNode: constantSource,
gain: gainNode,
context: audioContext,
metadata: {
label,
sourceType: "cv"
}
};
return () => {
constantSource.stop();
constantSource.disconnect();
gainNode.disconnect();
output.current = null;
constantSourceRef.current = null;
gainNodeRef.current = null;
};
}, [audioContext, label]);
(0, import_react11.useEffect)(() => {
if (!gate?.current?.audioNode || !audioContext) return;
const gateNode = gate.current.audioNode;
if (gateNode instanceof ConstantSourceNode) {
const analyser = audioContext.createAnalyser();
analyser.fftSize = 128;
analyser.smoothingTimeConstant = 0;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
gateNode.connect(analyser);
let lastGateState = false;
let intervalId;
const checkGate = () => {
analyser.getByteTimeDomainData(dataArray);
let max = 0;
for (let i = 0; i < bufferLength; i++) {
const value = Math.abs(dataArray[i] - 128);
if (value > max) max = value;
}
const isGateHigh = max > 20;
if (isGateHigh && !lastGateState) {
triggerEnvelope.current();
} else if (!isGateHigh && lastGateState) {
releaseEnvelope.current();
}
lastGateState = isGateHigh;
};
intervalId = window.setInterval(checkGate, 1);
return () => {
clearInterval(intervalId);
analyser.disconnect();
};
} else {
const analyser = audioContext.createAnalyser();
analyser.fftSize = 128;
analyser.smoothingTimeConstant = 0;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
gateNode.connect(analyser);
let lastGateState = false;
let intervalId;
const checkGate = () => {
analyser.getByteTimeDomainData(dataArray);
let max = 0;
for (let i = 0; i < bufferLength; i++) {
const value = Math.abs(dataArray[i] - 128);
if (value > max) max = value;
}
const isGateHigh = max > 20;
if (isGateHigh && !lastGateState) {
triggerEnvelope.current();
} else if (!isGateHigh && lastGateState) {
releaseEnvelope.current();
}
lastGateState = isGateHigh;
};
intervalId = window.setInterval(checkGate, 1);
return () => {
clearInterval(intervalId);
analyser.disconnect();
};
}
}, [gate, gate?.current, gate?.current?.audioNode, audioContext]);
(0, import_react11.useImperativeHandle)(ref, () => ({
trigger: triggerEnvelope.current,
releaseGate: releaseEnvelope.current,
getState: () => ({ attack, decay, sustain, release })
}), [attack, decay, sustain, release]);
if (children) {
return /* @__PURE__ */ (0, import_jsx_runtime8.jsx)(import_jsx_runtime8.Fragment, { children: children({
attack,
setAttack,
decay,
setDecay,
sustain,
setSustain,
release,
setRelease,
trigger: triggerEnvelope.current,
releaseGate: releaseEnvelope.current,
isActive: !!output.current
}) });
}
return null;
});
ADSR.displayName = "ADSR";
// src/components/cv/Sequencer.tsx
var import_react12 = __toESM(require("react"));
var import_jsx_runtime9 = require("react/jsx-runtime");
var Sequencer = import_react12.default.forwardRef(({
output,
gateOutput,
label = "sequencer",
numSteps = 8,
steps: controlledSteps,
onStepsChange,
bpm: controlledBpm,
onBpmChange,
onCurrentStepChange,
onPlayingChange,
children
}, ref) => {
const audioContext = useAudioContext();
const [steps, setSteps] = useControlledState(controlledSteps, Array(numSteps).fill(0.5), onStepsChange);
const [currentStep, setCurrentStep] = (0, import_react12.useState)(0);
const [bpm, setBpm] = useControlledState(controlledBpm, 120, onBpmChange);
const [isPlaying, setIsPlaying] = (0, import_react12.useState)(false);
const constantSourceRef = (0, import_react12.useRef)(null);
const gainNodeRef = (0, import_react12.useRef)(null);
const gateSourceRef = (0, import_react12.useRef)(null);
const gateGainRef = (0, import_react12.useRef)(null);
const intervalRef = (0, import_react12.useRef)(null);
const stepsRef = (0, import_react12.useRef)(steps);
const currentStepRef = (0, import_react12.useRef)(currentStep);
const bpmRef = (0, import_react12.useRef)(bpm);
(0, import_react12.useEffect)(() => {
stepsRef.current = steps;
}, [steps]);
(0, import_react12.useEffect)(() => {
currentStepRef.current = currentStep;
}, [currentStep]);
(0, import_react12.useEffect)(() => {
bpmRef.current = bpm;
}, [bpm]);
(0, import_react12.useEffect)(() => {
if (!audioContext) return;
const constantSource = audioContext.createConstantSource();
constantSource.offset.value = steps[0] || 0.5;
constantSourceRef.current = constantSource;
const gainNode = audioContext.createGain();
gainNode.gain.value = 1;
gainNodeRef.current = gainNode;
constantSource.connect(gainNode);
constantSource.start(0);
output.current = {
audioNode: constantSource,
gain: gainNode,
context: audioContext,
metadata: {
label,
sourceType: "cv"
}
};
if (gateOutput) {
const gateSource = audioContext.createConstantSource();
gateSource.offset.value = 0;
gateSourceRef.current = gateSource;
const gateGain = audioContext.createGain();
gateGain.gain.value = 1;
gateGainRef.current = gateGain;
gateSource.connect(gateGain);
gateSource.start(0);
gateOutput.current = {
audioNode: gateSource,
gain: gateGain,
context: audioContext,
metadata: {
label: `${label}-gate`,
sourceType: "cv"
}
};
}
return () => {
if (intervalRef.current !== null) {
clearInterval(intervalRef.current);
}
constantSource.stop();
constantSource.disconnect();
gainNode.disconnect();
output.current = null;
constantSourceRef.current = null;
gainNodeRef.current = null;
if (gateSourceRef.current) {
gateSourceRef.current.stop();
gateSourceRef.current.disconnect();
gateSourceRef.current = null;
}
if (gateGainRef.current) {
gateGainRef.current.disconnect();
gateGainRef.current = null;
}
if (gateOutput) {
gateOutput.current = null;
}
};
}, [audioContext, label, gateOutput]);
const play = () => {
if (isPlaying || !audioContext) return;
setIsPlaying(true);
const stepDuration = 60 / bpmRef.current * 1e3;
intervalRef.current = window.setInterval(() => {
setCurrentStep((prev) => {
const nextStep = (prev + 1) % stepsRef.current.length;
if (constantSourceRef.current) {
const now = audioContext.currentTime;
constantSourceRef.current.offset.setValueAtTime(stepsRef.current[nextStep], now);
}
if (gateSourceRef.current) {
const now = audioContext.currentTime;
const gateDuration = stepDuration * 0.8;
gateSourceRef.current.offset.setValueAtTime(1, now);
gateSourceRef.current.offset.setValueAtTime(0, now + gateDuration);
}
return nextStep;
});
}, stepDuration);
};
const pause = () => {
if (!isPlaying) return;
setIsPlaying(false);
if (intervalRef.current !== null) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
};
const reset = () => {
pause();
setCurrentStep(0);
if (constantSourceRef.current && audioContext) {
const now = audioContext.currentTime;
constantSourceRef.current.offset.setValueAtTime(stepsRef.current[0], now);
}
};
(0, import_react12.useEffect)(() => {
if (isPlaying && audioContext) {
if (intervalRef.current !== null) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
const stepDuration = 60 / bpmRef.current * 1e3;
intervalRef.current = window.setInterval(() => {
setCurrentStep((prev) => {
const nextStep = (prev + 1) % stepsRef.current.length;
if (constantSourceRef.current) {
const now = audioContext.currentTime;
constantSourceRef.current.offset.setValueAtTime(stepsRef.current[nextStep], now);
}
if (gateSourceRef.current) {
const now = audioContext.currentTime;
const gateDuration = 0.01;
gateSourceRef.current.offset.setValueAtTime(1, now);
gateSourceRef.current.offset.setValueAtTime(0, now + gateDuration);
}
return nextStep;
});
}, stepDuration);
}
}, [bpm]);
(0, import_react12.useImperativeHandle)(ref, () => ({
play,
pause,
reset,
getState: () => ({ steps, currentStep, bpm, isPlaying })
}), [steps, currentStep, bpm, isPlaying]);
(0, import_react12.useEffect)(() => {
onCurrentStepChange?.(currentStep);
}, [currentStep, onCurrentStepChange]);
(0, import_react12.useEffect)(() => {
onPlayingChange?.(isPlaying);
}, [isPlaying, onPlayingChange]);
if (children) {
return /* @__PURE__ */ (0, import_jsx_runtime9.jsx)(import_jsx_runtime9.Fragment, { children: children({
steps,
setSteps,
currentStep,
bpm,
setBpm,