@speechmatics/browser-audio-input-react
Version:
React hooks for managing audio inputs and permissions across browsers
205 lines (198 loc) • 5.77 kB
JavaScript
"use client";
import { useMemo, useSyncExternalStore, useCallback, useEffect, createContext, useContext } from 'react';
import { getAudioDevicesStore, PCMRecorder } from '@speechmatics/browser-audio-input';
export * from '@speechmatics/browser-audio-input';
import { jsx } from 'react/jsx-runtime';
function subscribeDevices(callback) {
const audioDevices = getAudioDevicesStore();
audioDevices.addEventListener("changeDevices", callback);
return () => {
audioDevices.removeEventListener("changeDevices", callback);
};
}
const getDevices = () => getAudioDevicesStore().devices;
function useAudioDeviceList() {
return useSyncExternalStore(subscribeDevices, getDevices, getDevices);
}
function subscribePermissionState(callback) {
const audioDevices = getAudioDevicesStore();
audioDevices.addEventListener("changePermissions", callback);
return () => {
audioDevices.removeEventListener("changePermissions", callback);
};
}
const getPermissionState = () => getAudioDevicesStore().permissionState;
function useAudioPermissionState() {
return useSyncExternalStore(
subscribePermissionState,
getPermissionState,
getPermissionState
);
}
function usePromptAudioPermission() {
return useCallback(async () => {
await getAudioDevicesStore().promptPermissions();
}, []);
}
function useAudioDevices() {
const permissionState = useAudioPermissionState();
const promptPermissions = usePromptAudioPermission();
const deviceList = useAudioDeviceList();
return useMemo(() => {
switch (permissionState) {
case "prompt":
return {
permissionState,
promptPermissions
};
case "granted":
return {
permissionState,
deviceList
};
case "prompting":
case "denied":
return {
permissionState
};
default:
throw new Error(`Unexpected permission state: ${permissionState}`);
}
}, [permissionState, promptPermissions, deviceList]);
}
function usePCMAudioRecorder(workletScriptURL, audioContext) {
const recorder = useMemo(
() => new PCMRecorder(workletScriptURL),
[workletScriptURL]
);
useEffect(() => {
return () => recorder.stopRecording();
}, [recorder]);
const startRecording = useCallback(
(options) => {
if (!audioContext) {
throw new Error("AudioContext not supplied!");
}
return recorder.startRecording({
...options,
audioContext
});
},
[recorder, audioContext]
);
const stopRecording = useCallback(
() => recorder.stopRecording(),
[recorder]
);
const addEventListener = useCallback(
(type, listener) => recorder.addEventListener(type, listener),
[recorder]
);
const removeEventListener = useCallback(
(type, listener) => recorder.removeEventListener(type, listener),
[recorder]
);
const analyser = useSyncExternalStore(
(onChange) => {
recorder.addEventListener("recordingStarted", onChange);
recorder.addEventListener("recordingStopped", onChange);
return () => {
recorder.removeEventListener("recordingStarted", onChange);
recorder.removeEventListener("recordingStopped", onChange);
};
},
() => recorder.analyser,
() => recorder.analyser
);
const isRecording = useSyncExternalStore(
(onChange) => {
recorder.addEventListener("recordingStarted", onChange);
recorder.addEventListener("recordingStopped", onChange);
return () => {
recorder.removeEventListener("recordingStarted", onChange);
recorder.removeEventListener("recordingStopped", onChange);
};
},
() => recorder.isRecording,
() => recorder.isRecording
);
const mute = useCallback(
() => recorder.mute(),
[recorder]
);
const unmute = useCallback(
() => recorder.unmute(),
[recorder]
);
const isMuted = useSyncExternalStore(
(onChange) => {
recorder.addEventListener("mute", onChange);
recorder.addEventListener("unmute", onChange);
return () => {
recorder.removeEventListener("mute", onChange);
recorder.removeEventListener("unmute", onChange);
};
},
() => recorder.isMuted,
() => recorder.isMuted
);
const value = useMemo(
() => ({
startRecording,
stopRecording,
mute,
unmute,
isMuted,
addEventListener,
removeEventListener,
analyser,
isRecording
}),
[
startRecording,
stopRecording,
mute,
unmute,
isMuted,
addEventListener,
removeEventListener,
analyser,
isRecording
]
);
return value;
}
const context = createContext(null);
function usePCMAudioRecorderContext() {
const ctx = useContext(context);
if (!ctx) {
throw new Error("PCM audio recorder context must be provided");
}
return ctx;
}
function usePCMAudioListener(cb) {
const ctx = useContext(context);
if (!ctx) {
throw new Error("PCM audio recorder context must be provided");
}
const { addEventListener, removeEventListener } = ctx;
useEffect(() => {
const onAudio = (ev) => {
cb(ev.data);
};
addEventListener("audio", onAudio);
return () => {
removeEventListener("audio", onAudio);
};
}, [addEventListener, removeEventListener, cb]);
}
function PCMAudioRecorderProvider({
workletScriptURL,
children,
audioContext
}) {
const value = usePCMAudioRecorder(workletScriptURL, audioContext);
return /* @__PURE__ */ jsx(context.Provider, { value: { ...value, audioContext }, children });
}
export { PCMAudioRecorderProvider, useAudioDevices, usePCMAudioListener, usePCMAudioRecorder, usePCMAudioRecorderContext };
//# sourceMappingURL=index.js.map