react-audio-recorder-hook
Version:
A powerful TypeScript-based React hook that provides complete audio recording capabilities with pause/resume functionality, recording management, and audio processing
401 lines • 16.9 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.AudioEffectType = void 0;
exports.default = useAudioRecorder;
const react_1 = require("react");
const browserSupport_1 = __importDefault(require("./utils/browserSupport"));
const audioEffects_1 = require("./effects/audioEffects");
function useAudioRecorder(options = {}) {
const { audioConstraints = {}, chunkInterval = 500, preferredMimeType, onNotSupported, audioBitsPerSecond, volumeMeterRefreshRate = 100, audioEffect, } = options;
// Check browser compatibility
const [browserCompatibility] = (0, react_1.useState)(() => ({
isSupported: browserSupport_1.default.isAudioRecordingSupported(),
mediaRecorderSupported: browserSupport_1.default.isMediaRecorderSupported(),
getUserMediaSupported: browserSupport_1.default.isGetUserMediaSupported(),
audioContextSupported: browserSupport_1.default.isAudioContextSupported(),
isMobileBrowser: browserSupport_1.default.isMobileBrowser(),
}));
const [mediaRecorder, setMediaRecorder] = (0, react_1.useState)(null);
const [isRecording, setIsRecording] = (0, react_1.useState)(false);
const [isPaused, setIsPaused] = (0, react_1.useState)(false);
const [recordingDuration, setRecordingDuration] = (0, react_1.useState)(0);
const [timer, setTimer] = (0, react_1.useState)(null);
const [mediaStream, setMediaStream] = (0, react_1.useState)(null);
const [currentVolume, setCurrentVolume] = (0, react_1.useState)(0);
const [error, setError] = (0, react_1.useState)(null);
const [isPermissionDenied, setIsPermissionDenied] = (0, react_1.useState)(false);
const [currentEffect, setCurrentEffect] = (0, react_1.useState)({
type: audioEffects_1.AudioEffectType.None,
mix: 0.5,
});
const audioChunksRef = (0, react_1.useRef)([]);
const pausedChunksRef = (0, react_1.useRef)([]);
const mimeTypeRef = (0, react_1.useRef)('audio/webm');
const audioUrlRef = (0, react_1.useRef)(null);
const audioContextRef = (0, react_1.useRef)(null);
const analyserRef = (0, react_1.useRef)(null);
const volumeTimerRef = (0, react_1.useRef)(null);
const mediaSourceRef = (0, react_1.useRef)(null);
// Set initial effect from options if provided
(0, react_1.useEffect)(() => {
if (audioEffect) {
setCurrentEffect(audioEffect);
}
}, [audioEffect]);
// Check compatibility on mount and call onNotSupported if provided
(0, react_1.useEffect)(() => {
if (!browserCompatibility.isSupported && onNotSupported) {
onNotSupported();
}
}, [browserCompatibility.isSupported, onNotSupported]);
const cleanupAudioUrl = (0, react_1.useCallback)(() => {
if (audioUrlRef.current) {
URL.revokeObjectURL(audioUrlRef.current);
audioUrlRef.current = null;
}
}, []);
const getSupportedMimeType = (0, react_1.useCallback)(() => {
// Use our browser support utility to find the best supported MIME type
if (preferredMimeType && browserSupport_1.default.isMimeTypeSupported(preferredMimeType)) {
return preferredMimeType;
}
return browserSupport_1.default.getBestSupportedMimeType();
}, [preferredMimeType]);
const stopMediaStream = (0, react_1.useCallback)((stream) => {
if (stream) {
stream.getTracks().forEach(track => {
track.stop();
});
setMediaStream(null);
}
}, []);
const startRecording = (0, react_1.useCallback)(async () => {
try {
cleanupAudioUrl();
setError(null);
// Check if recording is supported using our utility
if (!browserCompatibility.isSupported) {
const error = new Error('Audio recording is not supported in this browser');
setError(error);
if (onNotSupported) {
onNotSupported();
}
else {
console.error(error.message);
}
return;
}
try {
const stream = await navigator.mediaDevices
.getUserMedia({
audio: { ...audioConstraints },
})
.catch(err => {
// Handle permission denied
if (err.name === 'NotAllowedError' || err.name === 'PermissionDeniedError') {
setIsPermissionDenied(true);
throw new Error('Microphone permission denied');
}
throw err;
});
setMediaStream(stream);
setIsPermissionDenied(false);
// Set up volume metering using AudioContext
try {
// Create audio context
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
const audioContext = new (window.AudioContext ||
// @ts-expect-error - webkitAudioContext for Safari
window.webkitAudioContext)();
// Create analyzer
const analyser = audioContext.createAnalyser();
analyser.fftSize = 256;
analyser.smoothingTimeConstant = 0.8;
// Connect the microphone to the analyser
const source = audioContext.createMediaStreamSource(stream);
source.connect(analyser);
// Store references
audioContextRef.current = audioContext;
analyserRef.current = analyser;
// Start volume monitoring
const dataArray = new Uint8Array(analyser.frequencyBinCount);
const updateVolume = () => {
analyser.getByteFrequencyData(dataArray);
// Calculate volume
let sum = 0;
for (let i = 0; i < dataArray.length; i++) {
sum += dataArray[i];
}
// Normalize volume (0-1)
const average = sum / dataArray.length / 255;
setCurrentVolume(average);
// Schedule next update
volumeTimerRef.current = window.setTimeout(updateVolume, volumeMeterRefreshRate);
};
// Start monitoring
updateVolume();
}
catch (err) {
console.warn('Volume metering not available:', err);
}
mimeTypeRef.current = getSupportedMimeType();
const recorder = new MediaRecorder(stream, {
mimeType: mimeTypeRef.current,
audioBitsPerSecond,
});
setMediaRecorder(recorder);
audioChunksRef.current = [];
pausedChunksRef.current = [];
recorder.ondataavailable = event => {
if (event.data.size > 0) {
audioChunksRef.current.push(event.data);
}
};
recorder.onstop = async () => {
if (audioChunksRef.current.length === 0 && pausedChunksRef.current.length === 0) {
return;
}
const allChunks = [...pausedChunksRef.current, ...audioChunksRef.current];
const audioBlob = new Blob(allChunks, {
type: mimeTypeRef.current,
});
if (audioBlob.size === 0) {
return;
}
stopMediaStream(stream);
};
recorder.onerror = () => {
stopMediaStream(stream);
setIsRecording(false);
setIsPaused(false);
};
recorder.start(chunkInterval);
setIsRecording(true);
setIsPaused(false);
if (timer) {
clearInterval(timer);
}
const newTimer = setInterval(() => {
setRecordingDuration(prevDuration => prevDuration + 1);
}, 1000);
setTimer(newTimer);
}
catch (error) {
setIsRecording(false);
setIsPaused(false);
console.error('Error starting recording:', error);
}
}
catch (error) {
setIsRecording(false);
setIsPaused(false);
console.error('Error starting recording:', error);
}
}, [
getSupportedMimeType,
stopMediaStream,
timer,
cleanupAudioUrl,
audioConstraints,
audioBitsPerSecond,
browserCompatibility.isSupported,
chunkInterval,
onNotSupported,
volumeMeterRefreshRate,
]);
// ... All other functions remain mostly the same
const pauseRecording = (0, react_1.useCallback)(() => {
if (!mediaRecorder || mediaRecorder.state === 'inactive' || isPaused) {
return;
}
pausedChunksRef.current = [...pausedChunksRef.current, ...audioChunksRef.current];
audioChunksRef.current = [];
mediaRecorder.pause();
setIsPaused(true);
if (timer) {
clearInterval(timer);
setTimer(null);
}
}, [mediaRecorder, isPaused, timer]);
const resumeRecording = (0, react_1.useCallback)(async () => {
if (!mediaRecorder || mediaRecorder.state !== 'paused') {
return;
}
mediaRecorder.resume();
setIsPaused(false);
if (!timer) {
const newTimer = setInterval(() => {
setRecordingDuration(prevDuration => prevDuration + 1);
}, 1000);
setTimer(newTimer);
}
}, [mediaRecorder, timer]);
const createAudioBlob = (0, react_1.useCallback)(() => {
// Check if we have audio chunks to work with
if (audioChunksRef.current.length === 0 && pausedChunksRef.current.length === 0) {
return null;
}
// If recording is still active, pause it to access the data safely
if ((mediaRecorder === null || mediaRecorder === void 0 ? void 0 : mediaRecorder.state) === 'recording') {
mediaRecorder.pause();
}
// Combine all audio chunks
const allChunks = [...pausedChunksRef.current, ...audioChunksRef.current];
// Checking for iOS support
const isIOSDevice = browserSupport_1.default.isIOS();
// For iOS devices, use mp4/aac format if possible which is better supported
let blobType = mimeTypeRef.current;
if (isIOSDevice && !blobType.includes('mp4') && !blobType.includes('aac')) {
// Try to use mp4 if the recorded MIME type isn't already an iOS-friendly format
if (browserSupport_1.default.isMimeTypeSupported('audio/mp4')) {
blobType = 'audio/mp4';
}
else if (browserSupport_1.default.isMimeTypeSupported('audio/aac')) {
blobType = 'audio/aac';
}
}
// Create a blob from the chunks using the appropriate MIME type
const audioBlob = new Blob(allChunks, {
type: blobType,
});
// Ensure we have valid data
if (audioBlob.size === 0) {
return null;
}
return audioBlob;
}, [mediaRecorder]);
const playRecording = (0, react_1.useCallback)(async () => {
cleanupAudioUrl();
const audioBlob = createAudioBlob();
if (!audioBlob)
return null;
const audioUrl = URL.createObjectURL(audioBlob);
audioUrlRef.current = audioUrl;
return audioUrl;
}, [createAudioBlob, cleanupAudioUrl]);
const saveRecording = (0, react_1.useCallback)(async () => {
const audioBlob = createAudioBlob();
if (!audioBlob)
return null;
const audioUrl = URL.createObjectURL(audioBlob);
return { blob: audioBlob, url: audioUrl };
}, [createAudioBlob]);
const stopRecording = (0, react_1.useCallback)(async () => {
if (!mediaRecorder || mediaRecorder.state === 'inactive') {
return;
}
mediaRecorder.stop();
setIsRecording(false);
setIsPaused(false);
if (timer) {
clearInterval(timer);
setTimer(null);
}
// Cleanup volume metering
if (volumeTimerRef.current) {
clearTimeout(volumeTimerRef.current);
volumeTimerRef.current = null;
}
setCurrentVolume(0);
setRecordingDuration(0);
}, [mediaRecorder, timer]);
const cancelRecording = (0, react_1.useCallback)(() => {
(mediaRecorder === null || mediaRecorder === void 0 ? void 0 : mediaRecorder.state) !== 'inactive' && (mediaRecorder === null || mediaRecorder === void 0 ? void 0 : mediaRecorder.stop());
stopMediaStream(mediaStream);
setIsRecording(false);
setIsPaused(false);
if (timer) {
clearInterval(timer);
setTimer(null);
}
// Cleanup volume metering
if (volumeTimerRef.current) {
clearTimeout(volumeTimerRef.current);
volumeTimerRef.current = null;
}
// Close audio context
if (audioContextRef.current) {
audioContextRef.current.close().catch(e => {
console.warn('Error closing AudioContext:', e);
});
audioContextRef.current = null;
analyserRef.current = null;
}
cleanupAudioUrl();
setCurrentVolume(0);
setRecordingDuration(0);
audioChunksRef.current = [];
pausedChunksRef.current = [];
}, [mediaRecorder, mediaStream, stopMediaStream, timer, cleanupAudioUrl]);
const applyEffect = (0, react_1.useCallback)((effect) => {
// Update current effect state
setCurrentEffect(effect);
// If not recording or no audio context, we can't apply effects
if (!isRecording || !audioContextRef.current || !mediaSourceRef.current) {
return;
}
try {
// Disconnect any existing connections
try {
mediaSourceRef.current.disconnect();
}
catch (e) {
// Ignore disconnection errors
}
// Apply the new effect
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
if (audioContextRef.current && mediaSourceRef.current) {
const { destination } = audioContextRef.current;
(0, audioEffects_1.applyAudioEffect)(audioContextRef.current, mediaSourceRef.current, destination, effect);
}
}
catch (err) {
console.warn('Failed to apply audio effect:', err);
}
}, [isRecording]);
(0, react_1.useEffect)(() => {
return () => {
if ((mediaRecorder === null || mediaRecorder === void 0 ? void 0 : mediaRecorder.state) !== "inactive") {
return;
}
if (timer) {
clearInterval(timer);
}
// Cleanup volume metering on unmount
if (volumeTimerRef.current) {
clearTimeout(volumeTimerRef.current);
}
if (audioContextRef.current && audioContextRef.current.state !== "closed") {
audioContextRef.current.close().catch(console.error);
}
stopMediaStream(mediaStream);
cleanupAudioUrl();
};
}, [mediaStream, stopMediaStream, timer, cleanupAudioUrl, mediaRecorder]);
return {
startRecording,
stopRecording,
cancelRecording,
pauseRecording,
resumeRecording,
saveRecording,
playRecording,
applyEffect,
isRecording,
isPaused,
recordingDuration,
mediaStream,
currentVolume,
error,
isPermissionDenied,
browserCompatibility,
currentEffect,
};
}
// Re-export audio effects
var audioEffects_2 = require("./effects/audioEffects");
Object.defineProperty(exports, "AudioEffectType", { enumerable: true, get: function () { return audioEffects_2.AudioEffectType; } });
//# sourceMappingURL=index.js.map