@matuschek/react-mic
Version:
Record audio from your microphone in WAV or MP3 and display as a sound oscillation (wrapper react-mic)
768 lines (647 loc) • 22.7 kB
JavaScript
import React, { Component } from 'react';
import PropTypes, { string, number, bool, func } from 'prop-types';
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var analyser = audioCtx.createAnalyser();
var AudioContext = {
getAudioContext: function getAudioContext() {
return audioCtx;
},
getAnalyser: function getAnalyser() {
return analyser;
},
resetAnalyser: function resetAnalyser() {
analyser = audioCtx.createAnalyser();
},
decodeAudioData: function decodeAudioData() {
audioCtx.decodeAudioData(audioData).then(function (decodedData) {
// use the decoded data here
});
}
};
var asyncToGenerator = function (fn) {
return function () {
var gen = fn.apply(this, arguments);
return new Promise(function (resolve, reject) {
function step(key, arg) {
try {
var info = gen[key](arg);
var value = info.value;
} catch (error) {
reject(error);
return;
}
if (info.done) {
resolve(value);
} else {
return Promise.resolve(value).then(function (value) {
step("next", value);
}, function (err) {
step("throw", err);
});
}
}
return step("next");
});
};
};
var classCallCheck = function (instance, Constructor) {
if (!(instance instanceof Constructor)) {
throw new TypeError("Cannot call a class as a function");
}
};
var createClass = function () {
function defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
return function (Constructor, protoProps, staticProps) {
if (protoProps) defineProperties(Constructor.prototype, protoProps);
if (staticProps) defineProperties(Constructor, staticProps);
return Constructor;
};
}();
var _extends = Object.assign || function (target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) {
if (Object.prototype.hasOwnProperty.call(source, key)) {
target[key] = source[key];
}
}
}
return target;
};
var inherits = function (subClass, superClass) {
if (typeof superClass !== "function" && superClass !== null) {
throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);
}
subClass.prototype = Object.create(superClass && superClass.prototype, {
constructor: {
value: subClass,
enumerable: false,
writable: true,
configurable: true
}
});
if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;
};
var possibleConstructorReturn = function (self, call) {
if (!self) {
throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
}
return call && (typeof call === "object" || typeof call === "function") ? call : self;
};
/* eslint-disable no-undef */
var analyser$1 = void 0;
var audioCtx$1 = void 0;
var mediaRecorder = void 0;
var chunks = [];
var startTime = void 0;
var stream = void 0;
var mediaOptions = void 0;
var blobObject = void 0;
var onStartCallback = void 0;
var onStopCallback = void 0;
var onSaveCallback = void 0;
var onDataCallback = void 0;
var timeInterval = void 0;
var shimURL = 'https://unpkg.com/wasm-polyfill.js@0.2.0/wasm-polyfill.js';
var constraints = { audio: true // constraints - only audio needed
};navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var MicrophoneRecorder = function () {
function MicrophoneRecorder(onStart, onStop, onSave, onData, options, soundOptions) {
var _this = this;
classCallCheck(this, MicrophoneRecorder);
this.startRecording = function () {
startTime = Date.now();
if (mediaRecorder) {
if (audioCtx$1 && audioCtx$1.state === 'suspended') {
audioCtx$1.resume();
}
if (mediaRecorder && mediaRecorder.state === 'paused') {
mediaRecorder.resume();
return;
}
if (audioCtx$1 && mediaRecorder && mediaRecorder.state === 'inactive') {
mediaRecorder.start(10);
var source = audioCtx$1.createMediaStreamSource(stream);
source.connect(analyser$1);
if (onStartCallback) {
onStartCallback();
}
}
} else if (navigator.mediaDevices) {
console.log('getUserMedia supported.');
navigator.mediaDevices.getUserMedia(constraints).then(function (str) {
stream = str;
if (MediaRecorder.isTypeSupported(mediaOptions.mimeType)) {
mediaRecorder = new MediaRecorder(str, mediaOptions);
mediaRecorder.mimeType = mediaOptions.mimeType;
mediaRecorder.bufferSize = mediaOptions.bufferSize;
mediaRecorder.sampleRate = mediaOptions.sampleRate;
} else {
console.log('Not supportet mime type');
mediaRecorder = new MediaRecorder(str);
}
if (onStartCallback) {
onStartCallback();
}
mediaRecorder.onstop = _this.onStop;
mediaRecorder.ondataavailable = function (event) {
chunks.push(event.data);
if (onDataCallback) {
onDataCallback(event.data);
}
};
audioCtx$1 = AudioContext.getAudioContext();
audioCtx$1.resume().then(function () {
analyser$1 = AudioContext.getAnalyser();
mediaRecorder.start(10);
var sourceNode = audioCtx$1.createMediaStreamSource(stream);
sourceNode.connect(analyser$1);
});
});
} else {
alert('Your browser does not support audio recording');
}
};
var echoCancellation = soundOptions.echoCancellation,
autoGainControl = soundOptions.autoGainControl,
noiseSuppression = soundOptions.noiseSuppression,
channelCount = soundOptions.channelCount;
onStartCallback = onStart;
onStopCallback = onStop;
onSaveCallback = onSave;
onDataCallback = onData;
mediaOptions = options;
constraints = {
audio: {
echoCancellation: echoCancellation,
autoGainControl: autoGainControl,
noiseSuppression: noiseSuppression,
channelCount: channelCount
},
video: false
};
}
createClass(MicrophoneRecorder, [{
key: 'stopRecording',
value: function stopRecording() {
if (mediaRecorder && mediaRecorder.state !== 'inactive') {
mediaRecorder.stop();
stream.getAudioTracks().forEach(function (track) {
track.stop();
});
mediaRecorder = null;
AudioContext.resetAnalyser();
}
}
}, {
key: 'onStop',
value: function onStop() {
var blob = new Blob(chunks, { type: mediaOptions.mimeType });
chunks = [];
blobObject = {
blob: blob,
startTime: startTime,
stopTime: Date.now(),
options: mediaOptions,
blobURL: window.URL.createObjectURL(blob)
};
if (onStopCallback) {
onStopCallback(blobObject);
}
if (onSaveCallback) {
onSaveCallback(blobObject);
}
}
}]);
return MicrophoneRecorder;
}();
var MicrophoneRecorderMp3 = function () {
function MicrophoneRecorderMp3(onStart, onStop, onSave, onData, options) {
var _this2 = this;
classCallCheck(this, MicrophoneRecorderMp3);
this.startRecording = function () {
startTime = Date.now();
if (mediaRecorder) {
if (audioCtx$1 && audioCtx$1.state === 'suspended') {
audioCtx$1.resume();
}
if (mediaRecorder && mediaRecorder.state === 'paused') {
mediaRecorder.resume();
return;
}
if (audioCtx$1 && mediaRecorder && mediaRecorder.state === 'inactive') {
mediaRecorder.start(10);
var source = audioCtx$1.createMediaStreamSource(stream);
source.connect(analyser$1);
if (onStartCallback) {
onStartCallback();
}
}
} else if (navigator.mediaDevices) {
console.log('getUserMedia supported.');
navigator.mediaDevices.getUserMedia(constraints).then(function () {
var _ref = asyncToGenerator( /*#__PURE__*/regeneratorRuntime.mark(function _callee(str) {
var _mediaOptions, recorderParams;
return regeneratorRuntime.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
stream = str;
_mediaOptions = mediaOptions, recorderParams = _mediaOptions.recorderParams;
mediaRecorder = new Recorder(_extends({
wasmURL: wasmURL,
shimURL: shimURL
}, recorderParams));
_context.prev = 3;
_context.next = 6;
return mediaRecorder.init();
case 6:
if (onStartCallback) {
onStartCallback();
}
audioCtx$1 = AudioContext.getAudioContext();
audioCtx$1.resume().then(function () {
analyser$1 = AudioContext.getAnalyser();
mediaRecorder.startRecording();
if (onDataCallback) {
timeInterval = setInterval(onDataCallback, 10);
}
var sourceNode = audioCtx$1.createMediaStreamSource(stream);
sourceNode.connect(analyser$1);
});
_context.next = 14;
break;
case 11:
_context.prev = 11;
_context.t0 = _context['catch'](3);
console.log(JSON.stringify(_context.t0, 2, null));
case 14:
case 'end':
return _context.stop();
}
}
}, _callee, _this2, [[3, 11]]);
}));
return function (_x) {
return _ref.apply(this, arguments);
};
}()).catch(function (error) {
return console.log(JSON.stringify(error, 2, null));
});
} else {
alert('Your browser does not support audio recording');
}
};
onStartCallback = onStart;
onStopCallback = onStop;
onSaveCallback = onSave;
onDataCallback = onData;
mediaOptions = options;
}
createClass(MicrophoneRecorderMp3, [{
key: 'stopRecording',
value: function stopRecording() {
if (mediaRecorder) {
stream.getAudioTracks().forEach(function (track) {
track.stop();
});
AudioContext.resetAnalyser();
this.onStop();
}
}
}, {
key: 'onStop',
value: function () {
var _ref2 = asyncToGenerator( /*#__PURE__*/regeneratorRuntime.mark(function _callee2() {
var blob;
return regeneratorRuntime.wrap(function _callee2$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
_context2.prev = 0;
_context2.next = 3;
return mediaRecorder.stopRecording();
case 3:
blob = _context2.sent;
blobObject = {
blob: blob,
startTime: startTime,
stopTime: Date.now(),
options: mediaOptions,
blobURL: window.URL.createObjectURL(blob)
};
mediaRecorder.close();
mediaRecorder = null;
clearInterval(timeInterval);
if (onStopCallback) {
onStopCallback(blobObject);
}
if (onSaveCallback) {
onSaveCallback(blobObject);
}
_context2.next = 15;
break;
case 12:
_context2.prev = 12;
_context2.t0 = _context2['catch'](0);
console.log('onStop', JSON.stringify(_context2.t0, 2, null));
case 15:
case 'end':
return _context2.stop();
}
}
}, _callee2, this, [[0, 12]]);
}));
function onStop() {
return _ref2.apply(this, arguments);
}
return onStop;
}()
}]);
return MicrophoneRecorderMp3;
}();
var audioSource = void 0;
var AudioPlayer = {
create: function create(audioElem) {
var audioCtx = AudioContext.getAudioContext();
var analyser = AudioContext.getAnalyser();
if (audioSource === undefined) {
var source = audioCtx.createMediaElementSource(audioElem);
source.connect(analyser);
audioSource = source;
}
analyser.connect(audioCtx.destination);
}
};
var drawVisual = void 0;
var Visualizer = {
visualizeSineWave: function visualizeSineWave(canvasCtx, canvas, width, height, backgroundColor, strokeColor) {
var analyser = AudioContext.getAnalyser();
var bufferLength = analyser.fftSize;
var dataArray = new Uint8Array(bufferLength);
canvasCtx.clearRect(0, 0, width, height);
function draw() {
drawVisual = requestAnimationFrame(draw);
analyser = AudioContext.getAnalyser();
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillStyle = backgroundColor;
canvasCtx.fillRect(0, 0, width, height);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = strokeColor;
canvasCtx.beginPath();
var sliceWidth = width * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * height / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
}
draw();
},
visualizeFrequencyBars: function visualizeFrequencyBars(canvasCtx, canvas, width, height, backgroundColor, strokeColor) {
var self = this;
var analyser = AudioContext.getAnalyser();
analyser.fftSize = 256;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
canvasCtx.clearRect(0, 0, width, height);
function draw() {
drawVisual = requestAnimationFrame(draw);
analyser = AudioContext.getAnalyser();
analyser.getByteFrequencyData(dataArray);
canvasCtx.fillStyle = backgroundColor;
canvasCtx.fillRect(0, 0, width, height);
var barWidth = width / bufferLength * 2.5;
var barHeight = void 0;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
barHeight = dataArray[i];
var rgb = self.hexToRgb(strokeColor);
// canvasCtx.fillStyle = `rgb(${barHeight+100},${rgb.g},${rgb.b})`;
canvasCtx.fillStyle = strokeColor;
canvasCtx.fillRect(x, height - barHeight / 2, barWidth, barHeight / 2);
x += barWidth + 1;
}
}
draw();
},
visualizeFrequencyCircles: function visualizeFrequencyCircles(canvasCtx, canvas, width, height, backgroundColor, strokeColor) {
var analyser = AudioContext.getAnalyser();
analyser.fftSize = 32;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
canvasCtx.clearRect(0, 0, width, height);
function draw() {
drawVisual = requestAnimationFrame(draw);
analyser = AudioContext.getAnalyser();
analyser.getByteFrequencyData(dataArray);
var reductionAmount = 3;
var reducedDataArray = new Uint8Array(bufferLength / reductionAmount);
for (var i = 0; i < bufferLength; i += reductionAmount) {
var sum = 0;
for (var j = 0; j < reductionAmount; j++) {
sum += dataArray[i + j];
}
reducedDataArray[i / reductionAmount] = sum / reductionAmount;
}
canvasCtx.clearRect(0, 0, width, height);
canvasCtx.beginPath();
canvasCtx.arc(width / 2, height / 2, Math.min(height, width) / 2, 0, 2 * Math.PI);
canvasCtx.fillStyle = backgroundColor;
canvasCtx.fill();
var stepSize = Math.min(height, width) / 2.0 / reducedDataArray.length;
canvasCtx.strokeStyle = strokeColor;
for (var _i = 0; _i < reducedDataArray.length; _i++) {
canvasCtx.beginPath();
var normalized = reducedDataArray[_i] / 128;
var r = stepSize * _i + stepSize * normalized;
canvasCtx.arc(width / 2, height / 2, r, 0, 2 * Math.PI);
canvasCtx.stroke();
}
} draw();
},
hexToRgb: function hexToRgb(hex) {
var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
return result ? {
r: parseInt(result[1], 16),
g: parseInt(result[2], 16),
b: parseInt(result[3], 16)
} : null;
}
};
// cool blog article on how to do this: http://www.smartjava.org/content/exploring-html5-web-audio-visualizing-sound
var ReactMic = function (_Component) {
inherits(ReactMic, _Component);
function ReactMic(props) {
classCallCheck(this, ReactMic);
var _this = possibleConstructorReturn(this, (ReactMic.__proto__ || Object.getPrototypeOf(ReactMic)).call(this, props));
_this.visualize = function () {
var _this$props = _this.props,
backgroundColor = _this$props.backgroundColor,
strokeColor = _this$props.strokeColor,
width = _this$props.width,
height = _this$props.height,
visualSetting = _this$props.visualSetting;
var _this$state = _this.state,
canvas = _this$state.canvas,
canvasCtx = _this$state.canvasCtx;
if (visualSetting === 'sinewave') {
Visualizer.visualizeSineWave(canvasCtx, canvas, width, height, backgroundColor, strokeColor);
} else if (visualSetting === 'frequencyBars') {
Visualizer.visualizeFrequencyBars(canvasCtx, canvas, width, height, backgroundColor, strokeColor);
} else if (visualSetting === 'frequencyCircles') {
Visualizer.visualizeFrequencyCircles(canvasCtx, canvas, width, height, backgroundColor, strokeColor);
}
};
_this.visualizerRef = React.createRef();
_this.state = {
microphoneRecorder: null,
canvas: null,
canvasCtx: null
};
return _this;
}
createClass(ReactMic, [{
key: 'componentDidUpdate',
value: function componentDidUpdate(prevProps) {
var _props = this.props,
record = _props.record,
onStop = _props.onStop;
var microphoneRecorder = this.state.microphoneRecorder;
if (prevProps.record !== record) {
if (record) {
if (microphoneRecorder) {
microphoneRecorder.startRecording();
}
} else if (microphoneRecorder) {
microphoneRecorder.stopRecording(onStop);
this.clear();
}
}
}
}, {
key: 'componentDidMount',
value: function componentDidMount() {
var _this2 = this;
var _props2 = this.props,
onSave = _props2.onSave,
onStop = _props2.onStop,
onStart = _props2.onStart,
onData = _props2.onData,
audioElem = _props2.audioElem,
audioBitsPerSecond = _props2.audioBitsPerSecond,
echoCancellation = _props2.echoCancellation,
autoGainControl = _props2.autoGainControl,
noiseSuppression = _props2.noiseSuppression,
channelCount = _props2.channelCount,
mimeType = _props2.mimeType,
bufferSize = _props2.bufferSize,
recorderParams = _props2.recorderParams,
sampleRate = _props2.sampleRate;
var visualizer = this.visualizerRef.current;
var canvas = visualizer;
var canvasCtx = canvas.getContext('2d');
var options = {
audioBitsPerSecond: audioBitsPerSecond,
mimeType: mimeType,
bufferSize: bufferSize,
sampleRate: sampleRate,
recorderParams: recorderParams
};
var soundOptions = {
echoCancellation: echoCancellation,
autoGainControl: autoGainControl,
noiseSuppression: noiseSuppression
};
if (audioElem) {
AudioPlayer.create(audioElem);
this.setState({
canvas: canvas,
canvasCtx: canvasCtx
}, function () {
_this2.visualize();
});
} else {
this.setState({
microphoneRecorder: new MicrophoneRecorder(onStart, onStop, onSave, onData, options, soundOptions),
canvas: canvas,
canvasCtx: canvasCtx
}, function () {
_this2.visualize();
});
}
}
}, {
key: 'clear',
value: function clear() {
var _props3 = this.props,
width = _props3.width,
height = _props3.height;
var canvasCtx = this.state.canvasCtx;
canvasCtx.clearRect(0, 0, width, height);
}
}, {
key: 'render',
value: function render() {
var _props4 = this.props,
width = _props4.width,
height = _props4.height;
return React.createElement('canvas', {
ref: this.visualizerRef,
height: height,
width: width,
className: this.props.className
});
}
}]);
return ReactMic;
}(Component);
ReactMic.propTypes = {
backgroundColor: string,
strokeColor: string,
className: string,
audioBitsPerSecond: number,
mimeType: string,
height: number,
record: bool.isRequired,
onStop: func,
onData: func,
onSave: func,
bufferSize: PropTypes.oneOf([0, 256, 512, 1024, 2048, 4096, 8192, 16384]),
sampleRate: number
};
ReactMic.defaultProps = {
backgroundColor: 'rgba(255, 255, 255, 0.5)',
strokeColor: '#000000',
className: 'visualizer',
audioBitsPerSecond: 128000,
record: false,
width: 640,
height: 100,
visualSetting: 'sinewave',
echoCancellation: false,
autoGainControl: false,
noiseSuppression: false,
channelCount: 2,
mimeType: 'audio/wav;codecs=opus',
bufferSize: 2048,
sampleRate: 44100
};
export { ReactMic };
//# sourceMappingURL=index.es.js.map