UNPKG

@tecsinapse/react-mic

Version:

Record audio from your microphone in WAV or MP3 and display as a sound oscillation (wrapper react-mic)

1,622 lines (1,309 loc) 322 kB
import React, { Component } from 'react'; import { string, number, bool, func, oneOf, object } from 'prop-types'; var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {}; function createCommonjsModule(fn, module) { return module = { exports: {} }, fn(module, module.exports), module.exports; } var MediaStreamRecorder_1 = createCommonjsModule(function (module) { // Last time updated: 2016-07-03 8:51:35 AM UTC // links: // Open-Sourced: https://github.com/streamproc/MediaStreamRecorder // https://cdn.WebRTC-Experiment.com/MediaStreamRecorder.js // https://www.WebRTC-Experiment.com/MediaStreamRecorder.js // npm install msr //------------------------------------ // Browsers Support:: // Chrome (all versions) [ audio/video separately ] // Firefox ( >= 29 ) [ audio/video in single webm/mp4 container or only audio in ogg ] // Opera (all versions) [ same as chrome ] // Android (Chrome) [ only video ] // Android (Opera) [ only video ] // Android (Firefox) [ only video ] // Microsoft Edge (Only Audio & Gif) //------------------------------------ // Muaz Khan - www.MuazKhan.com // MIT License - www.WebRTC-Experiment.com/licence //------------------------------------ // ______________________ // MediaStreamRecorder.js function MediaStreamRecorder(mediaStream) { if (!mediaStream) { throw 'MediaStream is mandatory.'; } // void start(optional long timeSlice) // timestamp to fire "ondataavailable" this.start = function(timeSlice) { var Recorder; if (typeof MediaRecorder !== 'undefined') { Recorder = MediaRecorderWrapper; } else if (IsChrome || IsOpera || IsEdge) { if (this.mimeType.indexOf('video') !== -1) { Recorder = WhammyRecorder; } else if (this.mimeType.indexOf('audio') !== -1) { Recorder = StereoAudioRecorder; } } // video recorder (in GIF format) if (this.mimeType === 'image/gif') { Recorder = GifRecorder; } // audio/wav is supported only via StereoAudioRecorder // audio/pcm (int16) is supported only via StereoAudioRecorder if (this.mimeType === 'audio/wav' || this.mimeType === 'audio/pcm') { Recorder = StereoAudioRecorder; } // allows forcing StereoAudioRecorder.js on Edge/Firefox if (this.recorderType) { Recorder = this.recorderType; } mediaRecorder = new Recorder(mediaStream); mediaRecorder.blobs = []; var self = this; mediaRecorder.ondataavailable = function(data) { mediaRecorder.blobs.push(data); self.ondataavailable(data); }; mediaRecorder.onstop = this.onstop; mediaRecorder.onStartedDrawingNonBlankFrames = this.onStartedDrawingNonBlankFrames; // Merge all data-types except "function" mediaRecorder = mergeProps(mediaRecorder, this); mediaRecorder.start(timeSlice); }; this.onStartedDrawingNonBlankFrames = function() {}; this.clearOldRecordedFrames = function() { if (!mediaRecorder) { return; } mediaRecorder.clearOldRecordedFrames(); }; this.stop = function() { if (mediaRecorder) { mediaRecorder.stop(); } }; this.ondataavailable = function(blob) { console.log('ondataavailable..', blob); }; this.onstop = function(error) { console.warn('stopped..', error); }; this.save = function(file, fileName) { if (!file) { if (!mediaRecorder) { return; } ConcatenateBlobs(mediaRecorder.blobs, mediaRecorder.blobs[0].type, function(concatenatedBlob) { invokeSaveAsDialog(concatenatedBlob); }); return; } invokeSaveAsDialog(file, fileName); }; this.pause = function() { if (!mediaRecorder) { return; } mediaRecorder.pause(); console.log('Paused recording.', this.mimeType || mediaRecorder.mimeType); }; this.resume = function() { if (!mediaRecorder) { return; } mediaRecorder.resume(); console.log('Resumed recording.', this.mimeType || mediaRecorder.mimeType); }; // StereoAudioRecorder || WhammyRecorder || MediaRecorderWrapper || GifRecorder this.recorderType = null; // video/webm or audio/webm or audio/ogg or audio/wav this.mimeType = 'video/webm'; // logs are enabled by default this.disableLogs = false; // Reference to "MediaRecorder.js" var mediaRecorder; } // ______________________ // MultiStreamRecorder.js function MultiStreamRecorder(mediaStream) { if (!mediaStream) { throw 'MediaStream is mandatory.'; } var self = this; var isMediaRecorder = isMediaRecorderCompatible(); this.stream = mediaStream; // void start(optional long timeSlice) // timestamp to fire "ondataavailable" this.start = function(timeSlice) { audioRecorder = new MediaStreamRecorder(mediaStream); videoRecorder = new MediaStreamRecorder(mediaStream); audioRecorder.mimeType = 'audio/ogg'; videoRecorder.mimeType = 'video/webm'; for (var prop in this) { if (typeof this[prop] !== 'function') { audioRecorder[prop] = videoRecorder[prop] = this[prop]; } } audioRecorder.ondataavailable = function(blob) { if (!audioVideoBlobs[recordingInterval]) { audioVideoBlobs[recordingInterval] = {}; } audioVideoBlobs[recordingInterval].audio = blob; if (audioVideoBlobs[recordingInterval].video && !audioVideoBlobs[recordingInterval].onDataAvailableEventFired) { audioVideoBlobs[recordingInterval].onDataAvailableEventFired = true; fireOnDataAvailableEvent(audioVideoBlobs[recordingInterval]); } }; videoRecorder.ondataavailable = function(blob) { if (isMediaRecorder) { return self.ondataavailable({ video: blob, audio: blob }); } if (!audioVideoBlobs[recordingInterval]) { audioVideoBlobs[recordingInterval] = {}; } audioVideoBlobs[recordingInterval].video = blob; if (audioVideoBlobs[recordingInterval].audio && !audioVideoBlobs[recordingInterval].onDataAvailableEventFired) { audioVideoBlobs[recordingInterval].onDataAvailableEventFired = true; fireOnDataAvailableEvent(audioVideoBlobs[recordingInterval]); } }; function fireOnDataAvailableEvent(blobs) { recordingInterval++; self.ondataavailable(blobs); } videoRecorder.onstop = audioRecorder.onstop = function(error) { self.onstop(error); }; if (!isMediaRecorder) { // to make sure both audio/video are synced. videoRecorder.onStartedDrawingNonBlankFrames = function() { videoRecorder.clearOldRecordedFrames(); audioRecorder.start(timeSlice); }; videoRecorder.start(timeSlice); } else { videoRecorder.start(timeSlice); } }; this.stop = function() { if (audioRecorder) { audioRecorder.stop(); } if (videoRecorder) { videoRecorder.stop(); } }; this.ondataavailable = function(blob) { console.log('ondataavailable..', blob); }; this.onstop = function(error) { console.warn('stopped..', error); }; this.pause = function() { if (audioRecorder) { audioRecorder.pause(); } if (videoRecorder) { videoRecorder.pause(); } }; this.resume = function() { if (audioRecorder) { audioRecorder.resume(); } if (videoRecorder) { videoRecorder.resume(); } }; var audioRecorder; var videoRecorder; var audioVideoBlobs = {}; var recordingInterval = 0; } if (typeof MediaStreamRecorder !== 'undefined') { MediaStreamRecorder.MultiStreamRecorder = MultiStreamRecorder; } // _____________________________ // Cross-Browser-Declarations.js var browserFakeUserAgent = 'Fake/5.0 (FakeOS) AppleWebKit/123 (KHTML, like Gecko) Fake/12.3.4567.89 Fake/123.45'; (function(that) { if (typeof window !== 'undefined') { return; } if (typeof window === 'undefined' && typeof commonjsGlobal !== 'undefined') { commonjsGlobal.navigator = { userAgent: browserFakeUserAgent, getUserMedia: function() {} }; /*global window:true */ that.window = commonjsGlobal; } if (typeof document === 'undefined') { /*global document:true */ that.document = {}; document.createElement = document.captureStream = document.mozCaptureStream = function() { return {}; }; } if (typeof location === 'undefined') { /*global location:true */ that.location = { protocol: 'file:', href: '', hash: '' }; } if (typeof screen === 'undefined') { /*global screen:true */ that.screen = { width: 0, height: 0 }; } })(typeof commonjsGlobal !== 'undefined' ? commonjsGlobal : window); // WebAudio API representer var AudioContext = window.AudioContext; if (typeof AudioContext === 'undefined') { if (typeof webkitAudioContext !== 'undefined') { /*global AudioContext:true */ AudioContext = webkitAudioContext; } if (typeof mozAudioContext !== 'undefined') { /*global AudioContext:true */ AudioContext = mozAudioContext; } } if (typeof window === 'undefined') { /*jshint -W020 */ window = {}; } // WebAudio API representer var AudioContext = window.AudioContext; if (typeof AudioContext === 'undefined') { if (typeof webkitAudioContext !== 'undefined') { /*global AudioContext:true */ AudioContext = webkitAudioContext; } if (typeof mozAudioContext !== 'undefined') { /*global AudioContext:true */ AudioContext = mozAudioContext; } } /*jshint -W079 */ var URL = window.URL; if (typeof URL === 'undefined' && typeof webkitURL !== 'undefined') { /*global URL:true */ URL = webkitURL; } if (typeof navigator !== 'undefined') { if (typeof navigator.webkitGetUserMedia !== 'undefined') { navigator.getUserMedia = navigator.webkitGetUserMedia; } if (typeof navigator.mozGetUserMedia !== 'undefined') { navigator.getUserMedia = navigator.mozGetUserMedia; } } else { navigator = { getUserMedia: function() {}, userAgent: browserFakeUserAgent }; } var IsEdge = navigator.userAgent.indexOf('Edge') !== -1 && (!!navigator.msSaveBlob || !!navigator.msSaveOrOpenBlob); var IsOpera = false; if (typeof opera !== 'undefined' && navigator.userAgent && navigator.userAgent.indexOf('OPR/') !== -1) { IsOpera = true; } var IsChrome = !IsEdge && !IsEdge && !!navigator.webkitGetUserMedia; var MediaStream = window.MediaStream; if (typeof MediaStream === 'undefined' && typeof webkitMediaStream !== 'undefined') { MediaStream = webkitMediaStream; } /*global MediaStream:true */ if (typeof MediaStream !== 'undefined') { if (!('getVideoTracks' in MediaStream.prototype)) { MediaStream.prototype.getVideoTracks = function() { if (!this.getTracks) { return []; } var tracks = []; this.getTracks.forEach(function(track) { if (track.kind.toString().indexOf('video') !== -1) { tracks.push(track); } }); return tracks; }; MediaStream.prototype.getAudioTracks = function() { if (!this.getTracks) { return []; } var tracks = []; this.getTracks.forEach(function(track) { if (track.kind.toString().indexOf('audio') !== -1) { tracks.push(track); } }); return tracks; }; } if (!('stop' in MediaStream.prototype)) { MediaStream.prototype.stop = function() { this.getAudioTracks().forEach(function(track) { if (!!track.stop) { track.stop(); } }); this.getVideoTracks().forEach(function(track) { if (!!track.stop) { track.stop(); } }); }; } } if (typeof location !== 'undefined') { if (location.href.indexOf('file:') === 0) { console.error('Please load this HTML file on HTTP or HTTPS.'); } } // Merge all other data-types except "function" function mergeProps(mergein, mergeto) { for (var t in mergeto) { if (typeof mergeto[t] !== 'function') { mergein[t] = mergeto[t]; } } return mergein; } /** * @param {Blob} file - File or Blob object. This parameter is required. * @param {string} fileName - Optional file name e.g. "Recorded-Video.webm" * @example * invokeSaveAsDialog(blob or file, [optional] fileName); * @see {@link https://github.com/muaz-khan/RecordRTC|RecordRTC Source Code} */ function invokeSaveAsDialog(file, fileName) { if (!file) { throw 'Blob object is required.'; } if (!file.type) { try { file.type = 'video/webm'; } catch (e) {} } var fileExtension = (file.type || 'video/webm').split('/')[1]; if (fileName && fileName.indexOf('.') !== -1) { var splitted = fileName.split('.'); fileName = splitted[0]; fileExtension = splitted[1]; } var fileFullName = (fileName || (Math.round(Math.random() * 9999999999) + 888888888)) + '.' + fileExtension; if (typeof navigator.msSaveOrOpenBlob !== 'undefined') { return navigator.msSaveOrOpenBlob(file, fileFullName); } else if (typeof navigator.msSaveBlob !== 'undefined') { return navigator.msSaveBlob(file, fileFullName); } var hyperlink = document.createElement('a'); hyperlink.href = URL.createObjectURL(file); hyperlink.target = '_blank'; hyperlink.download = fileFullName; if (!!navigator.mozGetUserMedia) { hyperlink.onclick = function() { (document.body || document.documentElement).removeChild(hyperlink); }; (document.body || document.documentElement).appendChild(hyperlink); } var evt = new MouseEvent('click', { view: window, bubbles: true, cancelable: true }); hyperlink.dispatchEvent(evt); if (!navigator.mozGetUserMedia) { URL.revokeObjectURL(hyperlink.href); } } function bytesToSize(bytes) { var k = 1000; var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB']; if (bytes === 0) { return '0 Bytes'; } var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)), 10); return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i]; } // ______________ (used to handle stuff like http://goo.gl/xmE5eg) issue #129 // ObjectStore.js var ObjectStore = { AudioContext: AudioContext }; function isMediaRecorderCompatible() { var isOpera = !!window.opera || navigator.userAgent.indexOf(' OPR/') >= 0; var isChrome = !!window.chrome && !isOpera; var isFirefox = typeof window.InstallTrigger !== 'undefined'; if (isFirefox) { return true; } if (!isChrome) { return false; } var nVer = navigator.appVersion; var nAgt = navigator.userAgent; var fullVersion = '' + parseFloat(navigator.appVersion); var majorVersion = parseInt(navigator.appVersion, 10); var verOffset, ix; if (isChrome) { verOffset = nAgt.indexOf('Chrome'); fullVersion = nAgt.substring(verOffset + 7); } // trim the fullVersion string at semicolon/space if present if ((ix = fullVersion.indexOf(';')) !== -1) { fullVersion = fullVersion.substring(0, ix); } if ((ix = fullVersion.indexOf(' ')) !== -1) { fullVersion = fullVersion.substring(0, ix); } majorVersion = parseInt('' + fullVersion, 10); if (isNaN(majorVersion)) { fullVersion = '' + parseFloat(navigator.appVersion); majorVersion = parseInt(navigator.appVersion, 10); } return majorVersion >= 49; } // ______________ (used to handle stuff like http://goo.gl/xmE5eg) issue #129 // ObjectStore.js var ObjectStore = { AudioContext: window.AudioContext || window.webkitAudioContext }; // ================== // MediaRecorder.js /** * Implementation of https://dvcs.w3.org/hg/dap/raw-file/default/media-stream-capture/MediaRecorder.html * The MediaRecorder accepts a mediaStream as input source passed from UA. When recorder starts, * a MediaEncoder will be created and accept the mediaStream as input source. * Encoder will get the raw data by track data changes, encode it by selected MIME Type, then store the encoded in EncodedBufferCache object. * The encoded data will be extracted on every timeslice passed from Start function call or by RequestData function. * Thread model: * When the recorder starts, it creates a "Media Encoder" thread to read data from MediaEncoder object and store buffer in EncodedBufferCache object. * Also extract the encoded data and create blobs on every timeslice passed from start function or RequestData function called by UA. */ function MediaRecorderWrapper(mediaStream) { var self = this; /** * This method records MediaStream. * @method * @memberof MediaStreamRecorder * @example * recorder.record(); */ this.start = function(timeSlice, __disableLogs) { if (!self.mimeType) { self.mimeType = 'video/webm'; } if (self.mimeType.indexOf('audio') !== -1) { if (mediaStream.getVideoTracks().length && mediaStream.getAudioTracks().length) { var stream; if (!!navigator.mozGetUserMedia) { stream = new MediaStream(); stream.addTrack(mediaStream.getAudioTracks()[0]); } else { // webkitMediaStream stream = new MediaStream(mediaStream.getAudioTracks()); } mediaStream = stream; } } if (self.mimeType.indexOf('audio') !== -1) { self.mimeType = IsChrome ? 'audio/webm' : 'audio/ogg'; } self.dontFireOnDataAvailableEvent = false; var recorderHints = { mimeType: self.mimeType }; if (!self.disableLogs && !__disableLogs) { console.log('Passing following params over MediaRecorder API.', recorderHints); } if (mediaRecorder) { // mandatory to make sure Firefox doesn't fails to record streams 3-4 times without reloading the page. mediaRecorder = null; } if (IsChrome && !isMediaRecorderCompatible()) { // to support video-only recording on stable recorderHints = 'video/vp8'; } // http://dxr.mozilla.org/mozilla-central/source/content/media/MediaRecorder.cpp // https://wiki.mozilla.org/Gecko:MediaRecorder // https://dvcs.w3.org/hg/dap/raw-file/default/media-stream-capture/MediaRecorder.html // starting a recording session; which will initiate "Reading Thread" // "Reading Thread" are used to prevent main-thread blocking scenarios try { mediaRecorder = new MediaRecorder(mediaStream, recorderHints); } catch (e) { // if someone passed NON_supported mimeType // or if Firefox on Android mediaRecorder = new MediaRecorder(mediaStream); } if ('canRecordMimeType' in mediaRecorder && mediaRecorder.canRecordMimeType(self.mimeType) === false) { if (!self.disableLogs) { console.warn('MediaRecorder API seems unable to record mimeType:', self.mimeType); } } // i.e. stop recording when <video> is paused by the user; and auto restart recording // when video is resumed. E.g. yourStream.getVideoTracks()[0].muted = true; // it will auto-stop recording. mediaRecorder.ignoreMutedMedia = self.ignoreMutedMedia || false; var firedOnDataAvailableOnce = false; // Dispatching OnDataAvailable Handler mediaRecorder.ondataavailable = function(e) { if (self.dontFireOnDataAvailableEvent) { return; } // how to fix FF-corrupt-webm issues? // should we leave this? e.data.size < 26800 if (!e.data || !e.data.size || e.data.size < 26800 || firedOnDataAvailableOnce) { return; } firedOnDataAvailableOnce = true; var blob = self.getNativeBlob ? e.data : new Blob([e.data], { type: self.mimeType || 'video/webm' }); self.ondataavailable(blob); self.dontFireOnDataAvailableEvent = true; if (!!mediaRecorder) { mediaRecorder.stop(); mediaRecorder = null; } // record next interval self.start(timeSlice, '__disableLogs'); }; mediaRecorder.onerror = function(error) { if (!self.disableLogs) { if (error.name === 'InvalidState') { console.error('The MediaRecorder is not in a state in which the proposed operation is allowed to be executed.'); } else if (error.name === 'OutOfMemory') { console.error('The UA has exhaused the available memory. User agents SHOULD provide as much additional information as possible in the message attribute.'); } else if (error.name === 'IllegalStreamModification') { console.error('A modification to the stream has occurred that makes it impossible to continue recording. An example would be the addition of a Track while recording is occurring. User agents SHOULD provide as much additional information as possible in the message attribute.'); } else if (error.name === 'OtherRecordingError') { console.error('Used for an fatal error other than those listed above. User agents SHOULD provide as much additional information as possible in the message attribute.'); } else if (error.name === 'GenericError') { console.error('The UA cannot provide the codec or recording option that has been requested.', error); } else { console.error('MediaRecorder Error', error); } } // When the stream is "ended" set recording to 'inactive' // and stop gathering data. Callers should not rely on // exactness of the timeSlice value, especially // if the timeSlice value is small. Callers should // consider timeSlice as a minimum value if (!!mediaRecorder && mediaRecorder.state !== 'inactive' && mediaRecorder.state !== 'stopped') { mediaRecorder.stop(); } }; // void start(optional long mTimeSlice) // The interval of passing encoded data from EncodedBufferCache to onDataAvailable // handler. "mTimeSlice < 0" means Session object does not push encoded data to // onDataAvailable, instead, it passive wait the client side pull encoded data // by calling requestData API. try { mediaRecorder.start(3.6e+6); } catch (e) { mediaRecorder = null; } setTimeout(function() { if (!mediaRecorder) { return; } if (mediaRecorder.state === 'recording') { // "stop" method auto invokes "requestData"! mediaRecorder.requestData(); // mediaRecorder.stop(); } }, timeSlice); // Start recording. If timeSlice has been provided, mediaRecorder will // raise a dataavailable event containing the Blob of collected data on every timeSlice milliseconds. // If timeSlice isn't provided, UA should call the RequestData to obtain the Blob data, also set the mTimeSlice to zero. }; /** * This method stops recording MediaStream. * @param {function} callback - Callback function, that is used to pass recorded blob back to the callee. * @method * @memberof MediaStreamRecorder * @example * recorder.stop(function(blob) { * video.src = URL.createObjectURL(blob); * }); */ this.stop = function(callback) { if (!mediaRecorder) { return; } // mediaRecorder.state === 'recording' means that media recorder is associated with "session" // mediaRecorder.state === 'stopped' means that media recorder is detached from the "session" ... in this case; "session" will also be deleted. if (mediaRecorder.state === 'recording') { // "stop" method auto invokes "requestData"! mediaRecorder.requestData(); setTimeout(function() { self.dontFireOnDataAvailableEvent = true; if (!!mediaRecorder && mediaRecorder.state === 'recording') { mediaRecorder.stop(); } mediaRecorder = null; }, 2000); } }; /** * This method pauses the recording process. * @method * @memberof MediaStreamRecorder * @example * recorder.pause(); */ this.pause = function() { if (!mediaRecorder) { return; } if (mediaRecorder.state === 'recording') { mediaRecorder.pause(); } }; /** * The recorded blobs are passed over this event. * @event * @memberof MediaStreamRecorder * @example * recorder.ondataavailable = function(data) {}; */ this.ondataavailable = function(blob) { console.log('recorded-blob', blob); }; /** * This method resumes the recording process. * @method * @memberof MediaStreamRecorder * @example * recorder.resume(); */ this.resume = function() { if (this.dontFireOnDataAvailableEvent) { this.dontFireOnDataAvailableEvent = false; var disableLogs = self.disableLogs; self.disableLogs = true; this.record(); self.disableLogs = disableLogs; return; } if (!mediaRecorder) { return; } if (mediaRecorder.state === 'paused') { mediaRecorder.resume(); } }; /** * This method resets currently recorded data. * @method * @memberof MediaStreamRecorder * @example * recorder.clearRecordedData(); */ this.clearRecordedData = function() { if (!mediaRecorder) { return; } this.pause(); this.dontFireOnDataAvailableEvent = true; this.stop(); }; // Reference to "MediaRecorder" object var mediaRecorder; function isMediaStreamActive() { if ('active' in mediaStream) { if (!mediaStream.active) { return false; } } else if ('ended' in mediaStream) { // old hack if (mediaStream.ended) { return false; } } return true; } // this method checks if media stream is stopped // or any track is ended. (function looper() { if (!mediaRecorder) { return; } if (isMediaStreamActive() === false) { self.stop(); return; } setTimeout(looper, 1000); // check every second })(); } if (typeof MediaStreamRecorder !== 'undefined') { MediaStreamRecorder.MediaRecorderWrapper = MediaRecorderWrapper; } // ====================== // StereoAudioRecorder.js function StereoAudioRecorder(mediaStream) { // void start(optional long timeSlice) // timestamp to fire "ondataavailable" this.start = function(timeSlice) { timeSlice = timeSlice || 1000; mediaRecorder = new StereoAudioRecorderHelper(mediaStream, this); mediaRecorder.record(); timeout = setInterval(function() { mediaRecorder.requestData(); }, timeSlice); }; this.stop = function() { if (mediaRecorder) { mediaRecorder.stop(); clearTimeout(timeout); } }; this.pause = function() { if (!mediaRecorder) { return; } mediaRecorder.pause(); }; this.resume = function() { if (!mediaRecorder) { return; } mediaRecorder.resume(); }; this.ondataavailable = function() {}; // Reference to "StereoAudioRecorder" object var mediaRecorder; var timeout; } if (typeof MediaStreamRecorder !== 'undefined') { MediaStreamRecorder.StereoAudioRecorder = StereoAudioRecorder; } // ============================ // StereoAudioRecorderHelper.js // source code from: http://typedarray.org/wp-content/projects/WebAudioRecorder/script.js function StereoAudioRecorderHelper(mediaStream, root) { // variables var deviceSampleRate = 44100; // range: 22050 to 96000 if (!ObjectStore.AudioContextConstructor) { ObjectStore.AudioContextConstructor = new ObjectStore.AudioContext(); } // check device sample rate deviceSampleRate = ObjectStore.AudioContextConstructor.sampleRate; var leftchannel = []; var rightchannel = []; var scriptprocessornode; var recording = false; var recordingLength = 0; var volume; var audioInput; var sampleRate = root.sampleRate || deviceSampleRate; var mimeType = root.mimeType || 'audio/wav'; var isPCM = mimeType.indexOf('audio/pcm') > -1; var context; var numChannels = root.audioChannels || 2; this.record = function() { recording = true; // reset the buffers for the new recording leftchannel.length = rightchannel.length = 0; recordingLength = 0; }; this.requestData = function() { if (isPaused) { return; } if (recording) { root.ondataavailable(false); return; } if (recordingLength === 0) { requestDataInvoked = false; return; } requestDataInvoked = true; // clone stuff var internalLeftChannel = leftchannel.slice(0); var internalRightChannel = rightchannel.slice(0); var internalRecordingLength = recordingLength; // reset the buffers for the new recording leftchannel.length = rightchannel.length = []; recordingLength = 0; requestDataInvoked = false; // we flat the left and right channels down var leftBuffer = mergeBuffers(internalLeftChannel, internalRecordingLength); var interleaved = leftBuffer; // we interleave both channels together if (numChannels === 2) { var rightBuffer = mergeBuffers(internalRightChannel, internalRecordingLength); // bug fixed via #70,#71 interleaved = interleave(leftBuffer, rightBuffer); } if (isPCM) { // our final binary blob var blob = new Blob([convertoFloat32ToInt16(interleaved)], { type: 'audio/pcm' }); console.debug('audio recorded blob size:', bytesToSize(blob.size)); root.ondataavailable(blob); return; } // we create our wav file var buffer = new ArrayBuffer(44 + interleaved.length * 2); var view = new DataView(buffer); // RIFF chunk descriptor writeUTFBytes(view, 0, 'RIFF'); // -8 (via #97) view.setUint32(4, 44 + interleaved.length * 2 - 8, true); writeUTFBytes(view, 8, 'WAVE'); // FMT sub-chunk writeUTFBytes(view, 12, 'fmt '); view.setUint32(16, 16, true); view.setUint16(20, 1, true); // stereo (2 channels) view.setUint16(22, numChannels, true); view.setUint32(24, sampleRate, true); view.setUint32(28, sampleRate * numChannels * 2, true); // numChannels * 2 (via #71) view.setUint16(32, numChannels * 2, true); view.setUint16(34, 16, true); // data sub-chunk writeUTFBytes(view, 36, 'data'); view.setUint32(40, interleaved.length * 2, true); // write the PCM samples var lng = interleaved.length; var index = 44; var volume = 1; for (var i = 0; i < lng; i++) { view.setInt16(index, interleaved[i] * (0x7FFF * volume), true); index += 2; } // our final binary blob var blob = new Blob([view], { type: 'audio/wav' }); console.debug('audio recorded blob size:', bytesToSize(blob.size)); root.ondataavailable(blob); root.onstop(); audioInput.disconnect(); }; this.stop = function() { // we stop recording recording = false; this.requestData(); }; function interleave(leftChannel, rightChannel) { var length = leftChannel.length + rightChannel.length; var result = new Float32Array(length); var inputIndex = 0; for (var index = 0; index < length;) { result[index++] = leftChannel[inputIndex]; result[index++] = rightChannel[inputIndex]; inputIndex++; } return result; } function mergeBuffers(channelBuffer, recordingLength) { var result = new Float32Array(recordingLength); var offset = 0; var lng = channelBuffer.length; for (var i = 0; i < lng; i++) { var buffer = channelBuffer[i]; result.set(buffer, offset); offset += buffer.length; } return result; } function writeUTFBytes(view, offset, string$$1) { var lng = string$$1.length; for (var i = 0; i < lng; i++) { view.setUint8(offset + i, string$$1.charCodeAt(i)); } } function convertoFloat32ToInt16(buffer) { var l = buffer.length; var buf = new Int16Array(l); while (l--) { buf[l] = buffer[l] * 0xFFFF; //convert to 16 bit } return buf.buffer } // creates the audio context var context = ObjectStore.AudioContextConstructor; // creates a gain node ObjectStore.VolumeGainNode = context.createGain(); var volume = ObjectStore.VolumeGainNode; // creates an audio node from the microphone incoming stream ObjectStore.AudioInput = context.createMediaStreamSource(mediaStream); // creates an audio node from the microphone incoming stream var audioInput = ObjectStore.AudioInput; // connect the stream to the gain node audioInput.connect(volume); /* From the spec: This value controls how frequently the audioprocess event is dispatched and how many sample-frames need to be processed each call. Lower values for buffer size will result in a lower (better) latency. Higher values will be necessary to avoid audio breakup and glitches Legal values are 256, 512, 1024, 2048, 4096, 8192, and 16384.*/ var bufferSize = root.bufferSize || 2048; if (root.bufferSize === 0) { bufferSize = 0; } if (context.createJavaScriptNode) { scriptprocessornode = context.createJavaScriptNode(bufferSize, numChannels, numChannels); } else if (context.createScriptProcessor) { scriptprocessornode = context.createScriptProcessor(bufferSize, numChannels, numChannels); } else { throw 'WebAudio API has no support on this browser.'; } bufferSize = scriptprocessornode.bufferSize; console.debug('using audio buffer-size:', bufferSize); var requestDataInvoked = false; // sometimes "scriptprocessornode" disconnects from he destination-node // and there is no exception thrown in this case. // and obviously no further "ondataavailable" events will be emitted. // below global-scope variable is added to debug such unexpected but "rare" cases. window.scriptprocessornode = scriptprocessornode; if (numChannels === 1) { console.debug('All right-channels are skipped.'); } var isPaused = false; this.pause = function() { isPaused = true; }; this.resume = function() { isPaused = false; }; // http://webaudio.github.io/web-audio-api/#the-scriptprocessornode-interface scriptprocessornode.onaudioprocess = function(e) { if (!recording || requestDataInvoked || isPaused) { return; } var left = e.inputBuffer.getChannelData(0); leftchannel.push(new Float32Array(left)); if (numChannels === 2) { var right = e.inputBuffer.getChannelData(1); rightchannel.push(new Float32Array(right)); } recordingLength += bufferSize; }; volume.connect(scriptprocessornode); scriptprocessornode.connect(context.destination); } if (typeof MediaStreamRecorder !== 'undefined') { MediaStreamRecorder.StereoAudioRecorderHelper = StereoAudioRecorderHelper; } // =================== // WhammyRecorder.js function WhammyRecorder(mediaStream) { // void start(optional long timeSlice) // timestamp to fire "ondataavailable" this.start = function(timeSlice) { timeSlice = timeSlice || 1000; mediaRecorder = new WhammyRecorderHelper(mediaStream, this); for (var prop in this) { if (typeof this[prop] !== 'function') { mediaRecorder[prop] = this[prop]; } } mediaRecorder.record(); timeout = setInterval(function() { mediaRecorder.requestData(); }, timeSlice); }; this.stop = function() { if (mediaRecorder) { mediaRecorder.stop(); clearTimeout(timeout); } }; this.clearOldRecordedFrames = function() { if (mediaRecorder) { mediaRecorder.clearOldRecordedFrames(); } }; this.pause = function() { if (!mediaRecorder) { return; } mediaRecorder.pause(); }; this.resume = function() { if (!mediaRecorder) { return; } mediaRecorder.resume(); }; this.ondataavailable = function() {}; // Reference to "WhammyRecorder" object var mediaRecorder; var timeout; } if (typeof MediaStreamRecorder !== 'undefined') { MediaStreamRecorder.WhammyRecorder = WhammyRecorder; } // ========================== // WhammyRecorderHelper.js function WhammyRecorderHelper(mediaStream, root) { this.record = function(timeSlice) { if (!this.width) { this.width = 320; } if (!this.height) { this.height = 240; } if (this.video && this.video instanceof HTMLVideoElement) { if (!this.width) { this.width = video.videoWidth || video.clientWidth || 320; } if (!this.height) { this.height = video.videoHeight || video.clientHeight || 240; } } if (!this.video) { this.video = { width: this.width, height: this.height }; } if (!this.canvas || !this.canvas.width || !this.canvas.height) { this.canvas = { width: this.width, height: this.height }; } canvas.width = this.canvas.width; canvas.height = this.canvas.height; // setting defaults if (this.video && this.video instanceof HTMLVideoElement) { this.isHTMLObject = true; video = this.video.cloneNode(); } else { video = document.createElement('video'); video.src = URL.createObjectURL(mediaStream); video.width = this.video.width; video.height = this.video.height; } video.muted = true; video.play(); lastTime = new Date().getTime(); whammy = new Whammy.Video(root.speed, root.quality); console.log('canvas resolutions', canvas.width, '*', canvas.height); console.log('video width/height', video.width || canvas.width, '*', video.height || canvas.height); drawFrames(); }; this.clearOldRecordedFrames = function() { whammy.frames = []; }; var requestDataInvoked = false; this.requestData = function() { if (isPaused) { return; } if (!whammy.frames.length) { requestDataInvoked = false; return; } requestDataInvoked = true; // clone stuff var internalFrames = whammy.frames.slice(0); // reset the frames for the new recording whammy.frames = dropBlackFrames(internalFrames, -1); whammy.compile(function(whammyBlob) { root.ondataavailable(whammyBlob); console.debug('video recorded blob size:', bytesToSize(whammyBlob.size)); }); whammy.frames = []; requestDataInvoked = false; }; var isOnStartedDrawingNonBlankFramesInvoked = false; function drawFrames() { if (isPaused) { lastTime = new Date().getTime(); setTimeout(drawFrames, 500); return; } if (isStopDrawing) { return; } if (requestDataInvoked) { return setTimeout(drawFrames, 100); } var duration = new Date().getTime() - lastTime; if (!duration) { return drawFrames(); } // via webrtc-experiment#206, by Jack i.e. @Seymourr lastTime = new Date().getTime(); if (!self.isHTMLObject && video.paused) { video.play(); // Android } context.drawImage(video, 0, 0, canvas.width, canvas.height); if (!isStopDrawing) { whammy.frames.push({ duration: duration, image: canvas.toDataURL('image/webp') }); } if (!isOnStartedDrawingNonBlankFramesInvoked && !isBlankFrame(whammy.frames[whammy.frames.length - 1])) { isOnStartedDrawingNonBlankFramesInvoked = true; root.onStartedDrawingNonBlankFrames(); } setTimeout(drawFrames, 10); } var isStopDrawing = false; this.stop = function() { isStopDrawing = true; this.requestData(); }; var canvas = document.createElement('canvas'); var context = canvas.getContext('2d'); var video; var lastTime; var whammy; var self = this; function isBlankFrame(frame, _pixTolerance, _frameTolerance) { var localCanvas = document.createElement('canvas'); localCanvas.width = canvas.width; localCanvas.height = canvas.height; var context2d = localCanvas.getContext('2d'); var sampleColor = { r: 0, g: 0, b: 0 }; var maxColorDifference = Math.sqrt( Math.pow(255, 2) + Math.pow(255, 2) + Math.pow(255, 2) ); var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0; var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0; var matchPixCount, endPixCheck, maxPixCount; var image = new Image(); image.src = frame.image; context2d.drawImage(image, 0, 0, canvas.width, canvas.height); var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height); matchPixCount = 0; endPixCheck = imageData.data.length; maxPixCount = imageData.data.length / 4; for (var pix = 0; pix < endPixCheck; pix += 4) { var currentColor = { r: imageData.data[pix], g: imageData.data[pix + 1], b: imageData.data[pix + 2] }; var colorDifference = Math.sqrt( Math.pow(currentColor.r - sampleColor.r, 2) + Math.pow(currentColor.g - sampleColor.g, 2) + Math.pow(currentColor.b - sampleColor.b, 2) ); // difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2) if (colorDifference <= maxColorDifference * pixTolerance) { matchPixCount++; } } if (maxPixCount - matchPixCount <= maxPixCount * frameTolerance) { return false; } else { return true; } } function dropBlackFrames(_frames, _framesToCheck, _pixTolerance, _frameTolerance) { var localCanvas = document.createElement('canvas'); localCanvas.width = canvas.width; localCanvas.height = canvas.height; var context2d = localCanvas.getContext('2d'); var resultFrames = []; var checkUntilNotBlack = _framesToCheck === -1; var endCheckFrame = (_framesToCheck && _framesToCheck > 0 && _framesToCheck <= _frames.length) ? _framesToCheck : _frames.length; var sampleColor = { r: 0, g: 0, b: 0 }; var maxColorDifference = Math.sqrt( Math.pow(255, 2) + Math.pow(255, 2) + Math.pow(255, 2) ); var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0; var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0; var doNotCheckNext = false; for (var f = 0; f < endCheckFrame; f++) { var matchPixCount, endPixCheck, maxPixCount; if (!doNotCheckNext) { var image = new Image(); image.src = _frames[f].image; context2d.drawImage(image, 0, 0, canvas.width, canvas.height); var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height); matchPixCount = 0; endPixCheck = imageData.data.length; maxPixCount = imageData.data.length / 4; for (var pix = 0; pix < endPixCheck; pix += 4) { var currentColor = { r: imageData.data[pix], g: imageData.data[pix + 1], b: imageData.data[pix + 2] }; var colorDifference = Math.sqrt( Math.pow(currentColor.r - sampleColor.r, 2) + Math.pow(currentColor.g - sampleColor.g, 2) + Math.pow(currentColor.b - sampleColor.b, 2) ); // difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2) if (colorDifference <= maxColorDifference * pixTolerance) { matchPixCount++; } } } if (!doNotCheckNext && maxPixCount - matchPixCount <= maxPixCount * frameTolerance) ; else { // console.log('frame is passed : ' + f); if (checkUntilNotBlack) { doNotCheckNext = true; } resultFrames.push(_frames[f]); } } resultFrames = resultFrames.concat(_frames.slice(endCheckFrame)); if (resultFrames.length <= 0) { // at least one last frame should be available for next manipulation // if total duration of all frames will be < 1000 than ffmpeg doesn't work well... resultFrames.push(_frames[_frames.length - 1]); } return resultFrames; } var isPaused = false; this.pause = function() { isPaused = true; }; this.resume = function() { isPaused = false; }; } if (typeof MediaStreamRecorder !== 'undefined') { MediaStreamRecorder.WhammyRecorderHelper = WhammyRecorderHelper; } // -------------- // GifRecorder.js function GifRecorder(mediaStream) { if (typeof GIFEncoder === 'undefined') { throw 'Please link: https://cdn.webrtc-experiment.com/gif-recorder.js'; } // void start(optional long timeSlice) // timestamp to fire "ondataavailable" this.start = function(timeSlice) { timeSlice = timeSlice || 1000; var imageWidth = this.videoWidth || 320; var imageHeight = this.videoHeight || 240; canvas.width = video.width = imageWidth; canvas.height = video.height = imageHeight; // external library to record as GIF images gifEncoder = new GIFEncoder(); // void setRepeat(int iter) // Sets the number of times the set of GIF frames should be played. // Default