react-audio-processor-kit
Version:
A lightweight React hook for real-time voice activity detection (VAD), low-latency audio streaming, volume visualization, and optional full-session recording — built for voice-enabled interfaces and live audio applications.
2 lines • 39.1 kB
JavaScript
/*! For license information please see index.js.LICENSE.txt */
!function(e,t){if("object"==typeof exports&&"object"==typeof module)module.exports=t(require("react"));else if("function"==typeof define&&define.amd)define(["react"],t);else{var n="object"==typeof exports?t(require("react")):t(e.react);for(var r in n)("object"==typeof exports?exports:e)[r]=n[r]}}(self,(e=>(()=>{"use strict";var t={155:t=>{t.exports=e}},n={};function r(e){var i=n[e];if(void 0!==i)return i.exports;var a=n[e]={exports:{}};return t[e](a,a.exports,r),a.exports}r.d=(e,t)=>{for(var n in t)r.o(t,n)&&!r.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:t[n]})},r.o=(e,t)=>Object.prototype.hasOwnProperty.call(e,t),r.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})};var i={};r.r(i),r.d(i,{MIC_STATE:()=>k,useAudioProcessorKit:()=>R});var a,o,s,u,l,c=r(155),h={PAUSE:2,RESUME:4,STOP:3,INIT:1,START:6,VAD_SPEAKING_STARTED:10,VAD_SPEAKING_STOPED:12,AUDIO_CHUNK_TIMER_TRIGGER:11,ON_COMPLETE_FULL_RECORDING_TRIGGER:13,CLEAN:20},f=new Blob(["\n\nclass AudioProcessor extends AudioWorkletProcessor {\n\n constructor() {\n super();\n console.log(\"Hello :) from AudioProcessor V6\");\n\n this._readyTolisten = false;\n this._userSettings;\n this._needFullRecording = false;\n this._enableVad = false;\n this._eachProcessCallTime = 0.3333333333333333; //3ms by default , it can auto adjust based on sample rate and buffersize\n\n /**\n * WebAudio calls process() every time, we can't replace it.\n * So we use _runtimeProcess inside it, and change _runtimeProcess\n * to whatever function we need (like with VAD or without).\n * This way we avoid if checks and get better performance.\n */\n this._runtimeProcess = () => true; // default dummy function\n this._runtimeSwitcher = () => true;\n\n /**\n * variables for vad feature\n */\n this._peakNoticedFrameCount = 0;\n this._peakMaxFrame = 30;//sound start 90ms\n this._silenceNoticedFrameCount = 0;\n this._silentMaxFrame = 70; // 70 FRAME OF EACH 3MS TO 60*3 GIVES '180MS' OF SILENT MEANS TRIGGERS\n this._facedAnyPeakVolume = false; // a variable which allow to vad to save if meet peak condtion\n this._noiseFloor = 0.004;\n\n /**\n * variables for need to save audio to record total session from start to stop\n */\n this._totalSessionLength = 0;\n this._totalSession = [];\n\n /**\n * variables for need to save audio per given Time in frame or based on VAD\n */\n this._buffer2dArray16Bit = [];\n this._buffer2dpreCaluculatingLength = 0;\n\n\n /**\n * for Volume level of audio needed for virtualization and others\n * this rms volume data is update to main thread per the given frame\n */\n this._VolumeUpdateframe = this.msToFrame(16); // 16 MS to get 60 frame rate like smooth\n this._currentVolumeFrame = 0;\n\n /***\n * calls callback to get pass audio chunk data in nonvad senario data to main thread by given frame time\n */\n this.maxTimeTriggerSecondsInFrame = 333; // here 1 second default time , 3 ms apprx be the time to call process ,\n this.currentTimeTriggerSecondsInFrame = 0;\n\n this._enabledTimeIntervalVolumeVisualization = false;\n\n /**\n * for getGMS function variables\n */\n this.browserDefaultAudioBufferLength = 0;\n this.precalculatedLengthFastDivision = 0;\n\n /**\n * Communicate with main thread\n */\n this.port.onmessage = (event) => {\n const params = event.data;\n switch (params.status) {\n case 1://this.signal.INIT\n this._initialAssign(params);\n break;\n case this.signal.PAUSE:\n this._readyTolisten = false;\n break;\n\n case this.signal.START:\n this._readyTolisten = true;\n break;\n\n case this.signal.RESUME:\n this._readyTolisten = true;\n break;\n\n case this.signal.CLEAN:\n this._clean();\n console.log(\"called clean\");\n break;\n\n case this.signal.STOP:\n this._readyTolisten = false;\n\n if (this._needFullRecording) {\n const transferList = this._totalSession.map(chunk => chunk.buffer);\n this.port.postMessage({\n status: this.signal.ON_COMPLETE_FULL_RECORDING_TRIGGER,\n chunks: this._totalSession,\n length: this._totalSessionLength\n }, transferList);\n this._totalSession = [];\n this._totalSessionLength = 0;\n }\n break;\n default:\n console.log(\"not matching status key\");\n }\n };\n }\n\n _initialAssign(params) {\n this.signal = params.signal;\n this._runtimeSwitcher = this._runtimeSwitcherHelper.bind(this);\n this._oneTimeOptions(params);\n this._runtimeOptions(params);\n this._chooseProcess();\n this._readyTolisten = true;\n }\n\n _oneTimeOptions(params) {\n this._userSettings = params;\n this._enableVad = !!params?.vad?.enabled; //default False\n this._needFullRecording = !!params?.recording?.enabled; //default False\n }\n\n _runtimeOptions(params) {\n /**\n * time based settings\n */\n this._enabledTimeIntervalVolumeVisualization = params?.timing?.volumeVisualization;\n /**\n * vad based settings\n */\n this._noiseFloor = params?.vad?.noiseFloor;\n\n this._updateTimeBased(params);\n }\n\n _updateTimeBased(params) {\n /**\n * time based settings\n * all in Millisecond\n */\n this.maxTimeTriggerSecondsInFrame = this.msToFrame(params?.timing?.interval || 1000) //default timeSlice is 1000 ms\n /**\n * vad based settings\n * all in Millisecond\n */\n this._peakMaxFrame = this.msToFrame(params?.vad?.speakDetectionDelayMs || 90); // default max time confirm by system that user is started speaking\n this._silentMaxFrame = this.msToFrame(params?.vad?.silenceDetectionDelayMs || 210); // default max time confirm by system that user is stopped speaking\n }\n\n /**\n * logic to assign audioworklet process function to a function refrent wich only do the nesscery work ,Not other things\n * to reduce performance\n */\n _chooseProcess() {\n if (this._enableVad && this._needFullRecording) {\n this._runtimeProcess = this.Process_With_Vad_And_FullRecording.bind(this);\n } else if (this._enableVad) {\n this._runtimeProcess = this.Process_With_Vad.bind(this);\n } else if (this._needFullRecording) {\n this._runtimeProcess = this.Process_TiME_BASED_With_FullRecording.bind(this);\n } else {\n this._runtimeProcess = this.Process_TiME_BASED_Without_FullRecording.bind(this);\n }\n // why this pattern ? it reduce braching improve performance way more\n }\n\n process(inputs, outputs, parameters) { // at run time we can change inner funtion , this is dynamically changged\n if (!this._readyTolisten) return true;\n\n return this._runtimeSwitcher(inputs, outputs, parameters);\n }\n\n _runtimeSwitcherHelper(inputs, outputs, parameters) {\n const len = inputs?.[0]?.[0]?.length;\n if (len > 0) {\n this._calculateTimePerCall(len);\n this.browserDefaultAudioBufferLength = len;\n this.precalculatedLengthFastDivision = 1 / len;\n /**\n * changing to _runtimeProcess so next time it will direcly call the main methods\n */\n this._runtimeSwitcher = this._runtimeProcess;\n this._updateTimeBased(this._userSettings);\n return this._runtimeProcess(inputs, outputs, parameters);\n }\n return true;\n }\n\n _clean(){\n /**\n * variables for vad feature\n */\n this._peakNoticedFrameCount = 0;\n this._silenceNoticedFrameCount = 0;\n this._facedAnyPeakVolume = false;\n\n /**\n * variables for Buffers\n */\n this._buffer2dArray16Bit = [];\n this._buffer2dpreCaluculatingLength = 0;\n\n /**\n * variables for virtualization\n */\n this._currentVolumeFrame = 0;\n\n /**\n * variables for timebased feature\n */\n this.currentTimeTriggerSecondsInFrame = 0;\n }\n\n\n Process_TiME_BASED_Without_FullRecording(inputs, outputs, parameters) {\n\n const input = inputs[0];\n\n const channelData = input[0]; // taking first channel\n this._enabledTimeIntervalVolumeVisualization && this.isSilent(channelData);\n\n if (this.currentTimeTriggerSecondsInFrame > this.maxTimeTriggerSecondsInFrame) {\n const transferList = this._buffer2dArray16Bit.map(chunk => chunk.buffer);\n this.port.postMessage({\n status: this.signal.AUDIO_CHUNK_TIMER_TRIGGER,\n // speaking: false,\n chunks: this._buffer2dArray16Bit,\n length: this._buffer2dpreCaluculatingLength\n }, transferList);\n\n this.currentTimeTriggerSecondsInFrame = 0;\n this._buffer2dArray16Bit = [];\n this._buffer2dpreCaluculatingLength = 0;\n return true;\n } else {\n this.currentTimeTriggerSecondsInFrame++;\n }\n const converted16bit_array = this.float32ToInt16(channelData);\n this._buffer2dArray16Bit.push(converted16bit_array);\n this._buffer2dpreCaluculatingLength += converted16bit_array.length;\n return true;\n }\n\n Process_TiME_BASED_With_FullRecording(inputs, outputs, parameters) {\n\n const input = inputs[0];\n\n const channelData = input[0]; // taking first channel\n\n this._enabledTimeIntervalVolumeVisualization && this.isSilent(channelData);\n\n if (this.currentTimeTriggerSecondsInFrame > this.maxTimeTriggerSecondsInFrame) {\n const transferList = this._buffer2dArray16Bit.map(chunk => chunk.buffer);\n this.port.postMessage({\n status: this.signal.AUDIO_CHUNK_TIMER_TRIGGER,\n // speaking: false,\n chunks: this._buffer2dArray16Bit,\n length: this._buffer2dpreCaluculatingLength\n }, transferList);\n\n this.currentTimeTriggerSecondsInFrame = 0;\n this._buffer2dArray16Bit = [];\n this._buffer2dpreCaluculatingLength = 0;\n return true;\n } else {\n this.currentTimeTriggerSecondsInFrame++;\n }\n const converted16bit_array = this.float32ToInt16(channelData);\n this._buffer2dArray16Bit.push(converted16bit_array);\n this._buffer2dpreCaluculatingLength += converted16bit_array.length;\n this._totalSession.push(new Int16Array(converted16bit_array));\n this._totalSessionLength += converted16bit_array.length;\n return true;\n }\n\n\n Process_With_Vad_And_FullRecording(inputs, outputs, parameters) {\n const input = inputs[0];\n if (input.length) {\n\n const channelData = input[0]; // taking first channel\n //faced any peak audio is important because it ensure we not passing empty data to backed\n if (this.isSilent(channelData)) {\n\n if (!this._facedAnyPeakVolume) {\n this._peakNoticedFrameCount && (this._peakNoticedFrameCount = 0);\n return true; //early exit not valid audio\n }\n\n\n this._silenceNoticedFrameCount++;\n\n //60 FRAME OF EACH 3MS TO 60*3 GIVES '180MS'\n if (this._silenceNoticedFrameCount > this._silentMaxFrame) {\n const transferList = this._buffer2dArray16Bit.map(chunk => chunk.buffer);\n\n this.port.postMessage({\n status: this.signal.VAD_SPEAKING_STOPED,\n // speaking: false,\n chunks: this._buffer2dArray16Bit,\n length: this._buffer2dpreCaluculatingLength\n }, transferList);\n\n this._buffer2dArray16Bit = [];\n this._buffer2dpreCaluculatingLength = 0;\n this._silenceNoticedFrameCount = 0;\n this._facedAnyPeakVolume = false;\n this._peakNoticedFrameCount = 0;\n return true;\n }\n\n\n } else {\n // here the logic is i show is speaking data after some real word passed so i wait 180 ms of continuos speach before declaring it is speaking\n if ((!this._facedAnyPeakVolume) && this._peakNoticedFrameCount > this._peakMaxFrame) {\n // Notify UI that voice HEARD and may started speaking\n this.port.postMessage({ status: this.signal.VAD_SPEAKING_STARTED });\n this._facedAnyPeakVolume = true;\n }\n\n !this._facedAnyPeakVolume && this._peakNoticedFrameCount++;\n this._silenceNoticedFrameCount = 0;\n\n }\n\n const converted16bit_array = this.float32ToInt16(channelData);\n this._totalSession.push(new Int16Array(converted16bit_array));\n this._buffer2dArray16Bit.push(converted16bit_array);\n this._totalSessionLength += converted16bit_array.length;\n this._buffer2dpreCaluculatingLength += converted16bit_array.length;\n\n }\n\n // Keep processor alive\n return true;\n }\n\n Process_With_Vad(inputs, outputs, parameters) {\n const input = inputs[0];\n if (input.length) {\n\n const channelData = input[0]; // taking first channel\n //faced any peak audio is important because it ensure we not passing empty data to backed\n if (this.isSilent(channelData)) {\n\n if (!this._facedAnyPeakVolume) {\n this._peakNoticedFrameCount && (this._peakNoticedFrameCount = 0);\n return true; //early exit not valid audio\n }\n\n\n this._silenceNoticedFrameCount++;\n\n //60 FRAME OF EACH 3MS TO 60*3 GIVES '180MS'\n if (this._silenceNoticedFrameCount > this._silentMaxFrame) {\n const transferList = this._buffer2dArray16Bit.map(chunk => chunk.buffer);\n\n this.port.postMessage({\n status: this.signal.VAD_SPEAKING_STOPED,\n // speaking: false,\n chunks: this._buffer2dArray16Bit,\n length: this._buffer2dpreCaluculatingLength\n }, transferList);\n\n this._buffer2dArray16Bit = [];\n this._buffer2dpreCaluculatingLength = 0;\n this._silenceNoticedFrameCount = 0;\n this._facedAnyPeakVolume = false;\n this._peakNoticedFrameCount = 0;\n return true;\n }\n\n\n } else {\n // here the logic is i show is speaking data after some real word passed so i wait 180 ms of continuos speach before declaring it is speaking\n if ((!this._facedAnyPeakVolume) && this._peakNoticedFrameCount > this._peakMaxFrame) {\n // Notify UI that voice HEARD and may started speaking\n this.port.postMessage({ status: this.signal.VAD_SPEAKING_STARTED });\n this._facedAnyPeakVolume = true;\n }\n\n !this._facedAnyPeakVolume && this._peakNoticedFrameCount++;\n this._silenceNoticedFrameCount = 0;\n\n }\n\n const converted16bit_array = this.float32ToInt16(channelData);\n this._buffer2dArray16Bit.push(converted16bit_array);\n this._buffer2dpreCaluculatingLength += converted16bit_array.length;\n }\n\n // Keep processor alive\n return true;\n }\n\n\n /**\n * Convert 32 bit float to 16 bit int\n * example\n * [\n * 1.99999988079071044921875 to normalize between 16 bit int meaning convert under 35000\n * by just multipling point number to 35000 and also (32000 for negative value )\n * by this we get convert 32 bit to 16 bit\n * int\n * ]\n * @param {*} float32Array\n * @returns INTEGER\n */\n float32ToInt16(float32Array) {\n const len = float32Array.length;\n const int16Array = new Int16Array(len);\n for (let i = 0; i < len; i++) {\n let s = Math.max(-1, Math.min(1, float32Array[i]));\n int16Array[i] = s < 0 ? s * 0x8000 : s * 0x7FFF;\n }\n return int16Array;\n }\n\n /**\n * get silent and peak from value under 0 to 1\n *\n * v3 - new changes\n * In this version i simply only checking half value\n * it is ok till we get positive result same reslt not major accurasy issue\n * and get performance bost\n *\n * @param {*} buffer\n * @returns float\n */\n _getRMS(buffer) {\n let sumSquares = 0;\n const len = this.browserDefaultAudioBufferLength;\n /**\n * idea is to get a single value from 0 to 1 from all 128 values from this list;\n * mathematically just 20/100 gives 0.2 like that adding each give 128 value and total is 128.\n * which is 128/128 gives 1\n *\n * this is base idea other than it all normallization of each single 128 value\n * like if value is minus making is positive , any way it will not exede 1 so allways\n * its total lesst than or equal to 128 why becaous is each maxmimum is 1 then 1*128 gives 128\n *\n * finally sqaure rooting to get lowest round value we are doing this\n * without it also we can work in this\n *\n */\n for (let i = 0; i < len; i++) {\n sumSquares += buffer[i] * buffer[i];\n }\n /**\n * here constent 0.0078125 equivalent of 128 basically iam doing division in fastest way\n * multiplicational divition is faster than actual division in cpu\n * 1/128 gives 0.0078125 simply just multiply by it you get its value\n */\n return Math.sqrt(sumSquares * this.precalculatedLengthFastDivision);\n }\n\n /**\n * analyse for silence in audio bit\n * with our given constant value\n * @param {*} buffer 128 32bit float\n * @returns boolean\n */\n isSilent(buffer) {\n const rms = this._getRMS(buffer);\n const isSilent = rms < this._noiseFloor; //?is silent , lower than user noiceflor level?\n\n if (this._currentVolumeFrame > this._VolumeUpdateframe) {\n\n if (isSilent) {\n // Smoothly reduce previous RMS if silence is detected\n if (this.previousRms > 0) {\n this.previousRms -= 0.0018; // Gradual decrease of volume during silence\n }\n\n } else {\n // Update RMS value when audio is detected\n this.previousRms = rms;\n }\n this._currentVolumeFrame = 0;\n this.port.postMessage({\n status: 15,\n volume: isSilent ? this.previousRms : rms,\n });\n } else {\n this._currentVolumeFrame++;\n }\n\n return isSilent;\n }\n\n\n /**\n * Converts milliseconds to frame count based on an assumed 3ms per frame.\n */\n msToFrame(totalMs = 0) {\n return (totalMs * this._eachProcessCallTime) | 0;\n }\n\n\n _calculateTimePerCall(samplesArraySizePerCall) {\n //(128 /44000) *1000 gives approx 2.66ms and we can increase or decrease based on value\n const timePerCallInMilliseconds = (samplesArraySizePerCall / this._userSettings.actualSampleRate) * 1000; // how many possible call need to cover the whole samplerate with gives bytes array length\n this._eachProcessCallTime = 1 / timePerCallInMilliseconds; // this is converting diviso number to division in a multiplication way\n this._VolumeUpdateframe = this.msToFrame(16);\n }\n\n\n\n}\n\n\n registerProcessor('audio-processor', AudioProcessor);\n"],{type:"application/javascript"}),d=URL.createObjectURL(f);function p(e){return p="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},p(e)}function m(e,t){var n="undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(!n){if(Array.isArray(e)||(n=y(e))||t&&e&&"number"==typeof e.length){n&&(e=n);var r=0,i=function(){};return{s:i,n:function(){return r>=e.length?{done:!0}:{done:!1,value:e[r++]}},e:function(e){throw e},f:i}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var a,o=!0,s=!1;return{s:function(){n=n.call(e)},n:function(){var e=n.next();return o=e.done,e},e:function(e){s=!0,a=e},f:function(){try{o||null==n.return||n.return()}finally{if(s)throw a}}}}function v(){v=function(){return t};var e,t={},n=Object.prototype,r=n.hasOwnProperty,i=Object.defineProperty||function(e,t,n){e[t]=n.value},a="function"==typeof Symbol?Symbol:{},o=a.iterator||"@@iterator",s=a.asyncIterator||"@@asyncIterator",u=a.toStringTag||"@@toStringTag";function l(e,t,n){return Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}),e[t]}try{l({},"")}catch(e){l=function(e,t,n){return e[t]=n}}function c(e,t,n,r){var a=t&&t.prototype instanceof b?t:b,o=Object.create(a.prototype),s=new C(r||[]);return i(o,"_invoke",{value:F(e,n,s)}),o}function h(e,t,n){try{return{type:"normal",arg:e.call(t,n)}}catch(e){return{type:"throw",arg:e}}}t.wrap=c;var f="suspendedStart",d="suspendedYield",m="executing",g="completed",y={};function b(){}function _(){}function S(){}var w={};l(w,o,(function(){return this}));var T=Object.getPrototypeOf,A=T&&T(T(I([])));A&&A!==n&&r.call(A,o)&&(w=A);var E=S.prototype=b.prototype=Object.create(w);function k(e){["next","throw","return"].forEach((function(t){l(e,t,(function(e){return this._invoke(t,e)}))}))}function P(e,t){function n(i,a,o,s){var u=h(e[i],e,a);if("throw"!==u.type){var l=u.arg,c=l.value;return c&&"object"==p(c)&&r.call(c,"__await")?t.resolve(c.__await).then((function(e){n("next",e,o,s)}),(function(e){n("throw",e,o,s)})):t.resolve(c).then((function(e){l.value=e,o(l)}),(function(e){return n("throw",e,o,s)}))}s(u.arg)}var a;i(this,"_invoke",{value:function(e,r){function i(){return new t((function(t,i){n(e,r,t,i)}))}return a=a?a.then(i,i):i()}})}function F(t,n,r){var i=f;return function(a,o){if(i===m)throw Error("Generator is already running");if(i===g){if("throw"===a)throw o;return{value:e,done:!0}}for(r.method=a,r.arg=o;;){var s=r.delegate;if(s){var u=O(s,r);if(u){if(u===y)continue;return u}}if("next"===r.method)r.sent=r._sent=r.arg;else if("throw"===r.method){if(i===f)throw i=g,r.arg;r.dispatchException(r.arg)}else"return"===r.method&&r.abrupt("return",r.arg);i=m;var l=h(t,n,r);if("normal"===l.type){if(i=r.done?g:d,l.arg===y)continue;return{value:l.arg,done:r.done}}"throw"===l.type&&(i=g,r.method="throw",r.arg=l.arg)}}}function O(t,n){var r=n.method,i=t.iterator[r];if(i===e)return n.delegate=null,"throw"===r&&t.iterator.return&&(n.method="return",n.arg=e,O(t,n),"throw"===n.method)||"return"!==r&&(n.method="throw",n.arg=new TypeError("The iterator does not provide a '"+r+"' method")),y;var a=h(i,t.iterator,n.arg);if("throw"===a.type)return n.method="throw",n.arg=a.arg,n.delegate=null,y;var o=a.arg;return o?o.done?(n[t.resultName]=o.value,n.next=t.nextLoc,"return"!==n.method&&(n.method="next",n.arg=e),n.delegate=null,y):o:(n.method="throw",n.arg=new TypeError("iterator result is not an object"),n.delegate=null,y)}function R(e){var t={tryLoc:e[0]};1 in e&&(t.catchLoc=e[1]),2 in e&&(t.finallyLoc=e[2],t.afterLoc=e[3]),this.tryEntries.push(t)}function x(e){var t=e.completion||{};t.type="normal",delete t.arg,e.completion=t}function C(e){this.tryEntries=[{tryLoc:"root"}],e.forEach(R,this),this.reset(!0)}function I(t){if(t||""===t){var n=t[o];if(n)return n.call(t);if("function"==typeof t.next)return t;if(!isNaN(t.length)){var i=-1,a=function n(){for(;++i<t.length;)if(r.call(t,i))return n.value=t[i],n.done=!1,n;return n.value=e,n.done=!0,n};return a.next=a}}throw new TypeError(p(t)+" is not iterable")}return _.prototype=S,i(E,"constructor",{value:S,configurable:!0}),i(S,"constructor",{value:_,configurable:!0}),_.displayName=l(S,u,"GeneratorFunction"),t.isGeneratorFunction=function(e){var t="function"==typeof e&&e.constructor;return!!t&&(t===_||"GeneratorFunction"===(t.displayName||t.name))},t.mark=function(e){return Object.setPrototypeOf?Object.setPrototypeOf(e,S):(e.__proto__=S,l(e,u,"GeneratorFunction")),e.prototype=Object.create(E),e},t.awrap=function(e){return{__await:e}},k(P.prototype),l(P.prototype,s,(function(){return this})),t.AsyncIterator=P,t.async=function(e,n,r,i,a){void 0===a&&(a=Promise);var o=new P(c(e,n,r,i),a);return t.isGeneratorFunction(n)?o:o.next().then((function(e){return e.done?e.value:o.next()}))},k(E),l(E,u,"Generator"),l(E,o,(function(){return this})),l(E,"toString",(function(){return"[object Generator]"})),t.keys=function(e){var t=Object(e),n=[];for(var r in t)n.push(r);return n.reverse(),function e(){for(;n.length;){var r=n.pop();if(r in t)return e.value=r,e.done=!1,e}return e.done=!0,e}},t.values=I,C.prototype={constructor:C,reset:function(t){if(this.prev=0,this.next=0,this.sent=this._sent=e,this.done=!1,this.delegate=null,this.method="next",this.arg=e,this.tryEntries.forEach(x),!t)for(var n in this)"t"===n.charAt(0)&&r.call(this,n)&&!isNaN(+n.slice(1))&&(this[n]=e)},stop:function(){this.done=!0;var e=this.tryEntries[0].completion;if("throw"===e.type)throw e.arg;return this.rval},dispatchException:function(t){if(this.done)throw t;var n=this;function i(r,i){return s.type="throw",s.arg=t,n.next=r,i&&(n.method="next",n.arg=e),!!i}for(var a=this.tryEntries.length-1;a>=0;--a){var o=this.tryEntries[a],s=o.completion;if("root"===o.tryLoc)return i("end");if(o.tryLoc<=this.prev){var u=r.call(o,"catchLoc"),l=r.call(o,"finallyLoc");if(u&&l){if(this.prev<o.catchLoc)return i(o.catchLoc,!0);if(this.prev<o.finallyLoc)return i(o.finallyLoc)}else if(u){if(this.prev<o.catchLoc)return i(o.catchLoc,!0)}else{if(!l)throw Error("try statement without catch or finally");if(this.prev<o.finallyLoc)return i(o.finallyLoc)}}}},abrupt:function(e,t){for(var n=this.tryEntries.length-1;n>=0;--n){var i=this.tryEntries[n];if(i.tryLoc<=this.prev&&r.call(i,"finallyLoc")&&this.prev<i.finallyLoc){var a=i;break}}a&&("break"===e||"continue"===e)&&a.tryLoc<=t&&t<=a.finallyLoc&&(a=null);var o=a?a.completion:{};return o.type=e,o.arg=t,a?(this.method="next",this.next=a.finallyLoc,y):this.complete(o)},complete:function(e,t){if("throw"===e.type)throw e.arg;return"break"===e.type||"continue"===e.type?this.next=e.arg:"return"===e.type?(this.rval=this.arg=e.arg,this.method="return",this.next="end"):"normal"===e.type&&t&&(this.next=t),y},finish:function(e){for(var t=this.tryEntries.length-1;t>=0;--t){var n=this.tryEntries[t];if(n.finallyLoc===e)return this.complete(n.completion,n.afterLoc),x(n),y}},catch:function(e){for(var t=this.tryEntries.length-1;t>=0;--t){var n=this.tryEntries[t];if(n.tryLoc===e){var r=n.completion;if("throw"===r.type){var i=r.arg;x(n)}return i}}throw Error("illegal catch attempt")},delegateYield:function(t,n,r){return this.delegate={iterator:I(t),resultName:n,nextLoc:r},"next"===this.method&&(this.arg=e),y}},t}function g(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function y(e,t){if(e){if("string"==typeof e)return b(e,t);var n={}.toString.call(e).slice(8,-1);return"Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n?Array.from(e):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?b(e,t):void 0}}function b(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);n<t;n++)r[n]=e[n];return r}function _(e,t,n){return(t=function(e){var t=function(e){if("object"!=p(e)||!e)return e;var t=e[Symbol.toPrimitive];if(void 0!==t){var n=t.call(e,"string");if("object"!=p(n))return n;throw new TypeError("@@toPrimitive must return a primitive value.")}return String(e)}(e);return"symbol"==p(t)?t:t+""}(t))in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function S(e,t,n,r,i,a,o){try{var s=e[a](o),u=s.value}catch(e){return void n(e)}s.done?t(u):Promise.resolve(u).then(r,i)}function w(e){return function(){var t=this,n=arguments;return new Promise((function(r,i){var a=e.apply(t,n);function o(e){S(a,r,i,o,s,"next",e)}function s(e){S(a,r,i,o,s,"throw",e)}o(void 0)}))}}function T(e){return e&&"function"==typeof e}function A(){return E.apply(this,arguments)}function E(){return E=w(v().mark((function e(){var t,n,r,i,c,h,f,p=arguments;return v().wrap((function(e){for(;;)switch(e.prev=e.next){case 0:if(t=p.length>0&&void 0!==p[0]?p[0]:16e3,e.prev=1,!(m=a)||"closed"===m.state||o!==t){e.next=6;break}return e.abrupt("return",[a,s,u,l]);case 6:return a=new((null===(n=window)||void 0===n?void 0:n.AudioContext)||(null===(r=window)||void 0===r?void 0:r.webkitAudioContext))({sampleRate:t}),o=t,null==(f=null===(i=a)||void 0===i||null===(i=i.audioWorklet)||void 0===i||null===(c=i.addModule)||void 0===c?void 0:c.call(i,d))||null===(h=f.catch)||void 0===h||h.call(f,(function(e){console.log("Issue on adding audioWorklet module")})),e.next=12,f;case 12:return s=new AudioWorkletNode(a,"audio-processor"),(u=a.createBiquadFilter()).type="highpass",u.frequency.value=120,u.Q.value=.7,(l=a.createBiquadFilter()).type="lowpass",l.frequency.value=3500,l.Q.value=.7,e.abrupt("return",[a,s,u,l]);case 22:e.next=30;break;case 24:return e.prev=24,e.t0=e.catch(1),a=void 0,o=void 0,console.error("AudioContext/webkitAudioContext Instance Creation Or audioWorklet addModule issue ",e.t0),e.abrupt("return",!1);case 30:case"end":return e.stop()}var m}),e,null,[[1,24]])}))),E.apply(this,arguments)}var k={STOPPED:h.STOP,RECORDING:h.START,PAUSED:h.PAUSE},P=_(_(_({},h.STOP,"S"),h.START,"R"),h.PAUSE,"P");function F(e,t,n){return O.apply(this,arguments)}function O(){return(O=w(v().mark((function e(t,n,r){var i,a,o,s,u,l,c,f;return v().wrap((function(e){for(;;)switch(e.prev=e.next){case 0:return i=D(n),e.next=3,A(i.sampleRate);case 3:if(a=e.sent){e.next=6;break}throw new Error("Failed to initialize audioContext");case 6:return o=a[0],s=a[1],u=a[2],l=a[3],e.next=12,navigator.mediaDevices.getUserMedia({audio:!0});case 12:return c=e.sent,f=o.createMediaStreamSource(c),s.port.onmessage=t,i.signal=h,i.status=h.INIT,i.actualSampleRate=o.sampleRate,s.port.postMessage(i),f.connect(u).connect(l).connect(s),r.current.STREAM=c,r.current.SOURCE=f,r.current.WORKLETHREAD=s,r.current.AUDIOCONTEXT=o,r.current.POSTMESSAGE=s.port.postMessage,e.next=27,o.resume();case 27:return e.abrupt("return",!0);case 28:case"end":return e.stop()}}),e)})))).apply(this,arguments)}function R(){var e,t,n=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},r=(e=(0,c.useState)(P[k.STOPPED]),t=2,function(e){if(Array.isArray(e))return e}(e)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@"];if(null!=n){var r,i,a,o,s=[],u=!0,l=!1;try{if(a=(n=n.call(e)).next,0===t){if(Object(n)!==n)return;u=!1}else for(;!(u=(r=a.call(n)).done)&&(s.push(r.value),s.length!==t);u=!0);}catch(e){l=!0,i=e}finally{try{if(!u&&null!=n.return&&(o=n.return(),Object(o)!==o))return}finally{if(l)throw i}}return s}}(e,t)||y(e,t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()),i=r[0],a=r[1],s=(0,c.useRef)({}),u=(0,c.useRef)(function(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?g(Object(n),!0).forEach((function(t){_(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):g(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}({},n)),l=(0,c.useRef)([]),f=(0,c.useRef)({volume:0,isSpeaking:!1,micCode:h.STOP}),d=function(e){var t=e||s.current;try{var n=null==t?void 0:t.SOURCE,r=null==t?void 0:t.AUDIOCONTEXT,i=null==t?void 0:t.STREAM;n&&n.disconnect(),r&&"closed"!==r.state&&r.suspend(),null==i||i.getTracks().forEach((function(e){return e.stop()})),t.STREAM&&(t.STREAM=void 0),S({status:h.CLEAN})}catch(e){console.error("issue with cleanup")}};(0,c.useEffect)((function(){var e=s.current;return function(){d(e)}}),[]);var p=function(e){var t=u.current,n=t._selectedAudioEncodingFunction;(0,t._onDataAvailable)(n(e.chunks,e.length,o))},m=function(e){var t=u.current,n=t._selectedAudioEncodingFunction;(0,t._onComplete)(n(e.chunks,e.length,o))},b=function(e){var t;return null===(t=s.current)||void 0===t?void 0:t[e]},S=function(e){var t,n;null===(t=b("WORKLETHREAD"))||void 0===t||null===(t=t.port)||void 0===t||null===(n=t.postMessage)||void 0===n||n.call(t,e)},A=function(){l.current.forEach((function(e){null==e||e(f.current)}))},E=function(e){f.current.micCode=e,f.current.volume=0,A(),a(P[e])},O=function(){var e=w(v().mark((function e(){var t,r;return v().wrap((function(e){for(;;)switch(e.prev=e.next){case 0:if("S"===i){e.next=3;break}return console.error("Session already running."),e.abrupt("return",!1);case 3:if(e.prev=3,null!==(t=s.current)&&void 0!==t&&t.STREAM){e.next=12;break}return o=void 0,l=void 0,c=void 0,void 0,void 0,void 0,v=!1===(null==(a=u.current)||null===(o=a.audio)||void 0===o?void 0:o.wav)?x:C,g=T(null==a||null===(l=a.data)||void 0===l?void 0:l.onAvailable)?a.data.onAvailable:function(){},y=T(null==a||null===(c=a.recording)||void 0===c?void 0:c.onComplete)?a.recording.onComplete:function(){},u.current._selectedAudioEncodingFunction=v,u.current._onDataAvailable=g,u.current._onComplete=y,e.next=8,F((function(e){var t=e.data;switch(t.status){case h.VAD_SPEAKING_STARTED:f.current.isSpeaking=!0;break;case h.VAD_SPEAKING_STOPED:f.current.isSpeaking=!1,p(t);break;case h.AUDIO_CHUNK_TIMER_TRIGGER:p(t);break;case h.ON_COMPLETE_FULL_RECORDING_TRIGGER:m(t);break;case 15:f.current.volume=t.volume,window.requestAnimationFrame(A);break;default:console.log("not matching status key")}}),n,s);case 8:e.sent&&E(h.START),e.next=16;break;case 12:return e.next=14,null===(r=b("AUDIOCONTEXT"))||void 0===r?void 0:r.resume();case 14:S({status:h.START}),E(h.START);case 16:return e.abrupt("return",{status:!0});case 19:throw e.prev=19,e.t0=e.catch(3),d(),Error({status:!1,error:e.t0});case 23:case"end":return e.stop()}var a,o,l,c,v,g,y}),e,null,[[3,19]])})));return function(){return e.apply(this,arguments)}}(),R=function(){var e=w(v().mark((function e(){var t;return v().wrap((function(e){for(;;)switch(e.prev=e.next){case 0:if("R"===i){e.next=3;break}return console.error("Not running."),e.abrupt("return");case 3:return S({status:h.PAUSE}),e.next=6,null===(t=b("AUDIOCONTEXT"))||void 0===t?void 0:t.suspend();case 6:E(h.PAUSE);case 7:case"end":return e.stop()}}),e)})));return function(){return e.apply(this,arguments)}}(),I=function(){var e=w(v().mark((function e(){var t;return v().wrap((function(e){for(;;)switch(e.prev=e.next){case 0:if("P"===i){e.next=3;break}return console.error("Not Resumed."),e.abrupt("return");case 3:return S({status:h.RESUME}),e.next=6,null===(t=b("AUDIOCONTEXT"))||void 0===t?void 0:t.resume();case 6:E(h.START);case 7:case"end":return e.stop()}}),e)})));return function(){return e.apply(this,arguments)}}(),D=function(){var e=w(v().mark((function e(){var t;return v().wrap((function(e){for(;;)switch(e.prev=e.next){case 0:if("S"!==i){e.next=3;break}return console.error("Not started."),e.abrupt("return");case 3:return S({status:h.STOP}),e.next=6,null===(t=b("AUDIOCONTEXT"))||void 0===t?void 0:t.suspend();case 6:E(h.STOP),d();case 8:case"end":return e.stop()}}),e)})));return function(){return e.apply(this,arguments)}}();return{micState:i,Start:O,Pause:R,Resume:I,Stop:D,Subscribe:function(e){if(e&&"function"==typeof e)return l.current.push(e)-1},unSubscribe:function(e){"number"==typeof e&&e>=0&&e<l.current.length&&(l.current[e]=null)}}}function x(e,t){var n,r=new ArrayBuffer(2*t),i=new DataView(r),a=0,o=m(e);try{for(o.s();!(n=o.n()).done;)for(var s=n.value,u=0;u<s.length;u++,a+=2)i.setInt16(a,s[u],!0)}catch(e){o.e(e)}finally{o.f()}return new Blob([r],{type:"application/octet-stream"})}function C(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:16e3,r=t||e.reduce((function(e,t){return e+t.length}),0),i=new ArrayBuffer(44+2*r),a=new DataView(i);I(a,0,"RIFF"),a.setUint32(4,36+2*r,!0),I(a,8,"WAVE"),I(a,12,"fmt "),a.setUint32(16,16,!0),a.setUint16(20,1,!0),a.setUint16(22,1,!0),a.setUint32(24,n,!0),a.setUint32(28,1*n*2,!0),a.setUint16(32,2,!0),a.setUint16(34,16,!0),I(a,36,"data"),a.setUint32(40,2*r,!0);var o,s=44,u=m(e);try{for(u.s();!(o=u.n()).done;)for(var l=o.value,c=0;c<l.length;c++,s+=2)a.setInt16(s,l[c],!0)}catch(e){u.e(e)}finally{u.f()}return new Blob([i],{type:"audio/wav"})}function I(e,t,n){for(var r=0;r<n.length;r++)e.setUint8(t+r,n.charCodeAt(r))}function D(e){var t={},n=null==e?void 0:e.vad;n&&"object"===p(n)&&(t.vad={enabled:!!n.enabled,speakDetectionDelayMs:n.speakDetectionDelayMs||30,silenceDetectionDelayMs:n.silenceDetectionDelayMs||50,noiseFloor:L(n.noiseFloor)||.008});var r=null==e?void 0:e.recording;r&&"object"===p(r)&&(t.recording={enabled:!!r.enabled});var i=null==e?void 0:e.timing;i&&"object"===p(i)&&(t.timing={interval:i.interval||1e3,volumeVisualization:!!i.volumeVisualization});var a=null==e?void 0:e.audio;return a&&"object"===p(a)&&(t.audio={wav:!!a.wav,sampleRate:a.sampleRate||16e3}),t}function L(e){if(!e)return null;var t=Math.min(Math.max(1,e),100);return t>90?2*t/1e3:t/1e3}return i})()));