UNPKG

@aurally/speech-control

Version:

A class to handle microphone permissions, start and observe speech input

228 lines (217 loc) 9.76 kB
import { Observable, empty, throwError, timer, from } from 'rxjs'; import { finalize, filter, debounceTime, repeatWhen, retryWhen, mergeMap } from 'rxjs/operators'; /* eslint no-void: "off" */ // Loaded ready states var loadedStates = ['interactive', 'complete']; // Return Promise var whenDomReady = function whenDomReady(cb, doc) { return new Promise(function (resolve) { // Allow doc to be passed in as the lone first param if (cb && typeof cb !== 'function') { doc = cb; cb = null; } // Use global document if we don't have one doc = doc || window.document; // Handle DOM load var done = function done() { return resolve(void (cb && setTimeout(cb))); }; // Resolve now if DOM has already loaded // Otherwise wait for DOMContentLoaded if (loadedStates.indexOf(doc.readyState) !== -1) { done(); } else { doc.addEventListener('DOMContentLoaded', done); } }); }; // Promise chain helper whenDomReady.resume = function (doc) { return function (val) { return whenDomReady(doc).then(function () { return val; }); }; }; var Css = "\n #arly-ms {\n position: fixed;\n top: 0.75rem;\n left: 0.75rem;\n right: 0.75rem;\n z-index: 100000;\n border-radius: 6px;\n display: flex;\n align-items: center;\n padding: 1rem;\n background: rgba(0,0,0,0.7);\n box-shadow: 0 0 3px 0 rgba(0,0,0,0.7);\n color: #fff;\n transition: opacity 0.3s;\n text-align: left;\n }\n\n #arly-ms.hidden {\n opacity: 0;\n }\n\n #arly-ms.gone {\n z-index: -1;\n }\n\n .arly-ms-text {\n flex: 1;\n }\n\n .arly-ms-rec {\n display: inline-block;\n width: 1rem;\n height: 1rem;\n margin-right: 1rem;\n border-radius: 50%;\n background: #C91C2E;\n box-shadow: 0 0 5px 0 rgba(201,28,46,0.7);\n }\n\n .arly-ms-disable {\n color: #bbb;\n text\n }\n"; var Html = function (notificationText, disableText) { return "\n <style>\n " + Css + "\n </style>\n <div id=\"arly-ms\">\n <span class=\"arly-ms-rec\"></span>\n <span class=\"arly-ms-text\">" + notificationText + "</span>\n\n <span class=\"arly-ms-disable\">" + disableText + "</span>\n </div>\n"; }; function getNotification() { return document.querySelector('#arly-ms'); } function notificationEvents(notification) { var disable = new Promise(function (resolve) { var disableSpan = notification && notification.querySelector('.arly-ms-disable'); disableSpan && disableSpan.addEventListener('click', resolve); }); return { disable: disable }; } var append = function (notificationOptions) { if (notificationOptions === void 0) { notificationOptions = {}; } return new Promise(function (resolve) { var notification = getNotification(); if (!notification) { whenDomReady().then(function () { var language = navigator.language || navigator.userLanguage; var container = notificationOptions.container || document.body; var notificationText = notificationOptions.text || "I am listening for your search. Your language is " + language; var disableText = notificationOptions.disableText || "Disable"; container.insertAdjacentHTML('beforeend', Html(notificationText, disableText)); resolve(notificationEvents(getNotification())); }); } else { notification.classList.remove('hidden', 'gone'); resolve(notificationEvents(notification)); } }); }; var remove = function () { var notification = getNotification(); if (notification) { notification.classList.add('hidden'); setTimeout(function () { return notification.classList.add('gone'); }, 500); } }; var SpeechControlErrors; (function (SpeechControlErrors) { SpeechControlErrors["NoSpeechRecognition"] = "no-speech-recognition"; SpeechControlErrors["Disabled"] = "disabled"; })(SpeechControlErrors || (SpeechControlErrors = {})); var SpeechControl = /** @class */ (function () { function SpeechControl(options) { this._stopped = false; this._notificationShown = false; this.recLanguage = options && options.recLanguage; this.notification = {}; } SpeechControl.prototype._record = function (subscriber) { var SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; this._recognition = new SpeechRecognition(); if (this._recognition) { // this._recognition.continuous = true if (this.recLanguage) { this._recognition.lang = this.recLanguage; } this._recognition.onresult = subscriber.next.bind(subscriber); this._recognition.onend = subscriber.complete.bind(subscriber); this._recognition.onerror = subscriber.error.bind(subscriber); this._recognition.start(); } }; SpeechControl.prototype._disableRec = function () { window.sessionStorage.setItem('ARLY_DISABLE_REC', 'true'); this.stop(); }; SpeechControl.prototype.askForPermission = function () { return from(navigator.mediaDevices.getUserMedia({ audio: true }).then(function (stream) { // stop it immediately, its just used to trigger the permission stream.getTracks().forEach(function (track) { track.stop(); }); })); }; SpeechControl.prototype.whenPermissionGranted = function () { if (!navigator.permissions) { console.warn('SPEECH CONTROL: PERMISSIONS API IS NOT AVAILABLE, USING getUserMedia HERE'); return this.askForPermission(); } var handleState = function (subscriber, status) { if (status.state == 'granted') { subscriber.next(); subscriber.complete(); } else if (status.state == 'prompt') { status.addEventListener('change', function (_a) { var target = _a.target; handleState(subscriber, target); }); } else { subscriber.error(); } }; return new Observable(function (subscriber) { navigator.permissions .query({ name: 'microphone' }) .then(function (status) { return handleState(subscriber, status); }); }); }; SpeechControl.prototype.isEnabled = function () { // check if not disabled and speech _recognition available return (!window.sessionStorage.getItem('ARLY_DISABLE_REC') && (window.hasOwnProperty('SpeechRecognition') || window.hasOwnProperty('webkitSpeechRecognition'))); }; SpeechControl.prototype.setNotification = function (notification) { this.notification = notification; }; SpeechControl.prototype.on = function (term) { var _this = this; if (!this._observable) { this._observable = this.start().pipe(finalize(function () { return (_this._observable = undefined); })); } return this._observable.pipe(filter(function (event) { var item = event.results .item(event.results.length - 1)[0] .transcript.trim() .toLowerCase() .replace(/\s/g, ', '); return item.includes(term); })); }; SpeechControl.prototype.start = function (notificationOptions) { var _this = this; this._stopped = false; return new Observable(function (subscriber) { if (_this.isEnabled()) { _this._record(subscriber); _this.whenPermissionGranted().subscribe(function () { if (!_this._notificationShown) { var notification = append(notificationOptions || _this.notification); notification.then(function (nr) { return nr.disable.then(function () { _this._disableRec(); subscriber.error(SpeechControlErrors.Disabled); }); }); setTimeout(remove, 3000); _this._notificationShown = true; } }); } else { subscriber.error(SpeechControlErrors.NoSpeechRecognition); } }).pipe(debounceTime(500), repeatWhen(function (complete) { return complete.pipe(mergeMap(function () { // repeat because continouse does not work on all mobile devices if (_this._stopped) { return empty(); } return timer(500); })); }), retryWhen(function (error) { return error.pipe(mergeMap(function (error) { console.log(error); // retry if noting said if (error && error.error === 'no-speech') { return timer(500); } return throwError(error); })); })); }; SpeechControl.prototype.stop = function () { remove(); this._stopped = true; if (this._recognition) { this._recognition.stop(); } }; return SpeechControl; }()); export { SpeechControlErrors, SpeechControl }; //# sourceMappingURL=speech-control.es5.js.map