UNPKG

@onesy/ui-react

Version:
191 lines (187 loc) 8.26 kB
import _extends from "@babel/runtime/helpers/extends"; import _objectWithoutProperties from "@babel/runtime/helpers/objectWithoutProperties"; import _defineProperty from "@babel/runtime/helpers/defineProperty"; const _excluded = ["size", "SpeechRecognition", "continuous", "interimResults", "grammars", "language", "maxAlternatives", "join", "loading", "disabled", "Icon", "IconStop", "onChange", "onData", "onListen", "onListenStop", "onStart", "onStop", "onError", "TooltipProps", "IconButtonProps", "IconProps", "className"]; function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; } function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; } import React from 'react'; import { is, isEnvironment } from '@onesy/utils'; import { classNames, style as styleMethod, useOnesyTheme } from '@onesy/style-react'; import IconMaterialStop from '@onesy/icons-material-rounded-react/IconMaterialStopW100'; import IconMaterialSpeechToText from '@onesy/icons-material-rounded-react/IconMaterialSpeechToTextW100'; import LineElement from '../Line'; import TooltipElement from '../Tooltip'; import IconButtonElement from '../IconButton'; import { staticClassName } from '../utils'; const useStyle = styleMethod(theme => ({ root: {}, iconButton: { '&.onesy-IconButton-root': { transition: theme.methods.transitions.make('transform'), '&:active': { transform: 'scale(0.94)' } } } }), { name: 'onesy-SpeechToText' }); const SpeechToText = /*#__PURE__*/React.forwardRef((props_, ref) => { const theme = useOnesyTheme(); const l = theme.l; const props = React.useMemo(() => _objectSpread(_objectSpread(_objectSpread({}, theme?.ui?.elements?.all?.props?.default), theme?.ui?.elements?.onesySpeechToText?.props?.default), props_), [props_]); const Line = React.useMemo(() => theme?.elements?.Line || LineElement, [theme]); const Tooltip = React.useMemo(() => theme?.elements?.Tooltip || TooltipElement, [theme]); const IconButton = React.useMemo(() => theme?.elements?.IconButton || IconButtonElement, [theme]); const { size = 'regular', SpeechRecognition: SpeechRecognition_, continuous = true, interimResults = true, grammars, language = 'en-US', maxAlternatives = 1, join = '. ', loading, disabled, Icon: Icon_ = IconMaterialSpeechToText, IconStop = IconMaterialStop, onChange, onData: onData_, onListen: onListen_, onListenStop: onListenStop_, onStart: onStart_, onStop: onStop_, onError: onError_, TooltipProps, IconButtonProps, IconProps, className } = props, other = _objectWithoutProperties(props, _excluded); const [status, setStatus] = React.useState('initial'); const { classes } = useStyle(); const refs = { root: React.useRef(undefined), recognition: React.useRef(undefined), value: React.useRef(''), result: React.useRef([]), status: React.useRef(status) }; refs.status.current = status; const supported = isEnvironment('browser') && ('SpeechRecognition' in window || 'webkitSpeechRecognition' in window); const onData = React.useCallback(event => { const value = Array.from(event.results || []).map(item => item[0]?.transcript || '').join(''); refs.value.current = value; if (!value) { refs.result.current.push(value); } if (is('function', onData_)) onData_(value); }, [onData_]); const onStart = React.useCallback(async event => { setStatus('started'); if (is('function', onStart_)) onStart_(event); }, [onStart_]); const onEnd = React.useCallback(event => { setStatus('initial'); // result refs.result.current.push(refs.value.current); const response = refs.result.current.join(join); if (is('function', onChange)) onChange(response); }, [onChange, join]); const onError = React.useCallback(event => { if (is('function', onError_)) onError_(event); }, [onError_]); React.useEffect(() => { // previous clean up if (refs.recognition.current) { refs.recognition.current.abort(); } // make a new instance if (supported) { // eslint-disable-next-line @typescript-eslint/no-use-before-define const SpeechRecognitionClass = SpeechRecognition_ || window.SpeechRecognition || window.webkitSpeechRecognition; if (SpeechRecognitionClass) { refs.recognition.current = new SpeechRecognitionClass(); // options if (continuous !== undefined) refs.recognition.current.continuous = continuous; if (interimResults !== undefined) refs.recognition.current.interimResults = interimResults; if (grammars !== undefined) refs.recognition.current.grammars = grammars; if (language !== undefined) refs.recognition.current.lang = language; if (maxAlternatives !== undefined) refs.recognition.current.maxAlternatives = maxAlternatives; // events // result refs.recognition.current.addEventListener('start', onStart); refs.recognition.current.addEventListener('end', onEnd); refs.recognition.current.addEventListener('result', onData); refs.recognition.current.addEventListener('error', onError); } } return () => { // Clean up if (refs.recognition.current) { refs.recognition.current.removeEventListener('start', onStart); refs.recognition.current.removeEventListener('end', onEnd); refs.recognition.current.removeEventListener('result', onData); refs.recognition.current.removeEventListener('error', onError); } }; }, [SpeechRecognition_, supported, continuous, interimResults, grammars, language, maxAlternatives]); const onCleanUp = React.useCallback(() => { refs.result.current = []; refs.value.current = ''; }, []); const onListen = React.useCallback(async event => { if (refs.recognition.current) { // Clean up onCleanUp(); refs.recognition.current.start(); if (is('function', onListen_)) onListen_(event); } }, [onListen_]); const onListenStop = React.useCallback(async event => { if (refs.recognition.current) { refs.recognition.current.stop(); if (is('function', onListenStop_)) onListenStop_(event); } }, [onListenStop_]); const iconProps = _objectSpread({ size }, IconProps); const iconButtonProps = _objectSpread({ size, loading, disabled }, IconButtonProps); if (!supported) return null; let IconToUse = Icon_; let name = l('Speech to text'); if (status === 'started') { IconToUse = IconStop; name = l('Stop'); } return /*#__PURE__*/React.createElement(Line, _extends({ ref: item => { if (ref) { if (is('function', ref)) ref(item);else ref.current = item; } refs.root.current = item; }, gap: 1, direction: "row", align: "center", className: classNames([staticClassName('SpeechToText', theme) && [`onesy-SpeechToText-root`, `onesy-SpeechToText-size-${size}`], className, classes.root]) }, other), /*#__PURE__*/React.createElement(Tooltip, _extends({ name: name }, TooltipProps), /*#__PURE__*/React.createElement(IconButton, _extends({ onClick: status === 'started' ? onListenStop : onListen }, iconButtonProps, { selected: status === 'started', disabled: disabled !== undefined ? disabled : !supported, className: classNames([staticClassName('SpeechToText', theme) && [`onesy-SpeechToText-iconButton`], classes.iconButton]) }), /*#__PURE__*/React.createElement(IconToUse, iconProps)))); }); SpeechToText.displayName = 'onesy-SpeechToText'; export default SpeechToText;