@onesy/ui-react
Version:
UI for React
197 lines (193 loc) • 8.04 kB
JavaScript
import _objectWithoutProperties from "@babel/runtime/helpers/objectWithoutProperties";
import _defineProperty from "@babel/runtime/helpers/defineProperty";
const _excluded = ["ref", "size", "SpeechRecognition", "continuous", "interimResults", "grammars", "language", "maxAlternatives", "join", "loading", "disabled", "Icon", "IconStop", "onChange", "onData", "onListen", "onListenStop", "onStart", "onStop", "onError", "TooltipProps", "IconButtonProps", "IconProps", "className"];
function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; }
import React from 'react';
import { is, isEnvironment } from '@onesy/utils';
import { classNames, style as styleMethod, useOnesyTheme } from '@onesy/style-react';
import IconMaterialStop from '@onesy/icons-material-rounded-react/IconMaterialStopW100';
import IconMaterialSpeechToText from '@onesy/icons-material-rounded-react/IconMaterialSpeechToTextW100';
import LineElement from '../Line';
import TooltipElement from '../Tooltip';
import IconButtonElement from '../IconButton';
import { staticClassName } from '../utils';
import { jsx as _jsx } from "react/jsx-runtime";
const useStyle = styleMethod(theme => ({
root: {},
iconButton: {
'&.onesy-IconButton-root': {
transition: theme.methods.transitions.make('transform'),
'&:active': {
transform: 'scale(0.94)'
}
}
}
}), {
name: 'onesy-SpeechToText'
});
const SpeechToText = props_ => {
const theme = useOnesyTheme();
const l = theme.l;
const props = _objectSpread(_objectSpread(_objectSpread({}, theme?.ui?.elements?.all?.props?.default), theme?.ui?.elements?.onesySpeechToText?.props?.default), props_);
const Line = theme?.elements?.Line || LineElement;
const Tooltip = theme?.elements?.Tooltip || TooltipElement;
const IconButton = theme?.elements?.IconButton || IconButtonElement;
const {
ref,
size = 'regular',
SpeechRecognition: SpeechRecognition_,
continuous = true,
interimResults = true,
grammars,
language = 'en-US',
maxAlternatives = 1,
join = '. ',
loading,
disabled,
Icon: Icon_ = IconMaterialSpeechToText,
IconStop = IconMaterialStop,
onChange,
onData: onData_,
onListen: onListen_,
onListenStop: onListenStop_,
onStart: onStart_,
onStop: onStop_,
onError: onError_,
TooltipProps,
IconButtonProps,
IconProps,
className
} = props,
other = _objectWithoutProperties(props, _excluded);
const [status, setStatus] = React.useState('initial');
const {
classes
} = useStyle();
const refs = {
root: React.useRef(undefined),
recognition: React.useRef(undefined),
value: React.useRef(''),
result: React.useRef([]),
status: React.useRef(status)
};
refs.status.current = status;
const supported = isEnvironment('browser') && ('SpeechRecognition' in window || 'webkitSpeechRecognition' in window);
const onData = event => {
const value = Array.from(event.results || []).map(item => item[0]?.transcript || '').join('');
refs.value.current = value;
if (!value) {
refs.result.current.push(value);
}
if (is('function', onData_)) onData_(value);
};
const onStart = async event_0 => {
setStatus('started');
if (is('function', onStart_)) onStart_(event_0);
};
const onEnd = event_1 => {
setStatus('initial');
// result
refs.result.current.push(refs.value.current);
const response = refs.result.current.join(join);
if (is('function', onChange)) onChange(response);
};
const onError = event_2 => {
if (is('function', onError_)) onError_(event_2);
};
React.useEffect(() => {
// previous clean up
if (refs.recognition.current) {
refs.recognition.current.abort();
}
// make a new instance
if (supported) {
// eslint-disable-next-line @typescript-eslint/no-use-before-define
const SpeechRecognitionClass = SpeechRecognition_ || window.SpeechRecognition || window.webkitSpeechRecognition;
if (SpeechRecognitionClass) {
refs.recognition.current = new SpeechRecognitionClass();
// options
if (continuous !== undefined) refs.recognition.current.continuous = continuous;
if (interimResults !== undefined) refs.recognition.current.interimResults = interimResults;
if (grammars !== undefined) refs.recognition.current.grammars = grammars;
if (language !== undefined) refs.recognition.current.lang = language;
if (maxAlternatives !== undefined) refs.recognition.current.maxAlternatives = maxAlternatives;
// events
// result
refs.recognition.current.addEventListener('start', onStart);
refs.recognition.current.addEventListener('end', onEnd);
refs.recognition.current.addEventListener('result', onData);
refs.recognition.current.addEventListener('error', onError);
}
}
return () => {
// Clean up
if (refs.recognition.current) {
refs.recognition.current.removeEventListener('start', onStart);
refs.recognition.current.removeEventListener('end', onEnd);
refs.recognition.current.removeEventListener('result', onData);
refs.recognition.current.removeEventListener('error', onError);
}
};
}, [SpeechRecognition_, supported, continuous, interimResults, grammars, language, maxAlternatives]);
const onCleanUp = () => {
refs.result.current = [];
refs.value.current = '';
};
const onListen = async event_3 => {
if (refs.recognition.current) {
// Clean up
onCleanUp();
refs.recognition.current.start();
if (is('function', onListen_)) onListen_(event_3);
}
};
const onListenStop = async event_4 => {
if (refs.recognition.current) {
refs.recognition.current.stop();
if (is('function', onListenStop_)) onListenStop_(event_4);
}
};
const iconProps = _objectSpread({
size
}, IconProps);
const iconButtonProps = _objectSpread({
size,
loading,
disabled
}, IconButtonProps);
if (!supported) return null;
let IconToUse = Icon_;
let name = l('Speech to text');
if (status === 'started') {
IconToUse = IconStop;
name = l('Stop');
}
return /*#__PURE__*/_jsx(Line, _objectSpread(_objectSpread({
ref: item_0 => {
if (ref) {
if (is('function', ref)) ref(item_0);else ref.current = item_0;
}
refs.root.current = item_0;
},
gap: 1,
direction: "row",
align: "center",
className: classNames([staticClassName('SpeechToText', theme) && [`onesy-SpeechToText-root`, `onesy-SpeechToText-size-${size}`], className, classes.root])
}, other), {}, {
children: /*#__PURE__*/_jsx(Tooltip, _objectSpread(_objectSpread({
name: name
}, TooltipProps), {}, {
children: /*#__PURE__*/_jsx(IconButton, _objectSpread(_objectSpread({
onClick: status === 'started' ? onListenStop : onListen
}, iconButtonProps), {}, {
selected: status === 'started',
disabled: disabled !== undefined ? disabled : !supported,
className: classNames([staticClassName('SpeechToText', theme) && [`onesy-SpeechToText-iconButton`], classes.iconButton]),
children: /*#__PURE__*/_jsx(IconToUse, _objectSpread({}, iconProps))
}))
}))
}));
};
SpeechToText.displayName = 'onesy-SpeechToText';
export default SpeechToText;