@onesy/ui-react
Version:
UI for React
208 lines (203 loc) • 9.9 kB
JavaScript
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _objectWithoutProperties2 = _interopRequireDefault(require("@babel/runtime/helpers/objectWithoutProperties"));
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
var _react = _interopRequireDefault(require("react"));
var _utils = require("@onesy/utils");
var _styleReact = require("@onesy/style-react");
var _IconMaterialStopW = _interopRequireDefault(require("@onesy/icons-material-rounded-react/IconMaterialStopW100"));
var _IconMaterialSpeechToTextW = _interopRequireDefault(require("@onesy/icons-material-rounded-react/IconMaterialSpeechToTextW100"));
var _Line = _interopRequireDefault(require("../Line"));
var _Tooltip = _interopRequireDefault(require("../Tooltip"));
var _IconButton = _interopRequireDefault(require("../IconButton"));
var _utils2 = require("../utils");
var _jsxRuntime = require("react/jsx-runtime");
const _excluded = ["ref", "size", "SpeechRecognition", "continuous", "interimResults", "grammars", "language", "maxAlternatives", "join", "loading", "disabled", "Icon", "IconStop", "onChange", "onData", "onListen", "onListenStop", "onStart", "onStop", "onError", "TooltipProps", "IconButtonProps", "IconProps", "className"];
function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { (0, _defineProperty2.default)(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; }
const useStyle = (0, _styleReact.style)(theme => ({
root: {},
iconButton: {
'&.onesy-IconButton-root': {
transition: theme.methods.transitions.make('transform'),
'&:active': {
transform: 'scale(0.94)'
}
}
}
}), {
name: 'onesy-SpeechToText'
});
const SpeechToText = props_ => {
var _theme$ui, _theme$ui2, _theme$elements, _theme$elements2, _theme$elements3;
const theme = (0, _styleReact.useOnesyTheme)();
const l = theme.l;
const props = _objectSpread(_objectSpread(_objectSpread({}, theme === null || theme === void 0 || (_theme$ui = theme.ui) === null || _theme$ui === void 0 || (_theme$ui = _theme$ui.elements) === null || _theme$ui === void 0 || (_theme$ui = _theme$ui.all) === null || _theme$ui === void 0 || (_theme$ui = _theme$ui.props) === null || _theme$ui === void 0 ? void 0 : _theme$ui.default), theme === null || theme === void 0 || (_theme$ui2 = theme.ui) === null || _theme$ui2 === void 0 || (_theme$ui2 = _theme$ui2.elements) === null || _theme$ui2 === void 0 || (_theme$ui2 = _theme$ui2.onesySpeechToText) === null || _theme$ui2 === void 0 || (_theme$ui2 = _theme$ui2.props) === null || _theme$ui2 === void 0 ? void 0 : _theme$ui2.default), props_);
const Line = (theme === null || theme === void 0 || (_theme$elements = theme.elements) === null || _theme$elements === void 0 ? void 0 : _theme$elements.Line) || _Line.default;
const Tooltip = (theme === null || theme === void 0 || (_theme$elements2 = theme.elements) === null || _theme$elements2 === void 0 ? void 0 : _theme$elements2.Tooltip) || _Tooltip.default;
const IconButton = (theme === null || theme === void 0 || (_theme$elements3 = theme.elements) === null || _theme$elements3 === void 0 ? void 0 : _theme$elements3.IconButton) || _IconButton.default;
const {
ref,
size = 'regular',
SpeechRecognition: SpeechRecognition_,
continuous = true,
interimResults = true,
grammars,
language = 'en-US',
maxAlternatives = 1,
join = '. ',
loading,
disabled,
Icon: Icon_ = _IconMaterialSpeechToTextW.default,
IconStop = _IconMaterialStopW.default,
onChange,
onData: onData_,
onListen: onListen_,
onListenStop: onListenStop_,
onStart: onStart_,
onStop: onStop_,
onError: onError_,
TooltipProps,
IconButtonProps,
IconProps,
className
} = props,
other = (0, _objectWithoutProperties2.default)(props, _excluded);
const [status, setStatus] = _react.default.useState('initial');
const {
classes
} = useStyle();
const refs = {
root: _react.default.useRef(undefined),
recognition: _react.default.useRef(undefined),
value: _react.default.useRef(''),
result: _react.default.useRef([]),
status: _react.default.useRef(status)
};
refs.status.current = status;
const supported = (0, _utils.isEnvironment)('browser') && ('SpeechRecognition' in window || 'webkitSpeechRecognition' in window);
const onData = event => {
const value = Array.from(event.results || []).map(item => {
var _item$;
return ((_item$ = item[0]) === null || _item$ === void 0 ? void 0 : _item$.transcript) || '';
}).join('');
refs.value.current = value;
if (!value) {
refs.result.current.push(value);
}
if ((0, _utils.is)('function', onData_)) onData_(value);
};
const onStart = async event_0 => {
setStatus('started');
if ((0, _utils.is)('function', onStart_)) onStart_(event_0);
};
const onEnd = event_1 => {
setStatus('initial');
// result
refs.result.current.push(refs.value.current);
const response = refs.result.current.join(join);
if ((0, _utils.is)('function', onChange)) onChange(response);
};
const onError = event_2 => {
if ((0, _utils.is)('function', onError_)) onError_(event_2);
};
_react.default.useEffect(() => {
// previous clean up
if (refs.recognition.current) {
refs.recognition.current.abort();
}
// make a new instance
if (supported) {
// eslint-disable-next-line @typescript-eslint/no-use-before-define
const SpeechRecognitionClass = SpeechRecognition_ || window.SpeechRecognition || window.webkitSpeechRecognition;
if (SpeechRecognitionClass) {
refs.recognition.current = new SpeechRecognitionClass();
// options
if (continuous !== undefined) refs.recognition.current.continuous = continuous;
if (interimResults !== undefined) refs.recognition.current.interimResults = interimResults;
if (grammars !== undefined) refs.recognition.current.grammars = grammars;
if (language !== undefined) refs.recognition.current.lang = language;
if (maxAlternatives !== undefined) refs.recognition.current.maxAlternatives = maxAlternatives;
// events
// result
refs.recognition.current.addEventListener('start', onStart);
refs.recognition.current.addEventListener('end', onEnd);
refs.recognition.current.addEventListener('result', onData);
refs.recognition.current.addEventListener('error', onError);
}
}
return () => {
// Clean up
if (refs.recognition.current) {
refs.recognition.current.removeEventListener('start', onStart);
refs.recognition.current.removeEventListener('end', onEnd);
refs.recognition.current.removeEventListener('result', onData);
refs.recognition.current.removeEventListener('error', onError);
}
};
}, [SpeechRecognition_, supported, continuous, interimResults, grammars, language, maxAlternatives]);
const onCleanUp = () => {
refs.result.current = [];
refs.value.current = '';
};
const onListen = async event_3 => {
if (refs.recognition.current) {
// Clean up
onCleanUp();
refs.recognition.current.start();
if ((0, _utils.is)('function', onListen_)) onListen_(event_3);
}
};
const onListenStop = async event_4 => {
if (refs.recognition.current) {
refs.recognition.current.stop();
if ((0, _utils.is)('function', onListenStop_)) onListenStop_(event_4);
}
};
const iconProps = _objectSpread({
size
}, IconProps);
const iconButtonProps = _objectSpread({
size,
loading,
disabled
}, IconButtonProps);
if (!supported) return null;
let IconToUse = Icon_;
let name = l('Speech to text');
if (status === 'started') {
IconToUse = IconStop;
name = l('Stop');
}
return /*#__PURE__*/(0, _jsxRuntime.jsx)(Line, _objectSpread(_objectSpread({
ref: item_0 => {
if (ref) {
if ((0, _utils.is)('function', ref)) ref(item_0);else ref.current = item_0;
}
refs.root.current = item_0;
},
gap: 1,
direction: "row",
align: "center",
className: (0, _styleReact.classNames)([(0, _utils2.staticClassName)('SpeechToText', theme) && [`onesy-SpeechToText-root`, `onesy-SpeechToText-size-${size}`], className, classes.root])
}, other), {}, {
children: /*#__PURE__*/(0, _jsxRuntime.jsx)(Tooltip, _objectSpread(_objectSpread({
name: name
}, TooltipProps), {}, {
children: /*#__PURE__*/(0, _jsxRuntime.jsx)(IconButton, _objectSpread(_objectSpread({
onClick: status === 'started' ? onListenStop : onListen
}, iconButtonProps), {}, {
selected: status === 'started',
disabled: disabled !== undefined ? disabled : !supported,
className: (0, _styleReact.classNames)([(0, _utils2.staticClassName)('SpeechToText', theme) && [`onesy-SpeechToText-iconButton`], classes.iconButton]),
children: /*#__PURE__*/(0, _jsxRuntime.jsx)(IconToUse, _objectSpread({}, iconProps))
}))
}))
}));
};
SpeechToText.displayName = 'onesy-SpeechToText';
var _default = exports.default = SpeechToText;