@onesy/ui-react
Version:
UI for React
340 lines (320 loc) • 13.3 kB
JavaScript
import _extends from "@babel/runtime/helpers/extends";
import _objectWithoutProperties from "@babel/runtime/helpers/objectWithoutProperties";
import _defineProperty from "@babel/runtime/helpers/defineProperty";
const _excluded = ["size", "pause", "renderMain", "renderTime", "loading", "disabled", "Component", "Icon", "IconConfirm", "IconStart", "IconPause", "IconStop", "onConfirm", "onData", "onStart", "onPause", "onResume", "onStop", "onError", "TooltipProps", "IconButtonProps", "IconProps", "className"];
function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; }
import React from 'react';
import audioFix from 'webm-duration-fix';
import { getLeadingZerosNumber, is, isEnvironment, wait } from '@onesy/utils';
import { classNames, style as styleMethod, useOnesyTheme } from '@onesy/style-react';
import { OnesyDate, duration } from '@onesy/date';
import IconMaterialMic from '@onesy/icons-material-rounded-react/IconMaterialMicW100';
import IconMaterialSend from '@onesy/icons-material-rounded-react/IconMaterialSendW100';
import IconMaterialPlayArrow from '@onesy/icons-material-rounded-react/IconMaterialPlayArrowW100';
import IconMaterialPause from '@onesy/icons-material-rounded-react/IconMaterialPauseW100';
import IconMaterialStop from '@onesy/icons-material-rounded-react/IconMaterialStopW100';
import LineElement from '../Line';
import FadeElement from '../Fade';
import TypeElement from '../Type';
import TooltipElement from '../Tooltip';
import IconButtonElement from '../IconButton';
import { staticClassName } from '../utils';
const useStyle = styleMethod(theme => ({
'@keyframes pulse': {
'0%': {
opacity: 1
},
'50%': {
opacity: 0
},
'100%': {
opacity: 1
}
},
root: {
width: 'auto',
whiteSpace: 'nowrap'
},
recordDot: {
borderRadius: theme.methods.shape.radius.value(40, 'px'),
background: theme.palette.color.error.main,
animation: `$pulse 1700ms ${theme.transitions.timing_function.emphasized} 240ms infinite`
},
recordDot_size_small: {
width: 8,
height: 8
},
recordDot_size_regular: {
width: 10,
height: 10
},
recordDot_size_large: {
width: 12,
height: 12
},
time: {
'&.onesy-Type-root': {
whiteSpace: 'nowrap'
}
}
}), {
name: 'onesy-AudioRecorder'
});
const AudioRecorder = /*#__PURE__*/React.forwardRef((props_, ref) => {
const theme = useOnesyTheme();
const l = theme.l;
const props = React.useMemo(() => _objectSpread(_objectSpread(_objectSpread({}, theme?.ui?.elements?.all?.props?.default), theme?.ui?.elements?.onesyAudioRecorder?.props?.default), props_), [props_]);
const Line = React.useMemo(() => theme?.elements?.Line || LineElement, [theme]);
const Fade = React.useMemo(() => theme?.elements?.Fade || FadeElement, [theme]);
const Type = React.useMemo(() => theme?.elements?.Type || TypeElement, [theme]);
const Tooltip = React.useMemo(() => theme?.elements?.Tooltip || TooltipElement, [theme]);
const IconButton = React.useMemo(() => theme?.elements?.IconButton || IconButtonElement, [theme]);
const {
size = 'regular',
pause = true,
renderMain,
renderTime,
loading,
disabled,
Component,
Icon: Icon_ = IconMaterialMic,
IconConfirm = IconMaterialSend,
IconStart = IconMaterialPlayArrow,
IconPause = IconMaterialPause,
IconStop = IconMaterialStop,
onConfirm: onConfirm_,
onData,
onStart: onStart_,
onPause: onPause_,
onResume: onResume_,
onStop: onStop_,
onError,
TooltipProps,
IconButtonProps,
IconProps,
className
} = props,
other = _objectWithoutProperties(props, _excluded);
const [status, setStatus] = React.useState('initial');
const [value, setValue] = React.useState(0);
const {
classes
} = useStyle();
const refs = {
root: React.useRef(null),
mediaRecorder: React.useRef(null),
mediaRecorderBytes: React.useRef([]),
startedAt: React.useRef(0),
valuePaused: React.useRef(0),
value: React.useRef(null),
animationFrame: React.useRef(null),
onData: React.useRef(null),
previousAction: React.useRef('start'),
// fallback to duration calculation on desktop
// ie. for mobile where we can't easily determine duration
duration: React.useRef(0)
};
refs.onData.current = onData;
const supported = isEnvironment('browser') && window.navigator.mediaDevices?.getUserMedia;
refs.value.current = value;
const clear = () => {
cancelAnimationFrame(refs.animationFrame.current);
};
React.useEffect(() => {
return () => {
// Clean up
clear();
};
}, []);
const update = () => {
setValue(refs.valuePaused.current + (OnesyDate.milliseconds - refs.startedAt.current));
refs.animationFrame.current = requestAnimationFrame(update);
};
const onStart = React.useCallback(async event => {
// MediaStream
try {
const stream = await navigator.mediaDevices.getUserMedia({
audio: true
});
// media recorder
refs.mediaRecorder.current = new MediaRecorder(stream);
// reset
refs.mediaRecorderBytes.current = [];
// data
refs.mediaRecorder.current.addEventListener('dataavailable', event_ => {
const data = event_.data;
refs.mediaRecorderBytes.current.push(data);
if (is('function', refs.onData.current)) refs.onData.current(data);
});
// stop
refs.mediaRecorder.current.addEventListener('stop', event_ => {
const tracks = stream.getTracks();
tracks.forEach(track => track.stop());
});
// start
refs.mediaRecorder.current.start();
} catch (error) {
if (is('function', onError)) onError(error);
return;
}
// reset duration
refs.duration.current = 0;
// started at milliseconds
refs.startedAt.current = OnesyDate.milliseconds;
// ~60+ fps
refs.animationFrame.current = requestAnimationFrame(update);
setStatus('running');
if (is('function', onStart_)) onStart_(event);
// previous action
refs.previousAction.current = 'start';
}, [onStart_, onError]);
const onPause = React.useCallback(event => {
// media recorder
if (refs.mediaRecorder.current) refs.mediaRecorder.current.pause();
// add so far to duration
refs.duration.current += OnesyDate.milliseconds - refs.startedAt.current;
clear();
// Remember previous value
refs.valuePaused.current = refs.value.current;
setStatus('paused');
if (is('function', onPause_)) onPause_(event);
// previous action
refs.previousAction.current = 'pause';
}, [onPause_]);
const onStop = React.useCallback(event => {
// media recorder
if (refs.mediaRecorder.current) refs.mediaRecorder.current.stop();
clear();
setStatus('initial');
setValue(0);
refs.valuePaused.current = 0;
refs.value.current = 0;
if (is('function', onStop_)) onStop_(event);
}, [onStop_]);
const onConfirm = React.useCallback(async event => {
// Stop
onStop(event);
// duration
if (refs.previousAction.current === 'resume') {
// add so far to duration
refs.duration.current += OnesyDate.milliseconds - refs.startedAt.current;
}
const meta = {
// duration in seconds
duration: refs.duration.current / 1e3
};
await wait(40);
// Get the blob
const mimeType = refs.mediaRecorder.current?.mimeType;
let blob = new Blob(refs.mediaRecorderBytes.current, {
type: mimeType
});
try {
blob = await audioFix(blob);
} catch (error) {
console.log('AudioRecorder onConfirm error', error);
}
// clean up
refs.mediaRecorderBytes.current = [];
if (is('function', onConfirm_)) onConfirm_(blob, meta);
}, [onStop, onConfirm_]);
const onResume = React.useCallback(event => {
// media recorder
if (refs.mediaRecorder.current) refs.mediaRecorder.current.resume();
// record at milliseconds
refs.startedAt.current = OnesyDate.milliseconds;
// ~60+ fps
refs.animationFrame.current = requestAnimationFrame(update);
setStatus('running');
if (is('function', onResume_)) onResume_(event);
// previous action
refs.previousAction.current = 'resume';
}, [onResume_]);
const valueFormat = valueNew_ => {
let valueNew = '';
const valueDuration = duration(valueNew_, undefined, true, undefined, ['hour', 'minute', 'second', 'millisecond']);
if (valueDuration.hour > 0) valueNew += `${getLeadingZerosNumber(valueDuration.hour)}:`;
valueNew += `${getLeadingZerosNumber(valueDuration.minute || 0)}:`;
valueNew += `${getLeadingZerosNumber(valueDuration.second || 0)}`;
// valueNew += `${getLeadingZerosNumber(Math.floor((valueDuration.millisecond || 0) / 10))}`;
return valueNew;
};
const value_ = status === 'initial' ? '00:00' : valueFormat(value);
const iconProps = _objectSpread({
size
}, IconProps);
const iconButtonProps = _objectSpread({
size,
loading,
disabled
}, IconButtonProps);
return /*#__PURE__*/React.createElement(Line, _extends({
ref: item => {
if (ref) {
if (is('function', ref)) ref(item);else ref.current = item;
}
refs.root.current = item;
},
gap: 1,
direction: "row",
align: "center",
Component: Component,
className: classNames([staticClassName('AudioRecorder', theme) && [`onesy-AudioRecorder-root`, `onesy-AudioRecorder-size-${size}`], className, classes.root])
}, other), status !== 'initial' && /*#__PURE__*/React.createElement(Fade, {
in: true,
add: true
}, /*#__PURE__*/React.createElement(Line, {
gap: 1,
direction: "row",
align: "center"
}, status === 'running' && /*#__PURE__*/React.createElement("span", {
className: classNames([classes.recordDot, classes[`recordDot_size_${size}`]])
}), is('function', renderTime) ? renderTime(value_) : /*#__PURE__*/React.createElement(Type, {
version: size === 'large' ? 'b1' : size === 'regular' ? 'b2' : 'b3',
className: classNames([staticClassName('AudioRecorder', theme) && ['onesy-AudioRecorder-time'], classes.time])
}, value_))), status === 'initial' && /*#__PURE__*/React.createElement(Fade, {
in: true,
add: true
}, /*#__PURE__*/React.createElement("span", null, is('function', renderMain) ? renderMain({
onStart,
supported
}) : /*#__PURE__*/React.createElement(React.Fragment, null, /*#__PURE__*/React.createElement(Tooltip, _extends({
name: l('Voice record')
}, TooltipProps), /*#__PURE__*/React.createElement(IconButton, _extends({
onClick: onStart,
disabled: !supported
}, iconButtonProps), /*#__PURE__*/React.createElement(Icon_, iconProps)))))), status === 'running' && /*#__PURE__*/React.createElement(Fade, {
in: true,
add: true
}, /*#__PURE__*/React.createElement("span", null, /*#__PURE__*/React.createElement(Tooltip, _extends({
name: l('Cancel')
}, TooltipProps), /*#__PURE__*/React.createElement(IconButton, _extends({
onClick: onStop
}, iconButtonProps), /*#__PURE__*/React.createElement(IconStop, iconProps))), pause && /*#__PURE__*/React.createElement(Tooltip, _extends({
name: l('Pause')
}, TooltipProps), /*#__PURE__*/React.createElement(IconButton, _extends({
onClick: onPause
}, iconButtonProps), /*#__PURE__*/React.createElement(IconPause, iconProps))), /*#__PURE__*/React.createElement(Tooltip, _extends({
name: l('Confirm')
}, TooltipProps), /*#__PURE__*/React.createElement(IconButton, _extends({
onClick: onConfirm
}, iconButtonProps), /*#__PURE__*/React.createElement(IconConfirm, iconProps))))), status === 'paused' && /*#__PURE__*/React.createElement(Fade, {
in: true,
add: true
}, /*#__PURE__*/React.createElement("span", null, /*#__PURE__*/React.createElement(Tooltip, _extends({
name: l('Cancel')
}, TooltipProps), /*#__PURE__*/React.createElement(IconButton, _extends({
onClick: onStop
}, iconButtonProps), /*#__PURE__*/React.createElement(IconStop, iconProps))), /*#__PURE__*/React.createElement(Tooltip, _extends({
name: l('Resume')
}, TooltipProps), /*#__PURE__*/React.createElement(IconButton, _extends({
onClick: onResume
}, iconButtonProps), /*#__PURE__*/React.createElement(IconStart, iconProps))), /*#__PURE__*/React.createElement(Tooltip, _extends({
name: l('Confirm')
}, TooltipProps), /*#__PURE__*/React.createElement(IconButton, _extends({
onClick: onConfirm
}, iconButtonProps), /*#__PURE__*/React.createElement(IconConfirm, iconProps))))));
});
AudioRecorder.displayName = 'onesy-AudioRecorder';
export default AudioRecorder;