UNPKG

@selfcommunity/react-ui

Version:

React UI Components to integrate a Community created with SelfCommunity Platform.

209 lines (208 loc) • 14.5 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.VideoConference = void 0; const tslib_1 = require("tslib"); const jsx_runtime_1 = require("react/jsx-runtime"); const React = tslib_1.__importStar(require("react")); const components_core_1 = require("@livekit/components-core"); const livekit_client_1 = require("livekit-client"); const components_react_1 = require("@livekit/components-react"); const ParticipantTile_1 = require("./ParticipantTile"); const ControlBar_1 = require("./ControlBar"); const react_1 = require("react"); const useLiveStreamCheck_1 = require("./useLiveStreamCheck"); const FocusLayout_1 = require("./FocusLayout"); const react_core_1 = require("@selfcommunity/react-core"); const classnames_1 = tslib_1.__importDefault(require("classnames")); const styles_1 = require("@mui/material/styles"); const material_1 = require("@mui/material"); const system_1 = require("@mui/system"); const NoParticipants_1 = tslib_1.__importDefault(require("./NoParticipants")); const LiveStreamSettingsMenu_1 = tslib_1.__importDefault(require("./LiveStreamSettingsMenu")); const track_processors_1 = require("@livekit/track-processors"); const utils_1 = require("@selfcommunity/utils"); const LiveStream_1 = require("../../../constants/LiveStream"); const Icon_1 = tslib_1.__importDefault(require("@mui/material/Icon")); const notistack_1 = require("notistack"); const react_intl_1 = require("react-intl"); const PREFIX = 'SCVideoConference'; const classes = { root: `${PREFIX}-root` }; const Root = (0, styles_1.styled)(material_1.Box, { name: PREFIX, slot: 'Root', overridesResolver: (props, styles) => styles.root })(({ theme }) => ({ '& .lk-chat': { height: '100%' } })); /** * The `VideoConference` ready-made component is your drop-in solution for a classic video conferencing application. * It provides functionality such as focusing on one participant, grid view with pagination to handle large numbers * of participants, basic non-persistent chat, screen sharing, and more. * */ function VideoConference(inProps) { var _a, _b, _c; // PROPS const props = (0, system_1.useThemeProps)({ props: inProps, name: PREFIX }); const { className, chatMessageFormatter, chatMessageDecoder, chatMessageEncoder, SettingsComponent, speakerFocused, disableChat = false, disableMicrophone = false, disableCamera = false, disableShareScreen = false, hideParticipantsList = false, showSettings } = props, rest = tslib_1.__rest(props, ["className", "chatMessageFormatter", "chatMessageDecoder", "chatMessageEncoder", "SettingsComponent", "speakerFocused", "disableChat", "disableMicrophone", "disableCamera", "disableShareScreen", "hideParticipantsList", "showSettings"]); // STATE const [widgetState, setWidgetState] = React.useState({ showChat: false, unreadMessages: 0, showSettings: showSettings || false }); const [focusInitialized, setFocusInitialized] = React.useState(false); const lastAutoFocusedScreenShareTrack = React.useRef(null); // HOOKS const scUserContext = (0, react_core_1.useSCUser)(); const [blurEnabled, setBlurEnabled] = React.useState((0, utils_1.isClientSideRendering)() ? ((_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.getItem(LiveStream_1.CHOICE_VIDEO_BLUR_EFFECT)) === 'true' : false); const [processorPending, setProcessorPending] = React.useState(false); const tracks = (0, components_react_1.useTracks)([ { source: livekit_client_1.Track.Source.Camera, withPlaceholder: true }, { source: livekit_client_1.Track.Source.ScreenShare, withPlaceholder: false } ], { updateOnlyOn: [livekit_client_1.RoomEvent.ActiveSpeakersChanged], onlySubscribed: false }); const tracksNoParticipants = (0, react_1.useMemo)(() => tracks.filter((t) => t.participant.name === scUserContext.user.username || (speakerFocused && t.participant.name === speakerFocused.username) || t.source === 'screen_share'), [tracks, scUserContext.user]); const handleBlur = React.useCallback((event) => { var _a, _b; if (event.target) { if ('checked' in event.target) { setBlurEnabled((_a = event.target) === null || _a === void 0 ? void 0 : _a.checked); } else { setBlurEnabled((enabled) => !enabled); } } else { setBlurEnabled((enabled) => !enabled); } (_b = window === null || window === void 0 ? void 0 : window.localStorage) === null || _b === void 0 ? void 0 : _b.setItem(LiveStream_1.CHOICE_VIDEO_BLUR_EFFECT, (!blurEnabled).toString()); }, [setBlurEnabled, blurEnabled]); const participants = (0, components_react_1.useParticipants)(); const layoutContext = (0, components_react_1.useCreateLayoutContext)(); const screenShareTracks = tracks.filter(components_core_1.isTrackReference).filter((track) => track.publication.source === livekit_client_1.Track.Source.ScreenShare); const focusTrack = (_b = (0, components_react_1.usePinnedTracks)(layoutContext)) === null || _b === void 0 ? void 0 : _b[0]; const carouselTracks = tracks.filter((track) => !(0, components_core_1.isEqualTrackRef)(track, focusTrack)); const { cameraTrack } = (0, components_react_1.useLocalParticipant)(); const { enqueueSnackbar } = (0, notistack_1.useSnackbar)(); (0, useLiveStreamCheck_1.useLivestreamCheck)(); /** * widgetUpdate * @param state */ const widgetUpdate = (state) => { components_core_1.log.debug('updating widget state', state); setWidgetState(state); }; /** * handleFocusStateChange * @param state */ const handleFocusStateChange = (state) => { var _a, _b; components_core_1.log.debug('updating widget state', state); if (state && state.participant) { const updatedFocusTrack = tracks.find((tr) => tr.participant.identity === state.participant.identity); if (updatedFocusTrack) { (_b = (_a = layoutContext.pin).dispatch) === null || _b === void 0 ? void 0 : _b.call(_a, { msg: 'set_pin', trackReference: updatedFocusTrack }); } } }; (0, react_1.useEffect)(() => { var _a, _b, _c, _d, _e, _f, _g, _h; // If screen share tracks are published, and no pin is set explicitly, auto set the screen share. if (screenShareTracks.some((track) => track.publication.isSubscribed) && lastAutoFocusedScreenShareTrack.current === null) { components_core_1.log.debug('Auto set screen share focus:', { newScreenShareTrack: screenShareTracks[0] }); (_b = (_a = layoutContext.pin).dispatch) === null || _b === void 0 ? void 0 : _b.call(_a, { msg: 'set_pin', trackReference: screenShareTracks[0] }); lastAutoFocusedScreenShareTrack.current = screenShareTracks[0]; } else if (lastAutoFocusedScreenShareTrack.current && !screenShareTracks.some((track) => { var _a, _b; return track.publication.trackSid === ((_b = (_a = lastAutoFocusedScreenShareTrack.current) === null || _a === void 0 ? void 0 : _a.publication) === null || _b === void 0 ? void 0 : _b.trackSid); })) { components_core_1.log.debug('Auto clearing screen share focus.'); (_d = (_c = layoutContext.pin).dispatch) === null || _d === void 0 ? void 0 : _d.call(_c, { msg: 'clear_pin' }); lastAutoFocusedScreenShareTrack.current = null; } if (focusTrack) { let updatedFocusTrack; const isFocusTrackParticipantExist = participants.find((pt) => pt.identity === focusTrack.participant.identity); if (!isFocusTrackParticipantExist) { // Focus track is relative to a participant that has left the room updatedFocusTrack = tracks.find((tr) => tr.participant.identity === scUserContext.user.id.toString()); (_f = (_e = layoutContext.pin).dispatch) === null || _f === void 0 ? void 0 : _f.call(_e, { msg: 'set_pin', trackReference: updatedFocusTrack }); } else if (!(0, components_core_1.isTrackReference)(focusTrack)) { // You are not subscribet to the track updatedFocusTrack = tracks.find((tr) => tr.participant.identity === focusTrack.participant.identity && tr.source === focusTrack.source); if (updatedFocusTrack !== focusTrack && (0, components_core_1.isTrackReference)(updatedFocusTrack)) { (_h = (_g = layoutContext.pin).dispatch) === null || _h === void 0 ? void 0 : _h.call(_g, { msg: 'set_pin', trackReference: updatedFocusTrack }); } } } }, [ screenShareTracks.map((ref) => `${ref.publication.trackSid}_${ref.publication.isSubscribed}`).join(), (_c = focusTrack === null || focusTrack === void 0 ? void 0 : focusTrack.publication) === null || _c === void 0 ? void 0 : _c.trackSid, tracks, participants, speakerFocused ]); (0, react_1.useEffect)(() => { var _a, _b; if (speakerFocused && !focusInitialized) { const speaker = participants.find((pt) => { return pt.name === speakerFocused.username; }); if (speaker) { const updatedFocusTrack = tracks.find((tr) => { if (tr) { return tr.participant.identity === speaker.identity; } return false; }); (_b = (_a = layoutContext.pin).dispatch) === null || _b === void 0 ? void 0 : _b.call(_a, { msg: 'set_pin', trackReference: updatedFocusTrack }); setFocusInitialized(true); } } }, [tracks, participants, speakerFocused]); (0, react_1.useEffect)(() => { var _a; const localCamTrack = cameraTrack === null || cameraTrack === void 0 ? void 0 : cameraTrack.track; if (localCamTrack) { setProcessorPending(true); try { if (blurEnabled && !localCamTrack.getProcessor()) { localCamTrack.setProcessor((0, track_processors_1.BackgroundBlur)(20)); } else if (!blurEnabled) { localCamTrack.stopProcessor(); } } catch (e) { console.log(e); setBlurEnabled(false); (_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.setItem(LiveStream_1.CHOICE_VIDEO_BLUR_EFFECT, false.toString()); enqueueSnackbar((0, jsx_runtime_1.jsx)(react_intl_1.FormattedMessage, { id: "ui.liveStreamRoom.errorApplyVideoEffect", defaultMessage: "ui.contributionActionMenu.errorApplyVideoEffect" }), { variant: 'warning', autoHideDuration: 3000 }); } finally { setProcessorPending(false); } } }, [blurEnabled, cameraTrack]); return ((0, jsx_runtime_1.jsxs)(Root, Object.assign({ className: (0, classnames_1.default)(className, classes.root, 'lk-video-conference') }, rest, { children: [(0, components_core_1.isWeb)() && ((0, jsx_runtime_1.jsxs)(components_react_1.LayoutContextProvider, Object.assign({ value: layoutContext, onPinChange: handleFocusStateChange, onWidgetChange: widgetUpdate }, { children: [(0, jsx_runtime_1.jsxs)("div", Object.assign({ className: "lk-video-conference-inner" }, { children: [!focusTrack ? ((0, jsx_runtime_1.jsx)("div", Object.assign({ className: "lk-grid-layout-wrapper" }, { children: (0, jsx_runtime_1.jsx)(components_react_1.GridLayout, Object.assign({ tracks: hideParticipantsList ? tracksNoParticipants : tracks }, { children: (0, jsx_runtime_1.jsx)(ParticipantTile_1.ParticipantTile, {}) })) }))) : ((0, jsx_runtime_1.jsx)("div", Object.assign({ className: "lk-focus-layout-wrapper" }, { children: hideParticipantsList ? ((0, jsx_runtime_1.jsx)(FocusLayout_1.FocusLayoutContainerNoParticipants, { children: focusTrack && (0, jsx_runtime_1.jsx)(FocusLayout_1.FocusLayout, { trackRef: focusTrack }) })) : ((0, jsx_runtime_1.jsxs)(FocusLayout_1.FocusLayoutContainer, { children: [carouselTracks.length ? ((0, jsx_runtime_1.jsx)(components_react_1.CarouselLayout, Object.assign({ tracks: carouselTracks }, { children: (0, jsx_runtime_1.jsx)(ParticipantTile_1.ParticipantTile, {}) }))) : ((0, jsx_runtime_1.jsx)(NoParticipants_1.default, {})), focusTrack && (0, jsx_runtime_1.jsx)(FocusLayout_1.FocusLayout, { trackRef: focusTrack })] })) }))), (0, jsx_runtime_1.jsx)(ControlBar_1.ControlBar, { controls: Object.assign({ chat: !disableChat, microphone: !disableMicrophone, camera: !disableCamera, screenShare: !disableShareScreen }, { settings: true }) })] })), !disableChat && ((0, jsx_runtime_1.jsx)(components_react_1.Chat, { style: { display: widgetState.showChat ? 'grid' : 'none' }, messageFormatter: chatMessageFormatter, messageEncoder: chatMessageEncoder, messageDecoder: chatMessageDecoder })), (0, jsx_runtime_1.jsxs)("div", Object.assign({ className: "lk-settings-menu-modal", style: { display: widgetState.showSettings ? 'block' : 'none' } }, { children: [(0, jsx_runtime_1.jsx)(material_1.IconButton, Object.assign({ className: "lk-settings-menu-modal-icon-close", onClick: () => { var _a, _b; return (_b = layoutContext === null || layoutContext === void 0 ? void 0 : (_a = layoutContext.widget).dispatch) === null || _b === void 0 ? void 0 : _b.call(_a, { msg: 'toggle_settings' }); } }, { children: (0, jsx_runtime_1.jsx)(Icon_1.default, { children: "close" }) })), SettingsComponent ? ((0, jsx_runtime_1.jsx)(SettingsComponent, {})) : ((0, jsx_runtime_1.jsx)(jsx_runtime_1.Fragment, { children: (0, jsx_runtime_1.jsx)(LiveStreamSettingsMenu_1.default, { onlyContentMenu: true, actionBlurDisabled: !cameraTrack && !disableCamera, blurEnabled: blurEnabled, handleBlur: handleBlur }) }))] }))] }))), (0, jsx_runtime_1.jsx)(components_react_1.RoomAudioRenderer, {}), (0, jsx_runtime_1.jsx)(components_react_1.ConnectionStateToast, {})] }))); } exports.VideoConference = VideoConference;