UNPKG

@selfcommunity/react-ui

Version:

React UI Components to integrate a Community created with SelfCommunity Platform.

205 lines (204 loc) • 13.2 kB
import { __rest } from "tslib"; import { jsx as _jsx, jsxs as _jsxs, Fragment as _Fragment } from "react/jsx-runtime"; import * as React from 'react'; import { isEqualTrackRef, isTrackReference, isWeb, log } from '@livekit/components-core'; import { RoomEvent, Track } from 'livekit-client'; import { CarouselLayout, Chat, ConnectionStateToast, GridLayout, LayoutContextProvider, RoomAudioRenderer, useCreateLayoutContext, useLocalParticipant, useParticipants, usePinnedTracks, useTracks } from '@livekit/components-react'; import { ParticipantTile } from './ParticipantTile'; import { ControlBar } from './ControlBar'; import { useEffect, useMemo } from 'react'; import { useLivestreamCheck } from './useLiveStreamCheck'; import { FocusLayout, FocusLayoutContainer, FocusLayoutContainerNoParticipants } from './FocusLayout'; import { useSCUser } from '@selfcommunity/react-core'; import classNames from 'classnames'; import { styled } from '@mui/material/styles'; import { Box, IconButton } from '@mui/material'; import { useThemeProps } from '@mui/system'; import NoParticipants from './NoParticipants'; import LiveStreamSettingsMenu from './LiveStreamSettingsMenu'; import { BackgroundBlur } from '@livekit/track-processors'; import { isClientSideRendering } from '@selfcommunity/utils'; import { CHOICE_VIDEO_BLUR_EFFECT } from '../../../constants/LiveStream'; import Icon from '@mui/material/Icon'; import { useSnackbar } from 'notistack'; import { FormattedMessage } from 'react-intl'; const PREFIX = 'SCVideoConference'; const classes = { root: `${PREFIX}-root` }; const Root = styled(Box, { name: PREFIX, slot: 'Root', overridesResolver: (props, styles) => styles.root })(({ theme }) => ({ '& .lk-chat': { height: '100%' } })); /** * The `VideoConference` ready-made component is your drop-in solution for a classic video conferencing application. * It provides functionality such as focusing on one participant, grid view with pagination to handle large numbers * of participants, basic non-persistent chat, screen sharing, and more. * */ export function VideoConference(inProps) { var _a, _b, _c; // PROPS const props = useThemeProps({ props: inProps, name: PREFIX }); const { className, chatMessageFormatter, chatMessageDecoder, chatMessageEncoder, SettingsComponent, speakerFocused, disableChat = false, disableMicrophone = false, disableCamera = false, disableShareScreen = false, hideParticipantsList = false, showSettings } = props, rest = __rest(props, ["className", "chatMessageFormatter", "chatMessageDecoder", "chatMessageEncoder", "SettingsComponent", "speakerFocused", "disableChat", "disableMicrophone", "disableCamera", "disableShareScreen", "hideParticipantsList", "showSettings"]); // STATE const [widgetState, setWidgetState] = React.useState({ showChat: false, unreadMessages: 0, showSettings: showSettings || false }); const [focusInitialized, setFocusInitialized] = React.useState(false); const lastAutoFocusedScreenShareTrack = React.useRef(null); // HOOKS const scUserContext = useSCUser(); const [blurEnabled, setBlurEnabled] = React.useState(isClientSideRendering() ? ((_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.getItem(CHOICE_VIDEO_BLUR_EFFECT)) === 'true' : false); const [processorPending, setProcessorPending] = React.useState(false); const tracks = useTracks([ { source: Track.Source.Camera, withPlaceholder: true }, { source: Track.Source.ScreenShare, withPlaceholder: false } ], { updateOnlyOn: [RoomEvent.ActiveSpeakersChanged], onlySubscribed: false }); const tracksNoParticipants = useMemo(() => tracks.filter((t) => t.participant.name === scUserContext.user.username || (speakerFocused && t.participant.name === speakerFocused.username) || t.source === 'screen_share'), [tracks, scUserContext.user]); const handleBlur = React.useCallback((event) => { var _a, _b; if (event.target) { if ('checked' in event.target) { setBlurEnabled((_a = event.target) === null || _a === void 0 ? void 0 : _a.checked); } else { setBlurEnabled((enabled) => !enabled); } } else { setBlurEnabled((enabled) => !enabled); } (_b = window === null || window === void 0 ? void 0 : window.localStorage) === null || _b === void 0 ? void 0 : _b.setItem(CHOICE_VIDEO_BLUR_EFFECT, (!blurEnabled).toString()); }, [setBlurEnabled, blurEnabled]); const participants = useParticipants(); const layoutContext = useCreateLayoutContext(); const screenShareTracks = tracks.filter(isTrackReference).filter((track) => track.publication.source === Track.Source.ScreenShare); const focusTrack = (_b = usePinnedTracks(layoutContext)) === null || _b === void 0 ? void 0 : _b[0]; const carouselTracks = tracks.filter((track) => !isEqualTrackRef(track, focusTrack)); const { cameraTrack } = useLocalParticipant(); const { enqueueSnackbar } = useSnackbar(); useLivestreamCheck(); /** * widgetUpdate * @param state */ const widgetUpdate = (state) => { log.debug('updating widget state', state); setWidgetState(state); }; /** * handleFocusStateChange * @param state */ const handleFocusStateChange = (state) => { var _a, _b; log.debug('updating widget state', state); if (state && state.participant) { const updatedFocusTrack = tracks.find((tr) => tr.participant.identity === state.participant.identity); if (updatedFocusTrack) { (_b = (_a = layoutContext.pin).dispatch) === null || _b === void 0 ? void 0 : _b.call(_a, { msg: 'set_pin', trackReference: updatedFocusTrack }); } } }; useEffect(() => { var _a, _b, _c, _d, _e, _f, _g, _h; // If screen share tracks are published, and no pin is set explicitly, auto set the screen share. if (screenShareTracks.some((track) => track.publication.isSubscribed) && lastAutoFocusedScreenShareTrack.current === null) { log.debug('Auto set screen share focus:', { newScreenShareTrack: screenShareTracks[0] }); (_b = (_a = layoutContext.pin).dispatch) === null || _b === void 0 ? void 0 : _b.call(_a, { msg: 'set_pin', trackReference: screenShareTracks[0] }); lastAutoFocusedScreenShareTrack.current = screenShareTracks[0]; } else if (lastAutoFocusedScreenShareTrack.current && !screenShareTracks.some((track) => { var _a, _b; return track.publication.trackSid === ((_b = (_a = lastAutoFocusedScreenShareTrack.current) === null || _a === void 0 ? void 0 : _a.publication) === null || _b === void 0 ? void 0 : _b.trackSid); })) { log.debug('Auto clearing screen share focus.'); (_d = (_c = layoutContext.pin).dispatch) === null || _d === void 0 ? void 0 : _d.call(_c, { msg: 'clear_pin' }); lastAutoFocusedScreenShareTrack.current = null; } if (focusTrack) { let updatedFocusTrack; const isFocusTrackParticipantExist = participants.find((pt) => pt.identity === focusTrack.participant.identity); if (!isFocusTrackParticipantExist) { // Focus track is relative to a participant that has left the room updatedFocusTrack = tracks.find((tr) => tr.participant.identity === scUserContext.user.id.toString()); (_f = (_e = layoutContext.pin).dispatch) === null || _f === void 0 ? void 0 : _f.call(_e, { msg: 'set_pin', trackReference: updatedFocusTrack }); } else if (!isTrackReference(focusTrack)) { // You are not subscribet to the track updatedFocusTrack = tracks.find((tr) => tr.participant.identity === focusTrack.participant.identity && tr.source === focusTrack.source); if (updatedFocusTrack !== focusTrack && isTrackReference(updatedFocusTrack)) { (_h = (_g = layoutContext.pin).dispatch) === null || _h === void 0 ? void 0 : _h.call(_g, { msg: 'set_pin', trackReference: updatedFocusTrack }); } } } }, [ screenShareTracks.map((ref) => `${ref.publication.trackSid}_${ref.publication.isSubscribed}`).join(), (_c = focusTrack === null || focusTrack === void 0 ? void 0 : focusTrack.publication) === null || _c === void 0 ? void 0 : _c.trackSid, tracks, participants, speakerFocused ]); useEffect(() => { var _a, _b; if (speakerFocused && !focusInitialized) { const speaker = participants.find((pt) => { return pt.name === speakerFocused.username; }); if (speaker) { const updatedFocusTrack = tracks.find((tr) => { if (tr) { return tr.participant.identity === speaker.identity; } return false; }); (_b = (_a = layoutContext.pin).dispatch) === null || _b === void 0 ? void 0 : _b.call(_a, { msg: 'set_pin', trackReference: updatedFocusTrack }); setFocusInitialized(true); } } }, [tracks, participants, speakerFocused]); useEffect(() => { var _a; const localCamTrack = cameraTrack === null || cameraTrack === void 0 ? void 0 : cameraTrack.track; if (localCamTrack) { setProcessorPending(true); try { if (blurEnabled && !localCamTrack.getProcessor()) { localCamTrack.setProcessor(BackgroundBlur(20)); } else if (!blurEnabled) { localCamTrack.stopProcessor(); } } catch (e) { console.log(e); setBlurEnabled(false); (_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.setItem(CHOICE_VIDEO_BLUR_EFFECT, false.toString()); enqueueSnackbar(_jsx(FormattedMessage, { id: "ui.liveStreamRoom.errorApplyVideoEffect", defaultMessage: "ui.contributionActionMenu.errorApplyVideoEffect" }), { variant: 'warning', autoHideDuration: 3000 }); } finally { setProcessorPending(false); } } }, [blurEnabled, cameraTrack]); return (_jsxs(Root, Object.assign({ className: classNames(className, classes.root, 'lk-video-conference') }, rest, { children: [isWeb() && (_jsxs(LayoutContextProvider, Object.assign({ value: layoutContext, onPinChange: handleFocusStateChange, onWidgetChange: widgetUpdate }, { children: [_jsxs("div", Object.assign({ className: "lk-video-conference-inner" }, { children: [!focusTrack ? (_jsx("div", Object.assign({ className: "lk-grid-layout-wrapper" }, { children: _jsx(GridLayout, Object.assign({ tracks: hideParticipantsList ? tracksNoParticipants : tracks }, { children: _jsx(ParticipantTile, {}) })) }))) : (_jsx("div", Object.assign({ className: "lk-focus-layout-wrapper" }, { children: hideParticipantsList ? (_jsx(FocusLayoutContainerNoParticipants, { children: focusTrack && _jsx(FocusLayout, { trackRef: focusTrack }) })) : (_jsxs(FocusLayoutContainer, { children: [carouselTracks.length ? (_jsx(CarouselLayout, Object.assign({ tracks: carouselTracks }, { children: _jsx(ParticipantTile, {}) }))) : (_jsx(NoParticipants, {})), focusTrack && _jsx(FocusLayout, { trackRef: focusTrack })] })) }))), _jsx(ControlBar, { controls: Object.assign({ chat: !disableChat, microphone: !disableMicrophone, camera: !disableCamera, screenShare: !disableShareScreen }, { settings: true }) })] })), !disableChat && (_jsx(Chat, { style: { display: widgetState.showChat ? 'grid' : 'none' }, messageFormatter: chatMessageFormatter, messageEncoder: chatMessageEncoder, messageDecoder: chatMessageDecoder })), _jsxs("div", Object.assign({ className: "lk-settings-menu-modal", style: { display: widgetState.showSettings ? 'block' : 'none' } }, { children: [_jsx(IconButton, Object.assign({ className: "lk-settings-menu-modal-icon-close", onClick: () => { var _a, _b; return (_b = layoutContext === null || layoutContext === void 0 ? void 0 : (_a = layoutContext.widget).dispatch) === null || _b === void 0 ? void 0 : _b.call(_a, { msg: 'toggle_settings' }); } }, { children: _jsx(Icon, { children: "close" }) })), SettingsComponent ? (_jsx(SettingsComponent, {})) : (_jsx(_Fragment, { children: _jsx(LiveStreamSettingsMenu, { onlyContentMenu: true, actionBlurDisabled: !cameraTrack && !disableCamera, blurEnabled: blurEnabled, handleBlur: handleBlur }) }))] }))] }))), _jsx(RoomAudioRenderer, {}), _jsx(ConnectionStateToast, {})] }))); }