UNPKG

@videosdk.live/react-sdk

Version:

<h1 align="center"> <img src="https://static.videosdk.live/videosdk_logo_website_black.png"/><br/> <p align="center"> Video SDK React App<br/> <a href="https://videosdk.live/">videosdk.live</a> </p> </h1>

1,632 lines (1,607 loc) 63.1 kB
// Type definitions for @videosdk.live/react-sdk 0.1 // Project: https://docs.videosdk.live/docs/realtime-communication/sdk-reference/react-sdk/setup // Definitions by: Rajan Surani <https://github.com/rajansurani> // Ahmed Bhesaniya <https://github.com/ahmedbhesaniya97> // Zujo Now <https://github.com/zujonow> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped import React, { JSX } from 'react'; import { Connection } from './connection'; import { CameraDeviceInfo, DeviceInfo, MicrophoneDeviceInfo, PlaybackDeviceInfo } from './deviceInfo'; import { Permission } from './permission'; import { Meeting } from './meeting'; import { Participant } from './participant'; import { Stream } from './stream'; import { Character, CharacterMode, CharacterState } from './character'; /** * @param children - Render child component. * --- * @param config - This is meeting configuration object, whiich contains `meetingId`, `name` of the participant, `webcamEnabled`, `micEnabled` and many more. * --- * @param config.meetingId - * - Unique Id of the meeting where that participant will be joining. * * - It will be in the format of xxx-yyy-zzz and will be generated using the [VideoSDK's Room API](https://docs.videosdk.live/api-reference/realtime-communication/create-room). * --- * @param config.name - Name of the participant who will be joining the meeting, this name will be displayed to other participants in the same meeting. * --- * @param config.micEnabled - Whether mic of the participant will be on while joining the meeting. If it is set to false, then mic of that participant will be disabled by default, * but can be enabled or disabled later. * --- * @param config.webcamEnabled - Whether webcam of the participant will be on while joining the meeting. If it is set to false, then webcam of that participant will * be disabled by default, but can be enabled or disabled later. * --- * @param config.participantId - You can specify your custom participantId here. * --- * @param config.multiStream - Sets wheather to send multi resoultion streams while publishing video. Please refere thi link for more understanding * [What is multiStream ?](https://docs.videosdk.live/react/guide/video-and-audio-calling-api-sdk/render-media/optimize-video-track#what-is-multistream) * --- * @param config.maxResolution - You can specify your custom participantId here. * --- * @param config.signalingBaseUrl - You can specify the signaling base url here. * --- * @param config.mode - * * - There are 3 types of modes: * * - **SEND_AND_RECV**: Both audio and video streams will be produced and consumed in this mode. * * - **RECV_ONLY**: Both audio and video streams will be only consumed in this mode. * * - **SIGNALLING_ONLY**: Audio and video streams will not be produced or consumed in this mode. * * - defaultValue : **SEND_AND_RECV** * * --- * * @param config.customCameraVideoTrack - Set the initial custom video track using different encoding parameters, camera facing mode, and optimization mode. * --- * @param config.customMicrophoneAudioTrack - Set the initial custom audio track using different encoding parameters and optimization mode. * --- * @param config.defaultCamera - [For Mobile Browser] It sets the initial camera orientation. Use 'front' to initialize the camera with the front-facing (selfie) mode, or 'back' to initialize it with the rear-facing (main) mode. * --- * @param config.debugMode - Enables users to view detailed error logs generated by our SDK directly on the VideoSDK's dashboard. * --- * @param token - * - You can generate a token in two ways: * * 1. **Temporary Token** : You can visit [Dashboard's API Key section](https://app.videosdk.live/api-keys) and generate the temporary token from there. * 2. **Server** : You can setup JWT in backend and make an API call to get the token from your server. * --- * @param joinWithoutUserInteraction - * - This is a boolean flag, when set to true, allows a participant to join a meeting directly without explicitly calling the join() function. * * - This is an OPTIONAL parameter. By default, it is set to false meaning, user has to manually call the join(). * */ export function MeetingProvider({ children, config, token, joinWithoutUserInteraction, reinitialiseMeetingOnConfigChange: _reinitialiseMeetingOnConfigChange, deviceInfo }: { children: any; config: { meetingId: string; autoConsume?: boolean; preferredProtocol?: 'UDP_ONLY' | 'UDP_OVER_TCP' | 'TCP_ONLY'; participantId?: string | undefined; name: string; micEnabled: boolean; webcamEnabled: boolean; maxResolution?: 'hd' | 'sd'; customCameraVideoTrack?: MediaStream | undefined; customMicrophoneAudioTrack?: MediaStream | undefined; signalingBaseUrl?: string | undefined; multiStream?: boolean; mode?: 'SEND_AND_RECV' | 'SIGNALLING_ONLY' | 'RECV_ONLY'; metaData?: object; defaultCamera?: 'front' | 'back'; debugMode: boolean; translationLanguage?: string; speakingLanguage?: string; }; token: string; joinWithoutUserInteraction?: boolean; reinitialiseMeetingOnConfigChange?: boolean; deviceInfo?: object; }): any; /** * @param children - Render child component. * --- * @param onParticipantJoined - This event callback is trigger when a new participant joins the meeting. * --- * @param onParticipantLeft - This event callback is triggered when a participant leaves the meeting. * - It provides the participant object and an additional reason object describing why the participant left. * --- * @param onSpeakerChanged - * - This event will be emitted when a active speaker changed. * - If you want to know which participant is actively speaking, then this event will be used. * - If no participant is actively speaking, then this event will pass null as en event callback parameter. * --- * @param onPresenterChanged - * - This event will be emitted when any participant starts or stops screen sharing. * - It will pass participantId as an event callback parameter. * - If a participant stops screensharing, then this event will pass null as en event callback parameter. * --- * @param onEntryRequested - * - This event will be triggered when a new participant who is trying to join the meeting, is having permission ask_join in token. * - This event will only be triggered to the participants in the meeting, who is having the permission allow_join in token. * - This event will pass following parameters as an event parameters, participantId and name of the new participant who is trying to join the meeting, allow() and deny() to take required actions. * --- * @param onEntryResponded - * - This event will be triggered when the join() request is responded. * - This event will be triggered to the participants in the meeting, who is having the permission allow_join in token. * - This event will be also triggered to the participant who requested to join the meeting. * --- * @param onMeetingJoined - This event callback is trigger when a local participant joins the meeting. * --- * @param onMeetingLeft - This event callback is trigger when local participant leaves the meeting. * --- * @param onPausedAllStreams - This event will be emitted when all the consumer streams are paused succesfully. * --- * @param onResumedAllStreams - This event will be emitted when all the consumer streams are resumed succesfully. * --- * @param onRecordingStateChanged - This event will be emitted when the meeting's recording status changed. * --- * @param onLivestreamStateChanged - This event will be emitted when the meeting's livestream status changed. * --- * @param onHlsStateChanged - This event will be emitted when the meeting's HLS(Http Livestreaming) status changed. * --- * @param onWebcamRequested - * - This event will be triggered to the participant B when any other participant A requests to enable webcam of participant B. * - On accepting the request, webcam of participant B will be enabled. * --- * @param onMicRequested - * - This event will be triggered to the participant B when any other participant A requests to enable mic of participant B. * - On accepting the request, mic of participant B will be enabled. * --- * @param onError - * - This event will be triggered when any error occured. * - It will pass code and message, as an event callback parameter. * - To see all available error codes from SDK. [Meeting Error Codes](https://docs.videosdk.live/react/api/sdk-reference/error-codes) * --- * @param onMeetingStateChanged - * - This event will be triggered when state of meeting changes. * - It will pass state as an event callback parameter which will indicate current state of the meeting. * - All available states are `CONNECTING`, `CONNECTED`, `FAILED`, `DISCONNECTED`, `CLOSING`, `CLOSED`. * --- * @param onParticipantModeChanged - * - This event will be triggered when mode gets chanded. * - It will pass mode, as an event callback parameter. * - **SEND_AND_RECV**: Both audio and video streams will be produced and consumed in this mode. * - **RECV_ONLY**: Both audio and video streams will be only consumed in this mode. * - **SIGNALLING_ONLY**: Audio and video streams will not be produced or consumed in this mode. * --- * @param onQualityLimitation - This event will be emitted when a quality limitation is detected or resolved during the meeting. */ export function MeetingConsumer({ children, onParticipantJoined, onParticipantLeft, onSpeakerChanged, onPresenterChanged, onMainParticipantChanged, onEntryRequested, onEntryResponded, onPausedAllStreams, onResumedAllStreams, onRecordingStarted, onRecordingStopped, onData, onMeetingJoined, onMeetingLeft, onLiveStreamStarted, onLiveStreamStopped, onVideoStateChanged, onVideoSeeked, onWebcamRequested, onMicRequested, onPinStateChanged, onConnectionOpen, onConnetionClose, onSwitchMeeting, onError, onHlsStarted, onHlsStopped, onHlsStateChanged, onRecordingStateChanged, onLivestreamStateChanged, onMeetingStateChanged, onParticipantModeChanged, onQualityLimitation, }: { children: any; onParticipantJoined?: (participant: Participant) => void; onParticipantLeft?: (participant: Participant, reason: { message: string, code: number }) => void; onSpeakerChanged?: (activeSpeakerId: string | null) => void; onPresenterChanged?: (presenterId: string | null) => void; onMainParticipantChanged?: (participant: Participant) => void; onEntryRequested?: ({ participantId, name, allow, deny }: { participantId: string; name: string; allow: () => void; deny: () => void; }) => void; onEntryResponded?: ({ participantId, decision }: { participantId: string; decision: string; }) => void; onPausedAllStreams?: ({ kind }: { kind: "audio" | "video" | "share" | "shareAudio" | "all" | undefined }) => void; onResumedAllStreams?: ({ kind }: { kind: "audio" | "video" | "share" | "shareAudio" | "all" | undefined }) => void; onRecordingStarted?: () => void; onRecordingStopped?: () => void; onData?: (data: { from: string; timestamp: string; payload: string | Uint8Array; reliability: string; }) => void; onMeetingJoined?: () => void; onMeetingLeft?: (reason: { message: string, code: number }) => void; onLiveStreamStarted?: () => void; onLiveStreamStopped?: () => void; onVideoStateChanged?: () => void; onVideoSeeked?: () => void; onWebcamRequested?: ({ participantId, accept, reject }: { participantId: string; accept: () => void; reject: () => void; }) => void; onMicRequested?: ({ participantId, accept, reject }: { participantId: string; accept: () => void; reject: () => void; }) => void; onPinStateChanged?: ({ participantId, state, pinnedBy }: { participantId: string; state: { share: boolean; cam: boolean }; pinnedBy: string; }) => void; onConnectionOpen?: () => void; onConnetionClose?: () => void; onSwitchMeeting?: () => void; onError?: ({ code, message }: { code: string; message: string }) => void; onHlsStarted?: ({ downstreamUrl }: { downstreamUrl: string }) => void; onHlsStopped?: () => void; onHlsStateChanged?: ({ status, downstreamUrl, playbackHlsUrl, livestreamUrl }: { status: | 'HLS_STARTING' | 'HLS_STARTED' | 'HLS_PLAYABLE' | 'HLS_STOPPING' | 'HLS_STOPPED'; downstreamUrl?: string; playbackHlsUrl?: string; livestreamUrl?: string; }) => void; onRecordingStateChanged?: ({ status }: { status: | 'RECORDING_STARTING' | 'RECORDING_STARTED' | 'RECORDING_STOPPING' | 'RECORDING_STOPPED'; }) => void; onLivestreamStateChanged?: ({ status }: { status: | 'LIVESTREAM_STARTING' | 'LIVESTREAM_STARTED' | 'LIVESTREAM_STOPPING' | 'LIVESTREAM_STOPPED'; }) => void; onMeetingStateChanged?: ({ state }: { state: | 'CONNECTING' | 'CONNECTED' | 'FAILED' | 'DISCONNECTED' | 'CLOSING' | 'CLOSED'; }) => void; onParticipantModeChanged?: ({ participantId, mode }: { participantId: string; mode: 'SEND_AND_RECV' | 'SIGNALLING_ONLY' | 'RECV_ONLY' }) => void; onQualityLimitation?: ({ type, state, timestamp, }: { type: "congestion" | "bandwidth" | "cpu", state: "detected" | "resolved", timestamp: number, }) => void, }): any; /** * * --- * @param onDeviceChanged - It's a callback which gets triggered whenever a media device such as a camera, microphone, or speaker is connected to or removed from the system. * * **Code Example :** * ```js * function onDeviceChanged(devices) { * console.log("onDeviceChanged", devices); * } * const {} = useMediaDevice({ * onDeviceChanged * }); * ``` * --- * @returns This will returns methods and events associated with media devices and permissions. You can refer this [API Reference](https://docs.videosdk.live/react/api/sdk-reference/use-mediaDevice/introduction) */ export function useMediaDevice({ onDeviceChanged }?: { onDeviceChanged?: (devices: Promise<Array<DeviceInfo>>) => void; }): { getDevices: () => Promise<Array<DeviceInfo>>; getCameras: () => Promise<Array<CameraDeviceInfo>>; getMicrophones: () => Promise<Array<MicrophoneDeviceInfo>>; getPlaybackDevices: () => Promise<Array<PlaybackDeviceInfo>>; checkPermissions: (permissions?: Permission) => Promise<Map<string, boolean>>; requestPermission: ( permissions?: Permission ) => Promise<Map<string, boolean>>; }; /** * * @param participantId - Id of the participant. * --- * @param onStreamEnabled - It's a callback which gets triggered whenever a participant's video, audio or screen share stream is enabled. * * **Code Example :** * ```js * function onStreamEnabled(stream) { * console.log("onStreamEnabled", stream); * } * const { displayName } = useParticipant(participantId,{ * onStreamEnabled * }); * ``` * --- * @param onStreamEnabled - It's a callback which gets triggered whenever a participant's video, audio or screen share stream is disabled. * * **Code Example :** * ```js * function onStreamEnabled(stream) { * console.log("onStreamEnabled", stream); * } * const { displayName } = useParticipant(participantId,{ * onStreamEnabled * }); * ``` * --- * @param onMediaStatusChanged - It's a callback which gets triggered whenever a participant's video, audio, screenshare or screenshare audio is disabled or enabled. * * **Code Example :** * ```js * function onMediaStatusChanged(stream) { * const { kind, newStatus} = data; * console.log("onMediaStatusChanged", kind, newStatus); * } * const { displayName } = useParticipant(participantId,{ * onMediaStatusChanged * }); * ``` * --- * @param onVideoQualityChanged - * - It's a callback which gets triggered whenever a participant's video quality changes. * - currentQuality and prevQuality can have values `HIGH` | `MEDIUM` | `LOW`. * * **Code Example :** * ```js * function onVideoQualityChanged(stream) { * const { currentQuality, prevQuality } = data; * console.log("onVideoQualityChanged", currentQuality, prevQuality ); * } * const { displayName } = useParticipant(participantId,{ * onVideoQualityChanged * }); * ``` * --- * @param onStreamPaused - * - It's a callback that gets triggered whenever a participant's stream is paused by adaptive subscription manager. * - The `data` parameter contains the reason for pausing. * * **Code Example :** * ```js * function onStreamPaused(data) { * console.log("Stream paused:", data); * } * const { displayName } = useParticipant(participantId, { * onStreamPaused, * }); * ``` * --- * @param onStreamResumed - * - It's a callback that gets triggered whenever a participant's stream is resumed by adaptive subscription manager. * - The `stream` parameter contains the reason for resuming. * * **Code Example :** * ```js * function onStreamResumed(data) { * console.log("Stream resumed:", data); * } * const { displayName } = useParticipant(participantId, { * onStreamResumed, * }); * ``` * --- * @returns This will return particular participant properties and method. You can refer this [API Reference](https://docs.videosdk.live/react/api/sdk-reference/use-participant/introduction) */ export function useParticipant( participantId: string, { onStreamEnabled, onStreamDisabled, onMediaStatusChanged, onVideoQualityChanged, onStreamPaused, onStreamResumed }?: { onStreamDisabled?: (stream: Stream) => void; onStreamEnabled?: (stream: Stream) => void; onMediaStatusChanged?: ({ kind, peerId, newStatus }: { kind: 'audio' | 'video' | 'share' | 'shareAudio'; peerId: string; newStatus: boolean; }) => void; onVideoQualityChanged?: ({ peerId, prevQuality, currentQuality }: { peerId: string; prevQuality: 'low' | 'med' | 'high'; currentQuality: 'low' | 'med' | 'high'; }) => void; onStreamPaused?: ({ kind, reason }: { kind: 'video'; reason: string; }) => void; onStreamResumed?: ({ kind, reason }: { kind: 'video'; reason: string; }) => void; } ): { displayName: string; participant: Participant; webcamStream: Stream; micStream: Stream; screenShareStream: Stream; screenShareAudioStream: Stream; webcamOn: boolean; micOn: boolean; screenShareOn: boolean; screenShareAudioOn: boolean; isLocal: boolean; isActiveSpeaker: boolean; isMainParticipant: boolean; pinState: any; mode: 'SEND_AND_RECV' | 'SIGNALLING_ONLY' | 'RECV_ONLY' consumeMicStreams: () => void; consumeWebcamStreams: () => void; stopConsumingMicStreams: () => void; stopConsumingWebcamStreams: () => void; setQuality: (quality: 'low' | 'med' | 'high') => void; setScreenShareQuality: (quality: 'low' | 'med' | 'high') => void; setViewPort: (width: number, height: number) => void; enableMic: () => void; disableMic: () => void; enableWebcam: () => void; disableWebcam: () => void; remove: () => void; captureImage: ({ height, width }: { height?: number; width?: number; }) => Promise<string | null>; pin: (data: 'SHARE_AND_CAM' | 'CAM' | 'SHARE') => void; unpin: (data: 'SHARE_AND_CAM' | 'CAM' | 'SHARE') => void; getShareAudioStats: () => Promise< Array<{ bitrate: number; rtt: number; network: string; codec: string; jitter: number; limitation: any; totalPackets: number; packetsLost: number; concealmentEvents: number; insertedSamplesForDecelaration: number; removedSamplesForAccelaration: number; size: any; }> >; getAudioStats: () => Promise< Array<{ bitrate: number; rtt: number; network: string; codec: string; jitter: number; limitation: any; totalPackets: number; packetsLost: number; concealmentEvents: number; insertedSamplesForDecelaration: number; removedSamplesForAccelaration: number; size: any; }> >; getVideoStats: () => Promise< Array<{ bitrate: number; rtt: number; network: string; codec: string; jitter: number; limitation: any; totalPackets: number; packetsLost: number; concealmentEvents: number; insertedSamplesForDecelaration: number; removedSamplesForAccelaration: number; size: any; currentSpatialLayer: number; currentTemporalLayer: number; preferredSpatialLayer: number; preferredTemporalLayer: number; }> >; getShareStats: () => Promise< Array<{ bitrate: number; rtt: number; network: string; codec: string; jitter: number; limitation: any; totalPackets: number; packetsLost: number; concealmentEvents: number; insertedSamplesForDecelaration: number; removedSamplesForAccelaration: number; size: any; currentSpatialLayer: number; currentTemporalLayer: number; preferredSpatialLayer: number; preferredTemporalLayer: number; }> >; }; /** * * @param streamId - ID of the stream. * --- * @param onStreamStateChanged - It’s a callback that triggers whenever the state of a remote participant’s video stream changes. * * **Code Example:** * ```js * function onStreamStateChanged({ state, timestamp }) { * console.log("onStreamStateChanged", state, timestamp); * } * const { kind, paused, pause, resume } = useStream(streamId, { * onStreamStateChanged, * }); * ``` * --- * @returns This will return the stream properties and methods. */ export function useStream( streamId: string, { onStreamStateChanged, }?: { onStreamStateChanged?: ({ state, timestamp, }: { state: | 'active' | 'ended' | 'stuck' | 'freeze-detected' | 'freeze-resolved'; timestamp: number; }) => void; } ): { kind: 'audio' | 'video' | 'share' | 'shareAudio'; stream: Stream; codec: string; track: MediaStreamTrack; paused: boolean; pause: () => void; resume: () => void; }; /** * @param onParticipantJoined - This event callback is trigger when a new participant joins the meeting. * --- * @param onParticipantLeft - This event callback is triggered when a participant leaves the meeting. * - It provides the participant object and an additional reason object describing why the participant left. * --- * @param onSpeakerChanged - * - This event will be emitted when a active speaker changed. * - If you want to know which participant is actively speaking, then this event will be used. * - If no participant is actively speaking, then this event will pass null as en event callback parameter. * --- * @param onPresenterChanged - * - This event will be emitted when any participant starts or stops screen sharing. * - It will pass participantId as an event callback parameter. * - If a participant stops screensharing, then this event will pass null as en event callback parameter. * --- * @param onEntryRequested - * - This event will be triggered when a new participant who is trying to join the meeting, is having permission ask_join in token. * - This event will only be triggered to the participants in the meeting, who is having the permission allow_join in token. * - This event will pass following parameters as an event parameters, participantId and name of the new participant who is trying to join the meeting, allow() and deny() to take required actions. * --- * @param onEntryResponded - * - This event will be triggered when the join() request is responded. * - This event will be triggered to the participants in the meeting, who is having the permission allow_join in token. * - This event will be also triggered to the participant who requested to join the meeting. * --- * @param onMeetingJoined - This event callback is trigger when a local participant joins the meeting. * --- * @param onMeetingLeft - This event callback is trigger when local participant leaves the meeting. * --- * @param onPausedAllStreams - This event will be emitted when all the consumer streams are paused succesfully. * --- * @param onResumedAllStreams - This event will be emitted when all the consumer streams are resumed succesfully. * --- * @param onRecordingStateChanged - This event will be emitted when the meeting's recording status changed. * --- * @param onLivestreamStateChanged - This event will be emitted when the meeting's livestream status changed. * --- * @param onHlsStateChanged - This event will be emitted when the meeting's HLS(Http Livestreaming) status changed. * --- * @param onWebcamRequested - * - This event will be triggered to the participant B when any other participant A requests to enable webcam of participant B. * - On accepting the request, webcam of participant B will be enabled. * --- * @param onMicRequested - * - This event will be triggered to the participant B when any other participant A requests to enable mic of participant B. * - On accepting the request, mic of participant B will be enabled. * --- * @param onError - * - This event will be triggered when any error occured. * - It will pass code and message, as an event callback parameter. * - To see all available error codes from SDK. [Meeting Error Codes](https://docs.videosdk.live/react/api/sdk-reference/error-codes) * --- * @param onMeetingStateChanged - * - This event will be triggered when state of meeting changes. * - It will pass state as an event callback parameter which will indicate current state of the meeting. * - All available states are `CONNECTING`, `CONNECTED`, `FAILED`, `DISCONNECTED`, `CLOSING`, `CLOSED`. * --- * @param onParticipantModeChanged - * - This event will be triggered when mode gets chanded. * - It will pass mode, as an event callback parameter. * - **SEND_AND_RECV**: Both audio and video streams will be produced and consumed in this mode. * - **RECV_ONLY**: Both audio and video streams will be only consumed in this mode. * - **SIGNALLING_ONLY**: Audio and video streams will not be produced or consumed in this mode. * --- * @returns This will return Meeting properties and method. You can refer this [API Reference](https://docs.videosdk.live/react/api/sdk-reference/use-meeting/introduction) * * --- * @param onQualityLimitation - This event will be emitted when a quality limitation is detected or resolved during the meeting. */ export function useMeeting({ onParticipantJoined, onParticipantLeft, onSpeakerChanged, onPresenterChanged, onMainParticipantChanged, onEntryRequested, onEntryResponded, onPausedAllStreams, onResumedAllStreams, onRecordingStarted, onRecordingStopped, onData, onMeetingJoined, onMeetingLeft, onLiveStreamStarted, onLiveStreamStopped, onVideoStateChanged, onVideoSeeked, onWebcamRequested, onMicRequested, onPinStateChanged, onConnectionOpen, onConnetionClose, onSwitchMeeting, onError, onHlsStarted, onHlsStopped, onHlsStateChanged, onRecordingStateChanged, onLivestreamStateChanged, onMeetingStateChanged, onParticipantModeChanged, onCharacterJoined, onCharacterLeft, onMediaRelayStarted, onMediaRelayStopped, onMediaRelayError, onMediaRelayRequestResponse, onMediaRelayRequestReceived, onQualityLimitation, }?: { onParticipantJoined?: (participant: Participant) => void; onParticipantLeft?: (participant: Participant, reason: { message: string, code: number }) => void; onSpeakerChanged?: (activeSpeakerId: string | null) => void; onPresenterChanged?: (presenterId: string | null) => void; onMainParticipantChanged?: (participant: Participant) => void; onEntryRequested?: ({ participantId, name, allow, deny }: { participantId: string; name: string; allow: () => void; deny: () => void; }) => void; onEntryResponded?: ({ participantId, decision }: { participantId: string; decision: string; }) => void; onPausedAllStreams?: ({ kind }: { kind: "audio" | "video" | "share" | "shareAudio" | "all" | undefined }) => void; onResumedAllStreams?: ({ kind }: { kind: "audio" | "video" | "share" | "shareAudio" | "all" | undefined }) => void; onRecordingStarted?: () => void; onRecordingStopped?: () => void; onData?: (data: { from: string; timestamp: string; payload: string | Uint8Array; reliability: string; }) => void; onMeetingJoined?: () => void; onMeetingLeft?: (reason: { message: string, code: number }) => void; onLiveStreamStarted?: () => void; onLiveStreamStopped?: () => void; onVideoStateChanged?: () => void; onVideoSeeked?: () => void; onWebcamRequested?: ({ participantId, accept, reject }: { participantId: string; accept: () => void; reject: () => void; }) => void; onMicRequested?: ({ participantId, accept, reject }: { participantId: string; accept: () => void; reject: () => void; }) => void; onPinStateChanged?: ({ participantId, state, pinnedBy }: { participantId: string; state: { share: boolean; cam: boolean }; pinnedBy: string; }) => void; onConnectionOpen?: () => void; onConnetionClose?: () => void; onSwitchMeeting?: () => void; onError?: ({ code, message }: { code: string; message: string }) => void; onHlsStarted?: ({ downstreamUrl }: { downstreamUrl: string }) => void; onHlsStopped?: () => void; onHlsStateChanged?: ({ status, downstreamUrl, playbackHlsUrl, livestreamUrl }: { status: | 'HLS_STARTING' | 'HLS_STARTED' | 'HLS_PLAYABLE' | 'HLS_STOPPING' | 'HLS_STOPPED'; downstreamUrl?: string; playbackHlsUrl?: string; livestreamUrl?: string; }) => void; onRecordingStateChanged?: ({ status }: { status: | 'RECORDING_STARTING' | 'RECORDING_STARTED' | 'RECORDING_STOPPING' | 'RECORDING_STOPPED'; }) => void; onLivestreamStateChanged?: ({ status }: { status: | 'LIVESTREAM_STARTING' | 'LIVESTREAM_STARTED' | 'LIVESTREAM_STOPPING' | 'LIVESTREAM_STOPPED'; }) => void; onMeetingStateChanged?: ({ state }: { state: | 'CONNECTING' | 'CONNECTED' | 'FAILED' | 'DISCONNECTED' | 'CLOSING' | 'CLOSED'; }) => void; onParticipantModeChanged?: ({ participantId, mode }: { participantId: string; mode: 'SEND_AND_RECV' | 'SIGNALLING_ONLY' | 'RECV_ONLY' }) => void; onCharacterJoined?: (character: Character) => void; onCharacterLeft?: (character: Character) => void; onMediaRelayStarted?: ({ meetingId }: { meetingId: string }) => void; onMediaRelayStopped?: ({ meetingId, reason }: { meetingId: string; reason: string; }) => void; onMediaRelayError?: ({ meetingId, error }: { meetingId: string; error: string; }) => void; onMediaRelayRequestResponse?: ({ decision, decidedBy, meetingId }: { decision: "accepted" | "declined"; decidedBy: string; meetingId: string; }) => void; onMediaRelayRequestReceived?: ({ participantId, meetingId, displayName, accept, reject }: { participantId: string; meetingId: string; displayName: string; accept: () => void; reject: () => void; }) => void; onQualityLimitation?: ({ type, state, timestamp, }: { type: "congestion" | "bandwidth" | "cpu", state: "detected" | "resolved", timestamp: number, }) => void, }): { meetingId: string; meeting: Meeting; localParticipant: Participant; activeSpeakerId: string; participants: Map<string, Participant>; characters: Map<string, Character>; pinnedParticipants: Map< string, { cam: boolean; share: boolean; } >; presenterId?: string; localMicOn: boolean; localWebcamOn: boolean; isRecording: boolean; recordingState: string; livestreamState: string; hlsState: string; hlsUrls: { downstreamUrl: string; playbackHlsUrl: string; livestreamUrl: string; }; transcriptionState: string; translationState: string; selectedCameraDevice?: { deviceId: string; groupId: string; kind: 'videoinput'; label: string; }; selectedMicrophoneDevice?: { deviceId: string; groupId: string; kind: 'audioinput'; label: string; }; localScreenShareOn: boolean; connections: Map<string, Connection>; join: () => void; leave: () => void; end: () => void; unmuteMic: (customAudioTrack?: MediaStream | undefined) => void; muteMic: () => void; toggleMic: (customAudioTrack?: MediaStream | undefined) => void; enableWebcam: (customVideoTrack?: MediaStream | undefined) => void; send: (payload: string | Blob | ArrayBuffer | ArrayBufferView, options?: { reliability?: "RELIABLE" | "UNRELIABLE"; }) => Promise<boolean>; disableWebcam: () => void; toggleWebcam: (customVideoTrack?: MediaStream | undefined) => void; enableScreenShare: (customScreenShareTrack?: MediaStream | undefined) => void; disableScreenShare: () => void; toggleScreenShare: (customScreenShareTrack?: MediaStream | undefined) => void; enableAdaptiveSubscription: () => void; disableAdaptiveSubscription: () => void; pauseAllStreams: (kind?: "audio" | "video" | "share" | "shareAudio" | "all" | undefined) => void; resumeAllStreams: (kind?: "audio" | "video" | "share" | "shareAudio" | "all" | undefined) => void; startRecording: ( webhookUrl?: string, awsDirPath?: string, config?: { layout: { type: 'GRID' | 'SPOTLIGHT' | 'SIDEBAR'; priority: 'SPEAKER' | 'PIN'; gridSize: number; }; orientation: 'landscape' | 'portrait'; theme: 'DEFAULT' | 'DARK' | 'LIGHT'; quality: 'low' | 'med' | 'high'; mode: 'video-and-audio' | 'audio'; }, transcription?: { enabled: boolean; summary?: { enabled: boolean; prompt?: string; }; language?: string; } ) => void; stopRecording: () => void; switchTo: ({ meetingId, token }: { meetingId: string; token?: string; }) => void; startLiveStream: ( outputs: Array<{ url: string; streamKey: string; }>, config?: { layout: { type: 'GRID' | 'SPOTLIGHT' | 'SIDEBAR'; priority: 'SPEAKER' | 'PIN'; gridSize: number; }; theme: 'DEFAULT' | 'DARK' | 'LIGHT'; } ) => void; stopLiveStream: () => void; startHls: (config?: { layout: { type: 'GRID' | 'SPOTLIGHT' | 'SIDEBAR'; priority: 'SPEAKER' | 'PIN'; gridSize: number; }; orientation: 'landscape' | 'portrait'; theme: 'DEFAULT' | 'DARK' | 'LIGHT'; quality: 'low' | 'med' | 'high'; mode: 'video-and-audio' | 'audio'; }) => void; stopHls: () => void; getMics: () => Promise< Array<{ deviceId: string; label: string; }> >; getWebcams: () => Promise< Array<{ deviceId: string; label: string; facingMode: 'environment' | 'front'; }> >; changeMic: (object: string | MediaStream) => void; changeWebcam: (object: string | MediaStream) => void; changeMode(mode: 'SEND_AND_RECV' | 'SIGNALLING_ONLY' | 'RECV_ONLY'): void; startVideo: ({ link }: { link: string }) => void; stopVideo: () => void; pauseVideo: ({ currentTime }: { currentTime: number }) => void; resumeVideo: () => void; seekVideo: ({ currentTime }: { currentTime: number }) => void; connectTo: ({ meetingId, payload }: { meetingId: string; payload: string; }) => void; }; /** * * @param topic - Represents the topic for which you are publishing and getting a message. * --- * @param onMessageReceived - This will triggered when a new message is published for the subscribed topic with the message object. * --- * @param onOldMessagesReceived - This will triggered once when you subscribe to the topic and will receive all the old messages present for that topic as an array of message object. * --- * @returns This will return `message` properties and `publish()` method. You can refer this [API Reference](https://docs.videosdk.live/react/api/sdk-reference/use-pubsub#returns) * --- * **usePubSub example** * ```js * var topic = 'CHAT'; * * function onMessageReceived(message) { * console.log('New Message:', message); * } * * function onOldMessagesReceived(messages) { * console.log('Old Messages:', messages); * } * * const {publish, messages} = usePubSub(topic, { * onMessageReceived, * onOldMessagesReceived, * }); * ``` */ export function usePubSub( topic: string, { onMessageReceived, onOldMessagesReceived }?: { onMessageReceived?: (message: { id: string; message: string; senderId: string; senderName: string; timestamp: string; topic: string; payload: object; }) => void; onOldMessagesReceived?: ( messages: Array<{ id: string; message: string; senderId: string; senderName: string; timestamp: string; topic: string; payload: object; }> ) => void; } ): { publish: ( message: string, { persist, sendOnly }: { persist: boolean; sendOnly?: Array<String>; }, payload?: object ) => Promise<void>; messages: Array<{ id: string; message: string; senderId: string; senderName: string; timestamp: string; topic: string; payload: object; }>; }; /** * * @param key - Represents the key for which you want to set the value in the realtime store. * --- * @param onValueChanged - This will get triggered when the value for the given key changes. * --- * @returns This will return `setValue()` and `getValue()` methods. * --- * **useRealtimeStore example** * ```js * var key = 'BLOCKED'; * * function onValueChanged({value,updatedBy}) { * console.log('New Message:', message); * } * * const {setValue,getValue} = useRealtimeStore(key, { * onValueChanged, * }); * ``` */ export function useRealtimeStore( key: string, { onValueChanged, }?: { onValueChanged?: ({ value, updatedBy, }: { value: string updatedBy: string, }) => void, } ): { setValue: (value: string) => Promise<any>; getValue: () => Promise<string>; }; export function useFile(): { uploadBase64File: ({ base64Data, token, fileName }: { base64Data: string; token: string; fileName: string; }) => Promise<string | null>; fetchBase64File: ({ url, token }: { url: string; token: string; }) => Promise<string | null>; }; /** * @returns - This will return `startWhiteboard()`, `stopWhiteboard()` and `whiteboardUrl`. * --- * **useWhiteboard example** * ```javascript * const { startWhiteboard, stopWhiteboard, whiteboardUrl } = useWhiteboard(); * * async function handleStartWhiteboard() { * await startWhiteboard(); * } * * async function handleStopWhiteboard() { * await stopWhiteboard(); * } * ``` */ export function useWhiteboard(): { /** * @description Starts the whiteboard for the meeting. */ startWhiteboard: () => Promise<void>; /** * @description Stops the whiteboard session for the meeting. */ stopWhiteboard: () => Promise<void>; /** * @description The URL of the active whiteboard, or `null` if the whiteboard is not currently active. */ whiteboardUrl: string | null; }; /** * @param onTranscriptionStateChanged - This will triggered when a realtime transcription state is changed. * --- * @param onTranscriptionText - This will triggered when a realtime transcription text is published. * --- * @returns This will return `startTranscription()` and `stopTranscription()` method. You can refer this [API Reference](https://docs.videosdk.live/react/api/sdk-reference/use-transcription#returns) * --- * **useTranscription example** * ```js * * function onTranscriptionStateChanged(data) { * console.log('New State Payload:', data) * } * * function onTranscriptionText(data) { * console.log('Transcription Text Payload:', data); * } * * const { startTranscription, stopTranscription } = useTranscription({ * onTranscriptionStateChanged, * onTranscriptionText, * }); * * async function startTranscription()=>{ * await startTranscription() * } * * async function stopTranscription()=>{ * await stopTranscription() * } * ``` */ export function useTranscription({ onTranscriptionStateChanged, onTranscriptionText }?: { onTranscriptionStateChanged?: (data: { id: string; status: string }) => void; onTranscriptionText?: (data: { participantId: string; participantName: string; text: string; timestamp: number; type: 'realtime'; }) => void; }): { /** * @description This method is used to start the meeting transcription * @param config.webhookUrl? Webhook URL which will be called by VideoSDK when the transcription state gets changed * @param config.modelConfig? modelConfig if any, which will be used while doing transcription * @param config.summary.enabled Enables or disables summary generation from realtime transcriptions. * @param config.summary.prompt Guides summary generation (optional). * @param config.language Language code for transcription, default is 'en' (optional). */ startTranscription: ({ webhookUrl, modelConfig, summary, language }: { webhookUrl?: string; modelConfig?: object; summary?: { enabled: boolean; prompt?: string; }; language?: string; }) => void; stopTranscription: () => void; }; /** * @param onTranslationStateChanged - This will triggered when a realtime translation state is changed. * --- * @param onTranslationLanguageChanged - This will triggered when a realtime translation language is changed. * --- * @param onTranslationText - This will triggered when a realtime translation text is published. * --- * @returns This will return `startTranslation()`, `changeTranslationLanguage()` and `stopTranslation()` method. You can refer this [API Reference](https://docs.videosdk.live/react/api/sdk-reference/use-translation#returns) * --- * **useTranslation example** * ```js * * function onTranslationStateChanged(data) { * console.log('New State Payload:', data) * } * * function onTranslationLanguageChanged(data) { * console.log('New Language Payload:', data) * } * * function onTranslationText(data) { * console.log('Translation Text Payload:', data); * } * * const { startTranslation, changeTranslationLanguage, stopTranslation } = useTranslation({ * onTranslationStateChanged, * onTranslationLanguageChanged * onTranslationText, * }); * * async function startTranslation()=>{ * await startTranslation() * } * * async function changeTranslationLanguage()=>{ * await changeTranslationLanguage(languageCode) * } * * async function stopTranslation()=>{ * await stopTranslation() * } * ``` */ export function useTranslation({ onTranslationStateChanged, onTranslationLanguageChanged, onTranslationText }?: { onTranslationStateChanged?: (data: { id: string; status: string }) => void; onTranslationLanguageChanged?: (data: { participantId: string, language: string, }) => void; onTranslationText?: (data: { participantId: string; participantName: string; text: string; timestamp: number; language: string; type: string; }) => void; }): { /** * @description This method is used to start the meeting translation */ startTranslation: () => Promise<void>; changeTranslationLanguage: (languageCode: string) => Promise<void>; stopTranslation: () => Promise<void>; }; /** * @param onCharacterStateChanged - This will triggered when a character state is changed. * --- * @param onCharacterMessage - This will triggered when a character response/message is published. * --- * @param onUserMessage - This will triggered when a user/participant message is published. * --- * @param onData - This will triggered when a character worker notify payload. * --- * @param onError - This will triggered when a there is an error related to character. * --- * @param onStreamEnabled - It's a callback which gets triggered whenever a character's video, audio or screen share stream is enabled. * --- * @param onStreamEnabled - It's a callback which gets triggered whenever a character's video, audio or screen share stream is disabled. * --- * @param onMediaStatusChanged - It's a callback which gets triggered whenever a character's video or audio is disabled or enabled. * --- * @param onVideoQualityChanged - * - It's a callback which gets triggered whenever a character's video quality changes. * - currentQuality and prevQuality can have values `HIGH` | `MEDIUM` | `LOW`. * --- * @returns This will return `character` Object. * --- * **useCharacter example** * ```js * * function onCharacterStateChanged(data) { * console.log('New State Payload:', data) * } * * function onCharacterMessage(data) { * console.log('character message Payload:', data); * } * * function onUserMessage(data) { * console.log('user message Payload:', data); * } * * function onData(topic, data) { * console.log('character data available:', topic, data); * } * * function onError(topic, data) { * console.log('character error:', data.code,data.message); * } * * const { join, leave, sendMessage, interrupt } = useCharacter({ * interactionId, * // OR * id, * displayName, * characterRole, * characterMode, * knowledgeBases, * language, * }, * { * onCharacterStateChanged, * onCharacterMessage, * onUserMessage, * onData, * onError, * onCharacterJoined, * onCharacterLeft, * * onStreamEnabled, * onStreamDisabled, * onMediaStatusChanged, * onVideoQualityChanged * }); * * async function joinCharacter()=>{ * await join() * } * * async function removeCharacter()=>{ * await leave() * } * ``` */ export function useCharacter( { interactionId, // OR id, displayName, characterRole, characterMode, knowledgeBases, language, metaData }: { interactionId: string; // OR id: string; displayName: string; characterRole: string; characterMode: 'text' | 'co_pilot' | 'auto_pilot' | 'vision_pilot'; knowledgeBases: string[]; language: string; metaData: any; }, { onCharacterStateChanged, onCharacterMessage, onUserMessage, onData, onError, onStreamEnabled, onStreamDisabled, onMediaStatusChanged, onVideoQualityChanged }: { onCharacterStateChanged?: (data: { id: string; status: CharacterState; }) => void; onUserMessage?: (data: { participantId: string; participantName: string; text: string; timestamp: number; }) => void; onCharacterMessage?: (data: { characterId: string; characterName: string; text: string; timestamp: number; }) => void; onData?: (topic: string, data: any) => void; onError?: (data: any) => void; onCharacterJoined?: () => void; onCharacterLeft?: () => void; onStreamDisabled?: (stream: Stream) => void; onStreamEnabled?: (stream: Stream) => void; onMediaStatusChanged?: ({ kind, peerId, newStatus }: { kind: 'audio' | 'video'; peerId: string; newStatus: boolean; }) => void; onVideoQualityChanged?: ({ peerId, prevQuality, currentQuality }: { peerId: string; prevQuality: 'low' | 'med' | 'high'; currentQuality: 'low' | 'med' | 'high'; }) => void; } ): { displayName: string; webcamStream: Stream; micStream: Stream; webcamOn: boolean; micOn: boolean; isActiveSpeaker: boolean; interactionId?: string; id?: string; characterMode?: CharacterMode; characterState?: CharacterState; knowledgeBases?: string[]; metaData?: any; enableMic: () => void; disableMic: () => void; enableWebcam: () => void; disableWebcam: () => void; join: () => Promise<void>; leave: () => Promise<void>; sendMessage: (text: string) => Promise<void>; interrupt: () => Promise<void>; }; /** * @param microphoneId - It will be the id of the mic from which the audio should be captured. * --- * @param encoderConfig - This will accept the voice profile you want to capture. You can checkout all value [here](https://docs.videosdk.live/react/api/sdk-reference/custom-tracks#parameters-1) * * #### Example : `speech_standard`, `high_quality`, `music_standard`, * --- * @param noiseConfig - You can provide different noise configuration * --- * @param noiseConfig.noiseSuppression - It is used to improve the quality of audio by removing background noise that can interfere with the clarity of speech. * --- * @param noiseConfig.echoCancellation - It is used to remove unwanted echoes from voice. * --- * @param noiseConfig.autoGainControl - It is used to maintain a consistent level of loudness or amplitude in a voice. * --- * * **Code Example** * ```js * import { createMicrophoneAudioTrack } from "@videosdk.live/react-sdk"; * * let customTrack = await createMicrophoneAudioTrack({ * microphoneId : 'mic-id',