UNPKG

communication-react-19

Version:

React library for building modern communication user experiences utilizing Azure Communication Services (React 19 compatible fork)

1,542 lines (1,493 loc) 487 kB
/// <reference types="react" /> import { AddPhoneNumberOptions } from '@azure/communication-calling'; import { AudioDeviceInfo } from '@azure/communication-calling'; import { AudioEffectsStartConfig } from '@azure/communication-calling'; import { BackgroundBlurConfig } from '@azure/communication-calling'; import { BackgroundBlurEffect } from '@azure/communication-calling'; import { BackgroundReplacementConfig } from '@azure/communication-calling'; import { BackgroundReplacementEffect } from '@azure/communication-calling'; import { BreakoutRoom } from '@azure/communication-calling'; import { BreakoutRoomsSettings } from '@azure/communication-calling'; import { BreakoutRoomsUpdatedListener } from '@azure/communication-calling'; import { Call } from '@azure/communication-calling'; import { CallAgent } from '@azure/communication-calling'; import { CallClient } from '@azure/communication-calling'; import { CallClientOptions } from '@azure/communication-calling'; import { CallDirection } from '@azure/communication-calling'; import { CallEndReason } from '@azure/communication-calling'; import { CallerInfo } from '@azure/communication-calling'; import { CallInfo } from '@azure/communication-calling'; import { CallKind } from '@azure/communication-calling'; import { CallState as CallState_2 } from '@azure/communication-calling'; import { CallSurvey } from '@azure/communication-calling'; import { CallSurveyResponse } from '@azure/communication-calling'; import { CapabilitiesChangeInfo } from '@azure/communication-calling'; import { CaptionsKind } from '@azure/communication-calling'; import { CaptionsResultType } from '@azure/communication-calling'; import { ChatClient } from '@azure/communication-chat'; import { ChatClientOptions } from '@azure/communication-chat'; import { ChatMessage as ChatMessage_2 } from '@azure/communication-chat'; import { ChatMessageReadReceipt } from '@azure/communication-chat'; import { ChatParticipant } from '@azure/communication-chat'; import { ChatThreadClient } from '@azure/communication-chat'; import { CommunicationIdentifier } from '@azure/communication-common'; import { CommunicationIdentifierKind } from '@azure/communication-common'; import { CommunicationTokenCredential } from '@azure/communication-common'; import { CommunicationUserIdentifier } from '@azure/communication-common'; import type { CommunicationUserKind } from '@azure/communication-common'; import { CreateViewOptions } from '@azure/communication-calling'; import { DeviceAccess } from '@azure/communication-calling'; import { DeviceManager } from '@azure/communication-calling'; import type { DiagnosticFlag } from '@azure/communication-calling'; import type { DiagnosticQuality } from '@azure/communication-calling'; import type { DiagnosticValueType } from '@azure/communication-calling'; import { DominantSpeakersInfo } from '@azure/communication-calling'; import { DtmfTone as DtmfTone_2 } from '@azure/communication-calling'; import { EnvironmentInfo } from '@azure/communication-calling'; import { GroupCallLocator } from '@azure/communication-calling'; import { IButtonProps } from '@fluentui/react'; import { IButtonStyles } from '@fluentui/react'; import { IContextualMenuItem } from '@fluentui/react'; import { IContextualMenuItemStyles } from '@fluentui/react'; import { IContextualMenuProps } from '@fluentui/react'; import { IContextualMenuStyles } from '@fluentui/react'; import { IIconProps } from '@fluentui/react'; import { IIconStyles } from '@fluentui/react'; import { ILinkStyles } from '@fluentui/react'; import { IMessageBarProps } from '@fluentui/react'; import { IncomingCall } from '@azure/communication-calling'; import { IncomingCallKind } from '@azure/communication-calling'; import { IPersonaStyleProps } from '@fluentui/react'; import { IPersonaStyles } from '@fluentui/react'; import { IRawStyle } from '@fluentui/react'; import { IRenderFunction } from '@fluentui/react'; import { IStackStyles } from '@fluentui/react'; import { IStyle } from '@fluentui/react'; import { IStyleFunctionOrObject } from '@fluentui/react'; import { ITextFieldStyles } from '@fluentui/react'; import { LatestMediaDiagnostics } from '@azure/communication-calling'; import { LatestNetworkDiagnostics } from '@azure/communication-calling'; import { LocalRecordingInfo } from '@azure/communication-calling'; import { LocalVideoStream } from '@azure/communication-calling'; import type { MediaDiagnosticChangedEventArgs } from '@azure/communication-calling'; import type { MediaDiagnosticType } from '@azure/communication-calling'; import { MediaStreamType } from '@azure/communication-calling'; import { MicrosoftTeamsAppIdentifier } from '@azure/communication-common'; import { MicrosoftTeamsUserIdentifier } from '@azure/communication-common'; import type { NetworkDiagnosticChangedEventArgs } from '@azure/communication-calling'; import type { NetworkDiagnosticType } from '@azure/communication-calling'; import { PartialTheme } from '@fluentui/react'; import { ParticipantCapabilities } from '@azure/communication-calling'; import { ParticipantInfo } from '@azure/communication-calling'; import { ParticipantRole } from '@azure/communication-calling'; import { PermissionConstraints } from '@azure/communication-calling'; import { PersonaInitialsColor } from '@fluentui/react'; import { PersonaPresence } from '@fluentui/react'; import { PersonaSize } from '@fluentui/react'; import { PhoneNumberIdentifier } from '@azure/communication-common'; import { PropertyChangedEvent } from '@azure/communication-calling'; import { default as React_2 } from 'react'; import { Reaction as Reaction_2 } from '@azure/communication-calling'; import { ReactionMessage } from '@azure/communication-calling'; import { RealTimeTextInfo as RealTimeTextInfo_2 } from '@azure/communication-calling'; import { RealTimeTextResultType } from '@azure/communication-calling'; import { RecordingInfo } from '@azure/communication-calling'; import type { RemoteParticipant } from '@azure/communication-calling'; import { RemoteParticipantState as RemoteParticipantState_2 } from '@azure/communication-calling'; import { RoomCallLocator } from '@azure/communication-calling'; import { ScalingMode } from '@azure/communication-calling'; import { SendMessageOptions } from '@azure/communication-chat'; import type { ServerDiagnosticType } from '@azure/communication-calling'; import { SpotlightedParticipant } from '@azure/communication-calling'; import { StartCallOptions } from '@azure/communication-calling'; import { StartCaptionsOptions } from '@azure/communication-calling'; import { TeamsCall } from '@azure/communication-calling'; import { TeamsCallAgent } from '@azure/communication-calling'; import { TeamsCallInfo } from '@azure/communication-calling'; import { TeamsExtensionUserIdentifier } from '@azure/communication-common'; import { TeamsIncomingCall } from '@azure/communication-calling'; import { TeamsMeetingIdLocator } from '@azure/communication-calling'; import { TeamsMeetingLinkLocator } from '@azure/communication-calling'; import { Theme } from '@fluentui/react'; import { TransferEventArgs } from '@azure/communication-calling'; import { TypingIndicatorReceivedEvent } from '@azure/communication-chat'; import { UnknownIdentifier } from '@azure/communication-common'; import { VideoDeviceInfo } from '@azure/communication-calling'; import { VideoEffectName } from '@azure/communication-calling'; import { VideoStreamRenderer } from '@azure/communication-calling'; import { VideoStreamRendererView } from '@azure/communication-calling'; /** * Transfer feature state * * @public */ export declare interface AcceptedTransfer { /** * Stores call id of accepted transfer */ callId: string; /** * Stores timestamp when transfer was accepted */ timestamp: Date; } /** * Active error messages to be shown via {@link ErrorBar}. * * @public */ export declare interface ActiveErrorMessage { /** * Type of error that is active. */ type: ErrorType; /** * The latest timestamp when this error was observed. * * When available, this is used to track errors that have already been seen and dismissed * by the user. */ timestamp?: Date; } /** * Active notifications to be shown via {@link NotificationStack}. * * @public */ export declare interface ActiveNotification { /** * Type of error that is active. */ type: NotificationType; /** * Callback called when the primary button inside notification bar is clicked. */ onClickPrimaryButton?: () => void; /** * Callback called when the primary button inside notification bar is clicked. */ onClickSecondaryButton?: () => void; /** * Callback called when the notification is dismissed. */ onDismiss?: () => void; /** * If set, notification will automatically dismiss after 5 seconds */ autoDismiss?: boolean; /** * The latest timestamp when this notification was observed. * * When available, this is used to track notifications that have already been seen and dismissed * by the user. */ timestamp?: Date; /** * Aria-live property for the notification. * @defaultValue polite */ ariaLive?: 'assertive' | 'off' | 'polite'; } /** * Error reported via error events and stored in adapter state. * * @public */ export declare interface AdapterError extends Error { /** * The operation that failed. */ target: string; /** * Error thrown by the failed operation. */ innerError: Error; /** * Timestamp added to the error in the adapter implementation. */ timestamp: Date; } /** * Adapters stores the latest error for each operation in the state. * * `target` is an adapter defined string for each unique operation performed by the adapter. * * @public */ export declare type AdapterErrors = { [target: string]: AdapterError; }; /** * Notification from call client state stored in adapter state. * * @public */ export declare interface AdapterNotification { /** * Target of notification. There should only one notification per target. */ target: string; /** * Timestamp added to the notification in the adapter implementation. */ timestamp: Date; } /** * Adapters stores the latest notification for each target. * * @public */ export declare type AdapterNotifications = { [target: string]: AdapterNotification; }; /** * Functionality for interfacing with Composite adapter state. * * @public */ export declare interface AdapterState<TState> { /** Subscribes the handler to stateChanged events. */ onStateChange(handler: (state: TState) => void): void; /** Unsubscribes the handler to stateChanged events. */ offStateChange(handler: (state: TState) => void): void; /** Get the current State */ getState(): TState; } /** * Compare if 2 react components are exact equal, result type will be true/false * * @public */ export declare type AreEqual<A extends (props: any) => JSX.Element | undefined, B extends (props: any) => JSX.Element | undefined> = true extends AreTypeEqual<A, B> & AreParamEqual<A, B> ? true : false; /** * Compare if props of 2 react components are equal, result type will be true/false * * @public */ export declare type AreParamEqual<A extends (props: any) => JSX.Element | undefined, B extends (props: any) => JSX.Element | undefined> = AreTypeEqual<Required<Parameters<A>[0]>, Required<Parameters<B>[0]>>; /** * Compare if two types are equal, result type will be true/false * * @public */ export declare type AreTypeEqual<A, B> = A extends B ? (B extends A ? true : false) : false; /** * @beta * A callback function that defines what actions user can perform on an attachment. * By default, the UI library would have default actions that opens attachment URL in a new tab. * You can override the default actions or add new actions by providing this callback function. * Moreover, you can also return dynamic actions based on the properties in {@link AttachmentMetadata} and/or {@link ChatMessage}. * * @param attachment - The file attachment that user is trying to perform actions on. * @param message - The chat message that contains this attachment. * @returns A list of {@link AttachmentMenuAction} that defines the type of actions user can perform on the attachment. */ export declare type AttachmentActionHandler = (attachment: AttachmentMetadata, message?: ChatMessage) => AttachmentMenuAction[]; /** * @beta * * Attachment download options defines the list of actions that can be performed on an attachment. */ export declare interface AttachmentDownloadOptions { actionsForAttachment: AttachmentActionHandler; } /** * @beta * * Attachment menu action defines buttons that can be shown on the attachment card. * If there's one action, it will be shown as a button, if there are multiple actions, it will be shown as a dropdown. */ export declare interface AttachmentMenuAction { name: string; icon: JSX.Element; onClick: (attachment: AttachmentMetadata) => Promise<void>; } /** * Data model that represents a chat message attachment * where it contains an ID to uniquely identify the attachment, * a name that represents the name of file, and * a URL to download the attachment. * * @public */ export declare interface AttachmentMetadata { /** * Unique ID of the attachment. */ id: string; /** * Attachment name to be displayed. */ name: string; /** * Download URL for the attachment. */ url: string; } /** * Data model that represents a chat message attachment being uploaded * where it contains an ID to uniquely identify the attachment, * a name that represents the name of file, * an optional URL to download the attachment, * an optional progress value between 0 and 1 indicating the progress of the upload, and * an optional error object that contains error message would be shown to the user. * * * @beta */ export declare interface AttachmentMetadataInProgress { /** * Unique ID of the attachment. */ id: string; /** * Attachment name to be displayed. */ name: string; /** * Download URL for the attachment. */ url?: string; /** * A number between 0 and 1 indicating the progress of the upload. */ progress?: number; /** * A object contains error message would be shown to the user. */ error?: AttachmentProgressError; } /** * @beta * * Attachment Options that defines behaviour for uploading and downloading attachments. */ export declare interface AttachmentOptions { uploadOptions?: AttachmentUploadOptions; downloadOptions?: AttachmentDownloadOptions; } /** * @beta * A attachment progress error object that contains message to be shown to * the user when upload fails. */ export declare interface AttachmentProgressError { message: string; } /** * @beta * A callback function for handling attachment removed by the user in send box. * * @param attachmentId - The ID of uploaded attachments. */ export declare type AttachmentRemovalHandler = (attachmentId: string) => void; /** * @beta * A callback function for handling list of upload tasks that contains files selected by user to upload. * * @param AttachmentUploads - The list of uploaded attachments. Each attachment is represented by an {@link AttachmentUpload} object. */ export declare type AttachmentSelectionHandler = (attachmentUploads: AttachmentUploadTask[]) => void; /** * @beta */ export declare interface AttachmentUploadOptions { /** * A list of strings containing the comma separated list of supported media (aka. mime) types. * i.e. ['image/*', 'video/*', 'audio/*'] * Default value is `['*']`, meaning all media types are supported. * Similar to the `accept` attribute of the `<input type="attachment" />` element. * @beta */ supportedMediaTypes?: string[]; /** * Disable multiple attachments to be selected if set to `true`. * Default value is `false`, meaning multiple attachments can be selected. * Similar to the `multiple` attribute of the `<input type="attachment" />` element. * @beta */ disableMultipleUploads?: boolean; /** * A callback function of type {@link AttachmentSelectionHandler} that will be called * when user finishes selecting files in browser's file picker. This function is required since * this would be where upload logic is implemented to your own storage. * @beta */ handleAttachmentSelection: AttachmentSelectionHandler; /** * A optional callback function that will be called * when user removing files before clicking send message button. This function will be * where you can remove the file from your storage. * @beta */ handleAttachmentRemoval?: AttachmentRemovalHandler; } /** * A upload task represents and manages an attachment that is being uploaded. * When using the Composite, an attachment upload task is created for each file user is selected to upload. * A upload task is complete when notifyUploadCompleted is called. * A upload task is failed when notifyUploadFailed is called. * @beta */ export declare interface AttachmentUploadTask { /** * Unique identifier for the attachment upload task. */ taskId: string; /** * HTML {@link File} object for the uploaded attachment. */ file?: File; /** * {@link Blob} object for the uploaded inline image. */ image?: Blob; /** * Update the progress of the upload changed. * A upload is considered complete when the progress reaches 1. * @param value - number between 0 and 1 */ notifyUploadProgressChanged: (value: number) => void; /** * Mark the upload task as complete. * An attachment is considered completed uploading when ID and URL are provided. * @param id - the unique identifier of the attachment. * @param url - the download URL of the attachment. */ notifyUploadCompleted: (id: string, url: string) => void; /** * Mark the upload task as failed. * @param message - An error message that can be displayed to the user. */ notifyUploadFailed: (message: string) => void; } /** * Custom data attributes for displaying avatar for a user. * * @public */ export declare type AvatarPersonaData = { /** * Primary text to display, usually the name of the person. */ text?: string; /** * Image URL to use, should be a square aspect ratio and big enough to fit in the image area. */ imageUrl?: string; /** * The user's initials to display in the image area when there is no image. * @defaultvalue Derived from `text` */ imageInitials?: string; /** * The background color when the user's initials are displayed. * @defaultvalue Derived from `text` */ initialsColor?: PersonaInitialsColor | string; /** * The text color when the user's initials are displayed * @defaultvalue `white` */ initialsTextColor?: string; /** * If true, show the special coin for unknown persona. * It has '?' in place of initials, with static font and background colors */ showUnknownPersonaCoin?: boolean; }; /** * Callback function used to provide custom data to build an avatar for a user. * * @public */ export declare type AvatarPersonaDataCallback = (userId: string) => Promise<AvatarPersonaData>; /** * Arguments for creating the Azure Communication Services implementation of {@link CallAdapter}. * * Note: `displayName` can be a maximum of 256 characters. * * @public */ export declare type AzureCommunicationCallAdapterArgs = { userId: CommunicationUserIdentifier; displayName: string; credential: CommunicationTokenCredential; locator: CallAdapterLocator; /** * Optional parameters for the {@link AzureCommunicationCallAdapter} created */ options?: AzureCommunicationCallAdapterOptions; }; /** * Optional parameters to create {@link AzureCommunicationCallAdapter} * * @public */ export declare type AzureCommunicationCallAdapterOptions = CommonCallAdapterOptions; /** * Arguments for {@link createAzureCommunicationCallWithChatAdapter} * * @public */ export declare type AzureCommunicationCallWithChatAdapterArgs = { endpoint: string; userId: CommunicationUserIdentifier; displayName: string; credential: CommunicationTokenCredential; locator: CallAndChatLocator | TeamsMeetingLinkLocator | TeamsMeetingIdLocator; alternateCallerId?: string; callAdapterOptions?: AzureCommunicationCallAdapterOptions; }; /** * Arguments for {@link createAzureCommunicationCallWithChatAdapterFromClient} * * @public */ export declare type AzureCommunicationCallWithChatAdapterFromClientArgs = { callLocator: CallAdapterLocator | TeamsMeetingLinkLocator; callAgent: CallAgent; callClient: StatefulCallClient; chatClient: StatefulChatClient; chatThreadClient: ChatThreadClient; callAdapterOptions?: AzureCommunicationCallAdapterOptions; }; /** * Arguments for creating the Azure Communication Services implementation of {@link ChatAdapter}. * * @public */ export declare type AzureCommunicationChatAdapterArgs = { endpoint: string; userId: CommunicationUserIdentifier; displayName: string; credential: CommunicationTokenCredential; threadId: string; chatAdapterOptions?: ChatAdapterOptions; }; /** * Arguments for creating the Azure Communication Services implementation of {@link CallAdapter}. * * These arguments are used to create an outbound call scenarios. * * Note: `displayName` can be a maximum of 256 characters. * * @public */ export declare type AzureCommunicationOutboundCallAdapterArgs = { userId: CommunicationUserIdentifier; displayName: string; credential: CommunicationTokenCredential; targetCallees: StartCallIdentifier[]; /** * A phone number in E.164 format procured using Azure Communication Services that will be used to represent callers identity. * E.164 numbers are formatted as [+] [country code] [phone number including area code]. For example, +14255550123 for a US phone number. */ alternateCallerId?: string; /** * Optional parameters for the {@link AzureCommunicationCallAdapter} created */ options?: AzureCommunicationCallAdapterOptions; }; /** * Properties common to all composites exported from this library. * * @public */ export declare interface BaseCompositeProps<TIcons extends Record<string, JSX.Element>> { /** * Fluent theme for the composite. * * @defaultValue light theme */ fluentTheme?: PartialTheme | Theme; /** * Custom Icon override for the composite. * A JSX element can be provided to override the default icon. */ icons?: TIcons; /** * Locale for the composite. * * @defaultValue English (US) */ locale?: CompositeLocale; /** * Whether composite is displayed right-to-left. * * @defaultValue false */ rtl?: boolean; /** * A callback function that can be used to provide custom data to Avatars rendered * in Composite. * * This will not affect the displayName shown in the composite. * The displayName throughout the composite will be what is provided to the adapter when the adapter is created. * will be what is provided to the adapter when the adapter is created. */ onFetchAvatarPersonaData?: AvatarPersonaDataCallback; /** * A callback function that can be used to provide custom menu items for a participant in * participant list. */ onFetchParticipantMenuItems?: ParticipantMenuItemsCallback; } /** * Basic fluent styles props for all components exported from this libray. * * @public */ export declare interface BaseCustomStyles { /** Styles for the root container. */ root?: IStyle; } /** * Content blocked message type. * * Content blocked messages will rendered default value, but applications can provide custom strings and icon to renderers. * * @beta */ export declare interface BlockedMessage extends MessageCommon { messageType: 'blocked'; warningText?: string; linkText?: string; link?: string; deletedOn?: Date; senderId?: string; senderDisplayName?: string; status?: MessageStatus; attached?: MessageAttachedStatus; mine?: boolean; } /** * Breakout rooms state * * @public */ export declare interface BreakoutRoomsState { /** Breakout room assigned to local user in call */ assignedBreakoutRoom?: BreakoutRoom; /** Breakout room settings of call. This is defined when call is a breakout room. */ breakoutRoomSettings?: BreakoutRoomsSettings; /** Display name of breakout room. This is defined when call is a breakout room. */ breakoutRoomDisplayName?: string; } /** * @beta * Props for BrowserPermissionDeniedIOS component. */ export declare interface BrowserPermissionDeniedIOSProps extends BrowserPermissionDeniedProps { /** * Localization strings for BrowserPermissionDeniedIOS component. */ strings?: BrowserPermissionDeniedIOSStrings; /** * Link to image source. * * Image is inserted into the top of the component. */ imageSource?: string; } /** * @beta * Strings for BrowserPermissionDeniedIOS component */ export declare interface BrowserPermissionDeniedIOSStrings extends BrowserPermissionDeniedStrings { /** * Image alt text */ imageAltText: string; /** * Main text string. */ primaryText: string; /** * Subtext string. */ secondaryText: string; /** * Step 1 string */ step1Text: string; /** * Step 1 digit string */ step1DigitText: string; /** * Step 2 string */ step2Text: string; /** * Step 2 digit string */ step2DigitText: string; /** * Step 3 string */ step3Text: string; /** * Step 3 digit string */ step3DigitText: string; /** * Step 4 string */ step4Text: string; /** * Step 4 digit string */ step4DigitText: string; } /** * @beta * Props for BrowserPermissionDenied component. */ export declare interface BrowserPermissionDeniedProps { /** * Action to be taken by the more help link. Possible to send to external page or show other modal. */ onTroubleshootingClick?: () => void; /** * Action to be taken by the try again primary button. */ onTryAgainClick?: () => void; /** * Localization strings for BrowserPermissionDenied component. */ strings?: BrowserPermissionDeniedStrings; /** * Allows users to pass in an object contains custom CSS styles. * @Example * ``` * <BrowserPermissionDenied styles={{ primaryButton: { root: {backgroundColor: 'blue' }}}} /> * ``` */ styles?: BrowserPermissionDeniedStyles; } /** * @beta * Strings for BrowserPermissionDenied component */ export declare interface BrowserPermissionDeniedStrings { /** * Main button text string. */ primaryButtonText: string; /** * Main text string. */ primaryText: string; /** * Subtext string. */ secondaryText: string; /** * More help link string. */ linkText: string; } /** * Fluent styles for {@link BrowserPermissionDenied}. * * @beta */ export declare interface BrowserPermissionDeniedStyles extends BaseCustomStyles { /** Styles for the primary button. */ primaryButton?: IButtonStyles; /** Styles for the help troubleshooting link text. */ troubleshootingLink?: ILinkStyles; } /** * An Adapter interface specific for Azure Communication identity which extends {@link CommonCallAdapter}. * * @public */ export declare interface CallAdapter extends CommonCallAdapter { /** * Join the call with microphone initially on/off. * @deprecated Use joinCall(options?:JoinCallOptions) instead. * @param microphoneOn - Whether microphone is initially enabled * * @public */ joinCall(microphoneOn?: boolean): Call | undefined; /** * Join the call with options bag to set microphone/camera initial state when joining call * true = turn on the device when joining call * false = turn off the device when joining call * 'keep'/undefined = retain devices' precall state * * @param options - param to set microphone/camera initially on/off/use precall state. * * @public */ joinCall(options?: JoinCallOptions): Call | undefined; /** * Start the call. * * @param participants - An array of participant ids to join * * @public */ startCall(participants: string[], options?: StartCallOptions): Call | undefined; /** * Start the call. * @param participants - An array of {@link @azure/communication-common#CommunicationIdentifier} to be called * @public */ startCall(participants: StartCallIdentifier[], options?: StartCallOptions): Call | undefined; } /** * Payload for {@link CallEndedListener} containing details on the ended call. * * @public */ export declare type CallAdapterCallEndedEvent = { callId: string; code?: number; subCode?: number; }; /** * Functionality for managing the current call or start a new call * @deprecated CallAdapter interface will be flatten, consider using CallAdapter directly * @public */ export declare interface CallAdapterCallManagement extends CallAdapterCallOperations { /** * Join the call with microphone initially on/off. * @deprecated Use joinCall(options?:JoinCallOptions) instead. * @param microphoneOn - Whether microphone is initially enabled * * @public */ joinCall(microphoneOn?: boolean): Call | undefined; /** * Join the call with options bag to set microphone/camera initial state when joining call * true = turn on the device when joining call * false = turn off the device when joining call * 'keep'/undefined = retain devices' precall state * * @param options - param to set microphone/camera initially on/off/use precall state. * * @public */ joinCall(options?: JoinCallOptions): Call | undefined; /** * Start the call. * * @param participants - An array of participant ids to join * * @public */ startCall(participants: string[], options?: StartCallOptions): Call | undefined; /** * Start the call. * @param participants - An array of {@link @azure/communication-common#CommunicationIdentifier} to be called * @public */ startCall(participants: StartCallIdentifier[], options?: StartCallOptions): Call | undefined; } /** * Functionality for managing the current call. * * @public */ export declare interface CallAdapterCallOperations { /** * Leave the call * * @param forEveryone - Whether to remove all participants when leaving * * @public */ leaveCall(forEveryone?: boolean): Promise<void>; /** * Start the camera * This method will start rendering a local camera view when the call is not active * * @param options - Options to control how video streams are rendered {@link @azure/communication-calling#VideoStreamOptions } * * @public */ startCamera(options?: VideoStreamOptions): Promise<void>; /** * Stop the camera * This method will stop rendering a local camera view when the call is not active * * @public */ stopCamera(): Promise<void>; /** * Mute the current user during the call or disable microphone locally * * @public */ mute(): Promise<void>; /** * Unmute the current user during the call or enable microphone locally * * @public */ unmute(): Promise<void>; /** * Start sharing the screen during a call. * * @public */ startScreenShare(): Promise<void>; /** * Raise hand for current user * * @public */ raiseHand(): Promise<void>; /** * lower hand for current user * * @public */ lowerHand(): Promise<void>; /** * Send reaction emoji * * @public */ onReactionClick(reaction: Reaction_2): Promise<void>; /** * Stop sharing the screen * * @public */ stopScreenShare(): Promise<void>; /** * Remove a participant from the call. * * @param userId - Id of the participant to be removed * * @public */ removeParticipant(userId: string): Promise<void>; /** * Remove a participant from the call. * @param participant - {@link @azure/communication-common#CommunicationIdentifier} of the participant to be removed * @public */ removeParticipant(participant: CommunicationIdentifier): Promise<void>; /** * Create the html view for a stream. * * @remarks * This method is implemented for composite * * @param remoteUserId - Id of the participant to render, leave it undefined to create the local camera view * @param options - Options to control how video streams are rendered {@link @azure/communication-calling#VideoStreamOptions } * * @public */ createStreamView(remoteUserId?: string, options?: VideoStreamOptions): Promise<void | CreateVideoStreamViewResult>; /** * Dispose the html view for a stream. * * @remarks * This method is implemented for composite * * @deprecated Use {@link disposeRemoteVideoStreamView}, {@link disposeLocalVideoStreamView} and {@link disposeRemoteVideoStreamView} instead. * * @param remoteUserId - Id of the participant to render, leave it undefined to dispose the local camera view * @param options - Options to control how video streams are rendered {@link @azure/communication-calling#VideoStreamOptions } * * @public */ disposeStreamView(remoteUserId?: string, options?: VideoStreamOptions): Promise<void>; /** * Create the html view for a stream. * * @remarks * This method is implemented for composite * * @param options - Options to control how video streams are rendered {@link @azure/communication-calling#VideoStreamOptions } * * @public */ createTogetherModeStreamView(options?: TogetherModeStreamOptions): Promise<void | TogetherModeStreamViewResult>; /** * Start Together mode. * * @public */ startTogetherMode(): Promise<void>; /** * Recalculate the seating positions for together mode. * * @remarks * This method is implemented for composite * * @param width - Width of the container * @param height - Height of the container * * @public */ setTogetherModeSceneSize(width: number, height: number): void; /** * Dispose the html view for a stream. * * @remarks * This method is implemented for composite * * * @public */ disposeTogetherModeStreamView(): Promise<void>; /** * Dispose the html view for a screen share stream * * @remarks * this method is implemented for composite * * @param remoteUserId - Id of the participant to dispose the screen share stream view for. * * @public */ disposeScreenShareStreamView(remoteUserId: string): Promise<void>; /** * Dispose the html view for a remote video stream * * @param remoteUserId - Id of the participant to dispose * * @public */ disposeRemoteVideoStreamView(remoteUserId: string): Promise<void>; /** * Dispose the html view for a local video stream * * @public */ disposeLocalVideoStreamView(): Promise<void>; /** * Holds the call. * * @public */ holdCall(): Promise<void>; /** * Resumes the call from a `LocalHold` state. * * @public */ resumeCall(): Promise<void>; /** * Add a participant to the call. * * @public */ addParticipant(participant: PhoneNumberIdentifier, options?: AddPhoneNumberOptions): Promise<void>; addParticipant(participant: CommunicationUserIdentifier): Promise<void>; /** * send dtmf tone to another participant in a 1:1 PSTN call * * @public */ sendDtmfTone(dtmfTone: DtmfTone_2): Promise<void>; /** * Continues into a call when the browser version is not supported. */ allowUnsupportedBrowserVersion(): void; /** * Function to Start captions * @param options - options for start captions */ startCaptions(options?: StartCaptionsAdapterOptions): Promise<void>; /** * Function to set caption language * @param language - language set for caption */ setCaptionLanguage(language: string): Promise<void>; /** * Function to set spoken language * @param language - spoken language */ setSpokenLanguage(language: string): Promise<void>; /** * Funtion to stop captions */ stopCaptions(options?: StopCaptionsAdapterOptions): Promise<void>; /** * Start the video background effect. * * @public */ startVideoBackgroundEffect(videoBackgroundEffect: VideoBackgroundEffect): Promise<void>; /** * Stop the video background effect. * * @public */ stopVideoBackgroundEffects(): Promise<void>; /** * Override the background picker images for background replacement effect. * * @param backgroundImages - Array of custom background images. * * @public */ updateBackgroundPickerImages(backgroundImages: VideoBackgroundImage[]): void; /** * Update the selected video background effect. * * @public */ updateSelectedVideoBackgroundEffect(selectedVideoBackground: VideoBackgroundEffect): void; /** * Start the noise suppression effect. * */ startNoiseSuppressionEffect(): Promise<void>; /** * Stop the noise suppression effect. * */ stopNoiseSuppressionEffect(): Promise<void>; /** * Send the end of call survey result * * @public */ submitSurvey(survey: CallSurvey): Promise<CallSurveyResponse | undefined>; /** * Start spotlight for local and remote participants by their user ids. * If no array of user ids is passed then action is performed on local participant. */ startSpotlight(userIds?: string[]): Promise<void>; /** * Stop spotlight for local and remote participants by their user ids. * If no array of user ids is passed then action is performed on local participant. */ stopSpotlight(userIds?: string[]): Promise<void>; /** * Stop all spotlights */ stopAllSpotlight(): Promise<void>; /** * Mute a participant * * @param userId - Id of the participant to mute */ muteParticipant(userId: string): Promise<void>; /** * Mute All participants */ muteAllRemoteParticipants(): Promise<void>; /** * Return to origin call of breakout room */ returnFromBreakoutRoom(): Promise<void>; /** * Forbid Teams meeting attendees audio by their user ids. */ forbidAudio(userIds: string[]): Promise<void>; /** * Permit Teams meeting attendees audio by their user ids. */ permitAudio(userIds: string[]): Promise<void>; /** * Forbid Teams meeting audio. */ forbidOthersAudio(): Promise<void>; /** * Permit Teams meeting audio. */ permitOthersAudio(): Promise<void>; /** * Forbid Teams meeting attendees video by their user ids. */ forbidVideo(userIds: string[]): Promise<void>; /** * Permit Teams meeting attendees audio by their user ids. */ permitVideo(userIds: string[]): Promise<void>; /** * Forbid Teams meeting video. */ forbidOthersVideo(): Promise<void>; /** * Permit Teams meeting video. */ permitOthersVideo(): Promise<void>; /** * Send real time text * @param text - real time text content * @param finalized - Boolean to indicate if the real time text is final */ sendRealTimeText: (text: string, isFinalized: boolean) => Promise<void>; } /** * {@link CommonCallAdapter} state inferred from Azure Communication Services backend. * * @public */ export declare type CallAdapterClientState = { userId: CommunicationIdentifierKind; displayName?: string; call?: CallState; /** * State to track who the original call went out to. will be undefined the call is not a outbound * modality. This includes, groupCalls, Rooms calls, and Teams InteropMeetings. */ targetCallees?: CommunicationIdentifier[]; devices: DeviceManagerState; endedCall?: CallState; /** * State to track whether the call is a teams call. */ isTeamsCall: boolean; /** * State to track whether the call is a teams meeting. */ isTeamsMeeting: boolean; /** * State to track whether the call is a rooms call. */ isRoomsCall: boolean; /** * Latest error encountered for each operation performed via the adapter. */ latestErrors: AdapterErrors; /** * Latest notifications from call client state. */ latestNotifications: AdapterNotifications; /** * Azure communications Phone number to make PSTN calls with. */ alternateCallerId?: string; /** * Environment information about system the adapter is made on */ environmentInfo?: EnvironmentInfo; /** * State to track whether the local participant's camera is on. To be used when creating a custom * control bar with the CallComposite. */ cameraStatus?: 'On' | 'Off'; /** * Default set of background images for background replacement effect. */ videoBackgroundImages?: VideoBackgroundImage[]; /** * Dependency to be injected for video background effect. */ onResolveVideoEffectDependency?: () => Promise<VideoBackgroundEffectsDependency>; /** * Dependency to be injected for deep noise suppression effect. */ onResolveDeepNoiseSuppressionDependency?: () => Promise<DeepNoiseSuppressionEffectDependency>; /** * State to track whether the noise suppression should be on by default. * @default true */ deepNoiseSuppressionOnByDefault?: boolean; /** * State to track whether to hide the noise suppression button. * @default false */ hideDeepNoiseSuppressionButton?: boolean; /** * State to track the selected video background effect. */ selectedVideoBackgroundEffect?: VideoBackgroundEffect; /** * Call from transfer request accepted by local user */ acceptedTransferCallState?: CallState; /** * Hide attendee names in teams meeting */ hideAttendeeNames?: boolean; /** * State to track the sounds to be used in the call. */ sounds?: CallingSounds; /** * State to track the reactions to be used. * @public */ reactions?: ReactionResources; }; /** * Functionality for managing devices within a call. * * @public */ export declare interface CallAdapterDeviceManagement { /** * Ask for permissions of devices. * * @remarks * Browser permission window will pop up if permissions are not granted yet * * @param constrain - Define constraints for accessing local devices {@link @azure/communication-calling#PermissionConstraints } * * @public */ askDevicePermission(constrain: PermissionConstraints): Promise<DeviceAccess>; /** * Query for available camera devices. * * @remarks * This method should be called after askDevicePermission() * * @return An array of video device information entities {@link @azure/communication-calling#VideoDeviceInfo } * * @public */ queryCameras(): Promise<VideoDeviceInfo[]>; /** * Query for available microphone devices. * * @remarks * This method should be called after askDevicePermission() * * @return An array of audio device information entities {@link @azure/communication-calling#AudioDeviceInfo } * * @public */ queryMicrophones(): Promise<AudioDeviceInfo[]>; /** * Query for available speaker devices. * * @remarks * This method should be called after askDevicePermission() * * @return An array of audio device information entities {@link @azure/communication-calling#AudioDeviceInfo } * * @public */ querySpeakers(): Promise<AudioDeviceInfo[]>; /** * Set the camera to use in the call. * * @param sourceInfo - Camera device to choose, pick one returned by {@link CallAdapterDeviceManagement#queryCameras } * @param options - Options to control how the camera stream is rendered {@link @azure/communication-calling#VideoStreamOptions } * * @public */ setCamera(sourceInfo: VideoDeviceInfo, options?: VideoStreamOptions): Promise<void>; /** * Set the microphone to use in the call. * * @param sourceInfo - Microphone device to choose, pick one returned by {@link CallAdapterDeviceManagement#queryMicrophones } * * @public */ setMicrophone(sourceInfo: AudioDeviceInfo): Promise<void>; /** * Set the speaker to use in the call. * * @param sourceInfo - Speaker device to choose, pick one returned by {@link CallAdapterDeviceManagement#querySpeakers } * * @public */ setSpeaker(sourceInfo: AudioDeviceInfo): Promise<void>; } /** * Locator used by {@link createAzureCommunicationCallAdapter} to locate the call to join * * @public */ export declare type CallAdapterLocator = TeamsMeetingLinkLocator | GroupCallLocator | RoomCallLocator | /* @conditional-compile-remove(call-participants-locator) */ CallParticipantsLocator | TeamsMeetingIdLocator; /** * {@link CommonCallAdapter} state. * * @public */ export declare type CallAdapterState = CallAdapterUiState & CallAdapterClientState; /** * Call composite events that can be subscribed to. * * @public */ export declare interface CallAdapterSubscribers { /** * Subscribe function for 'participantsJoined' event. */ on(event: 'participantsJoined', listener: ParticipantsJoinedListener): void; /** * Subscribe function for 'participantsLeft' event. */ on(event: 'participantsLeft', listener: ParticipantsLeftListener): void; /** * Subscribe function for 'isMutedChanged' event. * * @remarks * The event will be triggered whenever current user or remote user mute state changed * */ on(event: 'isMutedChanged', listener: IsMutedChangedListener): void; /** * Subscribe function for 'callIdChanged' event. * * @remarks * The event will be triggered when callId of current user changed. * */ on(event: 'callIdChanged', listener: CallIdChangedListener): void; /** * Subscribe function for 'isLocalScreenSharingActiveChanged' event. */ on(event: 'isLocalScreenSharingActiveChanged', listener: IsLocalScreenSharingActiveChangedListener): void; /** * Subscribe function for 'displayNameChanged' event. */ on(event: 'displayNameChanged', listener: DisplayNameChangedListener): void; /** * Subscribe function for 'isSpeakingChanged' event. */ on(event: 'isSpeakingChanged', listener: IsSpeakingChangedListener): void; /** * Subscribe function for 'callEnded' event. */ on(event: 'callEnded', listener: CallEndedListener): void; /** * Subscribe function for 'diagnosticChanged' event. * * This event fires whenever there is a change in user facing diagnostics about the ongoing call. */ on(event: 'diagnosticChanged', listener: DiagnosticChangedEventListner): void; /** * Subscribe function for 'selectedMicrophoneChanged' event. * * This event fires whenever the user selects a new microphone device. */ on(event: 'selectedMicrophoneChanged', listener: PropertyChangedEvent): void; /** * Subscribe function for 'selectedSpeakerChanged' event. * * This event fires whenever the user selects a new speaker device. */ on(event: 'selectedSpeakerChanged', listener: PropertyChangedEvent): void; /** * Subscribe function for 'error' event. */ on(event: 'error', listener: (e: AdapterError) => void): void; /** * Subscribe function for 'captionsReceived' event. */ on(event: 'captionsReceived', listener: CaptionsReceivedListener): void; /** * Subscribe function for 'isCaptionsActiveChanged' event. */ on(event: 'isCaptionsActiveChanged', listener: IsCaptionsActiveChangedListener): void; /** * Subscribe function for 'isCaptionLanguageChanged' event. */ on(event: 'isCaptionLanguageChanged', listener: IsCaptionLanguageChangedListener): void; /** * Subscribe function for 'isSpokenLanguageChanged' event. */ on(event: 'isSpokenLanguageChanged', listener: IsSpokenLanguageChangedListener): void; /** * Subscribe function for 'realTimeTextReceived' event. */ on(event: 'realTimeTextReceived', listener: RealTimeTextReceivedListener): void