@fishjam-cloud/webrtc-client
Version:
Typescript client library for ExWebRTC/WebRTC endpoint in Membrane RTC Engine
767 lines (757 loc) • 30.2 kB
text/typescript
import { BinaryWriter, BinaryReader } from '@bufbuild/protobuf/wire';
import TypedEmitter from 'typed-emitter';
declare enum Variant {
VARIANT_UNSPECIFIED = 0,
VARIANT_LOW = 1,
VARIANT_MEDIUM = 2,
VARIANT_HIGH = 3,
UNRECOGNIZED = -1
}
/** Contains information about an ICE candidate which will be sent to the peer/server */
interface Candidate {
candidate: string;
sdpMLineIndex: number;
sdpMid: string;
usernameFragment: string;
}
declare const Candidate: MessageFns$2<Candidate>;
type Builtin$2 = Date | Function | Uint8Array | string | number | boolean | undefined;
type DeepPartial$2<T> = T extends Builtin$2 ? T : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial$2<U>> : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial$2<U>> : T extends {} ? {
[K in keyof T]?: DeepPartial$2<T[K]>;
} : Partial<T>;
type KeysOfUnion$2<T> = T extends T ? keyof T : never;
type Exact$2<P, I extends P> = P extends Builtin$2 ? P : P & {
[K in keyof P]: Exact$2<P[K], I[K]>;
} & {
[K in Exclude<keyof I, KeysOfUnion$2<P>>]: never;
};
interface MessageFns$2<T> {
encode(message: T, writer?: BinaryWriter): BinaryWriter;
decode(input: BinaryReader | Uint8Array, length?: number): T;
fromJSON(object: any): T;
toJSON(message: T): unknown;
create<I extends Exact$2<DeepPartial$2<T>, I>>(base?: I): T;
fromPartial<I extends Exact$2<DeepPartial$2<T>, I>>(object: I): T;
}
/** Defines any type of message sent from Peer to Membrane RTC Engine */
interface MediaEvent$1 {
connect?: MediaEvent_Connect | undefined;
disconnect?: MediaEvent_Disconnect | undefined;
updateEndpointMetadata?: MediaEvent_UpdateEndpointMetadata | undefined;
updateTrackMetadata?: MediaEvent_UpdateTrackMetadata | undefined;
renegotiateTracks?: MediaEvent_RenegotiateTracks | undefined;
candidate?: Candidate | undefined;
sdpOffer?: MediaEvent_SdpOffer | undefined;
trackBitrates?: MediaEvent_TrackBitrates | undefined;
enableTrackVariant?: MediaEvent_EnableTrackVariant | undefined;
disableTrackVariant?: MediaEvent_DisableTrackVariant | undefined;
setTargetTrackVariant?: MediaEvent_SetTargetTrackVariant | undefined;
unmuteTrack?: MediaEvent_UnmuteTrack | undefined;
}
declare const MediaEvent$1: MessageFns$1<MediaEvent$1>;
interface MediaEvent_VariantBitrate {
variant: Variant;
bitrate: number;
}
declare const MediaEvent_VariantBitrate: MessageFns$1<MediaEvent_VariantBitrate>;
/** Sent when a peer wants to join WebRTC Endpoint. */
interface MediaEvent_Connect {
metadataJson: string;
}
declare const MediaEvent_Connect: MessageFns$1<MediaEvent_Connect>;
/** Sent when a peer disconnects from WebRTC Endpoint. */
interface MediaEvent_Disconnect {
}
declare const MediaEvent_Disconnect: MessageFns$1<MediaEvent_Disconnect>;
/** Sent when a peer wants to update its metadata */
interface MediaEvent_UpdateEndpointMetadata {
metadataJson: string;
}
declare const MediaEvent_UpdateEndpointMetadata: MessageFns$1<MediaEvent_UpdateEndpointMetadata>;
/** Sent when a peer wants to update its track's metadata */
interface MediaEvent_UpdateTrackMetadata {
trackId: string;
metadataJson: string;
}
declare const MediaEvent_UpdateTrackMetadata: MessageFns$1<MediaEvent_UpdateTrackMetadata>;
/** Sent when peer wants to renegatiate connection due to adding a track or removing a track */
interface MediaEvent_RenegotiateTracks {
}
declare const MediaEvent_RenegotiateTracks: MessageFns$1<MediaEvent_RenegotiateTracks>;
/**
* Sent as a response to `offerData` media event during renegotiation
* Maps contain only information about current peer's `sendonly` tracks.
* The "mid" is an identifier used to associate an RTP packet with an MLine from the SDP offer/answer.
*/
interface MediaEvent_SdpOffer {
/** The value of the `sessionDescription.sdp` */
sdp: string;
trackIdToMetadataJson: {
[key: string]: string;
};
/** Maps track_id to its bitrate. The track_id in the TrackBitrates message is ignored (we use the map key), so it can be ommited. */
trackIdToBitrates: {
[key: string]: MediaEvent_TrackBitrates;
};
midToTrackId: {
[key: string]: string;
};
}
declare const MediaEvent_SdpOffer: MessageFns$1<MediaEvent_SdpOffer>;
/** Sent when Peer wants to update its track's bitrate */
interface MediaEvent_TrackBitrates {
trackId: string;
/** Bitrate of each variant. For non-simulcast tracks use VARIANT_UNSPECIFIED. */
variantBitrates: MediaEvent_VariantBitrate[];
}
declare const MediaEvent_TrackBitrates: MessageFns$1<MediaEvent_TrackBitrates>;
/** Sent when client disables one of the track variants */
interface MediaEvent_DisableTrackVariant {
trackId: string;
variant: Variant;
}
declare const MediaEvent_DisableTrackVariant: MessageFns$1<MediaEvent_DisableTrackVariant>;
/** Sent when client enables one of the track variants */
interface MediaEvent_EnableTrackVariant {
trackId: string;
variant: Variant;
}
declare const MediaEvent_EnableTrackVariant: MessageFns$1<MediaEvent_EnableTrackVariant>;
interface MediaEvent_SetTargetTrackVariant {
trackId: string;
variant: Variant;
}
declare const MediaEvent_SetTargetTrackVariant: MessageFns$1<MediaEvent_SetTargetTrackVariant>;
interface MediaEvent_UnmuteTrack {
trackId: string;
}
declare const MediaEvent_UnmuteTrack: MessageFns$1<MediaEvent_UnmuteTrack>;
type Builtin$1 = Date | Function | Uint8Array | string | number | boolean | undefined;
type DeepPartial$1<T> = T extends Builtin$1 ? T : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial$1<U>> : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial$1<U>> : T extends {} ? {
[K in keyof T]?: DeepPartial$1<T[K]>;
} : Partial<T>;
type KeysOfUnion$1<T> = T extends T ? keyof T : never;
type Exact$1<P, I extends P> = P extends Builtin$1 ? P : P & {
[K in keyof P]: Exact$1<P[K], I[K]>;
} & {
[K in Exclude<keyof I, KeysOfUnion$1<P>>]: never;
};
interface MessageFns$1<T> {
encode(message: T, writer?: BinaryWriter): BinaryWriter;
decode(input: BinaryReader | Uint8Array, length?: number): T;
fromJSON(object: any): T;
toJSON(message: T): unknown;
create<I extends Exact$1<DeepPartial$1<T>, I>>(base?: I): T;
fromPartial<I extends Exact$1<DeepPartial$1<T>, I>>(object: I): T;
}
interface MediaEvent_Track_SimulcastConfig {
enabled: boolean;
enabledVariants: Variant[];
disabledVariants: Variant[];
}
declare const MediaEvent_Track_SimulcastConfig: MessageFns<MediaEvent_Track_SimulcastConfig>;
interface MediaEvent_OfferData_TrackTypes {
audio: number;
video: number;
}
declare const MediaEvent_OfferData_TrackTypes: MessageFns<MediaEvent_OfferData_TrackTypes>;
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
type DeepPartial<T> = T extends Builtin ? T : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>> : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>> : T extends {} ? {
[K in keyof T]?: DeepPartial<T[K]>;
} : Partial<T>;
type KeysOfUnion<T> = T extends T ? keyof T : never;
type Exact<P, I extends P> = P extends Builtin ? P : P & {
[K in keyof P]: Exact<P[K], I[K]>;
} & {
[K in Exclude<keyof I, KeysOfUnion<P>>]: never;
};
interface MessageFns<T> {
encode(message: T, writer?: BinaryWriter): BinaryWriter;
decode(input: BinaryReader | Uint8Array, length?: number): T;
fromJSON(object: any): T;
toJSON(message: T): unknown;
create<I extends Exact<DeepPartial<T>, I>>(base?: I): T;
fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T;
}
type SerializedMediaEvent = Uint8Array;
interface MediaEvent {
type: keyof MediaEvent$1;
key?: string;
data?: any;
}
type MediaStreamTrackId = string;
type TrackKind = 'audio' | 'video';
/**
* Type describing Voice Activity Detection statuses.
*
* - `speech` - voice activity has been detected
* - `silence` - lack of voice activity has been detected
*/
type VadStatus = 'speech' | 'silence';
/**
* Type describing maximal bandwidth that can be used, in kbps. 0 is interpreted as unlimited bandwidth.
*/
type BandwidthLimit = number;
/**
* Type describing bandwidth limit for simulcast track.
* It is a mapping (encoding => BandwidthLimit).
* If encoding isn't present in this mapping, it will be assumed that this particular encoding shouldn't have any bandwidth limit
*/
type SimulcastBandwidthLimit = Map<Variant, BandwidthLimit>;
/**
* Type describing bandwidth limitation of a Track, including simulcast and non-simulcast tracks.
* A sum type of `BandwidthLimit` and `SimulcastBandwidthLimit`
*/
type TrackBandwidthLimit = BandwidthLimit | SimulcastBandwidthLimit;
/**
* Type describing possible reasons for currently selected encoding.
* - `other` - the exact reason couldn't be determined
* - `encodingInactive` - previously selected encoding became inactive
* - `lowBandwidth` - there is no longer enough bandwidth to maintain previously selected encoding
*/
type EncodingReason = 'other' | 'encodingInactive' | 'lowBandwidth';
/**
* Track's context i.e. all data that can be useful when operating on track.
*/
interface TrackContextFields {
readonly track: MediaStreamTrack | null;
/**
* Stream this track belongs to.
*/
readonly stream: MediaStream | null;
/**
* Endpoint this track comes from.
*/
readonly endpoint: Endpoint;
/**
* Track id. It is generated by RTC engine and takes form `endpoint_id:<random_uuidv4>`.
* It is WebRTC agnostic i.e. it does not contain `mid` or `stream id`.
*/
readonly trackId: string;
/**
* Simulcast configuration.
* Only present for local tracks.
*/
readonly simulcastConfig?: MediaEvent_Track_SimulcastConfig;
/**
* Any info that was passed in {@link WebRTCEndpoint.addTrack}.
*/
readonly metadata?: unknown;
readonly maxBandwidth?: TrackBandwidthLimit;
readonly vadStatus: VadStatus;
/**
* Encoding that is currently received.
* Only present for remote tracks.
*/
readonly encoding?: Variant;
/**
* The reason of currently selected encoding.
* Only present for remote tracks.
*/
readonly encodingReason?: EncodingReason;
}
interface TrackContextEvents {
/**
* Emitted each time track encoding has changed.
*
* Track encoding can change in the following cases:
* - when user requested a change
* - when sender stopped sending some encoding (because of bandwidth change)
* - when receiver doesn't have enough bandwidth
*
* Some of those reasons are indicated in {@link TrackContext.encodingReason}.
*/
encodingChanged: (context: TrackContext) => void;
/**
* Emitted every time an update about voice activity is received from the server.
*/
voiceActivityChanged: (context: TrackContext) => void;
}
interface TrackContext extends TrackContextFields, TypedEmitter<Required<TrackContextEvents>> {
}
type TrackNegotiationStatus = 'awaiting' | 'offered' | 'done';
/**
* Events emitted by the {@link WebRTCEndpoint} instance.
*/
interface WebRTCEndpointEvents {
/**
* Emitted each time WebRTCEndpoint need to send some data to the server.
*/
sendMediaEvent: (mediaEvent: SerializedMediaEvent) => void;
/**
* Emitted when endpoint of this {@link WebRTCEndpoint} instance is ready. Triggered by {@link WebRTCEndpoint.connect}
*/
connected: (endpointId: string, otherEndpoints: Endpoint[]) => void;
/**
* Emitted when endpoint of this {@link WebRTCEndpoint} instance was removed.
*/
disconnected: () => void;
/**
* Emitted when data in a new track arrives.
*
* This event is always emitted after {@link trackAdded}.
* It informs the user that data related to the given track arrives and can be played or displayed.
*/
trackReady: (ctx: TrackContext) => void;
/**
* Emitted each time the endpoint which was already in the room, adds new track. Fields track and stream will be set to null.
* These fields will be set to non-null value in {@link trackReady}
*/
trackAdded: (ctx: TrackContext) => void;
/**
* Emitted when some track will no longer be sent.
*
* It will also be emitted before {@link endpointRemoved} for each track of this endpoint.
*/
trackRemoved: (ctx: TrackContext) => void;
/**
* Emitted each time endpoint has its track metadata updated.
*/
trackUpdated: (ctx: TrackContext) => void;
/**
* Emitted each time new endpoint is added to the room.
*/
endpointAdded: (endpoint: Endpoint) => void;
/**
* Emitted each time endpoint is removed, emitted only for other endpoints.
*/
endpointRemoved: (endpoint: Endpoint) => void;
/**
* Emitted each time endpoint has its metadata updated.
*/
endpointUpdated: (endpoint: Endpoint) => void;
/**
* Emitted in case of errors related to multimedia session e.g. ICE connection.
*/
connectionError: (error: {
message: string;
event: Event;
}) => void;
/**
* Emitted in case of errors related to multimedia session e.g. ICE connection.
*/
signalingError: (error: {
message: string;
}) => void;
/**
* Currently, this event is only emitted when DisplayManager in RTC Engine is
* enabled and simulcast is disabled.
*
* Emitted when priority of video tracks have changed.
* @param enabledTracks - list of tracks which will be sent to client from SFU
* @param disabledTracks - list of tracks which will not be sent to client from SFU
*/
tracksPriorityChanged: (enabledTracks: TrackContext[], disabledTracks: TrackContext[]) => void;
/**
* Emitted every time the server estimates client's bandwidth.
*
* @param {bigint} estimation - client's available incoming bitrate estimated
* by the server. It's measured in bits per second.
*/
bandwidthEstimationChanged: (estimation: bigint) => void;
/**
* Emitted each time track encoding has been disabled.
*/
trackEncodingDisabled: (context: TrackContext, encoding: Variant) => void;
/**
* Emitted each time track encoding has been enabled.
*/
trackEncodingEnabled: (context: TrackContext, encoding: Variant) => void;
targetTrackEncodingRequested: (event: {
trackId: string;
variant: Variant;
}) => void;
disconnectRequested: (event: any) => void;
localTrackAdded: (event: {
trackId: string;
track: MediaStreamTrack;
stream: MediaStream;
trackMetadata?: unknown;
simulcastConfig: MediaEvent_Track_SimulcastConfig;
maxBandwidth: TrackBandwidthLimit;
}) => void;
localTrackRemoved: (event: {
trackId: string;
}) => void;
localTrackReplaced: (event: {
trackId: string;
track: MediaStreamTrack | null;
}) => void;
localTrackMuted: (event: {
trackId: string;
}) => void;
localTrackUnmuted: (event: {
trackId: string;
}) => void;
localTrackBandwidthSet: (event: {
trackId: string;
bandwidth: BandwidthLimit;
}) => void;
localTrackEncodingBandwidthSet: (event: {
trackId: string;
rid: Variant;
bandwidth: BandwidthLimit;
}) => void;
localTrackEncodingEnabled: (event: {
trackId: string;
encoding: Variant;
}) => void;
localTrackEncodingDisabled: (event: {
trackId: string;
encoding: Variant;
}) => void;
localEndpointMetadataChanged: (event: {
metadata: unknown;
}) => void;
localTrackMetadataChanged: (event: {
trackId: string;
metadata: unknown;
}) => void;
}
/**
* Interface describing Endpoint.
*/
interface Endpoint {
/**
* Endpoint's id. It is assigned by user in custom logic that use backend API.
*/
id: string;
/**
* Type of the endpoint, e.g. "webrtc", "hls" or "rtsp".
*/
type: string;
/**
* Any information that was provided in {@link WebRTCEndpoint.connect}.
*/
metadata?: unknown;
/**
* List of tracks that are sent by the endpoint.
*/
tracks: Map<string, TrackContext>;
}
declare class ConnectionManager {
private readonly connection;
constructor(iceServers: RTCIceServer[]);
isConnectionUnstable: () => boolean;
getConnection: () => RTCPeerConnection;
addTransceiversIfNeeded: (serverTracks: MediaEvent_OfferData_TrackTypes) => void;
addTransceiver: (track: MediaStreamTrack, transceiverConfig: RTCRtpTransceiverInit) => void;
setOnTrackReady: (onTrackReady: (event: RTCTrackEvent) => void) => void;
setRemoteDescription: (data: RTCSessionDescriptionInit) => Promise<void>;
isTrackInUse: (track: MediaStreamTrack) => boolean;
removeTrack: (sender: RTCRtpSender) => void;
findSender: (mediaStreamTrackId: MediaStreamTrackId) => RTCRtpSender;
addIceCandidate: (iceCandidate: RTCIceCandidate) => Promise<void>;
}
declare const TrackContextImpl_base: new () => TypedEmitter<Required<TrackContextEvents>>;
declare class TrackContextImpl extends TrackContextImpl_base implements TrackContext {
endpoint: Endpoint;
trackId: string;
track: MediaStreamTrack | null;
trackKind: TrackKind | null;
stream: MediaStream | null;
metadata?: unknown;
metadataParsingError?: any;
simulcastConfig?: MediaEvent_Track_SimulcastConfig;
maxBandwidth: TrackBandwidthLimit;
encoding?: Variant;
encodingReason?: EncodingReason;
vadStatus: VadStatus;
negotiationStatus: TrackNegotiationStatus;
pendingMetadataUpdate: boolean;
constructor(endpoint: Endpoint, trackId: string, metadata: any, simulcastConfig?: MediaEvent_Track_SimulcastConfig);
}
type EndpointWithTrackContext = Omit<Endpoint, 'tracks'> & {
tracks: Map<string, TrackContextImpl>;
};
declare const WebRTCEndpoint_base: new () => TypedEmitter<Required<WebRTCEndpointEvents>>;
/**
* Main class that is responsible for connecting to the RTC Engine, sending and receiving media.
*/
declare class WebRTCEndpoint extends WebRTCEndpoint_base {
private readonly localTrackManager;
private readonly remote;
private readonly local;
private readonly commandsQueue;
private proposedIceServers;
bandwidthEstimation: bigint;
connectionManager?: ConnectionManager;
private clearConnectionCallbacks;
constructor();
/**
* Tries to connect to the RTC Engine. If user is successfully connected then {@link WebRTCEndpointEvents.connected}
* will be emitted.
*
* @param metadata - Any information that other endpoints will receive in {@link WebRTCEndpointEvents.endpointAdded}
* after accepting this endpoint
*
* @example
* ```ts
* let webrtc = new WebRTCEndpoint();
* webrtc.connect({displayName: "Bob"});
* ```
*/
connect: (metadata: unknown) => void;
/**
* Feeds media event received from RTC Engine to {@link WebRTCEndpoint}.
* This function should be called whenever some media event from RTC Engine
* was received and can result in {@link WebRTCEndpoint} generating some other
* media events.
*
* @param mediaEvent - String data received over custom signalling layer.
*
* @example
* This example assumes phoenix channels as signalling layer.
* As phoenix channels require objects, RTC Engine encapsulates binary data into
* map with one field that is converted to object with one field on the TS side.
* ```ts
* webrtcChannel.on("mediaEvent", (event) => webrtc.receiveMediaEvent(event.data));
* ```
*/
receiveMediaEvent: (mediaEvent: SerializedMediaEvent) => Promise<void>;
private getEndpointId;
private onTrackReady;
/**
* Retrieves statistics related to the RTCPeerConnection.
* These statistics provide insights into the performance and status of the connection.
*
* @return {Promise<RTCStatsReport>}
*
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/getStats | MDN Web Docs: RTCPeerConnection.getStats()}
*/
getStatistics(selector?: MediaStreamTrack | null): Promise<RTCStatsReport>;
/**
* Returns a snapshot of currently received remote tracks.
*
* @example
* if (webRTCEndpoint.getRemoteTracks()[trackId]?.simulcastConfig?.enabled) {
* webRTCEndpoint.setTargetTrackEncoding(trackId, encoding);
* }
*/
getRemoteTracks(): Record<string, TrackContext>;
/**
* Returns a snapshot of currently received remote endpoints.
*/
getRemoteEndpoints(): Record<string, EndpointWithTrackContext>;
getLocalEndpoint(): EndpointWithTrackContext;
getBandwidthEstimation(): bigint;
private handleMediaEvent;
private onSdpAnswer;
/**
* Adds track that will be sent to the RTC Engine.
* @param track - Audio or video track e.g. from your microphone or camera.
* @param trackMetadata - Any information about this track that other endpoints will
* receive in {@link WebRTCEndpointEvents.endpointAdded}. E.g. this can source of the track - whether it's
* screensharing, webcam or some other media device.
* @param simulcastConfig - Simulcast configuration. By default simulcast is disabled.
* For more information refer to {@link SimulcastConfig}.
* @param maxBandwidth - maximal bandwidth this track can use.
* Defaults to 0 which is unlimited.
* This option has no effect for simulcast and audio tracks.
* For simulcast tracks use `{@link WebRTCEndpoint.setTrackBandwidth}.
* @returns {string} Returns id of added track
* @example
* ```ts
* let localStream: MediaStream = new MediaStream();
* try {
* localAudioStream = await navigator.mediaDevices.getUserMedia(
* AUDIO_CONSTRAINTS
* );
* localAudioStream
* .getTracks()
* .forEach((track) => localStream.addTrack(track));
* } catch (error) {
* console.error("Couldn't get microphone permission:", error);
* }
*
* try {
* localVideoStream = await navigator.mediaDevices.getUserMedia(
* VIDEO_CONSTRAINTS
* );
* localVideoStream
* .getTracks()
* .forEach((track) => localStream.addTrack(track));
* } catch (error) {
* console.error("Couldn't get camera permission:", error);
* }
*
* localStream
* .getTracks()
* .forEach((track) => webrtc.addTrack(track, localStream));
* ```
*/
addTrack(track: MediaStreamTrack, trackMetadata?: unknown, simulcastConfig?: MediaEvent_Track_SimulcastConfig, maxBandwidth?: TrackBandwidthLimit): Promise<string>;
/**
* Replaces a track that is being sent to the RTC Engine.
* @param trackId - Audio or video track.
* @param {string} trackId - Id of audio or video track to replace.
* @param {MediaStreamTrack} newTrack
* @returns {Promise<boolean>} success
* @example
* ```ts
* // setup camera
* let localStream: MediaStream = new MediaStream();
* try {
* localVideoStream = await navigator.mediaDevices.getUserMedia(
* VIDEO_CONSTRAINTS
* );
* localVideoStream
* .getTracks()
* .forEach((track) => localStream.addTrack(track));
* } catch (error) {
* console.error("Couldn't get camera permission:", error);
* }
* let oldTrackId;
* localStream
* .getTracks()
* .forEach((track) => trackId = webrtc.addTrack(track, localStream));
*
* // change camera
* const oldTrack = localStream.getVideoTracks()[0];
* let videoDeviceId = "abcd-1234";
* navigator.mediaDevices.getUserMedia({
* video: {
* ...(VIDEO_CONSTRAINTS as {}),
* deviceId: {
* exact: videoDeviceId,
* },
* }
* })
* .then((stream) => {
* let videoTrack = stream.getVideoTracks()[0];
* webrtc.replaceTrack(oldTrackId, videoTrack);
* })
* .catch((error) => {
* console.error('Error switching camera', error);
* })
* ```
*/
replaceTrack(trackId: string, newTrack: MediaStreamTrack | null): Promise<void>;
/**
* Updates maximum bandwidth for the track identified by trackId.
* This value directly translates to quality of the stream and, in case of video, to the amount of RTP packets being sent.
* In case trackId points at the simulcast track bandwidth is split between all of the variant streams proportionally to their resolution.
*
* @param {string} trackId
* @param {BandwidthLimit} bandwidth in kbps
* @returns {Promise<boolean>} success
*/
setTrackBandwidth(trackId: string, bandwidth: BandwidthLimit): Promise<void>;
/**
* Updates maximum bandwidth for the given simulcast encoding of the given track.
*
* @param {string} trackId - id of the track
* @param {string} rid - rid of the encoding
* @param {BandwidthLimit} bandwidth - desired max bandwidth used by the encoding (in kbps)
* @returns
*/
setEncodingBandwidth(trackId: string, rid: Variant, bandwidth: BandwidthLimit): Promise<void>;
/**
* Removes a track from connection that was sent to the RTC Engine.
* @param {string} trackId - Id of audio or video track to remove.
* @example
* ```ts
* // setup camera
* let localStream: MediaStream = new MediaStream();
* try {
* localVideoStream = await navigator.mediaDevices.getUserMedia(
* VIDEO_CONSTRAINTS
* );
* localVideoStream
* .getTracks()
* .forEach((track) => localStream.addTrack(track));
* } catch (error) {
* console.error("Couldn't get camera permission:", error);
* }
*
* let trackId
* localStream
* .getTracks()
* .forEach((track) => trackId = webrtc.addTrack(track, localStream));
*
* // remove track
* webrtc.removeTrack(trackId)
* ```
*/
removeTrack(trackId: string): Promise<void>;
/**
* Sets track variant that server should send to the client library.
*
* The variant will be sent whenever it is available.
* If chosen variant is temporarily unavailable, some other variant
* will be sent until the chosen variant becomes active again.
*
* @param {string} trackId - id of track
* @param {Encoding} variant - variant to receive
* @example
* ```ts
* webrtc.setTargetTrackEncoding(incomingTrackCtx.trackId, "l")
* ```
*/
setTargetTrackEncoding(trackId: string, variant: Variant): void;
/**
* Enables track encoding so that it will be sent to the server.
* @param {string} trackId - id of track
* @param {Encoding} encoding - encoding that will be enabled
* @example
* ```ts
* const trackId = webrtc.addTrack(track, stream, {}, {enabled: true, activeEncodings: ["l", "m", "h"]});
* webrtc.disableTrackEncoding(trackId, "l");
* // wait some time
* webrtc.enableTrackEncoding(trackId, "l");
* ```
*/
enableTrackEncoding: (trackId: string, encoding: Variant) => Promise<void>;
/**
* Disables track encoding so that it will be no longer sent to the server.
* @param {string} trackId - id of track
* @param {Encoding} encoding - encoding that will be disabled
* @example
* ```ts
* const trackId = webrtc.addTrack(track, stream, {}, {enabled: true, activeEncodings: ["l", "m", "h"]});
* webrtc.disableTrackEncoding(trackId, "l");
* ```
*/
disableTrackEncoding: (trackId: string, encoding: Variant) => Promise<void>;
/**
* Updates the metadata for the current endpoint.
* @param metadata - Data about this endpoint that other endpoints will receive upon being added.
*
* If the metadata is different from what is already tracked in the room, the optional
* event `endpointUpdated` will be emitted for other endpoint in the room.
*/
updateEndpointMetadata: (metadata: any) => void;
/**
* Updates the metadata for a specific track.
* @param trackId - trackId (generated in addTrack) of audio or video track.
* @param trackMetadata - Data about this track that other endpoint will receive upon being added.
*
* If the metadata is different from what is already tracked in the room, the optional
* event `trackUpdated` will be emitted for other endpoints in the room.
*/
updateTrackMetadata: (trackId: string, trackMetadata: any) => void;
/**
* Disconnects from the room. This function should be called when user disconnects from the room
* in a clean way e.g. by clicking a dedicated, custom button `disconnect`.
* As a result there will be generated one more media event that should be
* sent to the RTC Engine. Thanks to it each other endpoint will be notified
* that endpoint was removed in {@link WebRTCEndpointEvents.endpointRemoved},
*/
disconnect: () => void;
/**
* Cleans up {@link WebRTCEndpoint} instance.
*/
cleanUp: () => void;
private getTrackId;
private sendMediaEvent;
private createAndSendOffer;
private onOfferData;
private setupConnectionListeners;
private createNewConnection;
private onRemoteCandidate;
private onLocalCandidate;
private onIceCandidateError;
private onConnectionStateChange;
private onIceConnectionStateChange;
}
export { type BandwidthLimit, type EncodingReason, type Endpoint, type MediaEvent, type SerializedMediaEvent, type SimulcastBandwidthLimit, MediaEvent_Track_SimulcastConfig as SimulcastConfig, type TrackBandwidthLimit, type TrackContext, type TrackContextEvents, type TrackKind, type VadStatus, Variant, WebRTCEndpoint, type WebRTCEndpointEvents };