@anam-ai/js-sdk
Version:
Client side JavaScript SDK for Anam AI
412 lines • 18.4 kB
JavaScript
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { CONNECTION_CLOSED_CODE_MICROPHONE_PERMISSION_DENIED, CONNECTION_CLOSED_CODE_NORMAL, CONNECTION_CLOSED_CODE_WEBRTC_FAILURE, } from '../lib/constants';
import { EngineApiRestClient, SignallingClient, } from '../modules';
import { AnamEvent, InternalEvent, SignalMessageAction, } from '../types';
import { TalkMessageStream } from '../types/TalkMessageStream';
export class StreamingClient {
constructor(sessionId, options, publicEventEmitter, internalEventEmitter) {
this.peerConnection = null;
this.connectionReceivedAnswer = false;
this.remoteIceCandidateBuffer = [];
this.inputAudioStream = null;
this.dataChannel = null;
this.videoElement = null;
this.videoStream = null;
this.audioElement = null;
this.audioStream = null;
this.inputAudioState = { isMuted: false };
this.publicEventEmitter = publicEventEmitter;
this.internalEventEmitter = internalEventEmitter;
// initialize input audio state
const { inputAudio } = options;
this.inputAudioState = inputAudio.inputAudioState;
if (options.inputAudio.userProvidedMediaStream) {
this.inputAudioStream = options.inputAudio.userProvidedMediaStream;
}
// register event handlers
this.internalEventEmitter.addListener(InternalEvent.WEB_SOCKET_OPEN, this.onSignallingClientConnected.bind(this));
this.internalEventEmitter.addListener(InternalEvent.SIGNAL_MESSAGE_RECEIVED, this.onSignalMessage.bind(this));
// set ice servers
this.iceServers = options.iceServers;
// initialize signalling client
this.signallingClient = new SignallingClient(sessionId, options.signalling, this.publicEventEmitter, this.internalEventEmitter);
// initialize engine API client
this.engineApiRestClient = new EngineApiRestClient(options.engine.baseUrl, sessionId);
this.audioDeviceId = options.inputAudio.audioDeviceId;
}
onInputAudioStateChange(oldState, newState) {
// changed microphone mute state
if (oldState.isMuted !== newState.isMuted) {
if (newState.isMuted) {
this.muteAllAudioTracks();
}
else {
this.unmuteAllAudioTracks();
}
}
}
muteAllAudioTracks() {
var _a;
(_a = this.inputAudioStream) === null || _a === void 0 ? void 0 : _a.getAudioTracks().forEach((track) => {
track.enabled = false;
});
}
unmuteAllAudioTracks() {
var _a;
(_a = this.inputAudioStream) === null || _a === void 0 ? void 0 : _a.getAudioTracks().forEach((track) => {
track.enabled = true;
});
}
muteInputAudio() {
const oldAudioState = this.inputAudioState;
const newAudioState = Object.assign(Object.assign({}, this.inputAudioState), { isMuted: true });
this.inputAudioState = newAudioState;
this.onInputAudioStateChange(oldAudioState, newAudioState);
return this.inputAudioState;
}
unmuteInputAudio() {
const oldAudioState = this.inputAudioState;
const newAudioState = Object.assign(Object.assign({}, this.inputAudioState), { isMuted: false });
this.inputAudioState = newAudioState;
this.onInputAudioStateChange(oldAudioState, newAudioState);
return this.inputAudioState;
}
getInputAudioState() {
return this.inputAudioState;
}
getPeerConnection() {
return this.peerConnection;
}
getInputAudioStream() {
return this.inputAudioStream;
}
getVideoStream() {
return this.videoStream;
}
getAudioStream() {
return this.audioStream;
}
sendDataMessage(message) {
if (this.dataChannel && this.dataChannel.readyState === 'open') {
this.dataChannel.send(message);
}
}
setMediaStreamTargetsById(videoElementId, audioElementId) {
// set up streaming targets
if (videoElementId) {
const videoElement = document.getElementById(videoElementId);
if (!videoElement) {
throw new Error(`StreamingClient: video element with id ${videoElementId} not found`);
}
this.videoElement = videoElement;
}
if (audioElementId) {
const audioElement = document.getElementById(audioElementId);
if (!audioElement) {
throw new Error(`StreamingClient: audio element with id ${audioElementId} not found`);
}
this.audioElement = audioElement;
}
}
startConnection() {
try {
if (this.peerConnection) {
console.error('StreamingClient - startConnection: peer connection already exists');
return;
}
// start the connection
this.signallingClient.connect();
}
catch (error) {
console.log('StreamingClient - startConnection: error', error);
this.handleWebrtcFailure(error);
}
}
stopConnection() {
this.shutdown();
}
sendTalkCommand(content) {
return __awaiter(this, void 0, void 0, function* () {
if (!this.peerConnection) {
throw new Error('StreamingClient - sendTalkCommand: peer connection is null');
}
yield this.engineApiRestClient.sendTalkCommand(content);
return;
});
}
startTalkMessageStream(correlationId) {
if (!correlationId) {
// generate a random correlation uuid
correlationId = Math.random().toString(36).substring(2, 15);
}
return new TalkMessageStream(correlationId, this.internalEventEmitter, this.signallingClient);
}
initPeerConnection() {
return __awaiter(this, void 0, void 0, function* () {
this.peerConnection = new RTCPeerConnection({
iceServers: this.iceServers,
});
// set event handlers
this.peerConnection.onicecandidate = this.onIceCandidate.bind(this);
this.peerConnection.oniceconnectionstatechange =
this.onIceConnectionStateChange.bind(this);
this.peerConnection.onconnectionstatechange =
this.onConnectionStateChange.bind(this);
this.peerConnection.addEventListener('track', this.onTrackEventHandler.bind(this));
// set up data channels
yield this.setupDataChannels();
// add transceivers
this.peerConnection.addTransceiver('video', { direction: 'recvonly' });
this.peerConnection.addTransceiver('audio', { direction: 'sendrecv' });
});
}
onSignalMessage(signalMessage) {
return __awaiter(this, void 0, void 0, function* () {
if (!this.peerConnection) {
console.error('StreamingClient - onSignalMessage: peerConnection is not initialized');
return;
}
switch (signalMessage.actionType) {
case SignalMessageAction.ANSWER:
const answer = signalMessage.payload;
yield this.peerConnection.setRemoteDescription(answer);
this.connectionReceivedAnswer = true;
// flush the remote buffer
this.flushRemoteIceCandidateBuffer();
break;
case SignalMessageAction.ICE_CANDIDATE:
const iceCandidateConfig = signalMessage.payload;
const candidate = new RTCIceCandidate(iceCandidateConfig);
if (this.connectionReceivedAnswer) {
yield this.peerConnection.addIceCandidate(candidate);
}
else {
this.remoteIceCandidateBuffer.push(candidate);
}
break;
case SignalMessageAction.END_SESSION:
const reason = signalMessage.payload;
console.log('StreamingClient - onSignalMessage: reason', reason);
this.publicEventEmitter.emit(AnamEvent.CONNECTION_CLOSED, CONNECTION_CLOSED_CODE_NORMAL);
// close the peer connection
this.shutdown();
break;
case SignalMessageAction.WARNING:
const message = signalMessage.payload;
console.warn('Warning received from server: ' + message);
break;
case SignalMessageAction.TALK_STREAM_INTERRUPTED:
const chatMessage = signalMessage.payload;
this.publicEventEmitter.emit(AnamEvent.TALK_STREAM_INTERRUPTED, chatMessage.correlationId);
break;
default:
console.error('StreamingClient - onSignalMessage: unknown signal message action type', signalMessage);
}
});
}
onSignallingClientConnected() {
return __awaiter(this, void 0, void 0, function* () {
if (!this.peerConnection) {
try {
yield this.initPeerConnectionAndSendOffer();
}
catch (err) {
console.error('StreamingClient - onSignallingClientConnected: Error initializing peer connection', err);
this.handleWebrtcFailure(err);
}
}
});
}
flushRemoteIceCandidateBuffer() {
this.remoteIceCandidateBuffer.forEach((candidate) => {
var _a;
(_a = this.peerConnection) === null || _a === void 0 ? void 0 : _a.addIceCandidate(candidate);
});
this.remoteIceCandidateBuffer = [];
}
/**
* ICE Candidate Trickle
* As each ICE candidate is gathered from the STUN server it is sent to the
* webRTC server immediately in an effort to reduce time to connection.
*/
onIceCandidate(event) {
if (event.candidate) {
this.signallingClient.sendIceCandidate(event.candidate);
}
}
onIceConnectionStateChange() {
var _a, _b;
if (((_a = this.peerConnection) === null || _a === void 0 ? void 0 : _a.iceConnectionState) === 'connected' ||
((_b = this.peerConnection) === null || _b === void 0 ? void 0 : _b.iceConnectionState) === 'completed') {
this.publicEventEmitter.emit(AnamEvent.CONNECTION_ESTABLISHED);
}
}
onConnectionStateChange() {
var _a;
if (((_a = this.peerConnection) === null || _a === void 0 ? void 0 : _a.connectionState) === 'closed') {
console.error('StreamingClient - onConnectionStateChange: Connection closed');
this.handleWebrtcFailure('The connection to our servers was lost. Please try again.');
}
}
handleWebrtcFailure(err) {
console.error({ message: 'StreamingClient - handleWebrtcFailure: ', err });
if (err.name === 'NotAllowedError' && err.message === 'Permission denied') {
this.publicEventEmitter.emit(AnamEvent.CONNECTION_CLOSED, CONNECTION_CLOSED_CODE_MICROPHONE_PERMISSION_DENIED);
}
else {
this.publicEventEmitter.emit(AnamEvent.CONNECTION_CLOSED, CONNECTION_CLOSED_CODE_WEBRTC_FAILURE);
}
try {
this.stopConnection();
}
catch (error) {
console.error('StreamingClient - handleWebrtcFailure: error stopping connection', error);
}
}
onTrackEventHandler(event) {
if (event.track.kind === 'video') {
this.videoStream = event.streams[0];
this.publicEventEmitter.emit(AnamEvent.VIDEO_STREAM_STARTED, this.videoStream);
if (this.videoElement) {
this.videoElement.srcObject = this.videoStream;
const handle = this.videoElement.requestVideoFrameCallback(() => {
var _a;
// unregister the callback after the first frame
(_a = this.videoElement) === null || _a === void 0 ? void 0 : _a.cancelVideoFrameCallback(handle);
this.publicEventEmitter.emit(AnamEvent.VIDEO_PLAY_STARTED);
});
}
}
else if (event.track.kind === 'audio') {
this.audioStream = event.streams[0];
this.publicEventEmitter.emit(AnamEvent.AUDIO_STREAM_STARTED, this.audioStream);
if (this.audioElement) {
this.audioElement.srcObject = this.audioStream;
}
}
}
/**
* Set up the data channels for sending and receiving messages
*/
setupDataChannels() {
return __awaiter(this, void 0, void 0, function* () {
if (!this.peerConnection) {
console.error('StreamingClient - setupDataChannels: peer connection is not initialized');
return;
}
/**
* Audio
*
* If the user hasn't provided an audio stream, capture the audio stream from the user's microphone and send it to the peer connection
*/
if (this.inputAudioStream) {
// verify the user provided stream has audio tracks
if (!this.inputAudioStream.getAudioTracks().length) {
throw new Error('StreamingClient - setupDataChannels: user provided stream does not have audio tracks');
}
}
else {
const audioConstraints = {
echoCancellation: true,
};
// If an audio device ID is provided in the options, use it
if (this.audioDeviceId) {
audioConstraints.deviceId = {
exact: this.audioDeviceId,
};
}
this.inputAudioStream = yield navigator.mediaDevices.getUserMedia({
audio: audioConstraints,
});
}
// mute the audio tracks if the user has muted the microphone
if (this.inputAudioState.isMuted) {
this.muteAllAudioTracks();
}
const audioTrack = this.inputAudioStream.getAudioTracks()[0];
this.peerConnection.addTrack(audioTrack, this.inputAudioStream);
// pass the stream to the callback if it exists
this.publicEventEmitter.emit(AnamEvent.INPUT_AUDIO_STREAM_STARTED, this.inputAudioStream);
/**
* Text
*
* Create the data channel for sending and receiving text.
* There is no input stream for text, instead the sending of data is triggered by a UI interaction.
*/
const dataChannel = this.peerConnection.createDataChannel('chat', {
ordered: true,
});
dataChannel.onopen = () => {
this.dataChannel = dataChannel !== null && dataChannel !== void 0 ? dataChannel : null;
};
dataChannel.onclose = () => {
// TODO: should we set the data channel to null here?
};
// pass text message to the message history client
dataChannel.onmessage = (event) => {
const messageEvent = JSON.parse(event.data);
this.internalEventEmitter.emit(InternalEvent.WEBRTC_CHAT_MESSAGE_RECEIVED, messageEvent);
};
});
}
initPeerConnectionAndSendOffer() {
return __awaiter(this, void 0, void 0, function* () {
yield this.initPeerConnection();
if (!this.peerConnection) {
console.error('StreamingClient - initPeerConnectionAndSendOffer: peer connection is not initialized');
return;
}
// create offer and set local description
try {
const offer = yield this.peerConnection.createOffer();
yield this.peerConnection.setLocalDescription(offer);
}
catch (error) {
console.error('StreamingClient - initPeerConnectionAndSendOffer: error creating offer', error);
}
if (!this.peerConnection.localDescription) {
throw new Error('StreamingClient - initPeerConnectionAndSendOffer: local description is null');
}
yield this.signallingClient.sendOffer(this.peerConnection.localDescription);
});
}
shutdown() {
try {
if (this.inputAudioStream) {
this.inputAudioStream.getTracks().forEach((track) => {
track.stop();
});
}
this.inputAudioStream = null;
}
catch (error) {
console.error('StreamingClient - shutdown: error stopping input audio stream', error);
}
try {
this.signallingClient.stop();
}
catch (error) {
console.error('StreamingClient - shutdown: error stopping signallilng', error);
}
try {
if (this.peerConnection &&
this.peerConnection.connectionState !== 'closed') {
this.peerConnection.onconnectionstatechange = null;
this.peerConnection.close();
this.peerConnection = null;
}
}
catch (error) {
console.error('StreamingClient - shutdown: error closing peer connection', error);
}
}
}
//# sourceMappingURL=StreamingClient.js.map