@coze/realtime-api
Version:
A powerful real-time communication SDK for voice interactions with Coze AI bots | 扣子官方实时通信 SDK,用于与 Coze AI bots 进行语音交互
988 lines • 51.5 kB
JavaScript
(function(root, factory) {
if ('object' == typeof exports && 'object' == typeof module) module.exports = factory(require("@volcengine/rtc/extension-ainr"), require("@coze/api"), require("@volcengine/rtc"));
else if ('function' == typeof define && define.amd) define([
"@volcengine/rtc/extension-ainr",
"@coze/api",
"@volcengine/rtc"
], factory);
else if ('object' == typeof exports) exports["CozeRealtimeApi"] = factory(require("@volcengine/rtc/extension-ainr"), require("@coze/api"), require("@volcengine/rtc"));
else root["CozeRealtimeApi"] = factory(root["@volcengine/rtc/extension-ainr"], root["@coze/api"], root["@volcengine/rtc"]);
})(self, (__WEBPACK_EXTERNAL_MODULE__volcengine_rtc_extension_ainr__, __WEBPACK_EXTERNAL_MODULE__coze_api__, __WEBPACK_EXTERNAL_MODULE__volcengine_rtc__)=>(()=>{
"use strict";
var __webpack_modules__ = {
"@coze/api": function(module1) {
module1.exports = __WEBPACK_EXTERNAL_MODULE__coze_api__;
},
"@volcengine/rtc": function(module1) {
module1.exports = __WEBPACK_EXTERNAL_MODULE__volcengine_rtc__;
},
"@volcengine/rtc/extension-ainr": function(module1) {
module1.exports = __WEBPACK_EXTERNAL_MODULE__volcengine_rtc_extension_ainr__;
}
};
/************************************************************************/ // The module cache
var __webpack_module_cache__ = {};
// The require function
function __webpack_require__(moduleId) {
// Check if module is in cache
var cachedModule = __webpack_module_cache__[moduleId];
if (void 0 !== cachedModule) return cachedModule.exports;
// Create a new module (and put it into the cache)
var module1 = __webpack_module_cache__[moduleId] = {
exports: {}
};
// Execute the module function
__webpack_modules__[moduleId](module1, module1.exports, __webpack_require__);
// Return the exports of the module
return module1.exports;
}
/************************************************************************/ // webpack/runtime/compat_get_default_export
(()=>{
// getDefaultExport function for compatibility with non-ESM modules
__webpack_require__.n = function(module1) {
var getter = module1 && module1.__esModule ? function() {
return module1['default'];
} : function() {
return module1;
};
__webpack_require__.d(getter, {
a: getter
});
return getter;
};
})();
// webpack/runtime/define_property_getters
(()=>{
__webpack_require__.d = function(exports1, definition) {
for(var key in definition)if (__webpack_require__.o(definition, key) && !__webpack_require__.o(exports1, key)) Object.defineProperty(exports1, key, {
enumerable: true,
get: definition[key]
});
};
})();
// webpack/runtime/has_own_property
(()=>{
__webpack_require__.o = function(obj, prop) {
return Object.prototype.hasOwnProperty.call(obj, prop);
};
})();
// webpack/runtime/make_namespace_object
(()=>{
// define __esModule on exports
__webpack_require__.r = function(exports1) {
if ('undefined' != typeof Symbol && Symbol.toStringTag) Object.defineProperty(exports1, Symbol.toStringTag, {
value: 'Module'
});
Object.defineProperty(exports1, '__esModule', {
value: true
});
};
})();
/************************************************************************/ var __webpack_exports__ = {};
// ESM COMPAT FLAG
__webpack_require__.r(__webpack_exports__);
// EXPORTS
__webpack_require__.d(__webpack_exports__, {
RealtimeAPIError: ()=>/* reexport */ RealtimeAPIError,
RealtimeUtils: ()=>/* reexport */ utils_namespaceObject,
RealtimeError: ()=>/* reexport */ error_RealtimeError,
EventNames: ()=>/* reexport */ event_names,
RealtimeClient: ()=>/* binding */ RealtimeClient
});
// NAMESPACE OBJECT: ./src/utils.ts
var utils_namespaceObject = {};
__webpack_require__.r(utils_namespaceObject);
__webpack_require__.d(utils_namespaceObject, {
checkDevicePermission: ()=>checkDevicePermission,
checkPermission: ()=>checkPermission,
getAudioDevices: ()=>getAudioDevices,
isMobileVideoDevice: ()=>isMobileVideoDevice,
isScreenShareDevice: ()=>isScreenShareDevice,
isScreenShareSupported: ()=>isScreenShareSupported,
sleep: ()=>sleep
});
// EXTERNAL MODULE: external "@coze/api"
var api_ = __webpack_require__("@coze/api");
// EXTERNAL MODULE: external "@volcengine/rtc"
var rtc_ = __webpack_require__("@volcengine/rtc");
var rtc_default = /*#__PURE__*/ __webpack_require__.n(rtc_);
/**
+ * Delays execution for the specified duration
+ * @param milliseconds The time to sleep in milliseconds
+ * @throws {Error} If milliseconds is negative
+ * @returns Promise that resolves after the specified duration
+ */ const sleep = (milliseconds)=>{
if (milliseconds < 0) throw new Error('Sleep duration must be non-negative');
return new Promise((resolve)=>setTimeout(resolve, milliseconds));
};
/**
* @deprecated use checkDevicePermission instead
* Check microphone permission,return boolean
*/ const checkPermission = async function() {
let { audio = true, video = false } = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : {};
try {
const result = await rtc_default().enableDevices({
audio,
video
});
return result.audio;
} catch (error) {
console.error('Failed to check device permissions:', error);
return false;
}
};
/**
* Checks device permissions for audio and video
* @param checkVideo Whether to check video permissions (default: false)
* @returns Promise that resolves with the device permission status
*/ const checkDevicePermission = async function() {
let checkVideo = arguments.length > 0 && void 0 !== arguments[0] && arguments[0];
return await rtc_default().enableDevices({
audio: true,
video: checkVideo
});
};
/**
* Get audio devices
* @returns Promise<AudioDevices> Object containing arrays of audio input and output devices
*/ const getAudioDevices = async function() {
let { video = false } = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : {};
let devices = [];
if (video) {
devices = await rtc_default().enumerateDevices();
if (isScreenShareSupported()) // @ts-expect-error - add screenShare device to devices
devices.push({
deviceId: 'screenShare',
kind: 'videoinput',
label: 'Screen Share',
groupId: 'screenShare'
});
} else devices = await [
...await rtc_default().enumerateAudioCaptureDevices(),
...await rtc_default().enumerateAudioPlaybackDevices()
];
if (!(null == devices ? void 0 : devices.length)) return {
audioInputs: [],
audioOutputs: [],
videoInputs: []
};
return {
audioInputs: devices.filter((i)=>i.deviceId && 'audioinput' === i.kind),
audioOutputs: devices.filter((i)=>i.deviceId && 'audiooutput' === i.kind),
videoInputs: devices.filter((i)=>i.deviceId && 'videoinput' === i.kind)
};
};
const isScreenShareDevice = (deviceId)=>'screenShare' === deviceId;
/**
* 判断是否前后置摄像头
* @param deviceId
* @returns
*/ const isMobileVideoDevice = (deviceId)=>'user' === deviceId || 'environment' === deviceId;
/**
* Check if browser supports screen sharing
* 检查浏览器是否支持屏幕共享
*/ function isScreenShareSupported() {
var _navigator_mediaDevices, _navigator;
return !!(null === (_navigator = navigator) || void 0 === _navigator ? void 0 : null === (_navigator_mediaDevices = _navigator.mediaDevices) || void 0 === _navigator_mediaDevices ? void 0 : _navigator_mediaDevices.getDisplayMedia);
}
var event_names_EventNames = /*#__PURE__*/ function(EventNames) {
/**
* en: All events
* zh: 所有事件
*/ EventNames["ALL"] = "realtime.event";
/**
* en: All client events
* zh: 所有客户端事件
*/ EventNames["ALL_CLIENT"] = "client.*";
/**
* en: All server events
* zh: 所有服务端事件
*/ EventNames["ALL_SERVER"] = "server.*";
/**
* en: Room info
* zh: 房间信息
*/ EventNames["ROOM_INFO"] = "client.room.info";
/**
* en: Client connected
* zh: 客户端连接
*/ EventNames["CONNECTED"] = "client.connected";
/**
* en: Client connecting
* zh: 客户端连接中
*/ EventNames["CONNECTING"] = "client.connecting";
/**
* en: Client interrupted
* zh: 客户端中断
*/ EventNames["INTERRUPTED"] = "client.interrupted";
/**
* en: Client disconnected
* zh: 客户端断开
*/ EventNames["DISCONNECTED"] = "client.disconnected";
/**
* en: Client audio unmuted
* zh: 客户端音频未静音
*/ EventNames["AUDIO_UNMUTED"] = "client.audio.unmuted";
/**
* en: Client audio muted
* zh: 客户端音频静音
*/ EventNames["AUDIO_MUTED"] = "client.audio.muted";
/**
* en: Client video on
* zh: 客户端视频开启
*/ EventNames["VIDEO_ON"] = "client.video.on";
/**
* en: Client video off
* zh: 客户端视频关闭
*/ EventNames["VIDEO_OFF"] = "client.video.off";
/**
* en: Client video error
* zh: 客户端视频(或屏幕共享)错误
*/ EventNames["VIDEO_ERROR"] = "client.video.error";
/**
* en: Client video event
* zh: 客户端视频事件
*/ EventNames["PLAYER_EVENT"] = "client.video.event";
/**
* en: Client error
* zh: 客户端错误
*/ EventNames["ERROR"] = "client.error";
/**
* en: Audio noise reduction enabled
* zh: 抑制平稳噪声
*/ EventNames["SUPPRESS_STATIONARY_NOISE"] = "client.suppress.stationary.noise";
/**
* en: Suppress non-stationary noise
* zh: 抑制非平稳噪声
*/ EventNames["SUPPRESS_NON_STATIONARY_NOISE"] = "client.suppress.non.stationary.noise";
/**
* en: Audio input device changed
* zh: 音频输入设备改变
*/ EventNames["AUDIO_INPUT_DEVICE_CHANGED"] = "client.input.device.changed";
/**
* en: Audio output device changed
* zh: 音频输出设备改变
*/ EventNames["AUDIO_OUTPUT_DEVICE_CHANGED"] = "client.output.device.changed";
/**
* en: Video input device changed
* zh: 视频输入设备改变
*/ EventNames["VIDEO_INPUT_DEVICE_CHANGED"] = "client.video.input.device.changed";
/**
* en: Network quality changed
* zh: 网络质量改变
*/ EventNames["NETWORK_QUALITY"] = "client.network.quality";
/**
* en: Bot joined
* zh: Bot 加入
*/ EventNames["BOT_JOIN"] = "server.bot.join";
/**
* en: Bot left
* zh: Bot 离开
*/ EventNames["BOT_LEAVE"] = "server.bot.leave";
/**
* en: Audio speech started
* zh: 开始说话
*/ EventNames["AUDIO_AGENT_SPEECH_STARTED"] = "server.audio.agent.speech_started";
/**
* en: Audio speech stopped
* zh: 停止说话
*/ EventNames["AUDIO_AGENT_SPEECH_STOPPED"] = "server.audio.agent.speech_stopped";
/**
* en: Server error
* zh: 服务端错误
*/ EventNames["SERVER_ERROR"] = "server.error";
/**
* en: User speech started
* zh: 用户开始说话
*/ EventNames["AUDIO_USER_SPEECH_STARTED"] = "server.audio.user.speech_started";
/**
* en: User speech stopped
* zh: 用户停止说话
*/ EventNames["AUDIO_USER_SPEECH_STOPPED"] = "server.audio.user.speech_stopped";
/**
* en: User successfully enters the room
* zh: 用户成功进入房间后,会收到该事件
*/ EventNames["SESSION_CREATED"] = "server.session.created";
/**
* en: Session updated
* zh: 会话更新
*/ EventNames["SESSION_UPDATED"] = "server.session.updated";
/**
* en: Conversation created
* zh: 会话创建
*/ EventNames["CONVERSATION_CREATED"] = "server.conversation.created";
/**
* en: Conversation chat created
* zh: 会话对话创建
*/ EventNames["CONVERSATION_CHAT_CREATED"] = "server.conversation.chat.created";
/**
* en: Conversation chat in progress
* zh: 对话正在处理中
*/ EventNames["CONVERSATION_CHAT_IN_PROGRESS"] = "server.conversation.chat.in_progress";
/**
* en: Conversation message delta received
* zh: 文本消息增量返回
*/ EventNames["CONVERSATION_MESSAGE_DELTA"] = "server.conversation.message.delta";
/**
* en: Conversation message completed
* zh: 文本消息完成
*/ EventNames["CONVERSATION_MESSAGE_COMPLETED"] = "server.conversation.message.completed";
/**
* en: Conversation chat completed
* zh: 对话完成
*/ EventNames["CONVERSATION_CHAT_COMPLETED"] = "server.conversation.chat.completed";
/**
* en: Conversation chat requires action
* zh: 对话需要插件
*/ EventNames["CONVERSATION_CHAT_REQUIRES_ACTION"] = "server.conversation.chat.requires_action";
/**
* en: Conversation chat failed
* zh: 对话失败
*/ EventNames["CONVERSATION_CHAT_FAILED"] = "server.conversation.chat.failed";
/**
* en: Session pre answer updated
* zh: 安抚配置更新成功
*/ EventNames["SESSION_PRE_ANSWER_UPDATED"] = "server.session.pre_answer.updated";
/**
* en: Conversation audio transcript delta
* zh: 用户语音识别字幕
*/ EventNames["CONVERSATION_AUDIO_TRANSCRIPT_DELTA"] = "server.conversation.audio_transcript.delta";
/**
* en: Mode updated
* zh: 更新房间模式成功
*/ EventNames["MODE_UPDATED"] = "server.mode.updated";
/**
* en: Live created
* zh: 直播创建
*/ EventNames["LIVE_CREATED"] = "server.live.created";
return EventNames;
}(event_names_EventNames || {});
/* ESM default export */ const event_names = event_names_EventNames;
var error_RealtimeError = /*#__PURE__*/ function(RealtimeError) {
RealtimeError["DEVICE_ACCESS_ERROR"] = "DEVICE_ACCESS_ERROR";
RealtimeError["STREAM_CREATION_ERROR"] = "STREAM_CREATION_ERROR";
RealtimeError["CONNECTION_ERROR"] = "CONNECTION_ERROR";
RealtimeError["DISCONNECTION_ERROR"] = "DISCONNECTION_ERROR";
RealtimeError["INTERRUPT_ERROR"] = "INTERRUPT_ERROR";
RealtimeError["EVENT_HANDLER_ERROR"] = "EVENT_HANDLER_ERROR";
RealtimeError["PERMISSION_DENIED"] = "PERMISSION_DENIED";
RealtimeError["NETWORK_ERROR"] = "NETWORK_ERROR";
RealtimeError["INVALID_STATE"] = "INVALID_STATE";
RealtimeError["CREATE_ROOM_ERROR"] = "CREATE_ROOM_ERROR";
RealtimeError["PARSE_MESSAGE_ERROR"] = "PARSE_MESSAGE_ERROR";
RealtimeError["HANDLER_MESSAGE_ERROR"] = "HANDLER_MESSAGE_ERROR";
return RealtimeError;
}({});
class RealtimeAPIError extends Error {
/**
* @param code - Error code
* @param message - Error message
* @param error - Error object
*/ constructor(code, message, error){
super(`[${code}] ${message}`);
this.name = 'RealtimeAPIError';
this.code = code;
this.error = error;
}
}
class RealtimeEventHandler {
clearEventHandlers() {
this.eventHandlers = {};
}
on(eventName, callback) {
this._log(`on ${eventName} event`);
this.eventHandlers[eventName] = this.eventHandlers[eventName] || [];
this.eventHandlers[eventName].push(callback);
return callback;
}
off(eventName, callback) {
this._log(`off ${eventName} event`);
const handlers = this.eventHandlers[eventName] || [];
if (callback) {
const index = handlers.indexOf(callback);
if (-1 === index) {
console.warn(`Could not turn off specified event listener for "${eventName}": not found as a listener`);
return;
}
handlers.splice(index, 1);
} else delete this.eventHandlers[eventName];
}
// eslint-disable-next-line max-params
_dispatchToHandlers(eventName, event, handlers, prefix) {
for (const handler of handlers)if (!prefix || eventName.startsWith(prefix)) try {
handler(eventName, event);
} catch (e) {
throw new RealtimeAPIError(error_RealtimeError.HANDLER_MESSAGE_ERROR, `Failed to handle message: ${eventName}`);
}
}
dispatch(eventName, event) {
let consoleLog = !(arguments.length > 2) || void 0 === arguments[2] || arguments[2];
if (consoleLog) this._log(`dispatch ${eventName} event`, event);
const handlers = (this.eventHandlers[eventName] || []).slice();
this._dispatchToHandlers(eventName, event, handlers);
const allHandlers = (this.eventHandlers[event_names.ALL] || []).slice();
this._dispatchToHandlers(eventName, event, allHandlers);
const allClientHandlers = (this.eventHandlers[event_names.ALL_CLIENT] || []).slice();
this._dispatchToHandlers(eventName, event, allClientHandlers, 'client.');
const allServerHandlers = (this.eventHandlers[event_names.ALL_SERVER] || []).slice();
this._dispatchToHandlers(eventName, event, allServerHandlers, 'server.');
}
_log(message, event) {
if (this._debug) console.log(`[RealtimeClient] ${message}`, event);
}
constructor(debug = false){
this.eventHandlers = {};
this._debug = debug;
}
}
// EXTERNAL MODULE: external "@volcengine/rtc/extension-ainr"
var extension_ainr_ = __webpack_require__("@volcengine/rtc/extension-ainr");
var extension_ainr_default = /*#__PURE__*/ __webpack_require__.n(extension_ainr_);
class EngineClient extends RealtimeEventHandler {
bindEngineEvents() {
this.engine.on(rtc_default().events.onUserMessageReceived, this.handleMessage);
this.engine.on(rtc_default().events.onUserJoined, this.handleUserJoin);
this.engine.on(rtc_default().events.onUserLeave, this.handleUserLeave);
this.engine.on(rtc_default().events.onError, this.handleEventError);
this.engine.on(rtc_default().events.onNetworkQuality, this.handleNetworkQuality);
this.engine.on(rtc_default().events.onTrackEnded, this.handleTrackEnded);
if (this._isSupportVideo) this.engine.on(rtc_default().events.onPlayerEvent, this.handlePlayerEvent);
if (this._debug) {
this.engine.on(rtc_default().events.onLocalAudioPropertiesReport, this.handleLocalAudioPropertiesReport);
this.engine.on(rtc_default().events.onRemoteAudioPropertiesReport, this.handleRemoteAudioPropertiesReport);
}
}
removeEventListener() {
this.engine.off(rtc_default().events.onUserMessageReceived, this.handleMessage);
this.engine.off(rtc_default().events.onUserJoined, this.handleUserJoin);
this.engine.off(rtc_default().events.onUserLeave, this.handleUserLeave);
this.engine.off(rtc_default().events.onError, this.handleEventError);
this.engine.off(rtc_default().events.onNetworkQuality, this.handleNetworkQuality);
this.engine.off(rtc_default().events.onTrackEnded, this.handleTrackEnded);
if (this._isSupportVideo) this.engine.off(rtc_default().events.onPlayerEvent, this.handlePlayerEvent);
if (this._debug) {
this.engine.off(rtc_default().events.onLocalAudioPropertiesReport, this.handleLocalAudioPropertiesReport);
this.engine.off(rtc_default().events.onRemoteAudioPropertiesReport, this.handleRemoteAudioPropertiesReport);
}
}
_parseMessage(event) {
try {
return JSON.parse(event.message);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} catch (e) {
throw new RealtimeAPIError(error_RealtimeError.PARSE_MESSAGE_ERROR, (null == e ? void 0 : e.message) || 'Unknown error');
}
}
handleMessage(event) {
try {
const message = this._parseMessage(event);
this.dispatch(`server.${message.event_type}`, message);
} catch (e) {
if (e instanceof RealtimeAPIError) {
if (e.code === error_RealtimeError.PARSE_MESSAGE_ERROR) this.dispatch(event_names.ERROR, {
message: `Failed to parse message: ${event.message}`,
error: e
});
else if (e.code === error_RealtimeError.HANDLER_MESSAGE_ERROR) this.dispatch(event_names.ERROR, {
message: `Failed to handle message: ${event.message}`,
error: e
});
} else this.dispatch(event_names.ERROR, e);
}
}
handleEventError(e) {
this.dispatch(event_names.ERROR, e);
}
handleUserJoin(event) {
this.joinUserId = event.userInfo.userId;
this.dispatch(event_names.BOT_JOIN, event);
}
handleUserLeave(event) {
this.dispatch(event_names.BOT_LEAVE, event);
}
handlePlayerEvent(event) {
this.dispatch(event_names.PLAYER_EVENT, event);
}
handleNetworkQuality(uplinkNetworkQuality, downlinkNetworkQuality) {
this.dispatch(event_names.NETWORK_QUALITY, {
uplinkNetworkQuality,
downlinkNetworkQuality
});
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
handleTrackEnded(event) {
if ((null == event ? void 0 : event.kind) === 'video') this.dispatch(event_names.VIDEO_OFF, event);
}
async joinRoom(options) {
const { token, roomId, uid, audioMutedDefault, videoOnDefault, isAutoSubscribeAudio } = options;
try {
await this.engine.joinRoom(token, roomId, {
userId: uid
}, {
isAutoPublish: !audioMutedDefault,
isAutoSubscribeAudio,
isAutoSubscribeVideo: this._isSupportVideo && videoOnDefault
});
} catch (e) {
if (e instanceof Error) throw new RealtimeAPIError(error_RealtimeError.CONNECTION_ERROR, e.message);
}
}
async setAudioInputDevice(deviceId) {
const devices = await getAudioDevices();
if (-1 === devices.audioInputs.findIndex((i)=>i.deviceId === deviceId)) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, `Audio input device not found: ${deviceId}`);
this.engine.stopAudioCapture();
await this.engine.startAudioCapture(deviceId);
}
async setAudioOutputDevice(deviceId) {
const devices = await getAudioDevices({
video: false
});
if (-1 === devices.audioOutputs.findIndex((i)=>i.deviceId === deviceId)) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, `Audio output device not found: ${deviceId}`);
await this.engine.setAudioPlaybackDevice(deviceId);
}
async setVideoInputDevice(deviceId) {
let isAutoCapture = !(arguments.length > 1) || void 0 === arguments[1] || arguments[1];
var _this__videoConfig;
const devices = await getAudioDevices({
video: true
});
if (!isMobileVideoDevice(deviceId) && -1 === devices.videoInputs.findIndex((i)=>i.deviceId === deviceId)) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, `Video input device not found: ${deviceId}`);
this.engine.setLocalVideoPlayer(isScreenShareDevice(deviceId) ? rtc_.StreamIndex.STREAM_INDEX_SCREEN : rtc_.StreamIndex.STREAM_INDEX_MAIN, {
renderDom: (null === (_this__videoConfig = this._videoConfig) || void 0 === _this__videoConfig ? void 0 : _this__videoConfig.renderDom) || 'local-player',
userId: this._roomUserId
});
await this.changeVideoState(false);
if (isScreenShareDevice(deviceId)) {
if (this._streamIndex === rtc_.StreamIndex.STREAM_INDEX_MAIN) this.engine.setLocalVideoPlayer(rtc_.StreamIndex.STREAM_INDEX_MAIN);
if (isAutoCapture) {
var _this__videoConfig1;
this.engine.setVideoSourceType(rtc_.StreamIndex.STREAM_INDEX_SCREEN, rtc_.VideoSourceType.VIDEO_SOURCE_TYPE_INTERNAL);
await this.engine.startScreenCapture(null === (_this__videoConfig1 = this._videoConfig) || void 0 === _this__videoConfig1 ? void 0 : _this__videoConfig1.screenConfig);
await this.engine.publishScreen(rtc_.MediaType.VIDEO);
}
this._streamIndex = rtc_.StreamIndex.STREAM_INDEX_SCREEN;
} else {
if (this._streamIndex === rtc_.StreamIndex.STREAM_INDEX_SCREEN) this.engine.setLocalVideoPlayer(rtc_.StreamIndex.STREAM_INDEX_SCREEN);
if (isAutoCapture) await this.engine.startVideoCapture(deviceId);
this._streamIndex = rtc_.StreamIndex.STREAM_INDEX_MAIN;
}
}
async createLocalStream(userId, videoConfig) {
this._roomUserId = userId;
const devices = await getAudioDevices({
video: this._isSupportVideo
});
if (!devices.audioInputs.length) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, 'Failed to get audio devices');
if (this._isSupportVideo && !devices.videoInputs.length) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, 'Failed to get video devices');
await this.engine.startAudioCapture(devices.audioInputs[0].deviceId);
if (this._isSupportVideo) try {
await this.setVideoInputDevice((null == videoConfig ? void 0 : videoConfig.videoInputDeviceId) || devices.videoInputs[0].deviceId, null == videoConfig ? void 0 : videoConfig.videoOnDefault);
this.dispatch((null == videoConfig ? void 0 : videoConfig.videoOnDefault) ? event_names.VIDEO_ON : event_names.VIDEO_OFF, {});
} catch (e) {
this.dispatch(event_names.VIDEO_ERROR, e);
}
}
async disconnect() {
try {
await this.engine.leaveRoom();
this.removeEventListener();
this.clearEventHandlers();
rtc_default().destroyEngine(this.engine);
} catch (e) {
this.dispatch(event_names.ERROR, e);
throw e;
}
}
async changeAudioState(isMicOn) {
try {
if (isMicOn) await this.engine.publishStream(rtc_.MediaType.AUDIO);
else await this.engine.unpublishStream(rtc_.MediaType.AUDIO);
} catch (e) {
this.dispatch(event_names.ERROR, e);
throw e;
}
}
async changeVideoState(isVideoOn) {
if (isVideoOn) {
if (this._streamIndex === rtc_.StreamIndex.STREAM_INDEX_MAIN) await this.engine.startVideoCapture();
else {
var _this__videoConfig;
this.engine.setVideoSourceType(rtc_.StreamIndex.STREAM_INDEX_SCREEN, rtc_.VideoSourceType.VIDEO_SOURCE_TYPE_INTERNAL);
await this.engine.startScreenCapture(null === (_this__videoConfig = this._videoConfig) || void 0 === _this__videoConfig ? void 0 : _this__videoConfig.screenConfig);
await this.engine.publishScreen(rtc_.MediaType.VIDEO);
}
} else if (this._streamIndex === rtc_.StreamIndex.STREAM_INDEX_MAIN) await this.engine.stopVideoCapture();
else {
await this.engine.stopScreenCapture();
await this.engine.unpublishScreen(rtc_.MediaType.VIDEO);
}
}
async stop() {
try {
const result = await this.engine.sendUserMessage(this.joinUserId, JSON.stringify({
id: 'event_1',
event_type: 'conversation.chat.cancel',
data: {}
}));
this._log(`interrupt ${this.joinUserId} ${result}`);
} catch (e) {
this.dispatch(event_names.ERROR, e);
throw e;
}
}
async sendMessage(message) {
try {
const result = await this.engine.sendUserMessage(this.joinUserId, JSON.stringify(message));
this._log(`sendMessage ${this.joinUserId} ${JSON.stringify(message)} ${result}`);
} catch (e) {
this.dispatch(event_names.ERROR, e);
throw e;
}
}
enableAudioPropertiesReport(config) {
this.engine.enableAudioPropertiesReport(config);
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
handleLocalAudioPropertiesReport(event) {
var _event__audioPropertiesInfo, _event_;
if (this._debug && (null === (_event_ = event[0]) || void 0 === _event_ ? void 0 : null === (_event__audioPropertiesInfo = _event_.audioPropertiesInfo) || void 0 === _event__audioPropertiesInfo ? void 0 : _event__audioPropertiesInfo.linearVolume) > 0) console.log('handleLocalAudioPropertiesReport', event);
}
handleRemoteAudioPropertiesReport(event) {
if (this._debug) console.log('handleRemoteAudioPropertiesReport', event);
}
async enableAudioNoiseReduction() {
var _this_engine;
await (null === (_this_engine = this.engine) || void 0 === _this_engine ? void 0 : _this_engine.setAudioCaptureConfig({
noiseSuppression: true,
echoCancellation: true,
autoGainControl: true
}));
}
async initAIAnsExtension() {
const AIAnsExtension = new (extension_ainr_default())();
await this.engine.registerExtension(AIAnsExtension);
this._AIAnsExtension = AIAnsExtension;
}
changeAIAnsExtension(enable) {
if (enable) {
var _this__AIAnsExtension;
null === (_this__AIAnsExtension = this._AIAnsExtension) || void 0 === _this__AIAnsExtension || _this__AIAnsExtension.enable();
} else {
var _this__AIAnsExtension1;
null === (_this__AIAnsExtension1 = this._AIAnsExtension) || void 0 === _this__AIAnsExtension1 || _this__AIAnsExtension1.disable();
}
}
async startAudioPlaybackDeviceTest() {
try {
await this.engine.startAudioPlaybackDeviceTest('audio-test.wav', 200);
} catch (e) {
this.dispatch(event_names.ERROR, e);
throw e;
}
}
stopAudioPlaybackDeviceTest() {
try {
this.engine.stopAudioPlaybackDeviceTest();
} catch (e) {
this.dispatch(event_names.ERROR, e);
throw e;
}
}
getRtcEngine() {
return this.engine;
}
// eslint-disable-next-line max-params
constructor(appId, debug = false, isTestEnv = false, isSupportVideo = false, videoConfig){
super(debug), this.joinUserId = '', this._AIAnsExtension = null, this._isSupportVideo = false;
if (isTestEnv) rtc_default().setParameter('ICE_CONFIG_REQUEST_URLS', [
'rtc-test.bytedance.com'
]);
else localStorage.removeItem('RTC_ACCESS_URLS-VolcEngine');
this.engine = rtc_default().createEngine(appId);
this.handleMessage = this.handleMessage.bind(this);
this.handleUserJoin = this.handleUserJoin.bind(this);
this.handleUserLeave = this.handleUserLeave.bind(this);
this.handleEventError = this.handleEventError.bind(this);
this.handlePlayerEvent = this.handlePlayerEvent.bind(this);
this.handleNetworkQuality = this.handleNetworkQuality.bind(this);
this.handleTrackEnded = this.handleTrackEnded.bind(this);
// Debug only
this.handleLocalAudioPropertiesReport = this.handleLocalAudioPropertiesReport.bind(this);
this.handleRemoteAudioPropertiesReport = this.handleRemoteAudioPropertiesReport.bind(this);
this._isSupportVideo = isSupportVideo;
this._videoConfig = videoConfig;
}
}
// Only use for test
const TEST_APP_ID = '6705332c79516e015e3e5f0c';
class RealtimeClient extends RealtimeEventHandler {
/**
* en: Establish a connection to the Coze API and join the room
*
* zh: 建立与 Coze API 的连接并加入房间
*/ async connect() {
var _this__config_videoConfig;
const { botId, conversationId, voiceId, getRoomInfo } = this._config;
this.dispatch(event_names.CONNECTING, {});
let roomInfo;
try {
// Step1 get token
if (getRoomInfo) roomInfo = await getRoomInfo();
else {
const config = {};
if (this._config.prologueContent) config.prologue_content = this._config.prologueContent;
if (void 0 !== this._config.roomMode && null !== this._config.roomMode) config.room_mode = this._config.roomMode || api_.RoomMode.Default;
if (this._config.videoConfig) {
if (isScreenShareDevice(this._config.videoConfig.videoInputDeviceId)) config.video_config = {
stream_video_type: 'screen'
};
else config.video_config = {
stream_video_type: 'main'
};
}
if (this._config.translateConfig) config.translate_config = this._config.translateConfig;
const params = {
bot_id: botId,
conversation_id: conversationId || void 0,
voice_id: voiceId && voiceId.length > 0 ? voiceId : void 0,
connector_id: this._config.connectorId,
uid: this._config.userId || void 0,
workflow_id: this._config.workflowId || void 0,
config
};
roomInfo = await this._api.audio.rooms.create(params);
}
} catch (error) {
this.dispatch(event_names.ERROR, error);
throw new RealtimeAPIError(error_RealtimeError.CREATE_ROOM_ERROR, error instanceof Error ? error.message : 'Unknown error', error);
}
this.dispatch(event_names.ROOM_INFO, {
roomId: roomInfo.room_id,
uid: roomInfo.uid,
token: roomInfo.token,
appId: roomInfo.app_id
});
this._isTestEnv = TEST_APP_ID === roomInfo.app_id;
// Step2 create engine
this._client = new EngineClient(roomInfo.app_id, this._config.debug, this._isTestEnv, this._isSupportVideo, this._config.videoConfig);
// Step3 bind engine events
this._client.bindEngineEvents();
this._client.on(event_names.ALL, (eventName, data)=>{
this.dispatch(eventName, data, false);
});
if (this._config.suppressStationaryNoise) {
await this._client.enableAudioNoiseReduction();
this.dispatch(event_names.SUPPRESS_STATIONARY_NOISE, {});
}
if (this._config.suppressNonStationaryNoise) try {
await this._client.initAIAnsExtension();
this._client.changeAIAnsExtension(true);
this.dispatch(event_names.SUPPRESS_NON_STATIONARY_NOISE, {});
} catch (error) {
console.warn('Config suppressNonStationaryNoise is not supported', error);
}
var _this__config_audioMutedDefault, _this__config_videoConfig_videoOnDefault, _this__config_isAutoSubscribeAudio;
// Step4 join room
await this._client.joinRoom({
token: roomInfo.token,
roomId: roomInfo.room_id,
uid: roomInfo.uid,
audioMutedDefault: null !== (_this__config_audioMutedDefault = this._config.audioMutedDefault) && void 0 !== _this__config_audioMutedDefault && _this__config_audioMutedDefault,
videoOnDefault: null === (_this__config_videoConfig_videoOnDefault = null === (_this__config_videoConfig = this._config.videoConfig) || void 0 === _this__config_videoConfig ? void 0 : _this__config_videoConfig.videoOnDefault) || void 0 === _this__config_videoConfig_videoOnDefault || _this__config_videoConfig_videoOnDefault,
isAutoSubscribeAudio: null === (_this__config_isAutoSubscribeAudio = this._config.isAutoSubscribeAudio) || void 0 === _this__config_isAutoSubscribeAudio || _this__config_isAutoSubscribeAudio
});
// Step5 create local stream
await this._client.createLocalStream(roomInfo.uid, this._config.videoConfig);
// step6 set connected and dispatch connected event
this.isConnected = true;
this.dispatch(event_names.CONNECTED, {
roomId: roomInfo.room_id,
uid: roomInfo.uid,
token: roomInfo.token,
appId: roomInfo.app_id
});
}
/**
* en: Interrupt the current conversation
*
* zh: 中断当前对话
*/ async interrupt() {
var _this__client;
await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.stop());
this.dispatch(event_names.INTERRUPTED, {});
}
/**
* en: Disconnect from the current session
*
* zh: 断开与当前会话的连接
*/ async disconnect() {
var _this__client;
await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.disconnect());
this.isConnected = false;
this._client = null;
this.dispatch(event_names.DISCONNECTED, {});
}
/**
* en: Send a message to the bot
*
* zh: 发送消息给Bot
*/ async sendMessage(message) {
var _this__client;
await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.sendMessage(message));
const eventType = 'string' == typeof message.event_type ? message.event_type : 'unknown_event';
this.dispatch(`client.${eventType}`, message);
}
/**
* en: Enable or disable audio
*
* zh: 启用或禁用音频
*/ async setAudioEnable(isEnable) {
var _this__client;
await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.changeAudioState(isEnable));
if (isEnable) this.dispatch(event_names.AUDIO_UNMUTED, {});
else this.dispatch(event_names.AUDIO_MUTED, {});
}
async setVideoEnable(isEnable) {
try {
var _this__client;
await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.changeVideoState(isEnable));
this.dispatch(isEnable ? event_names.VIDEO_ON : event_names.VIDEO_OFF, {});
} catch (e) {
this.dispatch(event_names.VIDEO_ERROR, e);
throw e;
}
}
/**
* en: Enable audio properties reporting (debug mode only)
*
* zh: 启用音频属性报告(仅限调试模式)
*/ enableAudioPropertiesReport(config) {
if (this._config.debug) {
var _this__client;
null === (_this__client = this._client) || void 0 === _this__client || _this__client.enableAudioPropertiesReport(config);
return true;
}
console.warn('enableAudioPropertiesReport is not supported in non-debug mode');
return false;
}
/**
* en: Start audio playback device test (debug mode only)
*
* zh: 开始音频播放设备测试(仅限调试模式)
*/ async startAudioPlaybackDeviceTest() {
if (this._config.debug) {
var _this__client;
await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.startAudioPlaybackDeviceTest());
} else console.warn('startAudioPlaybackDeviceTest is not supported in non-debug mode');
}
/**
* en: Stop audio playback device test (debug mode only)
*
* zh: 停止音频播放设备测试(仅限调试模式)
*/ stopAudioPlaybackDeviceTest() {
if (this._config.debug) {
var _this__client;
null === (_this__client = this._client) || void 0 === _this__client || _this__client.stopAudioPlaybackDeviceTest();
} else console.warn('stopAudioPlaybackDeviceTest is not supported in non-debug mode');
}
/**
* en: Set the audio input device
*
* zh: 设置音频输入设备
*/ async setAudioInputDevice(deviceId) {
var _this__client;
await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.setAudioInputDevice(deviceId));
this.dispatch(event_names.AUDIO_INPUT_DEVICE_CHANGED, {
deviceId
});
}
/**
* en: Set the audio output device
*
* zh: 设置音频输出设备
*/ async setAudioOutputDevice(deviceId) {
var _this__client;
await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.setAudioOutputDevice(deviceId));
this.dispatch(event_names.AUDIO_OUTPUT_DEVICE_CHANGED, {
deviceId
});
}
/**
* en: Set the video input device
*
* zh: 设置视频输入设备
*/ async setVideoInputDevice(deviceId) {
try {
var _this__client;
await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.setVideoInputDevice(deviceId));
this.dispatch(event_names.VIDEO_ON, {});
} catch (e) {
this.dispatch(event_names.VIDEO_ERROR, e);
throw e;
}
this.dispatch(event_names.VIDEO_INPUT_DEVICE_CHANGED, {
deviceId
});
}
/**
* en: Get the RTC engine instance, for detail visit https://www.volcengine.com/docs/6348/104481
*
* zh: 获取 RTC 引擎实例,详情请访问 https://www.volcengine.com/docs/6348/104481
*/ getRtcEngine() {
var _this__client;
return null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.getRtcEngine();
}
/**
* Constructor for initializing a RealtimeClient instance.
*
* 构造函数,初始化RealtimeClient实例。
*
* @param config
* @param config.accessToken - Required, Access Token. |
* 必填,Access Token。
* @param config.botId - Required, Bot Id. |
* 必填,Bot Id。
* @param config.voiceId - Optional, Voice Id. |
* 可选,音色Id。
* @param config.conversationId - Optional, Conversation Id. |
* 可选,会话Id。
* @param config.userId - Optional, User Id. |
* 可选,用户Id。
* @param config.baseURL - Optional, defaults to "https://api.coze.cn". |
* 可选,默认值为 "https://api.coze.cn"。
* @param config.debug - Optional, defaults to false.
* 可选,默认值为 false。
* @param config.allowPersonalAccessTokenInBrowser
* - Optional, whether to allow personal access tokens in browser environment. |
* 可选,是否允许在浏览器环境中使用个人访问令牌。
* @param config.audioMutedDefault - Optional, whether audio is muted by default, defaults to false. |
* 可选,默认是否静音,默认值为 false。
* @param config.connectorId - Required, Connector Id. |
* 必填,渠道 Id。
* @param config.suppressStationaryNoise - Optional, suppress stationary noise, defaults to false. |
* 可选,默认是否抑制静态噪声,默认值为 false。
* @param config.suppressNonStationaryNoise - Optional, suppress non-stationary noise, defaults to false. |
* 可选,默认是否抑制非静态噪声,默认值为 false。
* @param config.isAutoSubscribeAudio - Optional, whether to automatically subscribe to bot reply audio streams, defaults to true. |
* @param config.videoConfig - Optional, Video configuration. |
* 可选,视频配置。
* @param config.videoConfig.videoOnDefault - Optional, Whether to turn on video by default, defaults to true. |
* 可选,默认是否开启视频,默认值为 true。
* @param config.videoConfig.renderDom - Optional, The DOM element to render the video stream to. |
* 可选,渲染视频流的 DOM 元素。
* @param config.videoConfig.videoInputDeviceId - Optional, The device ID of the video input device to use. |
* 可选,视频输入设备的设备 ID。
* @param config.videoConfig.screenConfig - Optional, Screen share configuration if videoInputDeviceId is 'screenShare' see https://www.volcengine.com/docs/6348/104481#screenconfig for more details. |
* 可选,屏幕共享配置,如果 videoInputDeviceId 是 'screenShare',请参考 https://www.volcengine.com/docs/6348/104481#screenconfig 了解更多详情。
* @param config.prologueContent - Optional, Prologue content. | 可选,开场白内容。
* @param config.roomMode - Optional, Room mode. | 可选,房间模式。
*/ constructor(config){
super(config.debug), this._client = null, this.isConnected = false, this._isTestEnv = false, this._isSupportVideo = false;
this._config = config;
var _this__config_baseURL;
const defaultBaseURL = null !== (_this__config_baseURL = this._config.baseURL) && void 0 !== _this__config_baseURL ? _this__config_baseURL : 'https://api.coze.cn';
this._config.baseURL = defaultBaseURL;
// init api
this._api = new api_.CozeAPI({
token: this._config.accessToken,
baseURL: defaultBaseURL,
allowPer