@coze/api
Version:
Official Coze Node.js SDK for seamless AI integration into your applications | 扣子官方 Node.js SDK,助您轻松集成 AI 能力到应用中
1,333 lines (1,325 loc) • 264 kB
JavaScript
import * as __WEBPACK_EXTERNAL_MODULE_agora_rte_extension__ from "agora-rte-extension";
import * as __WEBPACK_EXTERNAL_MODULE_agora_rtc_sdk_ng__ from "agora-rtc-sdk-ng";
import * as __WEBPACK_EXTERNAL_MODULE_agora_extension_ai_denoiser__ from "agora-extension-ai-denoiser";
import * as __WEBPACK_EXTERNAL_MODULE_uuid__ from "uuid";
import * as __WEBPACK_EXTERNAL_MODULE_axios__ from "axios";
import * as __WEBPACK_EXTERNAL_MODULE_ws__ from "ws";
import * as __WEBPACK_EXTERNAL_MODULE_reconnecting_websocket__ from "reconnecting-websocket";
import * as __WEBPACK_EXTERNAL_MODULE_os__ from "os";
import * as __WEBPACK_EXTERNAL_MODULE_node_fetch__ from "node-fetch";
import "crypto";
import "jsonwebtoken";
import * as __WEBPACK_EXTERNAL_MODULE_agora_rtc_sdk_ng_esm__ from "agora-rtc-sdk-ng/esm";
import * as __WEBPACK_EXTERNAL_MODULE_opus_encdec_src_oggOpusEncoder_js__ from "opus-encdec/src/oggOpusEncoder.js";
import * as __WEBPACK_EXTERNAL_MODULE_opus_encdec_dist_libopus_encoder_js__ from "opus-encdec/dist/libopus-encoder.js";
import * as __WEBPACK_EXTERNAL_MODULE_opus_encdec_src_oggOpusDecoder_js__ from "opus-encdec/src/oggOpusDecoder.js";
import * as __WEBPACK_EXTERNAL_MODULE_opus_encdec_dist_libopus_decoder_js__ from "opus-encdec/dist/libopus-decoder.js";
// The require scope
var __webpack_require__ = {};
/************************************************************************/ // webpack/runtime/define_property_getters
(()=>{
__webpack_require__.d = function(exports, definition) {
for(var key in definition)if (__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) Object.defineProperty(exports, key, {
enumerable: true,
get: definition[key]
});
};
})();
// webpack/runtime/has_own_property
(()=>{
__webpack_require__.o = function(obj, prop) {
return Object.prototype.hasOwnProperty.call(obj, prop);
};
})();
// webpack/runtime/make_namespace_object
(()=>{
// define __esModule on exports
__webpack_require__.r = function(exports) {
if ('undefined' != typeof Symbol && Symbol.toStringTag) Object.defineProperty(exports, Symbol.toStringTag, {
value: 'Module'
});
Object.defineProperty(exports, '__esModule', {
value: true
});
};
})();
/************************************************************************/ // NAMESPACE OBJECT: ./src/ws-tools/utils/index.ts
var utils_namespaceObject = {};
__webpack_require__.r(utils_namespaceObject);
__webpack_require__.d(utils_namespaceObject, {
checkDenoiserSupport: ()=>checkDenoiserSupport,
checkDevicePermission: ()=>checkDevicePermission,
downsampleTo8000: ()=>downsampleTo8000,
encodeG711A: ()=>encodeG711A,
encodeG711U: ()=>encodeG711U,
float32ToInt16Array: ()=>float32ToInt16Array,
floatTo16BitPCM: ()=>floatTo16BitPCM,
getAudioDevices: ()=>getAudioDevices,
isBrowserExtension: ()=>utils_isBrowserExtension,
isHarmonOS: ()=>isHarmonOS,
isMobile: ()=>isMobile,
setValueByPath: ()=>setValueByPath
});
/* eslint-disable @typescript-eslint/no-explicit-any */ // 禁用日志上传与打印日志
__WEBPACK_EXTERNAL_MODULE_agora_rtc_sdk_ng__["default"].disableLogUpload();
__WEBPACK_EXTERNAL_MODULE_agora_rtc_sdk_ng__["default"].setLogLevel(3);
__WEBPACK_EXTERNAL_MODULE_agora_rte_extension__.logger.setLogLevel(3);
/**
* Check audio device permissions
* @returns {Promise<{audio: boolean}>} Whether audio device permission is granted
*/ const checkDevicePermission = async ()=>{
const result = {
audio: true
};
try {
var _navigator_permissions;
// Check if browser supports mediaDevices API
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
console.error('Browser does not support mediaDevices API');
result.audio = false;
}
// Check permission status first through permissions API
const permissionStatus = await (null === (_navigator_permissions = navigator.permissions) || void 0 === _navigator_permissions ? void 0 : _navigator_permissions.query({
name: 'microphone'
})) || {
state: 'prompt'
};
// If permission has been denied
if ('denied' === permissionStatus.state) {
console.error('Microphone permission denied');
result.audio = false;
}
// If permission status is prompt or granted, try to get device
if ('prompt' === permissionStatus.state || 'granted' === permissionStatus.state) {
const stream = await navigator.mediaDevices.getUserMedia({
audio: true
});
// After obtaining successfully, close the audio stream
if (stream) stream.getTracks().forEach((track)=>track.stop());
}
} catch (error) {
// User denied authorization or other errors
console.error('Failed to get audio permission:', error);
result.audio = false;
}
return result;
};
/**
* Get list of audio devices
* @returns {Promise<{audioInputs: MediaDeviceInfo[], audioOutputs: MediaDeviceInfo[]}>} Audio devices
*/ const getAudioDevices = async ()=>{
try {
// Request microphone permission first, so we can get the complete device information
const { audio: audioPermission } = await checkDevicePermission();
if (!audioPermission) throw new Error('Microphone permission denied');
// Get all media devices
const devices = await navigator.mediaDevices.enumerateDevices();
if (!(null == devices ? void 0 : devices.length)) return {
audioInputs: [],
audioOutputs: []
};
return {
audioInputs: devices.filter((i)=>i.deviceId && 'audioinput' === i.kind),
audioOutputs: devices.filter((i)=>i.deviceId && 'audiooutput' === i.kind)
};
} catch (error) {
console.error('Failed to get audio devices:', error);
return {
audioInputs: [],
audioOutputs: []
};
}
};
/**
* Convert floating point numbers to 16-bit PCM
* @param float32Array - Array of floating point numbers
* @returns {ArrayBuffer} 16-bit PCM
*/ const floatTo16BitPCM = (float32Array)=>{
const buffer = new ArrayBuffer(2 * float32Array.length);
const view = new DataView(buffer);
let offset = 0;
for(let i = 0; i < float32Array.length; i++, offset += 2){
const s = Math.max(-1, Math.min(1, float32Array[i]));
view.setInt16(offset, s < 0 ? 0x8000 * s : 0x7fff * s, true);
}
return buffer;
};
/**
* Convert Float32Array to Int16Array (without going through ArrayBuffer)
*/ function float32ToInt16Array(float32) {
const int16 = new Int16Array(float32.length);
for(let i = 0; i < float32.length; i++){
const s = Math.max(-1, Math.min(1, float32[i]));
int16[i] = s < 0 ? 0x8000 * s : 0x7fff * s;
}
return int16;
}
/**
* Simple linear extraction method to downsample Float32Array from 48000Hz to 8000Hz
* @param input Float32Array 48000Hz
* @returns Float32Array 8000Hz
*/ function downsampleTo8000(input) {
const ratio = 6; // 6
const outputLength = Math.floor(input.length / ratio);
const output = new Float32Array(outputLength);
for(let i = 0; i < outputLength; i++)output[i] = input[Math.floor(i * ratio)];
return output;
}
/**
* Check if device is mobile
* @returns {boolean} Whether device is mobile
*/ const isMobile = ()=>/Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent);
const isHarmonOS = ()=>/harmony|hmos|huawei/i.test(navigator.userAgent);
/**
* Check if AI denoising is supported
* @param assetsPath - Public path for denoising plugin
* @returns {boolean} Whether AI denoising is supported
*/ const checkDenoiserSupport = (assetsPath)=>{
if (void 0 !== window.__denoiserSupported) return window.__denoiserSupported;
// Pass in the public path where the Wasm file is located to create an AIDenoiserExtension instance, path does not end with / "
const external = window.__denoiser || new __WEBPACK_EXTERNAL_MODULE_agora_extension_ai_denoiser__.AIDenoiserExtension({
assetsPath: null != assetsPath ? assetsPath : 'https://lf3-static.bytednsdoc.com/obj/eden-cn/613eh7lpqvhpeuloz/websocket'
});
window.__denoiser = external;
external.onloaderror = (e)=>{
// If the Wasm file fails to load, you can disable the plugin, for example:
console.error('Denoiser load error', e);
window.__denoiserSupported = false;
};
// Check compatibility
if (external.checkCompatibility()) {
// Register the plugin
// see https://github.com/AgoraIO/API-Examples-Web/blob/main/src/example/extension/aiDenoiser/agora-extension-ai-denoiser/README.md
__WEBPACK_EXTERNAL_MODULE_agora_rtc_sdk_ng__["default"].registerExtensions([
external
]);
window.__denoiserSupported = true;
return true;
}
// The current browser may not support the AI denoising plugin, you can stop executing subsequent logic
console.error('Does not support AI Denoiser!');
window.__denoiserSupported = false;
return false;
};
const utils_isBrowserExtension = ()=>'undefined' != typeof chrome && !!chrome.runtime && !!chrome.runtime.id;
/**
* Convert 16-bit linear PCM data to G.711 A-law
* @param {Int16Array|Array} pcmData - 16-bit signed PCM sample data
* @returns {Uint8Array} - G.711 A-law encoded data
*/ function encodeG711A(pcmData) {
const aLawData = new Uint8Array(pcmData.length);
// A-law compression table - used to optimize performance
const LOG_TABLE = [
1,
1,
2,
2,
3,
3,
3,
3,
4,
4,
4,
4,
4,
4,
4,
4,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
5,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
6,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7,
7
];
for(let i = 0; i < pcmData.length; i++){
let sample = pcmData[i];
const sign = sample < 0 ? 0 : 0x80;
// Get the absolute value of the sample and limit it to the 16-bit range
if (0 === sign) sample = -sample;
if (sample > 32767) sample = 32767;
// Use linear quantization for small signals, logarithmic quantization for large signals
let compressedValue;
if (sample < 256) compressedValue = sample >> 4;
else {
// Determine the "exponent" part of the sample
const exponent = LOG_TABLE[sample >> 8 & 0x7f];
const mantissa = sample >> exponent + 3 & 0x0f;
compressedValue = exponent << 4 | mantissa;
}
// Invert even bits (this is a feature of A-law)
aLawData[i] = (sign | compressedValue) ^ 0x55;
}
return aLawData;
}
/**
* Encode 16-bit PCM to G.711 μ-law (g711u)
* @param pcm16 - Int16Array of PCM samples
* @returns {Uint8Array} G.711U encoded data
*/ function encodeG711U(pcm16) {
const BIAS = 0x84;
const CLIP = 32635;
const out = new Uint8Array(pcm16.length);
for(let i = 0; i < pcm16.length; i++){
let pcm = pcm16[i];
const sign = pcm >> 8 & 0x80;
if (0 !== sign) pcm = -pcm;
if (pcm > CLIP) pcm = CLIP;
pcm += BIAS;
let exponent = 7;
for(let expMask = 0x4000; (pcm & expMask) === 0 && exponent > 0; expMask >>= 1)exponent--;
const mantissa = pcm >> exponent + 3 & 0x0f;
const ulaw = ~(sign | exponent << 4 | mantissa);
out[i] = ulaw;
}
return out;
}
/**
* Sets a value in an object at a specified path using dot notation.
* Creates nested objects along the path if they don't exist.
*
* @param obj - The target object to modify
* @param path - The path in dot notation (e.g., 'a.b.c')
* @param value - The value to set at the specified path
* @returns The modified object
*
* @example
* // Set a value at a nested path
* const obj = {};
* setValueByPath(obj, 'user.profile.name', 'John');
* // Result: { user: { profile: { name: 'John' } } }
*/ function setValueByPath(obj, path, value) {
if (!obj || 'object' != typeof obj) throw new Error('Target must be an object');
if (!path) throw new Error('Path cannot be empty');
const keys = path.split('.');
let current = obj;
// Navigate to the last-but-one key
for(let i = 0; i < keys.length - 1; i++){
const key = keys[i];
// Skip dangerous keys to prevent prototype pollution
if ('__proto__' === key || 'constructor' === key) throw new Error(`Invalid key detected: ${key}`);
// Create empty object if the key doesn't exist or is not an object
if (!current[key] || 'object' != typeof current[key]) current[key] = {};
current = current[key];
}
// Set the value at the final key
const lastKey = keys[keys.length - 1];
if ('__proto__' === lastKey || 'constructor' === lastKey) throw new Error(`Invalid key detected: ${lastKey}`);
current[lastKey] = value;
return obj;
}
const StreamProcessorWorklet = `
class StreamProcessor extends AudioWorkletProcessor {
constructor() {
super();
this.hasStarted = false;
this.hasInterrupted = false;
this.outputBuffers = [];
this.bufferLength = 128;
this.write = { buffer: new Float32Array(this.bufferLength), trackId: null };
this.writeOffset = 0;
this.trackSampleOffsets = {};
this.volume = 1.0;
this.port.onmessage = (event) => {
if (event.data) {
const payload = event.data;
if (payload.event === 'write') {
const int16Array = payload.buffer;
const float32Array = new Float32Array(int16Array.length);
for (let i = 0; i < int16Array.length; i++) {
float32Array[i] = int16Array[i] / 0x8000; // Convert Int16 to Float32
}
this.writeData(float32Array, payload.trackId);
} else if (payload.event === 'volume') {
this.volume = payload.volume;
} else if (
payload.event === 'offset' ||
payload.event === 'interrupt'
) {
const requestId = payload.requestId;
const trackId = this.write.trackId;
const offset = this.trackSampleOffsets[trackId] || 0;
this.port.postMessage({
event: 'offset',
requestId,
trackId,
offset,
});
if (payload.event === 'interrupt') {
this.hasInterrupted = true;
}
} else {
throw new Error(\`Unhandled event "\${payload.event}"\`);
}
}
};
}
writeData(float32Array, trackId = null) {
let { buffer } = this.write;
let offset = this.writeOffset;
for (let i = 0; i < float32Array.length; i++) {
buffer[offset++] = float32Array[i];
if (offset >= buffer.length) {
this.outputBuffers.push(this.write);
this.write = { buffer: new Float32Array(this.bufferLength), trackId };
buffer = this.write.buffer;
offset = 0;
}
}
this.writeOffset = offset;
return true;
}
process(inputs, outputs, parameters) {
const output = outputs[0];
const outputChannelData = output[0];
const outputBuffers = this.outputBuffers;
if (this.hasInterrupted) {
this.port.postMessage({ event: 'stop' });
return false;
} else if (outputBuffers.length) {
if(!this.hasStarted){
this.hasStarted = true;
this.port.postMessage({ event: 'first_frame' });
}
const { buffer, trackId } = outputBuffers.shift();
for (let i = 0; i < outputChannelData.length; i++) {
outputChannelData[i] = (buffer[i] || 0) * this.volume;
}
if (trackId) {
this.trackSampleOffsets[trackId] =
this.trackSampleOffsets[trackId] || 0;
this.trackSampleOffsets[trackId] += buffer.length;
}
return true;
} else if (this.hasStarted) {
this.port.postMessage({ event: 'stop' });
return false;
} else {
return true;
}
}
}
registerProcessor('stream-processor', StreamProcessor);
`;
let src = '';
if (utils_isBrowserExtension()) src = chrome.runtime.getURL('stream-processor.js');
else {
const script = new Blob([
StreamProcessorWorklet
], {
type: 'application/javascript'
});
src = URL.createObjectURL(script);
}
const StreamProcessorSrc = src;
/**
* Local audio loopback implementation using WebRTC peer connections
* to create a local audio communication channel.
* 完整的音频回环生命周期管理:
* connect() - 建立初始连接
* start() - 开始音频回环
* stop() - 暂停音频回环
* cleanup() - 完全清理所有资源
*/ class LocalLoopback {
/**
* Establishes a connection between two RTCPeerConnection objects
* to create a local audio loopback channel
* @param context - The AudioContext to use for audio processing
* @param stream - The MediaStream to use for the loopback
*/ async connect(context, stream) {
const servers = {
iceServers: [],
iceCandidatePoolSize: 1
};
this.mediaStream = stream;
const pc1 = new RTCPeerConnection(servers);
pc1.onicecandidate = (e)=>this.onIceCandidate(pc1, e);
pc1.oniceconnectionstatechange = (e)=>this.onIceStateChange(pc1, e);
this._debug('Created local peer connection object pc1');
const pc2 = new RTCPeerConnection(servers);
pc2.onicecandidate = (e)=>this.onIceCandidate(pc2, e);
pc2.oniceconnectionstatechange = (e)=>this.onIceStateChange(pc2, e);
pc2.ontrack = this.gotRemoteStream;
this._debug('Created remote peer connection object pc2');
const filteredStream = this.applyFilter(context);
if (!filteredStream) {
pc1.close();
pc2.close();
return;
}
filteredStream.getTracks().forEach((track)=>pc1.addTrack(track, filteredStream));
pc1.createOffer({
iceRestart: true
}).then(this.gotDescription1).catch((error)=>console.log(`createOffer failed: ${error}`));
this.pc1 = pc1;
this.pc2 = pc2;
}
/**
* 检查WebRTC连接状态,确保 ICE State 处于 connected 状态
* @returns
*/ async checkForReady() {
var _this_pc1;
// 检查ICE连接状态
// WebRTC连接状态可能是: new, checking, connected, completed, failed, disconnected, closed
const validStates = [
'connected',
'completed'
];
var _this_pc1_iceConnectionState;
if (validStates.includes(null !== (_this_pc1_iceConnectionState = null === (_this_pc1 = this.pc1) || void 0 === _this_pc1 ? void 0 : _this_pc1.iceConnectionState) && void 0 !== _this_pc1_iceConnectionState ? _this_pc1_iceConnectionState : '')) return true;
var _this_pc11;
this._debug(`WebRTC connection not ready, current state: ${null === (_this_pc11 = this.pc1) || void 0 === _this_pc11 ? void 0 : _this_pc11.iceConnectionState}`);
await this.sleep(300);
await new Promise((resolve, reject)=>{
if (!this.pc1 || !this.pc2) {
this._error('WebRTC peer connections not initialized');
reject(new Error('WebRTC peer connections not initialized'));
return;
}
let retryCount = 0;
const handleReconnect = async ()=>{
retryCount++;
// 重试 5 次
if (retryCount >= 5) {
this._error('WebRTC connection not ready');
reject(new Error('WebRTC connection not ready'));
return;
}
const result = await this.reconnect();
if (result) {
this._debug('WebRTC connection reestablished');
resolve(true);
} else setTimeout(()=>{
handleReconnect();
}, 2000);
};
setTimeout(()=>{
handleReconnect();
}, 500);
});
}
/**
* Starts the audio loopback by connecting the provided AudioWorkletNode
* to the peer destination
* @param streamNode - The AudioWorkletNode to connect to the peer destination
*/ start(streamNode) {
if (!this.context || !this.peer) {
this._error('No audio context or peer found');
return;
}
if ('running' !== this.context.state) {
this._error('Audio context is not running');
return;
}
// 检查WebRTC连接状态
if (!this.pc1 || !this.pc2) {
this._error('WebRTC peer connections not initialized');
return;
}
this.currentStreamNode = streamNode;
streamNode.connect(this.peer);
this._debug('local loopback start');
}
/**
* Stops the audio loopback temporarily without destroying connections
* Can be restarted by calling start() again
*/ stop() {
if (!this.currentStreamNode) {
this._debug('No active stream to stop');
return;
}
try {
// Disconnect the stream node from the peer destination
if (this.peer) this.currentStreamNode.disconnect(this.peer);
this.currentStreamNode = void 0;
this._debug('local loopback stopped');
} catch (err) {
this._error('Error stopping local loopback:', err);
}
}
/**
* Reconnects the WebRTC peer connections
* This method closes existing connections and establishes new ones
* while preserving the audio context and stream configuration
*/ async reconnect() {
this._debug('Reconnecting WebRTC peer connections');
// Save current context and stream
const currentContext = this.context;
const currentStream = this.mediaStream;
const currentStreamNode = this.currentStreamNode;
// Close existing peer connections but don't fully clean up
if (this.pc1) {
this.pc1.onicecandidate = null;
this.pc1.oniceconnectionstatechange = null;
this.pc1.close();
this.pc1 = void 0;
}
if (this.pc2) {
this.pc2.onicecandidate = null;
this.pc2.oniceconnectionstatechange = null;
this.pc2.ontrack = null;
this.pc2.close();
this.pc2 = void 0;
}
// Wait a short time to ensure connections are properly closed
await this.sleep(500);
// Reestablish connection if we have the necessary context
if (currentContext) {
await this.connect(currentContext, currentStream);
// If we were previously streaming, reconnect the stream node
if (currentStreamNode && this.peer) {
this._debug('Reestablishing audio connection');
// Wait for ICE connection to establish
const maxAttempts = 10;
let attempts = 0;
while(attempts < maxAttempts){
if (this.pc1) {
const pc1 = this.pc1;
const state = pc1.iceConnectionState;
if ('connected' === state || 'completed' === state) break;
}
await this.sleep(500);
attempts++;
this._debug(`Waiting for ICE connection (${attempts}/${maxAttempts})`);
}
// Reconnect the stream node
if (this.pc1) {
const pc1 = this.pc1;
const state = pc1.iceConnectionState;
if ('connected' === state || 'completed' === state) {
currentStreamNode.connect(this.peer);
this.currentStreamNode = currentStreamNode;
this._debug('Audio connection reestablished');
return true;
}
}
this._warn('Failed to establish ICE connection after multiple attempts');
return false;
}
return true;
}
this._error('Cannot reconnect - no audio context available');
return false;
}
sleep(ms) {
return new Promise((resolve)=>setTimeout(resolve, ms));
}
/**
* Creates and connects audio processing nodes for the media stream
* @param context - The AudioContext to use for creating audio nodes
* @returns The processed MediaStream or undefined if no stream is available
* @private
*/ applyFilter(context) {
if (!this.mediaStream) {
this._error('No media stream found');
return;
}
this.context = context;
this.mic = this.context.createMediaStreamSource(this.mediaStream);
this.peer = this.context.createMediaStreamDestination();
this.mic.connect(this.peer);
return this.peer.stream;
}
/**
* Handles the incoming remote stream from the peer connection
* @param e - The RTCTrackEvent containing the remote stream
* @private
*/ gotRemoteStream(e) {
this._debug('pc2 received remote stream', e.streams[0]);
if (this.remoteAudio.srcObject !== e.streams[0]) {
this.remoteAudio.srcObject = e.streams[0];
this.remoteAudio.muted = false;
this.remoteAudio.volume = 0.5;
const playPromise = this.remoteAudio.play();
if (playPromise) playPromise.catch((err)=>{
this._error('Failed to play audio:', err);
// If autoplay is prevented, try unlocking the audio context again
this._unlockAudioContext();
});
}
}
/**
* Handles the SDP offer from the first peer connection (pc1)
* @param desc - The RTCSessionDescriptionInit containing the SDP offer
* @private
*/ async gotDescription1(desc) {
var _this_pc1, _this_pc2, _this_pc21;
this._debug(`Offer from pc1\n${desc.sdp}`);
await (null === (_this_pc1 = this.pc1) || void 0 === _this_pc1 ? void 0 : _this_pc1.setLocalDescription(desc));
await (null === (_this_pc2 = this.pc2) || void 0 === _this_pc2 ? void 0 : _this_pc2.setRemoteDescription(desc));
null === (_this_pc21 = this.pc2) || void 0 === _this_pc21 || _this_pc21.createAnswer().then(this.gotDescription2).catch((error)=>console.error(`createAnswer failed: ${error}`));
}
/**
* Handles the SDP answer from the second peer connection (pc2)
* @param desc - The RTCSessionDescriptionInit containing the SDP answer
* @private
*/ async gotDescription2(desc) {
var _this_pc2, _this_pc1;
this._debug(`Answer from pc2\n${desc.sdp}`);
await (null === (_this_pc2 = this.pc2) || void 0 === _this_pc2 ? void 0 : _this_pc2.setLocalDescription(desc));
await (null === (_this_pc1 = this.pc1) || void 0 === _this_pc1 ? void 0 : _this_pc1.setRemoteDescription(desc));
}
/**
* Processes ICE candidates and forwards them to the other peer connection
* @param pc - The RTCPeerConnection that generated the candidate
* @param event - The RTCPeerConnectionIceEvent containing the candidate
* @private
*/ onIceCandidate(pc, event) {
var _this_getOtherPc;
null === (_this_getOtherPc = this.getOtherPc(pc)) || void 0 === _this_getOtherPc || _this_getOtherPc.addIceCandidate(event.candidate).then(()=>this.onAddIceCandidateSuccess(pc), (err)=>this.onAddIceCandidateError(pc, err));
this._debug(`${this.getName(pc)} ICE candidate:\n${event.candidate ? event.candidate.candidate : '(null)'}`);
}
/**
* Returns the other peer connection (pc1 or pc2) based on the input
* @param pc - The RTCPeerConnection to find the counterpart for
* @returns The other RTCPeerConnection
* @private
*/ getOtherPc(pc) {
return pc === this.pc1 ? this.pc2 : this.pc1;
}
/**
* Returns the name ('pc1' or 'pc2') of the peer connection for logging
* @param pc - The RTCPeerConnection to get the name for
* @returns The name of the peer connection
* @private
*/ getName(pc) {
return pc === this.pc1 ? 'pc1' : 'pc2';
}
/**
* Handles successful addition of an ICE candidate
* @param pc - The RTCPeerConnection that successfully added the candidate
* @private
*/ onAddIceCandidateSuccess(pc) {
this._debug(`${this.getName(pc)} addIceCandidate success`);
}
/**
* Handles errors that occur when adding an ICE candidate
* @param pc - The RTCPeerConnection that failed to add the candidate
* @param error - The error that occurred
* @private
*/ onAddIceCandidateError(pc, error) {
this._error(`${this.getName(pc)} addIceCandidate failed: ${error}`);
}
/**
* Handles ICE connection state changes
* @param pc - The RTCPeerConnection whose ICE state changed
* @param event - The event object containing state change information
* @private
*/ onIceStateChange(pc, event) {
if (pc) {
this._debug(`${this.getName(pc)} ICE state: ${pc.iceConnectionState}`);
this._debug('ICE state change event: ', event);
}
}
/**
* Logs debug information if debug mode is enabled
* @param args - Arguments to pass to console.log
* @private
*/ _debug() {
for(var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++)args[_key] = arguments[_key];
if (this.isDebug) console.log(...args);
}
/**
* Logs error messages to the console
* @param args - Arguments to pass to console.error
* @private
*/ _error() {
for(var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++)args[_key] = arguments[_key];
console.error(...args);
}
/**
* Logs warning messages to the console
* @param args - Arguments to pass to console.warn
* @private
*/ _warn() {
for(var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++)args[_key] = arguments[_key];
console.warn(...args);
}
/**
* Attempts to unlock the audio context for iOS devices
* Creates a silent audio element and plays it on user interaction
* to bypass iOS autoplay restrictions
* @private
*/ _unlockAudioContext() {
// Create a silent audio element
const silentSound = document.createElement('audio');
silentSound.setAttribute('src', 'data:audio/mp3;base64,//MkxAAHiAICWABElBeKPL/RANb2w+yiT1g/gTok//lP/W/l3h8QO/OCdCqCW2Cw//MkxAQHkAIWUAhEmAQXWUOFW2dxPu//9mr60ElY5sseQ+xxesmHKtZr7bsqqX2L//MkxAgFwAYiQAhEAC2hq22d3///9FTV6tA36JdgBJoOGgc+7qvqej5EPomQ+RMn/QmSACAv7mcADf//MkxBQHAAYi8AhEAO193vt9KGOq+6qcT7hhfN5FTInmwk8RkqKImTM55pRQHQSq//MkxBsGkgoIAABHhTACIJLf99nVI///yuW1uBqWfEu7CgNPWGpUadBmZ////4sL//MkxCMHMAH9iABEmAsKioqKigsLCwtVTEFNRTMuOTkuNVVVVVVVVVVVVVVVVVVV//MkxCkECAUYCAAAAFVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV');
silentSound.volume = 0.001; // Very low volume, essentially silent
// Add event listeners for user interaction events
const pageEvents = [
'touchstart',
'touchend',
'mousedown',
'keydown'
];
const unlockAudio = ()=>{
this._debug('User interaction detected, trying to unlock audio');
const playPromise = silentSound.play();
if (playPromise) playPromise.catch(()=>{
// Catch error but don't handle it
}).then(()=>{
// Also try to play the actual remote audio
const remotePlayPromise = this.remoteAudio.play();
if (remotePlayPromise) remotePlayPromise.catch(()=>{
// Catch error but don't process it
});
// Remove all event listeners once succeeded
pageEvents.forEach((event)=>{
document.removeEventListener(event, unlockAudio);
});
this._debug('Audio context unlocked');
});
};
// Add all event listeners and track them for later cleanup
pageEvents.forEach((event)=>{
document.addEventListener(event, unlockAudio);
this.eventListeners.push({
element: document,
event,
handler: unlockAudio
});
});
// Also try to play immediately
setTimeout(()=>{
this._debug('Attempting initial audio unlock');
unlockAudio();
}, 100);
}
/**
* Cleans up all resources used by the LocalLoopback instance
* This should be called when the instance is no longer needed to prevent memory leaks
*/ cleanup() {
this._debug('Cleaning up LocalLoopback resources');
// Close peer connections
if (this.pc1) {
// 1. 关闭所有轨道(摄像头/麦克风)
this.pc1.getSenders().forEach((sender)=>{
if (sender.track) sender.track.stop(); // 停止媒体轨道
});
// 2. 移除所有事件监听器(避免内存泄漏)
this.pc1.onicecandidate = null;
this.pc1.oniceconnectionstatechange = null;
this.pc1.close();
this.pc1 = void 0;
}
if (this.pc2) {
// 1. 关闭所有轨道(摄像头/麦克风)
this.pc2.getSenders().forEach((sender)=>{
if (sender.track) sender.track.stop(); // 停止媒体轨道
});
// 2. 移除所有事件监听器(避免内存泄漏)
this.pc2.onicecandidate = null;
this.pc2.oniceconnectionstatechange = null;
this.pc2.close();
this.pc2 = void 0;
}
// Cleanup media stream
if (this.mediaStream) {
// Stop all tracks in the media stream
this.mediaStream.getTracks().forEach((track)=>{
track.stop();
});
this.mediaStream = void 0;
}
// Clean up current stream node
if (this.currentStreamNode) {
try {
this.currentStreamNode.disconnect();
} catch (e) {
// Ignore errors during disconnect
}
this.currentStreamNode = void 0;
}
// Disconnect audio nodes
if (this.mic) {
this.mic.disconnect();
this.mic = void 0;
}
if (this.peer) {
this.peer.disconnect();
this.peer = void 0;
}
// Clean up HTML audio element
if (this.remoteAudio) {
this.remoteAudio.pause();
this.remoteAudio.srcObject = null;
if (this.remoteAudio.parentNode) this.remoteAudio.parentNode.removeChild(this.remoteAudio);
}
// Remove any registered event listeners
this.eventListeners.forEach((param)=>{
let { element, event, handler } = param;
element.removeEventListener(event, handler);
});
this.eventListeners = [];
this._debug('LocalLoopback cleanup complete');
}
/**
* Initializes a new instance of LocalLoopback
* @param isDebug - Whether to enable debug logging
*/ constructor(isDebug = false){
this.eventListeners = [];
this.remoteAudio = document.createElement('audio');
this.remoteAudio.setAttribute('autoplay', 'true');
this.remoteAudio.setAttribute('muted', 'true');
this.remoteAudio.setAttribute('style', 'display: none');
document.body.appendChild(this.remoteAudio);
this.isDebug = isDebug;
// Unlock audio context for iOS devices
this._unlockAudioContext();
this.gotDescription1 = this.gotDescription1.bind(this);
this.gotDescription2 = this.gotDescription2.bind(this);
this.gotRemoteStream = this.gotRemoteStream.bind(this);
}
}
/* ESM default export */ const local_loopback = LocalLoopback;
/**
* G.711 codec implementation for A-law and μ-law
*/ // A-law to linear PCM conversion table
const ALAW_TO_LINEAR_TABLE = new Int16Array(256);
// μ-law to linear PCM conversion table
const ULAW_TO_LINEAR_TABLE = new Int16Array(256);
// Initialize conversion tables
(function() {
// A-law to linear PCM conversion
for(let i = 0; i < 256; i++){
const aval = 0x55 ^ i;
let t = (0x0f & aval) << 4;
let seg = (0x70 & aval) >> 4;
if (seg) t = t + 0x108 << seg - 1;
else t += 8;
ALAW_TO_LINEAR_TABLE[i] = 0x80 & aval ? t : -t;
}
// μ-law to linear PCM conversion
for(let i = 0; i < 256; i++){
const uval = ~i;
let t = ((0x0f & uval) << 3) + 0x84;
let seg = (0x70 & uval) >> 4;
t <<= seg;
ULAW_TO_LINEAR_TABLE[i] = 0x80 & uval ? 0x84 - t : t - 0x84;
}
})();
/**
* Converts G.711 A-law encoded data to PCM16 format
* @param {Uint8Array} alawData - A-law encoded data
* @returns {Int16Array} - PCM16 data
*/ function decodeAlaw(alawData) {
const pcmData = new Int16Array(alawData.length);
for(let i = 0; i < alawData.length; i++)pcmData[i] = ALAW_TO_LINEAR_TABLE[alawData[i]];
return pcmData;
}
/**
* Converts G.711 μ-law encoded data to PCM16 format
* @param {Uint8Array} ulawData - μ-law encoded data
* @returns {Int16Array} - PCM16 data
*/ function decodeUlaw(ulawData) {
const pcmData = new Int16Array(ulawData.length);
for(let i = 0; i < ulawData.length; i++)pcmData[i] = ULAW_TO_LINEAR_TABLE[ulawData[i]];
return pcmData;
}
/**
* Plays audio streams received in raw PCM16, G.711a, or G.711u chunks from the browser
* @class
*/ class WavStreamPlayer {
/**
* Connects the audio context and enables output to speakers
* @returns {Promise<true>}
*/ async connect() {
this.context = new AudioContext({
sampleRate: this.sampleRate
});
if (this.enableLocalLoopback) {
var _this_localLoopback;
await (null === (_this_localLoopback = this.localLoopback) || void 0 === _this_localLoopback ? void 0 : _this_localLoopback.connect(this.context, this.localLoopbackStream));
}
if ('suspended' === this.context.state) await this.context.resume();
try {
await this.context.audioWorklet.addModule(this.scriptSrc);
} catch (e) {
console.error(e);
throw new Error(`Could not add audioWorklet module: ${this.scriptSrc}`);
}
return true;
}
setLocalLoopbackEnable(enable) {
this.enableLocalLoopback = enable;
if (this.enableLocalLoopback) {
if (this.localLoopback) this.localLoopback.cleanup();
this.localLoopback = new local_loopback(true);
} else if (this.localLoopback) {
this.localLoopback.cleanup();
this.localLoopback = void 0;
}
}
/**
* Pauses audio playback
*/ async pause() {
if (this.context && !this.isPaused) {
await this.context.suspend();
this.isPaused = true;
}
}
/**
* Resumes audio playback
*/ async resume() {
if (this.context && this.isPaused) {
await this.context.resume();
this.isPaused = false;
}
}
/**
* Toggles between play and pause states
*/ async togglePlay() {
if (this.isPaused) await this.resume();
else await this.pause();
}
/**
* Checks if audio is currently playing
* @returns {boolean}
*/ isPlaying() {
return Boolean(this.context && this.streamNode && !this.isPaused && 'running' === this.context.state);
}
/**
* 如果使用了本地回环,需要确保音频上下文已经准备好
* @returns {Promise<void>}
*/ async checkForReady() {
if (this.localLoopback && !this.context) {
await this._start();
await this.localLoopback.checkForReady();
}
}
/**
* Starts audio streaming
* @private
* @returns {Promise<true>}
*/ async _start() {
// Ensure worklet is loaded
if (!this.context) await this.connect();
const streamNode = new AudioWorkletNode(this.context, 'stream-processor');
streamNode.port.onmessage = (e)=>{
const { event } = e.data;
if ('stop' === event) {
if (this.localLoopback) this.localLoopback.stop();
else streamNode.disconnect();
this.streamNode = null;
} else if ('offset' === event) {
const { requestId, trackId, offset } = e.data;
const currentTime = offset / this.sampleRate;
this.trackSampleOffsets[requestId] = {
trackId,
offset,
currentTime
};
}
};
if (this.enableLocalLoopback) {
var _this_localLoopback;
null === (_this_localLoopback = this.localLoopback) || void 0 === _this_localLoopback || _this_localLoopback.start(streamNode);
} else streamNode.connect(this.context.destination);
this.streamNode = streamNode;
return true;
}
/**
* Adds audio data to the currently playing audio stream
* You can add chunks beyond the current play point and they will be queued for play
* @param {ArrayBuffer|Int16Array|Uint8Array} arrayBuffer
* @param {string} [trackId]
* @param {AudioFormat} [format] - Audio format: 'pcm', 'g711a', or 'g711u'
*/ async add16BitPCM(arrayBuffer) {
let trackId = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : 'default', format = arguments.length > 2 ? arguments[2] : void 0;
if ('string' != typeof trackId) throw new Error("trackId must be a string");
this.interruptedTrackIds[trackId];
if (!this.streamNode) await this._start();
let buffer;
const audioFormat = format || this.defaultFormat;
if (arrayBuffer instanceof Int16Array) // Already in PCM format
buffer = arrayBuffer;
else if (arrayBuffer instanceof Uint8Array) // Handle G.711 formats
buffer = 'g711a' === audioFormat ? decodeAlaw(arrayBuffer) : 'g711u' === audioFormat ? decodeUlaw(arrayBuffer) : new Int16Array(arrayBuffer.buffer);
else if (arrayBuffer instanceof ArrayBuffer) // Handle different formats based on the specified format
buffer = 'g711a' === audioFormat ? decodeAlaw(new Uint8Array(arrayBuffer)) : 'g711u' === audioFormat ? decodeUlaw(new Uint8Array(arrayBuffer)) : new Int16Array(arrayBuffer);
else throw new Error("argument must be Int16Array, Uint8Array, or ArrayBuffer");
// 使用 Transferable 对象传递 ArrayBuffer,避免数据复制
// 注意:只能传递 buffer.buffer,因为 buffer 是 Int16Array
const transferableBuffer = buffer.buffer;
this.streamNode.port.postMessage({
event: 'write',
buffer,
trackId
}, [
transferableBuffer
]);
}
/**
* Gets the offset (sample count) of the currently playing stream
* @param {boolean} [interrupt]
* @returns {{trackId: string|null, offset: number, currentTime: number} | null}
*/ async getTrackSampleOffset() {
let interrupt = arguments.length > 0 && void 0 !== arguments[0] && arguments[0];
if (!this.streamNode) return null;
const requestId = crypto.randomUUID();
this.streamNode.port.postMessage({
event: interrupt ? 'interrupt' : 'offset',
requestId
});
let trackSampleOffset;
while(!trackSampleOffset){
trackSampleOffset = this.trackSampleOffsets[requestId];
await new Promise((r)=>setTimeout(r, 1));
}
const { trackId } = trackSampleOffset;
if (interrupt && trackId) this.interruptedTrackIds[trackId] = true;
return trackSampleOffset;
}
/**
* Strips the current stream and returns the sample offset of the audio
* @returns {{trackId: string|null, offset: number, currentTime: number} | null}
*/ async interrupt() {
return this.getTrackSampleOffset(true);
}
/**
* Set media stream for local loopback
*/ setMediaStream(stream) {
this.localLoopbackStream = stream;
}
/**
* Adds G.711 A-law encoded audio data to the currently playing audio stream
* @param {ArrayBuffer|Uint8Array} arrayBuffer - G.711 A-law encoded data
* @param {string} [trackId]
* @returns {Int16Array}
*/ async addG711a(arrayBuffer) {
let trackId = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : 'default';
await this.add16BitPCM(arrayBuffer, trackId, 'g711a');
}
/**
* Adds G.711 μ-law encoded audio data to the currently playing audio stream
* @param {ArrayBuffer|Uint8Array} arrayBuffer - G.711 μ-law encoded data
* @param {string} [trackId]
*/ async addG711u(arrayBuffer) {
let trackId = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : 'default';
await this.add16BitPCM(arrayBuffer, trackId, 'g711u');
}
setSampleRate(sampleRate) {
this.sampleRate = sampleRate;
}
setDefaultFormat(format) {
this.defaultFormat = format;
}
/**
* Sets the volume of audio playback
* @param {number} volume - Volume level (0.0 to 1.0)
*/ setVolume(volume) {
// Clamp volume between 0 and 1
this.volume = Math.max(0, Math.min(1, volume));
if (this.streamNode) this.streamNode.port.postMessage({
event: 'volume',
volume: this.volume
});
}
/**
* Gets the current volume level of audio playback
* @returns {number} Current volume level (0.0 to 1.0)
*/ getVolume() {
return this.volume;
}
/**
* Destroys the player instance and releases all resources
* Should be called when the player is no longer needed
*/ async destroy() {
// Stop any audio that's playing
if (this.streamNode) {
this.streamNode.disconnect();
this.streamNode = null;
}
// Clean up local loopback
if (this.localLoopback) {
this.localLoopback.cleanup();
this.localLoopback = void 0;
}
// Close audio context
if (this.context) {
await this.context.close();
this.context = null;
}
// Reset all state
this.trackSampleOffsets = {};
this.interruptedTrackIds = {};
this.isPaused = false;
}
/**
* Creates a new WavStreamPlayer instance
* @param {{sampleRate?: number, enableLocalLoopback?: boolean, defaultFormat?: AudioFormat, volume?: number}} options
* @returns {WavStreamPlayer}
*/ constructor({ sampleRate = 44100, enableLocalLoopback = false, defaultFormat = 'pcm', volume = 1.0 } = {}){
this.volume = 1.0;
this.scriptSrc = StreamProcessorSrc;
this.sampleRate = sampleRate;
this.context = null;
this.streamNode = null;
this.trackSampleOffsets = {};
this.interruptedTrackIds = {};
this.isPaused = false;
this.enableLocalLoopback = enableLocalLoopback;
this.defaultFormat = defaultFormat;
if (this.enableLocalLoopback) this.localLoopback = new local_loopback(true);
// Initialize volume (0 = muted, 1 = full volume)
this.volume = volume;
}
}
const AudioProcessorWorklet = `
class AudioProcessor extends AudioWorkletProcessor {
constructor() {
super();
this.port.onmessage = this.receive.bind(this);
this.initialize();
}
initialize() {
this.foundAudio = false;
this.recording = false;
this.chunks = [];
}
/**
* Concatenates sampled chunks into channels
* Format is chunk[Left[], Right[]]
*/
readChannelData(chunks, channel = -1, maxChannels = 9) {
let channelLimit;
if (channel !== -1) {
if (chunks[0] && chunks[0].length - 1 < channel) {
throw new Error(
\`Channel \${channel} out of range: max \${chunks[0].length}\`
);
}
channelLimit = channel + 1;
} else {
channel = 0;
channelLimit = Math.min(chunks[0] ? chunks[0].length : 1, maxChannels);
}
const channels = [];
for (let n = channel; n < channelLimit; n++) {
const length = chunks.reduce((sum, chunk) => {
return sum + chunk[n].length;
}, 0);
const buffers = chunks.map((chunk) => chunk[n]);
const result = new Float32Array(length);
let offset = 0;
for (let i = 0; i < buffers.length; i++) {
result.set(buffers[i], offset);
offset += buffers[i].length;
}
channels[n] = result;
}
return channels;
}
/**
* Combines parallel audio data into correct format,
* channels[Left[], Right[]] to float32Array[LRLRLRLR...]
*/
formatAudioData(channels) {
if (channels.length === 1) {