UNPKG

ttc-ai-client

Version:

TypeScript client sdk for TTC AI services with decorators and schema validation.

252 lines 9 kB
"use strict"; /** * Simple Voice Call Manager * A clean, minimal wrapper around ttc-sc for voice conversations */ Object.defineProperty(exports, "__esModule", { value: true }); exports.VoiceCallManager = void 0; const ttc_sc_1 = require("ttc-sc"); const core_1 = require("../core"); console.log('Using ttc-sc version:', ttc_sc_1.WebRTCClient); class VoiceCallManager { constructor(options) { this.client = null; this.isActive = false; this.isListening = false; this.isSpeaking = false; this.isMuted = false; this.isProcessing = false; // Lock to prevent concurrent processing this.transcriptQueue = []; // Queue for incoming transcripts this.options = options; } /** * Start the voice call */ async startCall() { try { if (this.isActive) { console.warn('Call is already active'); return false; } this.options.onStatusChange?.('Connecting...'); // Initialize WebRTC Client with new API this.client = new ttc_sc_1.WebRTCClient({ url: this.options.serverUrl || 'ws://localhost:3786', conversationId: this.options.conversationId, onTranscript: (transcript) => { console.log('Transcript received:', transcript); if (transcript.trim()) { // Check if we're currently processing if (this.isProcessing) { // Queue the transcript for later console.log('Processing locked - queuing transcript:', transcript.trim()); this.transcriptQueue.push(transcript.trim()); } else { // Process immediately this.processTranscript(transcript.trim()); } } }, onPlaybackComplete: () => { console.log('Playback complete - checking queue'); // Handle cached function responses const cached = core_1.ttc.ai.internal.function_response_cache; if (cached) { core_1.ttc.ai.internal.function_response_cache = null; core_1.ttc.ai.internal.sendFunctionResponse(this.options.conversationId, cached.response, cached.image_urls); } // Release the lock and process queued transcripts this.onPlaybackComplete(); } }); // Initialize connection (requests microphone access and connects) await this.client.init(); this.isActive = true; this.options.onCallStart?.(); this.options.onStatusChange?.('Call active - speak now'); // Start recording once - it stays active for the entire call this.client.startRecording(); this.isListening = true; return true; } catch (error) { console.error('Failed to start call:', error); this.options.onError?.(error instanceof Error ? error.message : 'Failed to start call'); return false; } } /** * End the voice call */ endCall() { if (!this.isActive) return; this.isActive = false; this.isListening = false; this.isSpeaking = false; this.isMuted = false; this.isProcessing = false; // Clear any queued transcripts this.transcriptQueue = []; // Stop and clean up media stream if (this.client.stream) { this.client.stream.getTracks().forEach(track => track.stop()); this.client.stream = null; } if (this.client) { // Now we actually stop recording when ending the call try { this.client.stopRecording(); } catch (error) { console.error('Error stopping recording:', error); } this.client.destroy(); this.client = null; } this.options.onCallEnd?.(); this.options.onStatusChange?.('Call ended'); } /** * Send assistant response for TTS playback */ async respondWithSpeech(text) { if (!this.client || !this.isActive) { console.warn('Cannot speak: client not active'); return; } try { this.isSpeaking = true; this.options.onStatusChange?.('Speaking...'); this.options.onAssistantResponse?.(text); // Use synthesize method from new API this.client.synthesize(text); // The audio will be played automatically by the WebRTCClient // onPlaybackComplete callback will handle unmuting after playback finishes } catch (error) { console.error('TTS synthesis failed:', error); this.options.onError?.(error instanceof Error ? error.message : 'Speech synthesis failed'); this.onSpeechComplete(); } } /** * Get current call state */ getState() { return { isActive: this.isActive, isListening: this.isListening, isSpeaking: this.isSpeaking, isMuted: this.isMuted, isProcessing: this.isProcessing, queuedTranscripts: this.transcriptQueue.length }; } // Private methods /** * Process a single transcript */ processTranscript(transcript) { console.log('Processing transcript:', transcript); // Set lock this.isProcessing = true; this.options.onStatusChange?.('Processing...'); // Send to user speech handler this.options.onUserSpeech?.(transcript); } /** * Handle playback completion and process queued transcripts */ onPlaybackComplete() { console.log('Playback complete - releasing lock'); // Release the lock this.isProcessing = false; // Check if there are queued transcripts if (this.transcriptQueue.length > 0) { console.log(`Found ${this.transcriptQueue.length} queued transcript(s)`); // Combine all queued transcripts into one message const combinedTranscript = this.transcriptQueue.join(' '); // Clear the queue this.transcriptQueue = []; // Process the combined transcript this.processTranscript(combinedTranscript); } else { // No queued transcripts, resume listening this.onSpeechComplete(); } } /** * Mute the microphone by disabling all audio tracks */ muteMicrophone() { if (!this.client.stream) return; this.client.stream.getAudioTracks().forEach(track => { track.enabled = false; }); this.isMuted = true; this.isListening = false; this.options.onStatusChange?.('Processing...'); console.log('Microphone muted (tracks disabled)'); } /** * Unmute the microphone by enabling all audio tracks */ unmuteMicrophone() { if (!this.client.stream) return; this.client.stream.getAudioTracks().forEach(track => { track.enabled = true; }); this.isMuted = false; this.isListening = true; this.options.onStatusChange?.('Listening...'); console.log('Microphone unmuted (tracks enabled)'); } async startListening() { if (!this.client || !this.isActive || this.isListening || this.isSpeaking) { return; } try { // Unmute the microphone to resume listening this.unmuteMicrophone(); } catch (error) { console.error('Failed to unmute:', error); this.options.onError?.(error instanceof Error ? error.message : 'Failed to unmute'); } } async stopListening() { if (!this.client || !this.isListening) { return; } try { // Mute the microphone instead of stopping recording this.muteMicrophone(); } catch (error) { console.error('Failed to mute:', error); } } onSpeechComplete() { this.isSpeaking = false; // Resume listening after speech is done if (this.isActive) { console.log('Resuming listening after speech completion'); setTimeout(() => { this.startListening(); }, 500); } } /** * Destroy and clean up resources */ destroy() { this.endCall(); } } exports.VoiceCallManager = VoiceCallManager; //# sourceMappingURL=index.js.map