UNPKG

@aituber-onair/core

Version:

Core library for AITuber OnAir providing voice synthesis and chat processing

354 lines 12.6 kB
import { MAX_TOKENS_BY_LENGTH, DEFAULT_VISION_PROMPT, textsToScreenplay, } from '@aituber-onair/chat'; import { EventEmitter } from './EventEmitter'; /** * Core logic for chat processing * Combines ChatService and MemoryManager to execute * AITuber's main processing (text chat, vision chat) */ export class ChatProcessor extends EventEmitter { /** * Constructor * @param chatService Chat service * @param options Configuration options * @param memoryManager Memory manager (optional) */ constructor(chatService, options, memoryManager, toolCallback) { super(); this.chatLog = []; this.chatStartTime = 0; this.processingChat = false; this.chatService = chatService; this.options = options; this.memoryManager = memoryManager; this.toolCallback = toolCallback; // Initialize MAX_HOPS from options with default value of 6 this.MAX_HOPS = options.maxHops ?? 6; } /** * Add message to chat log * @param message Message to add */ addToChatLog(message) { this.chatLog.push(message); this.emit('chatLogUpdated', this.chatLog); } /** * Get chat log */ getChatLog() { return [...this.chatLog]; } /** * Clear chat log */ clearChatLog() { this.chatLog = []; this.emit('chatLogUpdated', this.chatLog); } /** * Set chat start time * @param time Timestamp */ setChatStartTime(time) { this.chatStartTime = time; } /** * Get chat start time */ getChatStartTime() { return this.chatStartTime; } /** * Get processing status */ isProcessing() { return this.processingChat; } /** * Update options * @param newOptions New options to merge with existing ones */ updateOptions(newOptions) { this.options = { ...this.options, ...newOptions }; // Update MAX_HOPS if maxHops is included in the new options if (newOptions.maxHops !== undefined) { this.MAX_HOPS = newOptions.maxHops; } } /** * Process text chat * @param text User input text * @param chatType Chat type */ async processTextChat(text, chatType = 'chatForm') { if (this.processingChat) { console.warn('Another chat processing is in progress'); return; } try { this.processingChat = true; this.emit('processingStart', { type: chatType, text }); // Set chat start time (if first message) if (this.chatStartTime === 0) { this.chatStartTime = Date.now(); } // Create user message const userMessage = { role: 'user', content: text, timestamp: Date.now(), }; // Add to chat log this.addToChatLog(userMessage); // Create memory (if needed) if (this.options.useMemory && this.memoryManager) { await this.memoryManager.createMemoryIfNeeded(this.chatLog, this.chatStartTime); } const initialMsgs = await this.prepareMessagesForAI(); // Set max tokens for text chat const maxTokens = this.getMaxTokensForChat(); await this.runToolLoop(initialMsgs, (msgs, stream, cb) => this.chatService.chatOnce(msgs, stream, cb, maxTokens)); } catch (error) { console.error('Error in text chat processing:', error); this.emit('error', error); } finally { this.processingChat = false; this.emit('processingEnd'); } } /** * Process vision chat * @param imageDataUrl Image data URL */ async processVisionChat(imageDataUrl) { if (this.processingChat) { console.warn('Another chat processing is in progress'); return; } try { this.processingChat = true; this.emit('processingStart', { type: 'vision', imageUrl: imageDataUrl }); // Set chat start time (if first message) if (this.chatStartTime === 0) { this.chatStartTime = Date.now(); } // Create memory (if needed) if (this.options.useMemory && this.memoryManager) { await this.memoryManager.createMemoryIfNeeded(this.chatLog, this.chatStartTime); } // Prepare messages to send to AI const baseMessages = await this.prepareMessagesForAI(); // Add vision system prompt if (this.options.visionSystemPrompt) { baseMessages.push({ role: 'system', content: this.options.visionSystemPrompt, }); } // Create vision message const visionMessage = { role: 'user', content: [ { type: 'text', text: this.options.visionPrompt || DEFAULT_VISION_PROMPT, }, { type: 'image_url', image_url: { url: imageDataUrl, detail: 'low', // For token saving }, }, ], }; // Set max tokens for vision chat const maxTokens = this.getMaxTokensForVision(); await this.runToolLoop([...baseMessages, visionMessage], (msgs, stream, cb) => this.chatService.visionChatOnce(msgs, stream, cb, maxTokens), imageDataUrl); } catch (error) { console.error('Error in vision chat processing:', error); this.emit('error', error); } finally { this.processingChat = false; this.emit('processingEnd'); } } /** * Prepare messages to send to AI * Create an array of messages with system prompt and memory */ async prepareMessagesForAI() { const messages = []; // Add system prompt if (this.options.systemPrompt) { messages.push({ role: 'system', content: this.options.systemPrompt, }); } // Add memory if (this.options.useMemory && this.memoryManager) { const memoryText = this.memoryManager.getMemoryForPrompt(); if (memoryText) { const memoryContent = memoryText + (this.options.memoryNote ? `\n\n${this.options.memoryNote}` : ''); messages.push({ role: 'system', content: memoryContent, }); } } // Add chat log messages.push(...this.chatLog.filter((m) => !(typeof m.content === 'string' && m.content.trim() === '') && !(Array.isArray(m.content) && m.content.length === 0))); return messages; } /** * Set chat log from external source * @param messages Message array to set as chat log */ setChatLog(messages) { this.chatLog = Array.isArray(messages) ? [...messages] : []; this.emit('chatLogUpdated', this.chatLog); } /** * Get max tokens for chat responses * @returns Maximum tokens for chat */ getMaxTokensForChat() { // Prioritize direct maxTokens setting if (this.options.maxTokens !== undefined) { return this.options.maxTokens; } // Use responseLength preset if specified if (this.options.responseLength) { return MAX_TOKENS_BY_LENGTH[this.options.responseLength]; } // Return undefined for provider default return undefined; } /** * Get max tokens for vision responses * @returns Maximum tokens for vision */ getMaxTokensForVision() { // Prioritize direct visionMaxTokens setting if (this.options.visionMaxTokens !== undefined) { return this.options.visionMaxTokens; } // Use visionResponseLength preset if specified if (this.options.visionResponseLength) { return MAX_TOKENS_BY_LENGTH[this.options.visionResponseLength]; } // Fallback to regular chat settings return this.getMaxTokensForChat(); } async runToolLoop(send, once, visionSource) { let toSend = send; let hops = 0; let first = true; // check if the chat service is claude const isClaude = this.chatService.provider === 'claude'; while (hops++ < this.MAX_HOPS) { const { blocks, stop_reason } = await once(toSend, first, (t) => this.emit('assistantPartialResponse', t)); first = false; blocks .filter((b) => b.type === 'tool_result') .forEach((b) => this.emit('assistantPartialResponse', b.content)); if (stop_reason === 'end') { const full = blocks .map((b) => b.type === 'text' ? b.text : b.type === 'tool_result' ? b.content : '') .join(''); const assistantMessage = { role: 'assistant', content: full, timestamp: Date.now(), }; this.addToChatLog(assistantMessage); const screenplay = textsToScreenplay([full])[0]; this.emit('assistantResponse', { message: assistantMessage, screenplay, visionSource, }); if (this.memoryManager) this.memoryManager.cleanupOldMemories(); return; } /* ---------- tool_use ---------- */ if (!this.toolCallback) throw new Error('Tool callback missing'); const toolUses = blocks.filter((b) => b.type === 'tool_use'); const toolResults = await this.toolCallback(toolUses); const assistantToolCall = isClaude ? { role: 'assistant', content: toolUses.map((u) => ({ type: 'tool_use', id: u.id, name: u.name, input: u.input ?? {}, })), } : { role: 'assistant', content: [], tool_calls: toolUses.map((u) => ({ id: u.id, type: 'function', function: { name: u.name, arguments: JSON.stringify(u.input || {}), }, })), }; const toolMsgs = toolResults.map((r) => { if (isClaude) { return { role: 'user', content: [ { type: r.type, tool_use_id: r.tool_use_id, content: r.content, }, ], }; } return { role: 'tool', tool_call_id: r.tool_use_id, content: r.content, }; }); /* build messages for the next turn */ const cleaned = toSend.filter((m) => { if (isClaude && m.role === 'assistant' && Array.isArray(m.content) && m.content.length === 0) { return false; } return true; }); if (!(isClaude && Array.isArray(assistantToolCall.content) && assistantToolCall.content.length === 0)) { cleaned.push(assistantToolCall); } toolMsgs.forEach((m) => cleaned.push(m)); toSend = cleaned; } // It is rare to reach this point. Just log it. console.warn('Tool loop exceeded MAX_HOPS'); } } //# sourceMappingURL=ChatProcessor.js.map