UNPKG

browser-use-typescript

Version:

A TypeScript-based browser automation framework

363 lines 18.1 kB
import { HumanMessage, ToolMessage, AIMessage } from "@langchain/core/messages"; import { AgentMessagePrompt } from "../prompt"; // Create a dummy no-op logger for when logging is disabled const noopLogger = { error: () => { }, log: () => { }, debug: () => { }, info: () => { }, warn: () => { }, trace: () => { } }; /** * Settings for the MessageManager */ class MessageManagerSettings { maxTokens = 12000; numChatTurnsToKeep = 10; imageTokens = 800; includeAttributes = []; messageContext = undefined; sensitiveData; availableFilePath = []; constructor(init) { Object.assign(this, init); } } /** * Manages message history, settings, and handles interactions between agent state and outputs */ export class MessageManager { task; settings; state; systemPrompt; logger; // Logger instance constructor(task, systemPrompt, settings, state, customLogger = noopLogger // Default to no-op logger ) { this.task = task; this.settings = settings; this.state = state; this.systemPrompt = systemPrompt; this.logger = customLogger; // Use provided logger or no-op logger if (this.state.history.messages.length === 0) { this.initialize_messages(); } this.logger.debug(`MessageManager initialized with task: "${task.substring(0, 50)}..."`); } initialize_messages() { this.logger.debug("Initializing messages in MessageManager"); this._add_message_with_tokens(this.systemPrompt); if (this.settings.messageContext) { const context_message = new HumanMessage({ content: " Context for the task" + this.settings.messageContext }); this._add_message_with_tokens(context_message); } const task_message = new HumanMessage({ content: `Your ultimate task is: """${this.task}""". If you achieved your ultimate task, stop everything and use the done action in the next step to complete the task. If not, continue as usual.` }); this._add_message_with_tokens(task_message); this.logger.debug("Added task message to history"); if (this.settings.sensitiveData) { this.logger.debug(`Adding sensitive data placeholders: ${Object.keys(this.settings.sensitiveData).length} items`); const info = `Here are placeholders for sensitve data: ${Object.keys(this.settings.sensitiveData)}`; const info_extended = info + 'To use them, write <secret>the placeholder name</secret>'; const info_message = new HumanMessage({ content: info_extended }); this._add_message_with_tokens(info_message); } const placeholder_message = new HumanMessage({ content: 'Example output:' }); this._add_message_with_tokens(placeholder_message); this.logger.debug("Added placeholder message to history"); const tool_call = [ { 'name': 'AgentOutput', 'args': { 'current_state': { 'evaluation_previous_goal': 'Success - I opend the first page', 'memory': 'Starting with the new task. I have completed 1/10 steps', 'next_goal': 'Click on company a', }, 'action': [{ 'click_element': { 'index': 0 } }], }, 'id': String(this.state.tool_id), 'type': 'tool_call', } ]; const example_tool_call = new AIMessage({ content: "", tool_calls: tool_call }); this._add_message_with_tokens(example_tool_call); this.logger.debug("Added example tool call to history"); this.add_tool_message('Browser started'); this.logger.debug("Added initial browser started tool message"); const placeholder_memory_message = new HumanMessage({ content: '[Your task history memory starts here]' }); this._add_message_with_tokens(placeholder_memory_message); this.logger.debug("Added placeholder memory message to history"); if (this.settings.availableFilePath) { this.logger.debug(`Adding ${this.settings.availableFilePath.length} available file paths to history`); const filepaths_msg = new HumanMessage({ content: `Here are file paths you can use: ${this.settings.availableFilePath}` }); this._add_message_with_tokens(filepaths_msg); } this.logger.info("Message initialization complete"); } add_new_task(new_task) { this.logger.debug(`Adding new task: "${new_task.substring(0, 50)}..."`); const content = `Your new ultimate task is: """${new_task}""". Take the previous context into account and finish your new ultimate task. `; const msg = new HumanMessage({ content }); this._add_message_with_tokens(msg); this.task = new_task; this.logger.info("New task added and set as current task"); } add_state_message(state, result = null, step_info = null, use_vision = true) { this.logger.debug(`Adding state message with URL: ${state.url}`); // If keep in memory, add directly to history and add state without result if (result) { this.logger.debug(`Processing ${result.length} action results`); for (const r of result) { if (r.includeInMemory) { this.logger.debug("Found result to include in memory"); if (r.extractedContent) { this.logger.debug(`Adding extracted content to memory: ${String(r.extractedContent).substring(0, 50)}...`); const msg = new HumanMessage({ content: 'Action result: ' + String(r.extractedContent) }); this._add_message_with_tokens(msg); } if (r.error) { // If endswith \n, remove it let errorMsg = r.error; if (errorMsg.endsWith('\n')) { errorMsg = errorMsg.slice(0, -1); } // Get only last line of error const last_line = errorMsg.split('\n').pop() || ''; this.logger.debug(`Adding error message to memory: ${last_line}`); const msg = new HumanMessage({ content: 'Action error: ' + last_line }); this._add_message_with_tokens(msg); } result = null; // If result in history, we don't want to add it again } } } // Otherwise add state message and result to next message (which will not stay in memory) this.logger.debug("Creating agent message prompt"); const agent_message_prompt = new AgentMessagePrompt(state, result || undefined, this.settings.includeAttributes, step_info || undefined); const state_message = agent_message_prompt.getUserMessage(use_vision); this.logger.debug(`Adding state message with ${use_vision ? 'vision' : 'no vision'}`); this._add_message_with_tokens(state_message); this.logger.info("State message added successfully"); } add_model_output(model_output) { this.logger.debug(`Adding model output with ${model_output.action?.length || 0} actions`); const tool_call = { name: 'AgentOutput', args: model_output.toJson(), // Assuming model_dump equivalent is toJSON id: String(this.state.tool_id), type: 'tool_call', }; this.logger.debug("Tool Call to be sent to the LLM ", tool_call); const msg = new AIMessage({ content: '', tool_calls: [tool_call], }); this._add_message_with_tokens(msg); this.logger.debug("Added model output as tool call"); // Empty tool response this.add_tool_message(''); this.logger.info("Model output processing complete"); } add_plan(plan, position = null) { if (plan) { this.logger.debug(`Adding plan at position ${position || 'end'}`); const msg = new AIMessage({ content: plan }); this._add_message_with_tokens(msg, position); this.logger.info("Plan added to message history"); } else { this.logger.debug("No plan to add (null plan provided)"); } } get_messages() { const messages = this.state.history.messages.map(m => m.message); // Debug which messages are in history with token count let total_input_tokens = 0; this.logger.debug(`Messages in history: ${this.state.history.messages.length}:`); for (const m of this.state.history.messages) { total_input_tokens += m.metadata.tokens; this.logger.debug(`${m.message.constructor.name} - Token count: ${m.metadata.tokens}`); } this.logger.debug(`Total input tokens: ${total_input_tokens}`); return messages; } _add_message_with_tokens(message, position = null) { this.logger.debug(`Adding message with tokens (type: ${message.constructor.name})`); // Filter out sensitive data from the message if (this.settings.sensitiveData) { this.logger.debug("Filtering sensitive data from message"); message = this._filter_sensitive_data(message); } const token_count = this._count_tokens(message); this.logger.debug(`Calculated token count: ${token_count}`); const metadata = { tokens: token_count }; this.state.history.addMessage(message, metadata, position || undefined); this.logger.debug(`Message added to history ${position !== null ? `at position ${position}` : 'at end'}`); } _filter_sensitive_data(message) { this.logger.debug("Filtering sensitive data from message content"); const replace_sensitive = (value) => { if (!this.settings.sensitiveData) { return value; } let replacements = 0; for (const [key, val] of Object.entries(this.settings.sensitiveData)) { if (!val) { continue; } const oldValue = value; value = value.replace(val, `<secret>${key}</secret>`); if (oldValue !== value) { replacements++; } } this.logger.debug(`Made ${replacements} sensitive data replacements`); return value; }; if (typeof message.content === 'string') { this.logger.debug("Filtering string content"); message.content = replace_sensitive(message.content); } else if (Array.isArray(message.content)) { this.logger.debug(`Filtering array content with ${message.content.length} items`); for (let i = 0; i < message.content.length; i++) { const item = message.content[i]; if ('image_url' in item) { this.logger.debug(`Found image at index ${i}, skipping`); } else if (typeof item === 'object' && item !== null && 'text' in item) { this.logger.debug(`Filtering text in array item ${i}`); item.text = replace_sensitive(item.text); message.content[i] = item; } } } return message; } _count_tokens(message) { this.logger.debug(`Counting tokens for message type: ${message.constructor.name}`); let tokens = 0; if (Array.isArray(message.content)) { this.logger.debug(`Message content is an array with ${message.content.length} items`); for (const item of message.content) { if ('image_url' in item) { this.logger.debug(`Found image, adding ${this.settings.imageTokens} tokens`); tokens += this.settings.imageTokens; } else if (typeof item === 'object' && item !== null && 'text' in item) { const textTokens = this._count_text_tokens(item.text); this.logger.debug(`Text item, adding ${textTokens} tokens`); tokens += textTokens; } } } else { let msg = String(message.content); if ('tool_calls' in message) { this.logger.debug("Message contains tool calls, including in token count"); msg += String(message.tool_calls); } const textTokens = this._count_text_tokens(msg); this.logger.debug(`String content, adding ${textTokens} tokens`); tokens += textTokens; } this.logger.debug(`Total token count: ${tokens}`); return tokens; } _count_text_tokens(text) { // Rough estimate if no tokenizer available const tokens = Math.floor(text.length / this.settings.numChatTurnsToKeep); this.logger.debug(`Estimated ${tokens} tokens for text of length ${text.length}`); return tokens; } cut_messages() { this.logger.debug(`Checking if we need to cut messages (current: ${this.state.history.current_tokens}, max: ${this.settings.maxTokens})`); const diff = this.state.history.current_tokens - this.settings.maxTokens; if (diff <= 0) { this.logger.debug("No need to cut messages, under token limit"); return; } this.logger.info(`Need to remove ${diff} tokens to meet max token limit`); const msg = this.state.history.messages[this.state.history.messages.length - 1]; this.logger.debug(`Attempting to cut tokens from last message (type: ${msg.message.constructor.name})`); // If list with image remove image if (Array.isArray(msg.message.content)) { this.logger.debug(`Last message has array content with ${msg.message.content.length} items`); let text = ''; for (let i = 0; i < msg.message.content.length; i++) { const item = msg.message.content[i]; if ('image_url' in item) { this.logger.debug(`Found image at index ${i}, removing`); msg.message.content.splice(i, 1); i--; // Adjust index after removal const imageTokens = this.settings.imageTokens; msg.metadata.tokens -= imageTokens; this.state.history.current_tokens -= imageTokens; this.logger.debug(`Removed image with ${imageTokens} tokens - total tokens now: ${this.state.history.current_tokens}/${this.settings.maxTokens}`); } else if (typeof item === 'object' && item !== null && 'text' in item) { this.logger.debug(`Adding text from item ${i} to consolidated text`); text += item.text; } } this.logger.debug(`Converting array content to string (length: ${text.length})`); msg.message.content = text; this.state.history.messages[this.state.history.messages.length - 1] = msg; } if (diff <= 0) { this.logger.debug("After removing images, we're now under token limit"); return; } this.logger.debug(`Still need to remove ${diff} more tokens`); // If still over, remove text from state message proportionally to the number of tokens needed with buffer // Calculate the proportion of content to remove const proportion_to_remove = diff / msg.metadata.tokens; this.logger.debug(`Proportion of content to remove: ${proportion_to_remove.toFixed(2)}`); if (proportion_to_remove > 0.99) { this.logger.error(`Max token limit reached - proportion to remove: ${proportion_to_remove}`); throw new Error(`Max token limit reached - history is too long - reduce the system prompt or task. ` + `proportion_to_remove: ${proportion_to_remove}`); } // Remove tokens and old long message this.state.history.removeLastStateMessage(); // New message with updated content const newMsg = new HumanMessage({ content: msg.message.content.slice(0, -Math.floor(msg.message.content.length * proportion_to_remove)) }); this._add_message_with_tokens(newMsg); const last_msg = this.state.history.messages[this.state.history.messages.length - 1]; this.logger.debug(`Added message with ${last_msg.metadata.tokens} tokens - ` + `total tokens now: ${this.state.history.current_tokens}/${this.settings.maxTokens} - ` + `total messages: ${this.state.history.messages.length}`); } _remove_last_state_message() { this.logger.debug("Removing last state message from history"); this.state.history.removeLastStateMessage(); this.logger.debug(`After removal - total messages: ${this.state.history.messages.length}, total tokens: ${this.state.history.current_tokens}`); } add_tool_message(content) { this.logger.debug(`Adding tool message with ID: ${this.state.tool_id}, content length: ${content.length}`); const msg = new ToolMessage({ content: content, tool_call_id: String(this.state.tool_id) }); this.logger.debug(`Incrementing tool ID from ${this.state.tool_id} to ${this.state.tool_id + 1}`); this.state.tool_id += 1; this._add_message_with_tokens(msg); this.logger.debug("Tool message added to history"); } } export { MessageManagerSettings, }; //# sourceMappingURL=services.js.map