UNPKG

conversation-engine

Version:

A powerful wrapper around the OpenAI API, providing additional features and making it easier to interact with AI models. Seamlessly chat with your AI assistant, include context strings, and manage conversation history.

50 lines 2.9 kB
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; import { updateAndGetMessageHistory } from './manageMessageHistory.js'; import { createContextFromStrings } from './createContextMessage.js'; import { createSystemMessage } from './createSystemMessage.js'; import { sendMessage } from './sendMessage.js'; import { getChatConfig } from '../config.js'; /** * Processes a user message with the given context strings and optional system message content. * It retrieves the chat configuration, message history, adds context messages and system messages to the chat log, * sends the user message to the AI, and updates the message history with the user message and AI response. * * @param {Message} userMessage - The user message to be processed. * @param {string[]} contexts - Optional array of context strings to be included in the chat. * @param {string} systemMessageContent - Optional system message content to be included in the chat. * @returns {Promise<Message>} - A Promise that resolves to the AI-generated response message. */ export function chat(userMessage, contexts = [], systemMessageContent = '') { return __awaiter(this, void 0, void 0, function* () { const { modelSelection, historyLength, historySummarizationModel, openaiOptions } = getChatConfig(); // Get message history const history = yield updateAndGetMessageHistory(undefined, historyLength, historySummarizationModel); let chatLog = history; // Add contexts to the chat log const contextMessage = createContextFromStrings(contexts); if (contextMessage.content) { chatLog.push(contextMessage); } // Add system message to the chat log if (systemMessageContent) { const systemMessage = createSystemMessage(systemMessageContent, modelSelection); chatLog.push(systemMessage); } // Add user message to the chat log chatLog.push(userMessage); // Send message and get response const response = yield sendMessage(chatLog, modelSelection, openaiOptions); // Update history in background updateAndGetMessageHistory([userMessage, response]); return response; }); } //# sourceMappingURL=processUserMessage.js.map