@jackhua/mini-langchain
Version:
A lightweight TypeScript implementation of LangChain with cost optimization features
304 lines (295 loc) • 9.21 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.EntityMemory = exports.ConversationSummaryMemory = exports.ConversationBufferWindowMemory = exports.ConversationBufferMemory = exports.ChatMessageHistory = exports.BaseMemory = void 0;
/**
* Base class for all memory implementations
*/
class BaseMemory {
}
exports.BaseMemory = BaseMemory;
/**
* Simple chat memory that stores conversation history
*/
class ChatMessageHistory {
constructor(messages) {
this.chatHistory = [];
if (messages) {
this.chatHistory = messages;
}
}
addUserMessage(message) {
this.chatHistory.push({
type: 'human',
content: message
});
}
addAIMessage(message) {
this.chatHistory.push({
type: 'ai',
content: message
});
}
addSystemMessage(message) {
this.chatHistory.push({
type: 'system',
content: message
});
}
addMessage(message) {
this.chatHistory.push(message);
}
clear() {
this.chatHistory = [];
}
getMessages() {
return this.chatHistory;
}
}
exports.ChatMessageHistory = ChatMessageHistory;
/**
* Buffer memory that maintains a conversation buffer
*/
class ConversationBufferMemory extends BaseMemory {
constructor(config) {
super();
this.chatMemory = config?.chatMemory || new ChatMessageHistory();
this.returnMessages = config?.returnMessages ?? true;
this.inputKey = config?.inputKey || 'input';
this.outputKey = config?.outputKey || 'output';
this.memoryKey = config?.memoryKey || 'history';
}
get memoryVariables() {
return [this.memoryKey];
}
loadMemoryVariables(_values) {
if (this.returnMessages) {
return {
[this.memoryKey]: this.chatMemory.getMessages()
};
}
else {
return {
[this.memoryKey]: this.getBuffer()
};
}
}
saveContext(inputs, outputs) {
const inputStr = inputs[this.inputKey];
const outputStr = outputs[this.outputKey];
if (inputStr) {
this.chatMemory.addUserMessage(inputStr);
}
if (outputStr) {
this.chatMemory.addAIMessage(outputStr);
}
}
clear() {
this.chatMemory.clear();
}
getBuffer() {
const messages = this.chatMemory.getMessages();
const strings = [];
for (const message of messages) {
let role;
switch (message.type) {
case 'human':
role = 'Human';
break;
case 'ai':
role = 'AI';
break;
case 'system':
role = 'System';
break;
default:
role = 'Unknown';
}
strings.push(`${role}: ${message.content}`);
}
return strings.join('\n');
}
}
exports.ConversationBufferMemory = ConversationBufferMemory;
/**
* Window buffer memory that only keeps the last k interactions
*/
class ConversationBufferWindowMemory extends ConversationBufferMemory {
constructor(config) {
super(config);
this.k = config?.k || 5;
}
loadMemoryVariables(values) {
const messages = this.chatMemory.getMessages();
const recentMessages = messages.slice(-this.k * 2); // Keep last k exchanges
if (this.returnMessages) {
return {
[this.memoryKey]: recentMessages
};
}
else {
return {
[this.memoryKey]: this.formatMessages(recentMessages)
};
}
}
formatMessages(messages) {
const strings = [];
for (const message of messages) {
let role;
switch (message.type) {
case 'human':
role = 'Human';
break;
case 'ai':
role = 'AI';
break;
case 'system':
role = 'System';
break;
default:
role = 'Unknown';
}
strings.push(`${role}: ${message.content}`);
}
return strings.join('\n');
}
}
exports.ConversationBufferWindowMemory = ConversationBufferWindowMemory;
/**
* Summary memory that maintains a running summary of the conversation
*/
class ConversationSummaryMemory extends BaseMemory {
constructor(config) {
super();
this.llm = config.llm;
this.chatMemory = config.chatMemory || new ChatMessageHistory();
this.summaryMessage = config.summaryMessage || '';
this.prompt = config.prompt;
this.memoryKey = config.memoryKey || 'history';
}
get memoryVariables() {
return [this.memoryKey];
}
loadMemoryVariables(_values) {
return {
[this.memoryKey]: this.summaryMessage
};
}
async saveContext(inputs, outputs) {
const inputStr = inputs.input;
const outputStr = outputs.output;
// Add to chat history
if (inputStr) {
this.chatMemory.addUserMessage(inputStr);
}
if (outputStr) {
this.chatMemory.addAIMessage(outputStr);
}
// Update summary
const messages = this.chatMemory.getMessages();
const newLines = this.formatMessages(messages.slice(-2)); // Last exchange
if (this.summaryMessage) {
// Update existing summary
const summaryPrompt = `Current summary: ${this.summaryMessage}
New lines of conversation:
${newLines}
New summary:`;
this.summaryMessage = await this.llm.call(summaryPrompt);
}
else {
// Create initial summary
const summaryPrompt = `Summarize the following conversation:
${newLines}
Summary:`;
this.summaryMessage = await this.llm.call(summaryPrompt);
}
}
clear() {
this.chatMemory.clear();
this.summaryMessage = '';
}
formatMessages(messages) {
const strings = [];
for (const message of messages) {
let role;
switch (message.type) {
case 'human':
role = 'Human';
break;
case 'ai':
role = 'AI';
break;
default:
continue;
}
strings.push(`${role}: ${message.content}`);
}
return strings.join('\n');
}
}
exports.ConversationSummaryMemory = ConversationSummaryMemory;
/**
* Entity memory that tracks entities mentioned in the conversation
*/
class EntityMemory extends BaseMemory {
constructor(config) {
super();
this.entities = new Map();
this.llm = config.llm;
this.memoryKey = config.memoryKey || 'entities';
this.entityExtractionPrompt = config.entityExtractionPrompt ||
`Extract all entities mentioned in the following conversation.
Return them as a comma-separated list.
Conversation:
{conversation}
Entities:`;
this.entitySummarizationPrompt = config.entitySummarizationPrompt ||
`Update the summary of "{entity}" based on the following conversation.
Current summary: {summary}
Conversation:
{conversation}
New summary:`;
}
get memoryVariables() {
return [this.memoryKey];
}
loadMemoryVariables(_values) {
const entityInfo = [];
for (const [entity, summary] of this.entities.entries()) {
entityInfo.push(`${entity}: ${summary}`);
}
return {
[this.memoryKey]: entityInfo.join('\n')
};
}
async saveContext(inputs, outputs) {
const inputStr = inputs.input || '';
const outputStr = outputs.output || '';
const conversation = `Human: ${inputStr}\nAI: ${outputStr}`;
// Extract entities
const extractPrompt = this.entityExtractionPrompt.replace('{conversation}', conversation);
const entitiesStr = await this.llm.call(extractPrompt);
const entities = entitiesStr.split(',').map((e) => e.trim()).filter(Boolean);
// Update entity summaries
for (const entity of entities) {
const currentSummary = this.entities.get(entity) || 'No information yet.';
const updatePrompt = this.entitySummarizationPrompt
.replace('{entity}', entity)
.replace('{summary}', currentSummary)
.replace('{conversation}', conversation);
const newSummary = await this.llm.call(updatePrompt);
this.entities.set(entity, newSummary);
}
}
clear() {
this.entities.clear();
}
getEntity(entity) {
return this.entities.get(entity);
}
getAllEntities() {
return new Map(this.entities);
}
}
exports.EntityMemory = EntityMemory;
//# sourceMappingURL=memory.js.map