n8n-nodes-graphiti
Version:
Graphiti temporal knowledge graph memory for n8n AI agents
224 lines • 11.2 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.GraphitiChatMemory = void 0;
const chat_memory_1 = require("@langchain/community/memory/chat_memory");
const axios_1 = __importDefault(require("axios"));
class GraphitiChatMemory extends chat_memory_1.BaseChatMemory {
constructor(fields) {
var _a, _b, _c, _d, _e, _f;
super({
returnMessages: (_a = fields.returnMessages) !== null && _a !== void 0 ? _a : false,
inputKey: (_b = fields.inputKey) !== null && _b !== void 0 ? _b : 'input',
outputKey: (_c = fields.outputKey) !== null && _c !== void 0 ? _c : 'output',
chatHistory: fields.chatHistory,
});
this.memoryKey = 'chat_history';
this.userId = fields.userId;
this.contextWindowLength = (_d = fields.contextWindowLength) !== null && _d !== void 0 ? _d : 5;
this.searchLimit = (_e = fields.searchLimit) !== null && _e !== void 0 ? _e : 10;
this.memoryKey = (_f = fields.memoryKey) !== null && _f !== void 0 ? _f : 'chat_history';
// Initialize axios client with timeout and auth headers
this.apiClient = axios_1.default.create({
baseURL: fields.apiUrl,
timeout: 180000, // 3 minutes - Graphiti can be very slow with complex processing
headers: {
'Content-Type': 'application/json',
'X-API-KEY': fields.apiKey,
},
});
}
get memoryKeys() {
return [this.memoryKey];
}
/**
* Load memory variables by combining short-term memory with long-term facts from Graphiti
*/
async loadMemoryVariables(values) {
try {
const userInput = values[this.inputKey || 'input'] || '';
console.log(`[Graphiti] Loading memory for user: ${this.userId}, input: ${userInput}`);
const longTermFacts = [];
const recentEpisodes = [];
// Query Graphiti for relevant long-term facts (semantic search)
if (userInput && typeof userInput === 'string') {
try {
const queryRequest = {
user_id: this.userId,
query: userInput,
limit: this.searchLimit,
};
console.log('[Graphiti] Querying semantic facts...');
const response = await this.apiClient.post('/memory/query', queryRequest);
if (response.data.hits && response.data.hits.length > 0) {
response.data.hits.forEach((hit, index) => {
longTermFacts.push(`${index + 1}. ${hit.fact} (confidence: ${hit.score.toFixed(2)})`);
});
console.log(`[Graphiti] Found ${response.data.hits.length} relevant facts`);
}
}
catch (error) {
console.error('[Graphiti] Error querying semantic facts:', error);
// Continue with empty long-term facts on error
}
}
// Get recent conversation episodes from Graphiti database
try {
console.log(`[Graphiti] Fetching last ${this.contextWindowLength} episodes...`);
const episodesResponse = await this.apiClient.get(`/memory/users/${this.userId}/episodes`, {
params: {
limit: this.contextWindowLength,
},
});
if (episodesResponse.data.episodes && episodesResponse.data.episodes.length > 0) {
recentEpisodes.push(...episodesResponse.data.episodes);
console.log(`[Graphiti] Found ${recentEpisodes.length} recent episodes`);
}
}
catch (error) {
console.error('[Graphiti] Error fetching episodes:', error);
// Fallback to in-memory chat history if episodes endpoint fails
console.log('[Graphiti] Falling back to chatHistory...');
try {
const messages = await this.chatHistory.getMessages();
const recentMessages = messages.slice(-this.contextWindowLength);
recentMessages.forEach((msg) => {
recentEpisodes.push({
content: String(msg.content),
role: msg._getType() === 'human' ? 'user' : 'assistant',
timestamp: new Date().toISOString(),
});
});
}
catch (fallbackError) {
console.error('[Graphiti] Fallback to chatHistory also failed:', fallbackError);
}
}
// Format memory content
let memoryContent = '';
if (longTermFacts.length > 0) {
memoryContent += '=== Relevant Facts from Long-term Memory ===\n';
memoryContent += longTermFacts.join('\n');
memoryContent += '\n\n';
}
if (recentEpisodes.length > 0) {
memoryContent += '=== Recent Conversation ===\n';
const conversationHistory = recentEpisodes
.map((episode) => {
const role = episode.role === 'user' ? 'User' : 'Assistant';
return `${role}: ${episode.content}`;
})
.join('\n');
memoryContent += conversationHistory;
}
console.log('[Graphiti] Memory loaded successfully');
return {
[this.memoryKey]: memoryContent || 'No previous conversation history.',
};
}
catch (error) {
console.error('[Graphiti] Error loading memory variables:', error);
// Return empty memory on error
return {
[this.memoryKey]: 'No previous conversation history.',
};
}
}
/**
* Save conversation context to both short-term memory and Graphiti long-term storage
*/
async saveContext(inputValues, outputValues) {
var _a, _b, _c, _d;
console.log('[Graphiti] ======= saveContext CALLED =======');
console.log('[Graphiti] inputValues:', JSON.stringify(inputValues, null, 2));
console.log('[Graphiti] outputValues:', JSON.stringify(outputValues, null, 2));
try {
// Extract messages first
const userInput = inputValues[this.inputKey || 'input'] || '';
const aiResponse = outputValues[this.outputKey || 'output'] || '';
// Save to chat history (BaseChatMemory) - ONLY input and output, no metadata
// This prevents storing large system_message and formatting_instructions in memory
console.log('[Graphiti] Calling super.saveContext with filtered data...');
await super.saveContext({ [this.inputKey || 'input']: userInput }, { [this.outputKey || 'output']: aiResponse });
console.log('[Graphiti] super.saveContext completed successfully');
console.log(`[Graphiti] userId: ${this.userId}`);
console.log(`[Graphiti] userInput extracted: "${userInput}"`);
console.log(`[Graphiti] aiResponse extracted: "${aiResponse}"`);
// Save to Graphiti long-term storage
const timestamp = new Date().toISOString();
// Save user message
if (userInput) {
console.log('[Graphiti] User input detected, saving to Graphiti...');
try {
const userRequest = {
user_id: this.userId,
text: String(userInput),
metadata: {
role: 'user',
source: 'n8n',
session_id: this.userId,
timestamp,
},
};
console.log('[Graphiti] Sending user message:', userRequest);
const userApiResponse = await this.apiClient.post('/memory/append', userRequest);
console.log('[Graphiti] ✓ User message saved! Status:', userApiResponse.status);
}
catch (error) {
console.error('[Graphiti] ✗ Error saving user message:', error);
if (axios_1.default.isAxiosError(error)) {
console.error('[Graphiti] Response data:', (_a = error.response) === null || _a === void 0 ? void 0 : _a.data);
console.error('[Graphiti] Response status:', (_b = error.response) === null || _b === void 0 ? void 0 : _b.status);
}
}
}
else {
console.log('[Graphiti] No user input to save');
}
// Save AI message
if (aiResponse) {
console.log('[Graphiti] AI response detected, saving to Graphiti...');
try {
const aiRequest = {
user_id: this.userId,
text: String(aiResponse),
metadata: {
role: 'assistant',
source: 'n8n',
session_id: this.userId,
timestamp,
},
};
console.log('[Graphiti] Sending AI message:', aiRequest);
const aiApiResponse = await this.apiClient.post('/memory/append', aiRequest);
console.log('[Graphiti] ✓ AI message saved! Status:', aiApiResponse.status);
}
catch (error) {
console.error('[Graphiti] ✗ Error saving AI message:', error);
if (axios_1.default.isAxiosError(error)) {
console.error('[Graphiti] Response data:', (_c = error.response) === null || _c === void 0 ? void 0 : _c.data);
console.error('[Graphiti] Response status:', (_d = error.response) === null || _d === void 0 ? void 0 : _d.status);
}
}
}
else {
console.log('[Graphiti] No AI response to save');
}
console.log('[Graphiti] ======= saveContext COMPLETED =======');
}
catch (error) {
console.error('[Graphiti] ✗✗✗ FATAL ERROR in saveContext:', error);
// Don't throw - allow workflow to continue even if memory save fails
}
}
/**
* Clear all memory (both short-term and long-term)
*/
async clear() {
await this.chatHistory.clear();
}
}
exports.GraphitiChatMemory = GraphitiChatMemory;
//# sourceMappingURL=GraphitiChatMemory.js.map