cursor10x-mcp
Version:
Memory System for Cursor using MCP - Provides persistent context awareness for Claude
1,546 lines (1,400 loc) • 190 kB
JavaScript
#!/usr/bin/env node
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
import {
CallToolRequestSchema,
ListToolsRequestSchema,
} from "@modelcontextprotocol/sdk/types.js";
import { createClient } from "@libsql/client";
import { fileURLToPath } from "url";
import { dirname } from "path";
import * as fs from 'fs';
import * as path from 'path';
// Load environment variables if they don't exist in process.env
if (!process.env.TURSO_DATABASE_URL || !process.env.TURSO_AUTH_TOKEN) {
try {
// Try to load from .env.local in current directory
const dotenv = await import('dotenv').catch(() => null);
if (dotenv) {
// Check multiple possible env file locations
const possibleEnvFiles = [
path.join(process.cwd(), '.env.local'),
path.join(process.cwd(), '.env'),
path.join(dirname(fileURLToPath(import.meta.url)), '.env')
];
// Try each file
for (const envFile of possibleEnvFiles) {
if (fs.existsSync(envFile)) {
dotenv.config({ path: envFile });
console.log(`Loaded environment variables from ${envFile}`);
break;
}
}
}
} catch (error) {
// Just log and continue - don't stop execution
console.log(`Note: Could not load environment variables from file: ${error.message}`);
}
}
// Set up proper paths for ESM
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
/**
* Formats a timestamp into a human-readable string
* @param {number} timestamp - Unix timestamp to format
* @returns {string} Human readable timestamp
*/
function formatTimestamp(timestamp) {
const date = new Date(timestamp);
const now = new Date();
// Within the last hour
if (now - date < 60 * 60 * 1000) {
const minutesAgo = Math.floor((now - date) / (60 * 1000));
return `${minutesAgo} minute${minutesAgo !== 1 ? 's' : ''} ago`;
}
// Within the same day
if (date.getDate() === now.getDate() &&
date.getMonth() === now.getMonth() &&
date.getFullYear() === now.getFullYear()) {
return `Today at ${date.getHours()}:${date.getMinutes().toString().padStart(2, '0')}`;
}
// Yesterday
const yesterday = new Date(now);
yesterday.setDate(yesterday.getDate() - 1);
if (date.getDate() === yesterday.getDate() &&
date.getMonth() === yesterday.getMonth() &&
date.getFullYear() === yesterday.getFullYear()) {
return `Yesterday at ${date.getHours()}:${date.getMinutes().toString().padStart(2, '0')}`;
}
// Within the last week
if (now - date < 7 * 24 * 60 * 60 * 1000) {
const days = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'];
return `${days[date.getDay()]} at ${date.getHours()}:${date.getMinutes().toString().padStart(2, '0')}`;
}
// Default format for older dates
return `${date.toLocaleDateString()} at ${date.getHours()}:${date.getMinutes().toString().padStart(2, '0')}`;
}
// Vector embedding and similarity search utilities
/**
* Generate a simple vector embedding from text using a basic hashing technique
* This is a placeholder for a proper embedding model that would be integrated
* with an actual ML model or embedding API
*
* @param {string} text - Text to generate embedding for
* @param {number} dimensions - Dimensionality of the vector (default: 128)
* @returns {Float32Array} A float32 vector representation
*/
async function createEmbedding(text, dimensions = 128) {
try {
// In a production system, this would call an embedding API or model
// For this implementation, we'll use a simple deterministic approach
// that creates vector representations that maintain some text similarity
// Normalize text
const normalizedText = text.toLowerCase().trim();
// Create a fixed size Float32Array
const vector = new Float32Array(dimensions);
// Simple hash function to generate vector elements
for (let i = 0; i < dimensions; i++) {
// Use different character combinations to influence each dimension
let value = 0;
for (let j = 0; j < normalizedText.length; j++) {
const charCode = normalizedText.charCodeAt(j);
// Use different seeds for each dimension to vary the representation
value += Math.sin(charCode * (i + 1) * 0.01) * Math.cos(j * 0.01);
}
// Normalize to a value between -1 and 1
vector[i] = Math.tanh(value);
}
// Ensure values are in a good range for cosine similarity
// Normalize the vector to unit length which is best for cosine similarity
const magnitude = Math.sqrt(vector.reduce((sum, val) => sum + val * val, 0));
if (magnitude > 0) {
for (let i = 0; i < dimensions; i++) {
vector[i] = vector[i] / magnitude;
}
}
logDebug(`Generated ${dimensions}-d embedding for text`);
return vector;
} catch (error) {
log(`Error creating embedding: ${error.message}`, "error");
// Return zero vector as fallback
return new Float32Array(dimensions);
}
}
/**
* Convert a Float32Array to a Buffer for database storage
*
* @param {Float32Array} vector - Vector to convert
* @returns {Buffer} Buffer representation of the vector
*/
function vectorToBuffer(vector) {
try {
// Get the vector dimensions from environment or default to 128
const DEFAULT_VECTOR_DIMS = 128;
const configuredDims = process.env.VECTOR_DIMENSIONS ?
parseInt(process.env.VECTOR_DIMENSIONS, 10) : DEFAULT_VECTOR_DIMS;
// Check if the vector dimensions match the configured dimensions
if (vector.length !== configuredDims) {
log(`VECTOR WARNING: Vector dimension mismatch. Expected ${configuredDims}, got ${vector.length}`, "error");
// Adjust vector dimensions if needed
if (vector.length < configuredDims) {
// Pad with zeros if too short
const paddedVector = new Float32Array(configuredDims);
paddedVector.set(vector);
vector = paddedVector;
log(`VECTOR DEBUG: Padded vector to ${configuredDims} dimensions`, "info");
} else if (vector.length > configuredDims) {
// Truncate if too long
vector = vector.slice(0, configuredDims);
log(`VECTOR DEBUG: Truncated vector to ${configuredDims} dimensions`, "info");
}
}
// Convert Float32Array to a string representation for vector32()
const vectorString = '[' + Array.from(vector).join(', ') + ']';
// Try using the new Turso vector32 function
if (db) {
try {
const result = db.prepare(`SELECT vector32(?) AS vec`).get(vectorString);
if (result && result.vec) {
return result.vec;
}
} catch (vector32Error) {
log(`VECTOR WARNING: vector32 function failed: ${vector32Error.message}`, "error");
// Try fallback with explicit dimensions parameter
try {
const resultWithDims = db.prepare(`SELECT vector32(?, ${configuredDims}) AS vec`).get(vectorString);
if (resultWithDims && resultWithDims.vec) {
return resultWithDims.vec;
}
} catch (dimError) {
log(`VECTOR WARNING: vector32 with dimensions parameter failed: ${dimError.message}`, "error");
}
}
}
// If Turso's vector32 is not available or fails, fall back to direct buffer conversion
log(`VECTOR DEBUG: Falling back to direct buffer conversion`, "info");
return Buffer.from(vector.buffer);
} catch (error) {
log(`VECTOR ERROR: Error converting vector to buffer: ${error.message}`, "error");
// Fallback to direct buffer conversion
return Buffer.from(vector.buffer);
}
}
/**
* Convert a buffer from database back to Float32Array
*
* @param {Buffer} buffer - Buffer from database
* @returns {Float32Array} Vector representation
*/
function bufferToVector(buffer) {
try {
if (!buffer) {
log("VECTOR ERROR: Null buffer passed to bufferToVector", "error");
return new Float32Array(0);
}
// Get the expected vector dimensions
const DEFAULT_VECTOR_DIMS = 128;
const configuredDims = process.env.VECTOR_DIMENSIONS ?
parseInt(process.env.VECTOR_DIMENSIONS, 10) : DEFAULT_VECTOR_DIMS;
// Try to use Turso's vector_to_json function first for better F32_BLOB handling
if (db) {
try {
// Use the built-in vector_to_json function
const result = db.prepare(`SELECT vector_to_json(?) AS vec_json`).get(buffer);
if (result && result.vec_json) {
try {
// Parse the JSON string to get the vector values
const vectorValues = JSON.parse(result.vec_json);
if (Array.isArray(vectorValues)) {
return new Float32Array(vectorValues);
}
} catch (jsonError) {
log(`VECTOR WARNING: Failed to parse vector_to_json result: ${jsonError.message}`, "error");
}
}
} catch (functionError) {
log(`VECTOR DEBUG: vector_to_json function not available: ${functionError.message}`, "info");
}
}
// If Turso's function fails, use standard buffer conversion
// Check if the buffer length matches what we expect for a Float32Array
const expectedByteLength = 4 * configuredDims; // 4 bytes per float32
if (buffer.length !== expectedByteLength) {
log(`VECTOR WARNING: Buffer size mismatch. Expected ${expectedByteLength} bytes, got ${buffer.length}`, "error");
// Try to interpret as Float32Array anyway, resulting in potentially wrong dimensions
const floatArray = new Float32Array(buffer.buffer, buffer.byteOffset, Math.floor(buffer.length / 4));
// Adjust to the expected dimensions
if (floatArray.length !== configuredDims) {
log(`VECTOR WARNING: Converted vector has ${floatArray.length} dimensions, expected ${configuredDims}`, "error");
// Create a properly sized array
const properVector = new Float32Array(configuredDims);
// Copy values, truncating or padding as needed
const copyLength = Math.min(floatArray.length, configuredDims);
for (let i = 0; i < copyLength; i++) {
properVector[i] = floatArray[i];
}
return properVector;
}
return floatArray;
}
// Normal case - buffer size matches expectations
return new Float32Array(buffer.buffer, buffer.byteOffset, buffer.length / 4);
} catch (error) {
log(`VECTOR ERROR: Error converting buffer to vector: ${error.message}`, "error");
// Return an empty vector on error
return new Float32Array(0);
}
}
/**
* Store an embedding vector in the database
*
* @param {number} contentId - ID of the content this vector represents
* @param {string} contentType - Type of content (message, file, snippet, etc.)
* @param {Float32Array} vector - The embedding vector
* @param {Object} metadata - Additional info about the vector (optional)
* @returns {Promise<Object>} Result of the insert operation
*/
async function storeEmbedding(contentId, contentType, vector, metadata = null) {
try {
if (!db) {
log("ERROR: Database not initialized in storeEmbedding", "error");
throw new Error("Database not initialized");
}
// Convert the Float32Array to a string representation for vector32()
const vectorString = '[' + Array.from(vector).join(', ') + ']';
const now = Date.now();
// Detailed logging for debugging vector storage issues
log(`VECTOR DEBUG: Attempting to store vector for ${contentType} with ID ${contentId}`, "info");
log(`VECTOR DEBUG: Vector dimensions: ${vector.length}, Vector string: ${vectorString.substring(0, 30)}...`, "info");
// Store in the vectors table using vector32() function
try {
// First check if the table has F32_BLOB column
const tableInfo = await db.prepare("PRAGMA table_info(vectors)").all();
const vectorColumn = tableInfo.find(col => col.name === 'vector');
const isF32Blob = vectorColumn && vectorColumn.type.includes('F32_BLOB');
let result;
if (isF32Blob) {
// Use vector32 function for F32_BLOB column
result = await db.prepare(`
INSERT INTO vectors (content_id, content_type, vector, created_at, metadata)
VALUES (?, ?, vector32(?), ?, ?)
`).run(
contentId,
contentType,
vectorString,
now,
metadata ? JSON.stringify(metadata) : null
);
} else {
// Fall back to BLOB for old schema
const vectorBuffer = vectorToBuffer(vector);
result = await db.prepare(`
INSERT INTO vectors (content_id, content_type, vector, created_at, metadata)
VALUES (?, ?, ?, ?, ?)
`).run(
contentId,
contentType,
vectorBuffer,
now,
metadata ? JSON.stringify(metadata) : null
);
}
log(`VECTOR SUCCESS: Stored ${vector.length}-d vector for ${contentType} with ID ${contentId}`, "info");
// Verify storage by trying to read it back
const verification = await db.prepare(`
SELECT id FROM vectors
WHERE content_id = ? AND content_type = ?
ORDER BY created_at DESC LIMIT 1
`).get(contentId, contentType);
log(`VECTOR VERIFICATION: Read back vector with database ID ${verification?.id || 'not found'}`, "info");
return result;
} catch (dbError) {
log(`VECTOR ERROR: Database error while storing vector: ${dbError.message}`, "error");
throw dbError;
}
} catch (error) {
log(`Error storing embedding: ${error.message}`, "error");
throw error;
}
}
/**
* Find similar content using vector similarity
*
* @param {Float32Array} queryVector - Vector to search for
* @param {string} contentType - Type of content to search (optional)
* @param {number} limit - Maximum number of results (default: 10)
* @param {number} threshold - Similarity threshold (default: 0.7)
* @returns {Promise<Array>} Array of similar content with similarity scores
*/
async function findSimilarVectors(queryVector, contentType = null, limit = 10, threshold = 0.7) {
try {
if (!db) {
throw new Error("Database not initialized");
}
// Convert the query vector to string format for vector32
const vectorString = '[' + Array.from(queryVector).join(', ') + ']';
// First, try to use vector_top_k with ANN index for optimal performance
try {
// Check if vector_top_k is available
let hasVectorTopK = false;
try {
await db.prepare("SELECT 1 FROM vector_top_k('idx_vectors_ann', vector32('[0.1, 0.2, 0.3]'), 1) LIMIT 0").all();
hasVectorTopK = true;
log(`VECTOR DEBUG: vector_top_k function available`, "info");
} catch (topkError) {
hasVectorTopK = false;
log(`VECTOR DEBUG: vector_top_k function not available: ${topkError.message}`, "info");
}
if (hasVectorTopK) {
log(`VECTOR DEBUG: Running ANN similarity search with vector_top_k`, "info");
// Build the query based on whether contentType is specified
let sql;
let params;
if (contentType) {
sql = `
SELECT
v.id,
v.content_id,
v.content_type,
t.score AS similarity
FROM vector_top_k('idx_vectors_ann', vector32(?), ?) t
JOIN vectors v ON v.rowid = t.rowid
WHERE v.content_type = ?
AND t.score >= ?
ORDER BY t.score DESC
`;
params = [vectorString, limit * 2, contentType, threshold]; // Query more than needed to filter by content_type
} else {
sql = `
SELECT
v.id,
v.content_id,
v.content_type,
t.score AS similarity
FROM vector_top_k('idx_vectors_ann', vector32(?), ?) t
JOIN vectors v ON v.rowid = t.rowid
WHERE t.score >= ?
ORDER BY t.score DESC
LIMIT ?
`;
params = [vectorString, limit, threshold, limit];
}
const results = await db.prepare(sql).all(...params);
// If we found results, return them
if (results && results.length > 0) {
return results;
}
log(`VECTOR DEBUG: No results with vector_top_k, falling back to distance calculation`, "info");
}
} catch (annError) {
log(`VECTOR WARNING: ANN search failed, falling back to cosine distance: ${annError.message}`, "error");
}
// Fall back to vector_distance_cos if ANN search isn't available or returns no results
try {
// Build the query based on whether contentType is specified
let sql;
let params;
if (contentType) {
sql = `
SELECT
id,
content_id,
content_type,
(1 - vector_distance_cos(vector, vector32(?))) AS similarity
FROM vectors
WHERE content_type = ?
AND (1 - vector_distance_cos(vector, vector32(?))) >= ?
ORDER BY similarity DESC
LIMIT ?
`;
params = [vectorString, contentType, vectorString, threshold, limit];
} else {
sql = `
SELECT
id,
content_id,
content_type,
(1 - vector_distance_cos(vector, vector32(?))) AS similarity
FROM vectors
WHERE (1 - vector_distance_cos(vector, vector32(?))) >= ?
ORDER BY similarity DESC
LIMIT ?
`;
params = [vectorString, vectorString, threshold, limit];
}
log(`VECTOR DEBUG: Running vector similarity search with vector_distance_cos`, "info");
const results = await db.prepare(sql).all(...params);
return results;
} catch (vectorError) {
// If vector_distance_cos fails, fall back to manual calculation
log(`VECTOR WARNING: Vector function search failed, falling back to manual: ${vectorError.message}`, "error");
// Get all vectors of the requested type
let sql = 'SELECT id, content_id, content_type, vector FROM vectors';
let params = [];
if (contentType) {
sql += ' WHERE content_type = ?';
params.push(contentType);
}
const allVectors = await db.prepare(sql).all(...params);
// Calculate similarities manually
const withSimilarity = allVectors.map(row => {
const storedVector = bufferToVector(row.vector);
const similarity = cosineSimilarity(queryVector, storedVector);
return { ...row, similarity };
});
// Filter by threshold, sort by similarity, and limit results
return withSimilarity
.filter(row => row.similarity >= threshold)
.sort((a, b) => b.similarity - a.similarity)
.slice(0, limit);
}
} catch (error) {
log(`Error finding similar vectors: ${error.message}`, "error");
return [];
}
}
/**
* Calculate cosine similarity between two vectors
*
* @param {Float32Array} a - First vector
* @param {Float32Array} b - Second vector
* @returns {number} Cosine similarity (-1 to 1)
*/
function cosineSimilarity(a, b) {
if (a.length !== b.length) return 0;
let dotProduct = 0;
let normA = 0;
let normB = 0;
for (let i = 0; i < a.length; i++) {
dotProduct += a[i] * b[i];
normA += a[i] * a[i];
normB += b[i] * b[i];
}
normA = Math.sqrt(normA);
normB = Math.sqrt(normB);
if (normA === 0 || normB === 0) return 0;
return dotProduct / (normA * normB);
}
/**
* Create vector indexes for efficient similarity search
* Should be called after database schema changes
*
* @returns {Promise<boolean>} Success status
*/
async function createVectorIndexes() {
try {
if (!db) {
log("ERROR: Database not initialized in createVectorIndexes", "error");
throw new Error("Database not initialized");
}
log("VECTOR DEBUG: Starting vector index creation", "info");
// Basic indexes for content lookup
const basicIndexes = [
`CREATE INDEX IF NOT EXISTS idx_vectors_content_type ON vectors(content_type)`,
`CREATE INDEX IF NOT EXISTS idx_vectors_content_id ON vectors(content_id)`,
];
// Try to create the basic indexes
for (const indexSQL of basicIndexes) {
try {
await db.prepare(indexSQL).run();
log(`VECTOR DEBUG: Created basic index with SQL: ${indexSQL}`, "info");
} catch (basicIndexError) {
log(`VECTOR ERROR: Failed to create basic index: ${basicIndexError.message}`, "error");
throw basicIndexError; // Fail early if even basic indexes can't be created
}
}
// Now try to create the vector index using libsql_vector_idx with proper F32_BLOB column
try {
log("VECTOR DEBUG: Attempting to create Turso vector index", "info");
// First check if the database supports vector indexing
try {
const versionCheck = await db.prepare("SELECT sqlite_version() as version").get();
log(`VECTOR DEBUG: SQLite version: ${versionCheck?.version || 'unknown'}`, "info");
} catch (versionError) {
log(`VECTOR DEBUG: Could not check SQLite version: ${versionError.message}`, "info");
}
// Check if libsql_vector_idx is available
let hasVectorIdxFunction = false;
try {
const functionCheck = await db.prepare("SELECT typeof(libsql_vector_idx('dummy')) as type").get();
hasVectorIdxFunction = functionCheck && functionCheck.type !== 'null';
log(`VECTOR DEBUG: libsql_vector_idx function available: ${hasVectorIdxFunction}`, "info");
} catch (fnError) {
log(`VECTOR DEBUG: libsql_vector_idx function not available: ${fnError.message}`, "info");
}
// Create optimized vector index using proper syntax based on Turso documentation
if (hasVectorIdxFunction) {
const vectorIndexSQL = `
CREATE INDEX IF NOT EXISTS idx_vectors_ann
ON vectors(libsql_vector_idx(vector))
WHERE vector IS NOT NULL
`;
await db.prepare(vectorIndexSQL).run();
log('VECTOR SUCCESS: Turso ANN vector index created successfully', "info");
// Set optimal vector index parameters for performance
try {
// Set the number of neighbors parameter for ANN index (trade-off between accuracy and performance)
await db.prepare("PRAGMA libsql_vector_neighbors = 20").run();
log('VECTOR SUCCESS: Set optimal ANN neighbors parameter', "info");
} catch (paramError) {
log(`VECTOR WARNING: Could not set ANN parameters: ${paramError.message}`, "error");
}
} else {
// Create simple index on vector column if ANN indexing not available
const vectorIndexSQL = `
CREATE INDEX IF NOT EXISTS idx_vectors_vector ON vectors(vector) WHERE vector IS NOT NULL
`;
await db.prepare(vectorIndexSQL).run();
log('VECTOR SUCCESS: Standard vector index created successfully', "info");
}
} catch (vectorError) {
log(`VECTOR WARNING: Could not create vector index: ${vectorError.message}`, "error");
log('VECTOR WARNING: Vector search will use full table scans which may be slower', "error");
// Try a more basic index as fallback
try {
log("VECTOR DEBUG: Attempting to create basic vector index as fallback", "info");
const fallbackIndexSQL = `
CREATE INDEX IF NOT EXISTS idx_vectors_basic ON vectors(content_id, content_type) WHERE vector IS NOT NULL
`;
await db.prepare(fallbackIndexSQL).run();
log('VECTOR DEBUG: Created basic fallback index successfully', "info");
} catch (fallbackError) {
log(`VECTOR ERROR: Could not create fallback index: ${fallbackError.message}`, "error");
}
}
// Check if vectors table has any rows
try {
const countResult = await db.prepare('SELECT COUNT(*) as count FROM vectors').get();
log(`VECTOR DEBUG: Current vector count in database: ${countResult?.count || 0}`, "info");
} catch (countError) {
log(`VECTOR ERROR: Could not count vectors: ${countError.message}`, "error");
}
return true;
} catch (error) {
log(`ERROR: Vector index creation failed: ${error.message}`, "error");
return false;
}
}
// Logging function with timestamps and severity levels
function log(message, level = "info") {
const timestamp = new Date().toISOString();
const prefix = level === "error" ? "ERROR: " : "";
console.error(`[${timestamp}] ${prefix}${message}`);
}
// Log environment information for debugging
log(`Environment variables:
NODE_ENV: ${process.env.NODE_ENV || 'not set'}
TURSO_DATABASE_URL: ${process.env.TURSO_DATABASE_URL ? (process.env.TURSO_DATABASE_URL.substring(0, 15) + "...") : 'not set'}
TURSO_AUTH_TOKEN: ${process.env.TURSO_AUTH_TOKEN ? "provided" : 'not set'}`);
// Database-related code - Turso Adapter implementation
let debugLogging = process.env.LOG_LEVEL === "debug";
/**
* Log database operations when in debug mode
* @param {string} message - The message to log
*/
function logDebug(message) {
if (debugLogging) {
console.log(`[DB] ${message}`);
}
}
/**
* Create a Turso client with connection fallback
* @returns {Object} Turso client
*/
function createTursoClient() {
try {
// Get database URL and auth token from environment variables
const dbUrl = process.env.TURSO_DATABASE_URL;
const authToken = process.env.TURSO_AUTH_TOKEN;
log(`Database URL: ${dbUrl ? dbUrl.substring(0, 15) + "..." : "not set"}`);
log(`Auth token: ${authToken ? "provided" : "not set"}`);
// Check if required environment variables are set
if (!dbUrl) {
throw new Error("TURSO_DATABASE_URL environment variable is required");
}
// Check if URL has the correct protocol
if (!dbUrl.startsWith("libsql://") && !dbUrl.startsWith("file:")) {
log(`Invalid database URL protocol: ${dbUrl.split("://")[0]}://`, "error");
log(`URL should start with libsql:// or file://`, "error");
throw new Error("Invalid database URL protocol. Must start with libsql:// or file://");
}
// For remote Turso database, auth token is required
if (dbUrl.startsWith("libsql://") && !authToken) {
log("Auth token is required for remote Turso database but not provided", "error");
throw new Error("Auth token is required for remote Turso database");
}
// Create remote Turso client
if (dbUrl.startsWith("libsql://")) {
log("Using remote Turso database");
return createClient({
url: dbUrl,
authToken: authToken
});
}
// File path handling for local SQLite
if (dbUrl.startsWith("file:")) {
log("Using local SQLite database");
// Get the file path from the URL
let filePath = dbUrl.replace("file:", "");
// Make path absolute if it isn't already
if (!path.isAbsolute(filePath)) {
filePath = path.join(process.cwd(), filePath);
}
const dirPath = path.dirname(filePath);
// Ensure directory exists
if (!fs.existsSync(dirPath)) {
log(`Creating database directory: ${dirPath}`);
fs.mkdirSync(dirPath, { recursive: true });
}
// Log database path
log(`Local SQLite database path: ${filePath}`);
// Create local SQLite client
const localClient = createClient({
url: `file:${filePath}`,
});
return localClient;
}
// This should never happen due to previous checks
throw new Error(`Unsupported database URL format: ${dbUrl}`);
} catch (error) {
log(`Database connection error: ${error.message}`, "error");
throw error;
}
}
/**
* Statement class to emulate better-sqlite3 interface
*/
class Statement {
constructor(client, sql) {
this.client = client;
this.sql = sql;
// Convert positional parameters (?) to named parameters (:param1, :param2, etc.)
// This fixes issues with parameter binding in libsql
let paramCount = 0;
this.convertedSql = sql.replace(/\?/g, () => `:param${++paramCount}`);
this.paramCount = paramCount;
}
/**
* Run a SQL statement with parameters
* @param {...any} params - Parameters for the statement
* @returns {Object} Result object
*/
async run(...params) {
try {
// Convert positional parameters to named parameters object
const namedParams = {};
for (let i = 0; i < params.length; i++) {
namedParams[`param${i + 1}`] = params[i];
}
logDebug(
`Running SQL: ${this.convertedSql} with params: ${JSON.stringify(
namedParams
)}`
);
const result = await this.client.execute({
sql: this.convertedSql,
args: namedParams,
});
return {
changes: result.rowsAffected || 0,
lastInsertRowid: result.lastInsertRowid,
};
} catch (error) {
log(`Error running SQL: ${this.sql}`, "error");
throw error;
}
}
/**
* Get a single row as an object
* @param {...any} params - Parameters for the statement
* @returns {Object|undefined} Row object or undefined
*/
async get(...params) {
try {
// Convert positional parameters to named parameters object
const namedParams = {};
for (let i = 0; i < params.length; i++) {
namedParams[`param${i + 1}`] = params[i];
}
logDebug(
`Getting row with SQL: ${
this.convertedSql
} with params: ${JSON.stringify(namedParams)}`
);
const result = await this.client.execute({
sql: this.convertedSql,
args: namedParams,
});
return result.rows[0] || undefined;
} catch (error) {
log(`Error getting row with SQL: ${this.sql}`, "error");
throw error;
}
}
/**
* Get all rows as objects
* @param {...any} params - Parameters for the statement
* @returns {Array<Object>} Array of row objects
*/
async all(...params) {
try {
// Convert positional parameters to named parameters object
const namedParams = {};
for (let i = 0; i < params.length; i++) {
namedParams[`param${i + 1}`] = params[i];
}
logDebug(
`Getting all rows with SQL: ${
this.convertedSql
} with params: ${JSON.stringify(namedParams)}`
);
const result = await this.client.execute({
sql: this.convertedSql,
args: namedParams,
});
return result.rows || [];
} catch (error) {
log(`Error getting all rows with SQL: ${this.sql}`, "error");
throw error;
}
}
}
/**
* Create a database adapter that emulates better-sqlite3 interface
* @returns {Object} Database adapter object
*/
function createTursoAdapter() {
const client = createTursoClient();
return {
/**
* Prepare a SQL statement
* @param {string} sql - SQL statement
* @returns {Statement} Statement object
*/
prepare(sql) {
return new Statement(client, sql);
},
/**
* Execute a SQL statement
* @param {string} sql - SQL statement
* @returns {void}
*/
async exec(sql) {
logDebug(`Executing SQL: ${sql}`);
try {
// Handle multiple statements separated by semicolons
const statements = sql.split(";").filter((stmt) => stmt.trim());
for (const statement of statements) {
if (statement.trim()) {
try {
await client.execute({ sql: statement.trim() });
} catch (stmtError) {
log(
`Error executing statement: ${statement.trim()}`,
"error"
);
throw stmtError;
}
}
}
} catch (error) {
log(`Error executing SQL: ${sql}`, "error");
throw error;
}
},
/**
* Close the database connection
* @returns {void}
*/
async close() {
log("Closing database connection");
// Turso client doesn't have a close method, but we'll include this for API compatibility
},
};
}
let db = null;
let serverInstance = null;
// Define all memory tools
const MEMORY_TOOLS = {
// System tools
BANNER: {
name: "generateBanner",
description: "Generates a banner containing memory system statistics and status",
inputSchema: {
type: "object",
properties: {}
}
},
HEALTH: {
name: "checkHealth",
description: "Checks the health of the memory system and its database",
inputSchema: {
type: "object",
properties: {}
}
},
// Unified tool for beginning of conversation
INIT_CONVERSATION: {
name: "initConversation",
description: "Initializes a conversation by storing the user message, generating a banner, and retrieving context in one operation",
inputSchema: {
type: "object",
properties: {
content: {
type: "string",
description: "Content of the user message"
},
importance: {
type: "string",
description: "Importance level (low, medium, high)",
default: "low"
},
metadata: {
type: "object",
description: "Optional metadata for the message",
additionalProperties: true
}
},
required: ["content"]
}
},
// Unified tool for ending a conversation
END_CONVERSATION: {
name: "endConversation",
description: "Ends a conversation by storing the assistant message, recording a milestone, and logging an episode in one operation",
inputSchema: {
type: "object",
properties: {
content: {
type: "string",
description: "Content of the assistant's final message"
},
milestone_title: {
type: "string",
description: "Title of the milestone to record"
},
milestone_description: {
type: "string",
description: "Description of what was accomplished"
},
importance: {
type: "string",
description: "Importance level (low, medium, high)",
default: "medium"
},
metadata: {
type: "object",
description: "Optional metadata",
additionalProperties: true
}
},
required: ["content", "milestone_title", "milestone_description"]
}
},
// Short-term memory tools
STORE_USER_MESSAGE: {
name: "storeUserMessage",
description: "Stores a user message in the short-term memory",
inputSchema: {
type: "object",
properties: {
content: {
type: "string",
description: "Content of the message"
},
importance: {
type: "string",
description: "Importance level (low, medium, high)",
default: "low"
},
metadata: {
type: "object",
description: "Optional metadata for the message",
additionalProperties: true
}
},
required: ["content"]
}
},
STORE_ASSISTANT_MESSAGE: {
name: "storeAssistantMessage",
description: "Stores an assistant message in the short-term memory",
inputSchema: {
type: "object",
properties: {
content: {
type: "string",
description: "Content of the message"
},
importance: {
type: "string",
description: "Importance level (low, medium, high)",
default: "low"
},
metadata: {
type: "object",
description: "Optional metadata for the message",
additionalProperties: true
}
},
required: ["content"]
}
},
TRACK_ACTIVE_FILE: {
name: "trackActiveFile",
description: "Tracks an active file being accessed by the user",
inputSchema: {
type: "object",
properties: {
filename: {
type: "string",
description: "Path to the file being tracked"
},
action: {
type: "string",
description: "Action performed on the file (open, edit, close, etc.)"
},
metadata: {
type: "object",
description: "Optional metadata for the file",
additionalProperties: true
}
},
required: ["filename", "action"]
}
},
GET_RECENT_MESSAGES: {
name: "getRecentMessages",
description: "Retrieves recent messages from the short-term memory",
inputSchema: {
type: "object",
properties: {
limit: {
type: "number",
description: "Maximum number of messages to retrieve",
default: 10
},
importance: {
type: "string",
description: "Filter by importance level (low, medium, high)"
}
}
}
},
GET_ACTIVE_FILES: {
name: "getActiveFiles",
description: "Retrieves active files from the short-term memory",
inputSchema: {
type: "object",
properties: {
limit: {
type: "number",
description: "Maximum number of files to retrieve",
default: 10
}
}
}
},
// Long-term memory tools
STORE_MILESTONE: {
name: "storeMilestone",
description: "Stores a project milestone in the long-term memory",
inputSchema: {
type: "object",
properties: {
title: {
type: "string",
description: "Title of the milestone"
},
description: {
type: "string",
description: "Description of the milestone"
},
importance: {
type: "string",
description: "Importance level (low, medium, high)",
default: "medium"
},
metadata: {
type: "object",
description: "Optional metadata for the milestone",
additionalProperties: true
}
},
required: ["title", "description"]
}
},
STORE_DECISION: {
name: "storeDecision",
description: "Stores a project decision in the long-term memory",
inputSchema: {
type: "object",
properties: {
title: {
type: "string",
description: "Title of the decision"
},
content: {
type: "string",
description: "Content of the decision"
},
reasoning: {
type: "string",
description: "Reasoning behind the decision"
},
importance: {
type: "string",
description: "Importance level (low, medium, high)",
default: "medium"
},
metadata: {
type: "object",
description: "Optional metadata for the decision",
additionalProperties: true
}
},
required: ["title", "content"]
}
},
STORE_REQUIREMENT: {
name: "storeRequirement",
description: "Stores a project requirement in the long-term memory",
inputSchema: {
type: "object",
properties: {
title: {
type: "string",
description: "Title of the requirement"
},
content: {
type: "string",
description: "Content of the requirement"
},
importance: {
type: "string",
description: "Importance level (low, medium, high)",
default: "medium"
},
metadata: {
type: "object",
description: "Optional metadata for the requirement",
additionalProperties: true
}
},
required: ["title", "content"]
}
},
// Episodic memory tools
RECORD_EPISODE: {
name: "recordEpisode",
description: "Records an episode (action) in the episodic memory",
inputSchema: {
type: "object",
properties: {
actor: {
type: "string",
description: "Actor performing the action (user, assistant, system)"
},
action: {
type: "string",
description: "Type of action performed"
},
content: {
type: "string",
description: "Content or details of the action"
},
importance: {
type: "string",
description: "Importance level (low, medium, high)",
default: "low"
},
context: {
type: "string",
description: "Context for the episode"
}
},
required: ["actor", "action", "content"]
}
},
GET_RECENT_EPISODES: {
name: "getRecentEpisodes",
description: "Retrieves recent episodes from the episodic memory",
inputSchema: {
type: "object",
properties: {
limit: {
type: "number",
description: "Maximum number of episodes to retrieve",
default: 10
},
context: {
type: "string",
description: "Filter by context"
}
}
}
},
// Context tools
GET_COMPREHENSIVE_CONTEXT: {
name: "getComprehensiveContext",
description: "Retrieves comprehensive context from all memory systems",
inputSchema: {
type: "object",
properties: {
query: {
type: "string",
description: "Optional query for semantic search to find relevant context"
}
}
}
},
GET_MEMORY_STATS: {
name: "getMemoryStats",
description: "Retrieves statistics about the memory system",
inputSchema: {
type: "object",
properties: {}
}
},
// Vector management tool
MANAGE_VECTOR: {
name: "manageVector",
description: "Unified tool for managing vector embeddings with operations for store, search, update, and delete",
inputSchema: {
type: "object",
properties: {
operation: {
type: "string",
description: "Operation to perform (store, search, update, delete)",
enum: ["store", "search", "update", "delete"]
},
contentId: {
type: "number",
description: "ID of the content this vector represents (for store, update, delete)"
},
contentType: {
type: "string",
description: "Type of content (message, file, snippet, etc.)"
},
vector: {
type: "array",
description: "Vector data as array of numbers (for store, update) or query vector (for search)"
},
metadata: {
type: "object",
description: "Additional info about the vector (optional)",
additionalProperties: true
},
vectorId: {
type: "number",
description: "ID of the vector to update or delete"
},
limit: {
type: "number",
description: "Maximum number of results for search operation",
default: 10
},
threshold: {
type: "number",
description: "Similarity threshold for search operation",
default: 0.7
}
},
required: ["operation"]
}
},
DIAGNOSE_VECTORS: {
name: "diagnoseVectors",
description: "Run diagnostics on the vector storage system to identify issues",
inputSchema: {
type: "object",
properties: {}
}
}
};
// In-memory store as fallback if database initialization fails
const inMemoryStore = {
messages: [],
activeFiles: [],
milestones: [],
decisions: [],
requirements: [],
episodes: []
};
let useInMemory = false;
// Initialize database
async function initializeDatabase() {
try {
// Check if environment variables are set (from either process.env or .env.local)
if (!process.env.TURSO_DATABASE_URL) {
log('TURSO_DATABASE_URL environment variable not found - using in-memory database', 'error');
useInMemory = true;
return null;
}
if (process.env.TURSO_DATABASE_URL.startsWith('libsql://') && !process.env.TURSO_AUTH_TOKEN) {
log('TURSO_AUTH_TOKEN environment variable required for remote Turso database but not found - using in-memory database', 'error');
useInMemory = true;
return null;
}
log('Initializing database with Turso');
db = createTursoAdapter();
// Test connection
try {
const testResult = await db.prepare('SELECT 1 as test').get();
log(`Database connection test successful: ${JSON.stringify(testResult)}`);
} catch (error) {
log(`Failed to connect to Turso database: ${error.message}`, "error");
log('Falling back to in-memory database', 'error');
useInMemory = true;
return null;
}
// Create tables if they don't exist
const tables = {
messages: `
CREATE TABLE IF NOT EXISTS messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
role TEXT NOT NULL,
content TEXT NOT NULL,
created_at INTEGER NOT NULL,
metadata TEXT,
importance TEXT DEFAULT 'low'
)
`,
active_files: `
CREATE TABLE IF NOT EXISTS active_files (
id INTEGER PRIMARY KEY AUTOINCREMENT,
filename TEXT UNIQUE,
last_accessed INTEGER,
metadata TEXT
)
`,
milestones: `
CREATE TABLE IF NOT EXISTS milestones (
id INTEGER PRIMARY KEY AUTOINCREMENT,
title TEXT,
description TEXT,
importance TEXT DEFAULT 'medium',
created_at INTEGER,
metadata TEXT
)
`,
decisions: `
CREATE TABLE IF NOT EXISTS decisions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
title TEXT,
content TEXT,
reasoning TEXT,
importance TEXT DEFAULT 'medium',
created_at INTEGER,
metadata TEXT
)
`,
requirements: `
CREATE TABLE IF NOT EXISTS requirements (
id INTEGER PRIMARY KEY AUTOINCREMENT,
title TEXT,
content TEXT,
importance TEXT DEFAULT 'medium',
created_at INTEGER,
metadata TEXT
)
`,
episodes: `
CREATE TABLE IF NOT EXISTS episodes (
id INTEGER PRIMARY KEY AUTOINCREMENT,
actor TEXT,
action TEXT,
content TEXT,
timestamp INTEGER,
importance TEXT DEFAULT 'low',
context TEXT,
metadata TEXT
)
`,
// New vector-based tables for codebase indexing
vectors: `
CREATE TABLE IF NOT EXISTS vectors (
id INTEGER PRIMARY KEY AUTOINCREMENT,
content_id INTEGER NOT NULL,
content_type TEXT NOT NULL,
vector F32_BLOB(128) NOT NULL,
created_at INTEGER NOT NULL,
metadata TEXT
)
`,
code_files: `
CREATE TABLE IF NOT EXISTS code_files (
id INTEGER PRIMARY KEY AUTOINCREMENT,
file_path TEXT UNIQUE,
language TEXT,
last_indexed INTEGER,
size INTEGER,
metadata TEXT
)
`,
code_snippets: `
CREATE TABLE IF NOT EXISTS code_snippets (
id INTEGER PRIMARY KEY AUTOINCREMENT,
file_id INTEGER,
start_line INTEGER,
end_line INTEGER,
content TEXT,
symbol_type TEXT,
metadata TEXT,
FOREIGN KEY (file_id) REFERENCES code_files(id)
)
`
};
// Verify or create each table
for (const [name, createStatement] of Object.entries(tables)) {
try {
await db.prepare(createStatement).run();
log(`Table ${name} verified/created`);
// For vectors table, do an additional check
if (name === 'vectors') {
const tableInfo = await db.prepare("PRAGMA table_info(vectors)").all();
log(`VECTOR DEBUG: Vector table schema: ${JSON.stringify(tableInfo)}`, "info");
}
} catch (error) {
log(`Failed to create table ${name}: ${error.message}`, "error");
throw error;
}
}
// Create vector indexes for efficient similarity search
try {
log("VECTOR DEBUG: Initializing vector indexes", "info");
const indexResult = await createVectorIndexes();
if (indexResult) {
log('VECTOR SUCCESS: Vector indexes setup completed successfully', "info");
} else {
log('VECTOR WARNING: Vector indexes setup partially completed with issues', "error");
}
} catch (indexError) {
log(`VECTOR ERROR: Vector indexes creation failed: ${indexError.message}`, "error");
log('VECTOR WARNING: Vector operations may be slower or unavailable', "error");
}
// Create a test_connection table to verify write access
try {
await db.prepare(`
CREATE TABLE IF NOT EXISTS test_connection (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
created_at TEXT
)
`).run();
const now = new Date().toISOString();
await db.prepare(`
INSERT INTO test_connection (name, created_at)
VALUES ('test', ?)
`).run(now);
const testResult = await db.prepare('SELECT * FROM test_connection ORDER BY id DESC LIMIT 1').get();
log(`Write test successful: ${JSON.stringify(testResult)}`);
} catch (error) {
log(`Failed to write to database: ${error.message}`, "error");
throw error;
}
// Perform a quick test of the vector storage
try {
// Generate a simple test vector
log("VECTOR DEBUG: Testing vector storage during initialization", "info");
const testVector = new Float32Array(16).fill(0.1); // Simple test vector