@stackmemoryai/stackmemory
Version:
Lossless, project-scoped memory for AI coding tools. Durable context across sessions with 56 MCP tools, FTS5 search, conductor orchestrator, loop/watch monitoring, snapshot capture, pre-flight overlap checks, Claude/Codex/OpenCode wrappers, Linear sync, a
349 lines (348 loc) • 10.9 kB
JavaScript
import { fileURLToPath as __fileURLToPath } from 'url';
import { dirname as __pathDirname } from 'path';
const __filename = __fileURLToPath(import.meta.url);
const __dirname = __pathDirname(__filename);
import { Pool } from "pg";
import { logger } from "../monitoring/logger.js";
class PostgresAdapter {
pool;
config;
isInitialized = false;
constructor(config) {
this.config = {
...config,
vectorDimensions: config.vectorDimensions || 1536
// OpenAI ada-002 dimensions
};
this.pool = new Pool(this.config);
}
async connect() {
try {
await this.pool.connect();
await this.initialize();
this.isInitialized = true;
logger.info("PostgreSQL connected successfully");
} catch (error) {
logger.error(
"Failed to connect to PostgreSQL",
error instanceof Error ? error : void 0
);
throw error;
}
}
async disconnect() {
await this.pool.end();
this.isInitialized = false;
logger.info("PostgreSQL disconnected");
}
async execute(query, params) {
try {
const result = await this.pool.query(query, params);
return {
rows: result.rows,
rowCount: result.rowCount || 0,
fields: result.fields?.map((f) => ({
name: f.name,
type: f.dataTypeID.toString()
}))
};
} catch (error) {
logger.error(
"Query execution failed",
error instanceof Error ? error : new Error(String(error)),
{ query }
);
throw error;
}
}
async beginTransaction() {
await this.execute("BEGIN");
}
async commit() {
await this.execute("COMMIT");
}
async rollback() {
await this.execute("ROLLBACK");
}
isConnected() {
return this.isInitialized;
}
async initialize() {
await this.createBaseSchema();
if (this.config.enableTimescale) {
await this.enableTimescale();
}
if (this.config.enablePgvector) {
await this.enablePgvector();
}
}
async createBaseSchema() {
const queries = [
// Projects table
`CREATE TABLE IF NOT EXISTS projects (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
name VARCHAR(255) NOT NULL,
path TEXT NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
metadata JSONB
)`,
// Sessions table
`CREATE TABLE IF NOT EXISTS sessions (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
project_id UUID REFERENCES projects(id) ON DELETE CASCADE,
branch VARCHAR(255),
started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
ended_at TIMESTAMP,
metadata JSONB
)`,
// Traces table
`CREATE TABLE IF NOT EXISTS traces (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
session_id UUID REFERENCES sessions(id) ON DELETE CASCADE,
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
type VARCHAR(100) NOT NULL,
data JSONB NOT NULL,
metadata JSONB
)`,
// Context frames table
`CREATE TABLE IF NOT EXISTS context_frames (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
project_id UUID REFERENCES projects(id) ON DELETE CASCADE,
branch VARCHAR(255),
content TEXT NOT NULL,
summary TEXT,
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
type VARCHAR(100) NOT NULL,
metadata JSONB
)`,
// Decisions table
`CREATE TABLE IF NOT EXISTS decisions (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
project_id UUID REFERENCES projects(id) ON DELETE CASCADE,
session_id UUID REFERENCES sessions(id) ON DELETE CASCADE,
decision TEXT NOT NULL,
rationale TEXT,
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
metadata JSONB
)`
];
for (const query of queries) {
await this.execute(query);
}
const indexes = [
`CREATE INDEX IF NOT EXISTS idx_traces_session_timestamp
ON traces(session_id, timestamp)`,
`CREATE INDEX IF NOT EXISTS idx_context_project_branch
ON context_frames(project_id, branch)`,
`CREATE INDEX IF NOT EXISTS idx_traces_type
ON traces(type)`,
`CREATE INDEX IF NOT EXISTS idx_context_frames_timestamp
ON context_frames(timestamp)`,
`CREATE INDEX IF NOT EXISTS idx_decisions_project_session
ON decisions(project_id, session_id)`
];
for (const index of indexes) {
await this.execute(index);
}
}
async enableTimescale() {
try {
await this.execute("CREATE EXTENSION IF NOT EXISTS timescaledb");
await this.execute(`
SELECT create_hypertable('traces', 'timestamp',
if_not_exists => TRUE,
chunk_time_interval => INTERVAL '1 day'
)
`);
await this.execute(`
SELECT create_hypertable('context_frames', 'timestamp',
if_not_exists => TRUE,
chunk_time_interval => INTERVAL '7 days'
)
`);
logger.info("TimescaleDB extension enabled");
} catch (error) {
logger.warn(
"Failed to enable TimescaleDB",
error instanceof Error ? error : void 0
);
}
}
async enablePgvector() {
try {
await this.execute("CREATE EXTENSION IF NOT EXISTS vector");
await this.execute(`
ALTER TABLE context_frames
ADD COLUMN IF NOT EXISTS embedding vector(${this.config.vectorDimensions})
`);
await this.execute(`
ALTER TABLE traces
ADD COLUMN IF NOT EXISTS embedding vector(${this.config.vectorDimensions})
`);
await this.execute(`
CREATE INDEX IF NOT EXISTS idx_context_embedding
ON context_frames
USING ivfflat (embedding vector_cosine_ops)
WITH (lists = 100)
`);
logger.info("pgvector extension enabled");
} catch (error) {
logger.warn(
"Failed to enable pgvector",
error instanceof Error ? error : void 0
);
}
}
// Data access methods
async saveTrace(trace) {
await this.execute(
`INSERT INTO traces (id, session_id, timestamp, type, data, metadata)
VALUES ($1, $2, $3, $4, $5, $6)`,
[
trace.id,
trace.sessionId,
trace.timestamp,
trace.type,
JSON.stringify(trace.data),
trace.metadata ? JSON.stringify(trace.metadata) : null
]
);
}
async saveContext(context) {
await this.execute(
`INSERT INTO context_frames (id, project_id, branch, content, timestamp, type, metadata)
VALUES ($1, $2, $3, $4, $5, $6, $7)`,
[
context.id,
context.projectId,
context.branch || null,
context.content,
context.timestamp,
context.type,
context.metadata ? JSON.stringify(context.metadata) : null
]
);
}
async getRecentTraces(sessionId, limit = 100) {
const result = await this.execute(
`SELECT * FROM traces
WHERE session_id = $1
ORDER BY timestamp DESC
LIMIT $2`,
[sessionId, limit]
);
return result.rows.map((row) => ({
id: row.id,
sessionId: row.session_id,
timestamp: row.timestamp,
type: row.type,
data: row.data,
metadata: row.metadata
}));
}
async getRecentContext(projectId, branch, limit = 50) {
const query = branch ? `SELECT * FROM context_frames
WHERE project_id = $1 AND branch = $2
ORDER BY timestamp DESC
LIMIT $3` : `SELECT * FROM context_frames
WHERE project_id = $1
ORDER BY timestamp DESC
LIMIT $2`;
const params = branch ? [projectId, branch, limit] : [projectId, limit];
const result = await this.execute(query, params);
return result.rows.map((row) => ({
id: row.id,
projectId: row.project_id,
branch: row.branch,
content: row.content,
timestamp: row.timestamp,
type: row.type,
metadata: row.metadata
}));
}
// Hybrid SQLite/PostgreSQL migration helper
async migrateFromSQLite(sqliteDb) {
logger.info("Starting migration from SQLite to PostgreSQL");
try {
await this.beginTransaction();
const projects = sqliteDb.prepare("SELECT * FROM projects").all();
for (const project of projects) {
await this.execute(
"INSERT INTO projects (id, name, path, created_at, updated_at, metadata) VALUES ($1, $2, $3, $4, $5, $6)",
[
project.id,
project.name,
project.path,
project.created_at,
project.updated_at,
project.metadata
]
);
}
const sessions = sqliteDb.prepare("SELECT * FROM sessions").all();
for (const session of sessions) {
await this.execute(
"INSERT INTO sessions (id, project_id, branch, started_at, ended_at, metadata) VALUES ($1, $2, $3, $4, $5, $6)",
[
session.id,
session.project_id,
session.branch,
session.started_at,
session.ended_at,
session.metadata
]
);
}
const traceCount = sqliteDb.prepare("SELECT COUNT(*) as count FROM traces").get();
const batchSize = 1e3;
for (let offset = 0; offset < traceCount.count; offset += batchSize) {
const traces = sqliteDb.prepare("SELECT * FROM traces LIMIT ? OFFSET ?").all(batchSize, offset);
for (const trace of traces) {
await this.execute(
"INSERT INTO traces (id, session_id, timestamp, type, data, metadata) VALUES ($1, $2, $3, $4, $5, $6)",
[
trace.id,
trace.session_id,
trace.timestamp,
trace.type,
trace.data,
trace.metadata
]
);
}
logger.info(
`Migrated ${offset + traces.length}/${traceCount.count} traces`
);
}
const contexts = sqliteDb.prepare("SELECT * FROM context_frames").all();
for (const context of contexts) {
await this.execute(
"INSERT INTO context_frames (id, project_id, branch, content, summary, timestamp, type, metadata) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)",
[
context.id,
context.project_id,
context.branch,
context.content,
context.summary,
context.timestamp,
context.type,
context.metadata
]
);
}
await this.commit();
logger.info("Migration completed successfully");
} catch (error) {
await this.rollback();
logger.error(
"Migration failed",
error instanceof Error ? error : void 0
);
throw error;
}
}
}
export {
PostgresAdapter
};