@stackmemoryai/stackmemory
Version:
Lossless, project-scoped memory for AI coding tools. Durable context across sessions with 56 MCP tools, FTS5 search, conductor orchestrator, loop/watch monitoring, snapshot capture, pre-flight overlap checks, Claude/Codex/OpenCode wrappers, Linear sync, a
601 lines (600 loc) • 18.8 kB
JavaScript
import { fileURLToPath as __fileURLToPath } from 'url';
import { dirname as __pathDirname } from 'path';
const __filename = __fileURLToPath(import.meta.url);
const __dirname = __pathDirname(__filename);
import { EventEmitter } from "events";
import { logger } from "../monitoring/logger.js";
import { DatabaseError, ErrorCode, wrapError } from "../errors/index.js";
class MigrationManager extends EventEmitter {
config;
progress;
isRunning = false;
isPaused = false;
abortController;
constructor(config) {
super();
this.validateConfig(config);
this.config = this.normalizeConfig(config);
this.progress = this.initializeProgress();
}
validateConfig(config) {
if (!config.sourceAdapter || !config.targetAdapter) {
throw new DatabaseError(
"Source and target adapters are required",
ErrorCode.DB_MIGRATION_FAILED,
{ reason: "missing_adapters" }
);
}
if (config.batchSize && (config.batchSize < 1 || config.batchSize > 1e4)) {
throw new DatabaseError(
"Batch size must be between 1 and 10000",
ErrorCode.DB_MIGRATION_FAILED,
{ batchSize: config.batchSize }
);
}
if (config.retryAttempts && (config.retryAttempts < 0 || config.retryAttempts > 10)) {
throw new DatabaseError(
"Retry attempts must be between 0 and 10",
ErrorCode.DB_MIGRATION_FAILED,
{ retryAttempts: config.retryAttempts }
);
}
if (config.retryDelayMs && (config.retryDelayMs < 0 || config.retryDelayMs > 3e4)) {
throw new DatabaseError(
"Retry delay must be between 0 and 30000ms",
ErrorCode.DB_MIGRATION_FAILED,
{ retryDelayMs: config.retryDelayMs }
);
}
}
normalizeConfig(config) {
return {
...config,
batchSize: config.batchSize ?? 1e3,
retryAttempts: config.retryAttempts ?? 3,
retryDelayMs: config.retryDelayMs ?? 1e3,
verifyData: config.verifyData ?? true,
enableDualWrite: config.enableDualWrite ?? true,
progressCallback: config.progressCallback ?? (() => {
})
};
}
initializeProgress() {
return {
phase: "initializing",
totalRecords: 0,
processedRecords: 0,
percentage: 0,
startTime: /* @__PURE__ */ new Date(),
errors: [],
warnings: []
};
}
async planMigration() {
logger.info("Planning migration strategy");
const plan = [];
const tables = ["frames", "events", "anchors"];
for (const table of tables) {
try {
const stats = await this.config.sourceAdapter.getStats();
const estimatedRows = this.estimateTableRows(table, stats);
plan.push({
table,
priority: this.getTablePriority(table),
estimatedRows,
dependencies: this.getTableDependencies(table),
strategy: "full"
});
} catch (error) {
logger.warn(`Failed to estimate rows for table ${table}:`, error);
plan.push({
table,
priority: this.getTablePriority(table),
estimatedRows: 0,
dependencies: this.getTableDependencies(table),
strategy: "skip"
});
}
}
plan.sort((a, b) => a.priority - b.priority);
const totalRecords = plan.reduce((sum, p) => sum + p.estimatedRows, 0);
this.progress.totalRecords = totalRecords;
logger.info(
`Migration plan: ${plan.length} tables, ~${totalRecords} records`
);
return plan;
}
estimateTableRows(table, stats) {
switch (table) {
case "frames":
return stats.totalFrames || 0;
case "events":
return stats.totalEvents || 0;
case "anchors":
return stats.totalAnchors || 0;
default:
return 0;
}
}
getTablePriority(table) {
const priorities = { frames: 1, events: 2, anchors: 3 };
return priorities[table] || 99;
}
getTableDependencies(table) {
const dependencies = {
frames: [],
events: ["frames"],
anchors: ["frames"]
};
return dependencies[table] || [];
}
async migrate(strategy = {
type: "online",
allowWrites: true,
verifyIntegrity: true,
fallbackOnError: true
}) {
if (this.isRunning) {
throw new DatabaseError(
"Migration already in progress",
ErrorCode.DB_MIGRATION_FAILED,
{ reason: "already_running" }
);
}
this.isRunning = true;
this.abortController = new AbortController();
try {
logger.info("Starting database migration", strategy);
this.updateProgress({ phase: "initializing" });
await this.validateAdapters();
const plan = await this.planMigration();
await this.initializeTargetSchema();
if (strategy.type === "dual-write" && this.config.enableDualWrite) {
await this.enableDualWrite();
}
this.updateProgress({ phase: "migrating" });
await this.executeMigrationPlan(plan, strategy);
if (strategy.verifyIntegrity) {
this.updateProgress({ phase: "verifying" });
await this.verifyDataIntegrity(plan);
}
this.updateProgress({ phase: "completing" });
await this.completeMigration(strategy);
this.updateProgress({ phase: "completed", percentage: 100 });
logger.info("Migration completed successfully");
this.emit("completed", this.progress);
} catch (error) {
this.updateProgress({ phase: "failed" });
const sanitizedError = this.sanitizeError(error);
logger.error("Migration failed:", sanitizedError);
if (strategy.fallbackOnError) {
try {
await this.rollbackMigration();
} catch (rollbackError) {
logger.error("Rollback failed:", this.sanitizeError(rollbackError));
}
}
const userError = new DatabaseError(
"Migration failed. Check logs for details.",
ErrorCode.DB_MIGRATION_FAILED,
{ phase: this.progress.phase },
error instanceof Error ? error : void 0
);
this.emit("failed", userError);
throw userError;
} finally {
this.isRunning = false;
this.abortController = void 0;
}
}
async validateAdapters() {
logger.debug("Validating database adapters");
if (!this.config.sourceAdapter.isConnected()) {
await this.config.sourceAdapter.connect();
}
if (!await this.config.sourceAdapter.ping()) {
throw new DatabaseError(
"Source adapter is not responding",
ErrorCode.DB_CONNECTION_FAILED,
{ adapter: "source" }
);
}
if (!this.config.targetAdapter.isConnected()) {
await this.config.targetAdapter.connect();
}
if (!await this.config.targetAdapter.ping()) {
throw new DatabaseError(
"Target adapter is not responding",
ErrorCode.DB_CONNECTION_FAILED,
{ adapter: "target" }
);
}
const sourceVersion = await this.config.sourceAdapter.getSchemaVersion();
const targetVersion = await this.config.targetAdapter.getSchemaVersion();
if (sourceVersion !== targetVersion) {
logger.warn(
`Schema version mismatch: source=${sourceVersion}, target=${targetVersion}`
);
this.addWarning("Schema version mismatch detected");
}
}
async initializeTargetSchema() {
logger.debug("Initializing target schema");
try {
await this.config.targetAdapter.initializeSchema();
} catch (error) {
logger.error("Failed to initialize target schema:", error);
throw new DatabaseError(
"Target schema initialization failed",
ErrorCode.DB_SCHEMA_ERROR,
{ operation: "initializeSchema" },
error instanceof Error ? error : void 0
);
}
}
async enableDualWrite() {
logger.info("Enabling dual-write mode");
this.addWarning(
"Dual-write mode enabled - ensure application routes writes to both adapters"
);
}
async executeMigrationPlan(plan, strategy) {
for (const tablePlan of plan) {
if (this.abortController?.signal.aborted) {
throw new DatabaseError(
"Migration aborted by user",
ErrorCode.DB_MIGRATION_FAILED,
{ reason: "user_abort" }
);
}
if (tablePlan.strategy === "skip") {
logger.info(`Skipping table: ${tablePlan.table}`);
continue;
}
this.updateProgress({ currentTable: tablePlan.table });
await this.migrateTable(tablePlan, strategy);
}
}
async migrateTable(plan, _strategy) {
logger.info(`Migrating table: ${plan.table} (~${plan.estimatedRows} rows)`);
let offset = 0;
let migratedRows = 0;
while (true) {
if (this.abortController?.signal.aborted || this.isPaused) {
break;
}
try {
const batch = await this.getBatch(
plan.table,
offset,
this.config.batchSize
);
if (batch.length === 0) {
break;
}
await this.migrateBatch(plan.table, batch);
migratedRows += batch.length;
offset += this.config.batchSize;
this.progress.processedRecords += batch.length;
this.updateProgressPercentage();
await this.sleep(this.calculateAdaptiveDelay());
} catch (error) {
this.addError(plan.table, `Batch migration failed: ${error}`);
if (this.config.retryAttempts > 0) {
await this.retryBatch(plan.table, offset, this.config.batchSize);
} else {
throw wrapError(
error,
`Batch migration failed for table ${plan.table}`,
ErrorCode.DB_MIGRATION_FAILED,
{ table: plan.table, offset }
);
}
}
}
logger.info(
`Completed migrating table ${plan.table}: ${migratedRows} rows`
);
}
async getBatch(table, offset, limit) {
const allowedTables = ["frames", "events", "anchors"];
if (!allowedTables.includes(table)) {
throw new DatabaseError(
`Invalid table name: ${table}`,
ErrorCode.DB_QUERY_FAILED,
{ table, allowedTables }
);
}
const safeLimit = Math.max(1, Math.min(limit, 1e4));
const safeOffset = Math.max(0, offset);
const validTable = table;
return this.config.sourceAdapter.getTablePage(
validTable,
safeOffset,
safeLimit
);
}
async migrateBatch(table, batch) {
const allowedTables = ["frames", "events", "anchors"];
if (!allowedTables.includes(table)) {
throw new DatabaseError(
`Invalid table name: ${table}`,
ErrorCode.DB_INSERT_FAILED,
{ table }
);
}
await this.config.targetAdapter.inTransaction(async (adapter) => {
const operations = batch.map((row) => ({
type: "insert",
table,
data: this.validateRowData(table, row)
}));
await adapter.executeBulk(operations);
});
}
validateRowData(table, row) {
if (!row || typeof row !== "object") {
throw new DatabaseError(
`Invalid row data for table ${table}`,
ErrorCode.DB_INSERT_FAILED,
{ table, rowType: typeof row }
);
}
switch (table) {
case "frames":
return this.validateFrameRow(row);
case "events":
return this.validateEventRow(row);
case "anchors":
return this.validateAnchorRow(row);
default:
throw new DatabaseError(
`Unknown table: ${table}`,
ErrorCode.DB_INSERT_FAILED,
{ table }
);
}
}
validateFrameRow(row) {
const required = [
"frame_id",
"project_id",
"run_id",
"type",
"name",
"state",
"depth"
];
for (const field of required) {
if (!(field in row)) {
throw new DatabaseError(
`Missing required field ${field} in frame row`,
ErrorCode.DB_CONSTRAINT_VIOLATION,
{ table: "frames", missingField: field }
);
}
}
return row;
}
validateEventRow(row) {
const required = ["event_id", "frame_id", "seq", "type", "text"];
for (const field of required) {
if (!(field in row)) {
throw new DatabaseError(
`Missing required field ${field} in event row`,
ErrorCode.DB_CONSTRAINT_VIOLATION,
{ table: "events", missingField: field }
);
}
}
return row;
}
validateAnchorRow(row) {
const required = ["anchor_id", "frame_id", "type", "text", "priority"];
for (const field of required) {
if (!(field in row)) {
throw new DatabaseError(
`Missing required field ${field} in anchor row`,
ErrorCode.DB_CONSTRAINT_VIOLATION,
{ table: "anchors", missingField: field }
);
}
}
return row;
}
async retryBatch(table, offset, batchSize) {
for (let attempt = 1; attempt <= this.config.retryAttempts; attempt++) {
try {
await this.sleep(this.config.retryDelayMs * attempt);
const batch = await this.getBatch(table, offset, batchSize);
await this.migrateBatch(table, batch);
logger.info(`Retry successful for table ${table} at offset ${offset}`);
return;
} catch (error) {
logger.warn(
`Retry ${attempt}/${this.config.retryAttempts} failed:`,
error
);
if (attempt === this.config.retryAttempts) {
throw new DatabaseError(
`Failed after ${this.config.retryAttempts} retries`,
ErrorCode.DB_MIGRATION_FAILED,
{ table, offset, attempts: this.config.retryAttempts },
error instanceof Error ? error : void 0
);
}
}
}
}
async verifyDataIntegrity(plan) {
logger.info("Verifying data integrity");
for (const tablePlan of plan) {
if (tablePlan.strategy === "skip") continue;
try {
const sourceStats = await this.config.sourceAdapter.getStats();
const targetStats = await this.config.targetAdapter.getStats();
const sourceCount = this.estimateTableRows(
tablePlan.table,
sourceStats
);
const targetCount = this.estimateTableRows(
tablePlan.table,
targetStats
);
if (sourceCount !== targetCount) {
this.addError(
tablePlan.table,
`Row count mismatch: source=${sourceCount}, target=${targetCount}`
);
} else {
logger.debug(
`Table ${tablePlan.table} verified: ${sourceCount} rows`
);
}
} catch (error) {
this.addError(tablePlan.table, `Verification failed: ${error}`);
}
}
if (this.progress.errors.length > 0) {
throw new DatabaseError(
`Data integrity verification failed with ${this.progress.errors.length} errors`,
ErrorCode.DB_MIGRATION_FAILED,
{
errorCount: this.progress.errors.length,
errors: this.progress.errors
}
);
}
}
async completeMigration(_strategy) {
logger.info("Completing migration");
const sourceVersion = await this.config.sourceAdapter.getSchemaVersion();
await this.config.targetAdapter.migrateSchema(sourceVersion);
await this.config.targetAdapter.analyze();
logger.info("Migration completion tasks finished");
}
async rollbackMigration() {
logger.warn("Rolling back migration");
try {
logger.warn(
"Rollback would clean target database - implement based on strategy"
);
} catch (error) {
logger.error("Rollback failed:", error);
}
}
updateProgress(updates) {
Object.assign(this.progress, updates);
this.updateProgressPercentage();
if (this.progress.totalRecords > 0) {
const elapsed = Date.now() - this.progress.startTime.getTime();
const rate = this.progress.processedRecords / (elapsed / 1e3);
const remaining = this.progress.totalRecords - this.progress.processedRecords;
if (rate > 0) {
this.progress.estimatedEndTime = new Date(
Date.now() + remaining / rate * 1e3
);
}
}
this.config.progressCallback(this.progress);
this.emit("progress", this.progress);
}
updateProgressPercentage() {
if (this.progress.totalRecords > 0) {
this.progress.percentage = Math.min(
100,
this.progress.processedRecords / this.progress.totalRecords * 100
);
}
}
addError(table, error) {
this.progress.errors.push({
table,
error,
timestamp: /* @__PURE__ */ new Date()
});
logger.error(`Migration error for table ${table}: ${error}`);
}
addWarning(warning, table) {
this.progress.warnings.push({
table: table || "general",
warning,
timestamp: /* @__PURE__ */ new Date()
});
logger.warn(`Migration warning: ${warning}`);
}
sanitizeError(error) {
if (error instanceof Error) {
return {
name: error.name,
message: error.message
// Exclude stack traces and sensitive data for security
};
}
return { message: "Unknown error occurred" };
}
calculateAdaptiveDelay() {
const memoryUsage = process.memoryUsage().heapUsed / 1024 / 1024;
if (memoryUsage > 400) return 100;
if (memoryUsage > 300) return 50;
return 10;
}
sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
pause() {
if (!this.isRunning) {
throw new DatabaseError(
"No migration in progress",
ErrorCode.DB_MIGRATION_FAILED,
{ reason: "not_running" }
);
}
this.isPaused = true;
logger.info("Migration paused");
this.emit("paused");
}
resume() {
if (!this.isRunning) {
throw new DatabaseError(
"No migration in progress",
ErrorCode.DB_MIGRATION_FAILED,
{ reason: "not_running" }
);
}
this.isPaused = false;
logger.info("Migration resumed");
this.emit("resumed");
}
abort() {
if (!this.isRunning) {
throw new DatabaseError(
"No migration in progress",
ErrorCode.DB_MIGRATION_FAILED,
{ reason: "not_running" }
);
}
this.abortController?.abort();
logger.info("Migration aborted");
this.emit("aborted");
}
getProgress() {
return { ...this.progress };
}
isActive() {
return this.isRunning;
}
async estimateDuration() {
const plan = await this.planMigration();
const totalRecords = plan.reduce((sum, p) => sum + p.estimatedRows, 0);
const estimatedSeconds = totalRecords / 1e3;
const estimatedMinutes = Math.ceil(estimatedSeconds / 60);
let confidence = "medium";
if (totalRecords < 1e4) confidence = "high";
if (totalRecords > 1e5) confidence = "low";
return { estimatedMinutes, confidence };
}
}
export {
MigrationManager
};