@stackmemoryai/stackmemory
Version:
Project-scoped memory for AI coding tools. Durable context across sessions with MCP integration, frames, smart retrieval, Claude Code skills, and automatic hooks.
385 lines (384 loc) • 11.6 kB
JavaScript
import { fileURLToPath as __fileURLToPath } from 'url';
import { dirname as __pathDirname } from 'path';
const __filename = __fileURLToPath(import.meta.url);
const __dirname = __pathDirname(__filename);
import { logger } from "../monitoring/logger.js";
import { trace } from "../trace/index.js";
import { ErrorCode, wrapError } from "../errors/index.js";
class BatchOperationsManager {
db;
preparedStatements = /* @__PURE__ */ new Map();
batchQueue = [];
isProcessing = false;
constructor(db) {
if (db) {
this.db = db;
this.initializePreparedStatements();
} else {
this.db = void 0;
}
}
/**
* Add events in bulk with optimized batching
*/
async bulkInsertEvents(events, options = {}) {
const {
batchSize = 100,
onConflict = "ignore",
enableTransactions = true
} = options;
return this.performBulkInsert("events", events, {
batchSize,
onConflict,
enableTransactions,
preprocessor: (event) => ({
...event,
event_id: `evt_${event.frame_id}_${event.seq}_${Date.now()}`,
payload: JSON.stringify(event.payload)
})
});
}
/**
* Add anchors in bulk
*/
async bulkInsertAnchors(anchors, options = {}) {
return this.performBulkInsert("anchors", anchors, {
...options,
preprocessor: (anchor) => ({
...anchor,
anchor_id: `anc_${anchor.frame_id}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
metadata: JSON.stringify(anchor.metadata),
created_at: Date.now()
})
});
}
/**
* Bulk update frame digests
*/
async bulkUpdateFrameDigests(updates, options = {}) {
const { batchSize = 50, enableTransactions = true } = options;
return trace.traceAsync(
"function",
"bulkUpdateFrameDigests",
{ count: updates.length },
async () => {
const startTime = performance.now();
const stats = {
totalRecords: updates.length,
batchesProcessed: 0,
successfulInserts: 0,
failedInserts: 0,
totalTimeMs: 0,
avgBatchTimeMs: 0
};
if (updates.length === 0) return stats;
const stmt = this.db.prepare(`
UPDATE frames
SET digest_text = ?,
digest_json = ?,
closed_at = COALESCE(?, closed_at),
state = CASE WHEN ? IS NOT NULL THEN 'closed' ELSE state END
WHERE frame_id = ?
`);
const updateFn = (batch) => {
for (const update of batch) {
try {
const result = stmt.run(
update.digest_text,
JSON.stringify(update.digest_json),
update.closed_at,
update.closed_at,
update.frame_id
);
stats.successfulInserts += result.changes;
} catch (error) {
stats.failedInserts++;
const wrappedError = wrapError(
error,
"Failed to update frame digest",
ErrorCode.DB_UPDATE_FAILED,
{ frameId: update.frame_id }
);
logger.warn("Failed to update frame digest", {
frameId: update.frame_id,
error: wrappedError.message
});
}
}
};
if (enableTransactions) {
const transaction = this.db.transaction(updateFn);
await this.processBatches(updates, batchSize, transaction, stats);
} else {
await this.processBatches(updates, batchSize, updateFn, stats);
}
stats.totalTimeMs = performance.now() - startTime;
stats.avgBatchTimeMs = stats.batchesProcessed > 0 ? stats.totalTimeMs / stats.batchesProcessed : 0;
logger.info(
"Bulk frame digest update completed",
stats
);
return stats;
}
);
}
/**
* Generic bulk insert with preprocessing
*/
async performBulkInsert(table, records, options = {}) {
const {
batchSize = 100,
onConflict = "ignore",
enableTransactions = true,
preprocessor
} = options;
return trace.traceAsync(
"function",
`bulkInsert${table}`,
{ count: records.length },
async () => {
const startTime = performance.now();
const stats = {
totalRecords: records.length,
batchesProcessed: 0,
successfulInserts: 0,
failedInserts: 0,
totalTimeMs: 0,
avgBatchTimeMs: 0
};
if (records.length === 0) return stats;
const processedRecords = preprocessor ? records.map(preprocessor) : records;
const firstRecord = processedRecords[0];
const columns = Object.keys(firstRecord);
const placeholders = columns.map(() => "?").join(", ");
const conflictClause = this.getConflictClause(onConflict);
const insertSql = `INSERT ${conflictClause} INTO ${table} (${columns.join(", ")}) VALUES (${placeholders})`;
const stmt = this.db.prepare(insertSql);
const insertFn = (batch) => {
for (const record of batch) {
try {
const values = columns.map((col) => record[col]);
const result = stmt.run(...values);
stats.successfulInserts += result.changes;
} catch (error) {
stats.failedInserts++;
const wrappedError = wrapError(
error,
`Failed to insert ${table} record`,
ErrorCode.DB_INSERT_FAILED,
{ table, record }
);
logger.warn(`Failed to insert ${table} record`, {
record,
error: wrappedError.message
});
}
}
};
if (enableTransactions) {
const transaction = this.db.transaction(insertFn);
await this.processBatches(
processedRecords,
batchSize,
transaction,
stats
);
} else {
await this.processBatches(
processedRecords,
batchSize,
insertFn,
stats
);
}
stats.totalTimeMs = performance.now() - startTime;
stats.avgBatchTimeMs = stats.batchesProcessed > 0 ? stats.totalTimeMs / stats.batchesProcessed : 0;
logger.info(
`Bulk ${table} insert completed`,
stats
);
return stats;
}
);
}
/**
* Process records in batches
*/
async processBatches(records, batchSize, processFn, stats) {
for (let i = 0; i < records.length; i += batchSize) {
const batch = records.slice(i, i + batchSize);
const batchStart = performance.now();
try {
processFn(batch);
stats.batchesProcessed++;
const batchTime = performance.now() - batchStart;
logger.debug("Batch processed", {
batchNumber: stats.batchesProcessed,
records: batch.length,
timeMs: batchTime.toFixed(2)
});
if (stats.batchesProcessed % 10 === 0) {
await new Promise((resolve) => setImmediate(resolve));
}
} catch (error) {
stats.failedInserts += batch.length;
const wrappedError = wrapError(
error,
"Batch processing failed",
ErrorCode.DB_TRANSACTION_FAILED,
{ batchNumber: stats.batchesProcessed + 1, batchSize: batch.length }
);
logger.error("Batch processing failed", wrappedError, {
batchNumber: stats.batchesProcessed + 1,
batchSize: batch.length
});
}
}
}
/**
* Queue batch operation for later processing
*/
queueBatchOperation(operation) {
this.batchQueue.push(operation);
if (this.batchQueue.length >= 10 && !this.isProcessing) {
setImmediate(() => this.processBatchQueue());
}
}
/**
* Process queued batch operations
*/
async processBatchQueue() {
if (this.isProcessing || this.batchQueue.length === 0) {
return;
}
this.isProcessing = true;
const operations = [...this.batchQueue];
this.batchQueue = [];
try {
const groupedOps = this.groupOperationsByTable(operations);
for (const [table, tableOps] of groupedOps) {
await this.processTableOperations(table, tableOps);
}
logger.info("Batch queue processed", {
operations: operations.length,
tables: groupedOps.size
});
} catch (error) {
const wrappedError = wrapError(
error,
"Batch queue processing failed",
ErrorCode.DB_TRANSACTION_FAILED,
{ operationsCount: operations.length }
);
logger.error("Batch queue processing failed", wrappedError);
} finally {
this.isProcessing = false;
}
}
/**
* Flush any remaining queued operations
*/
async flush() {
if (this.batchQueue.length > 0) {
await this.processBatchQueue();
}
}
/**
* Get SQL conflict clause
*/
getConflictClause(onConflict) {
switch (onConflict) {
case "ignore":
return "OR IGNORE";
case "replace":
return "OR REPLACE";
case "update":
return "ON CONFLICT DO UPDATE SET";
default:
return "";
}
}
/**
* Group operations by table for efficient processing
*/
groupOperationsByTable(operations) {
const grouped = /* @__PURE__ */ new Map();
for (const op of operations) {
if (!grouped.has(op.table)) {
grouped.set(op.table, []);
}
grouped.get(op.table).push(op);
}
return grouped;
}
/**
* Process all operations for a specific table
*/
async processTableOperations(table, operations) {
for (const op of operations) {
switch (op.operation) {
case "insert":
await this.performBulkInsert(table, op.data, {
onConflict: op.onConflict
});
break;
// Add update and delete operations as needed
default:
logger.warn("Unsupported batch operation", {
table,
operation: op.operation
});
}
}
}
/**
* Initialize commonly used prepared statements
*/
initializePreparedStatements() {
this.preparedStatements.set(
"insert_event",
this.db.prepare(`
INSERT OR IGNORE INTO events
(event_id, frame_id, run_id, seq, event_type, payload, ts)
VALUES (?, ?, ?, ?, ?, ?, ?)
`)
);
this.preparedStatements.set(
"insert_anchor",
this.db.prepare(`
INSERT OR IGNORE INTO anchors
(anchor_id, frame_id, type, text, priority, metadata, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?)
`)
);
logger.info("Batch operations prepared statements initialized");
}
/**
* Cleanup resources
*/
cleanup() {
this.preparedStatements.clear();
}
}
let globalBatchManager = null;
function getBatchManager(db) {
if (!globalBatchManager) {
globalBatchManager = new BatchOperationsManager(db);
}
return globalBatchManager;
}
async function bulkInsertEvents(events, options) {
const manager = getBatchManager();
return manager.bulkInsertEvents(events, options);
}
async function bulkInsertAnchors(anchors, options) {
const manager = getBatchManager();
return manager.bulkInsertAnchors(anchors, options);
}
export {
BatchOperationsManager,
bulkInsertAnchors,
bulkInsertEvents,
getBatchManager
};
//# sourceMappingURL=batch-operations.js.map