@stackmemoryai/stackmemory
Version:
Project-scoped memory for AI coding tools. Durable context across sessions with MCP integration, frames, smart retrieval, Claude Code skills, and automatic hooks.
173 lines (172 loc) • 4.51 kB
JavaScript
import { fileURLToPath as __fileURLToPath } from 'url';
import { dirname as __pathDirname } from 'path';
const __filename = __fileURLToPath(import.meta.url);
const __dirname = __pathDirname(__filename);
import { EventEmitter } from "events";
import * as fs from "fs";
import * as path from "path";
import { logger } from "./logger.js";
function getEnv(key, defaultValue) {
const value = process.env[key];
if (value === void 0) {
if (defaultValue !== void 0) return defaultValue;
throw new Error(`Environment variable ${key} is required`);
}
return value;
}
function getOptionalEnv(key) {
return process.env[key];
}
class MetricsCollector extends EventEmitter {
metrics = [];
metricsFile;
flushInterval = null;
aggregates = /* @__PURE__ */ new Map();
constructor() {
super();
if (process.env["STACKMEMORY_METRICS_ENABLED"] === "true") {
const metricsDir = path.join(
process.env["HOME"] || ".",
".stackmemory",
"metrics"
);
if (!fs.existsSync(metricsDir)) {
fs.mkdirSync(metricsDir, { recursive: true });
}
this.metricsFile = path.join(
metricsDir,
`metrics-${(/* @__PURE__ */ new Date()).toISOString().split("T")[0]}.jsonl`
);
this.flushInterval = setInterval(() => this.flush(), 3e4);
}
}
async record(metric, value, tags) {
const entry = {
timestamp: /* @__PURE__ */ new Date(),
metric,
value,
type: "gauge",
tags
};
this.metrics.push(entry);
this.updateAggregates(metric, value);
this.emit("metric", entry);
if (this.metrics.length > 1e3) {
await this.flush();
}
}
async increment(metric, tags, value = 1) {
const entry = {
timestamp: /* @__PURE__ */ new Date(),
metric,
value,
type: "counter",
tags
};
this.metrics.push(entry);
this.updateAggregates(metric, value);
this.emit("metric", entry);
}
async timing(metric, duration, tags) {
const entry = {
timestamp: /* @__PURE__ */ new Date(),
metric,
value: duration,
type: "timing",
tags
};
this.metrics.push(entry);
this.updateAggregates(metric, duration);
this.emit("metric", entry);
}
updateAggregates(metric, value) {
const existing = this.aggregates.get(metric) || {
sum: 0,
count: 0,
min: Infinity,
max: -Infinity
};
this.aggregates.set(metric, {
sum: existing.sum + value,
count: existing.count + 1,
min: Math.min(existing.min, value),
max: Math.max(existing.max, value)
});
}
async flush() {
if (this.metrics.length === 0) return;
const toFlush = [...this.metrics];
this.metrics = [];
if (this.metricsFile) {
try {
const lines = toFlush.map((m) => JSON.stringify(m)).join("\n") + "\n";
await fs.promises.appendFile(this.metricsFile, lines);
} catch (error) {
logger.error(
"Failed to write metrics",
error instanceof Error ? error : new Error(String(error))
);
}
}
this.emit("flush", toFlush);
}
getStats(metric) {
if (metric) {
const stats = this.aggregates.get(metric);
if (!stats) return {};
return {
[metric]: {
...stats,
avg: stats.count > 0 ? stats.sum / stats.count : 0
}
};
}
const result = {};
for (const [key, stats] of this.aggregates.entries()) {
result[key] = {
...stats,
avg: stats.count > 0 ? stats.sum / stats.count : 0
};
}
return result;
}
reset() {
this.metrics = [];
this.aggregates.clear();
}
destroy() {
if (this.flushInterval) {
clearInterval(this.flushInterval);
this.flushInterval = null;
}
this.flush();
}
}
const collector = new MetricsCollector();
process.on("beforeExit", () => collector.destroy());
class Metrics {
static async record(metric, value, tags) {
await collector.record(metric, value, tags);
}
static async increment(metric, tags) {
await collector.increment(metric, tags);
}
static async timing(metric, duration, tags) {
await collector.timing(metric, duration, tags);
}
static getStats(metric) {
return collector.getStats(metric);
}
static reset() {
collector.reset();
}
static on(event, listener) {
collector.on(event, listener);
}
}
const metrics = Metrics;
export {
Metrics,
metrics
};
//# sourceMappingURL=metrics.js.map