@stackmemoryai/stackmemory
Version:
Project-scoped memory for AI coding tools. Durable context across sessions with MCP integration, frames, smart retrieval, Claude Code skills, and automatic hooks.
278 lines (276 loc) • 9.14 kB
JavaScript
import { fileURLToPath as __fileURLToPath } from 'url';
import { dirname as __pathDirname } from 'path';
const __filename = __fileURLToPath(import.meta.url);
const __dirname = __pathDirname(__filename);
import Database from "better-sqlite3";
import {
DatabaseError,
ErrorCode,
createErrorHandler
} from "../../../core/errors/index.js";
class MetricsQueries {
db;
constructor(dbPath) {
try {
this.db = new Database(dbPath, { readonly: false });
this.initializeTables();
} catch (error) {
throw new DatabaseError(
"Failed to initialize metrics database",
ErrorCode.DB_CONNECTION_FAILED,
{
dbPath,
operation: "constructor"
},
error instanceof Error ? error : void 0
);
}
}
initializeTables() {
const errorHandler = createErrorHandler({
operation: "initializeTables"
});
try {
this.db.exec(`
CREATE TABLE IF NOT EXISTS task_analytics (
id TEXT PRIMARY KEY,
title TEXT NOT NULL,
state TEXT NOT NULL,
created_at INTEGER NOT NULL,
completed_at INTEGER,
estimated_effort INTEGER,
actual_effort INTEGER,
assignee_id TEXT,
priority TEXT DEFAULT 'medium',
labels TEXT DEFAULT '[]',
blocking_issues TEXT DEFAULT '[]',
updated_at INTEGER DEFAULT (strftime('%s', 'now'))
);
CREATE INDEX IF NOT EXISTS idx_task_state ON task_analytics(state);
CREATE INDEX IF NOT EXISTS idx_task_created ON task_analytics(created_at);
CREATE INDEX IF NOT EXISTS idx_task_assignee ON task_analytics(assignee_id);
`);
} catch (error) {
const dbError = errorHandler(error, {
operation: "initializeTables",
schema: "task_analytics"
});
throw new DatabaseError(
"Failed to initialize analytics tables",
ErrorCode.DB_MIGRATION_FAILED,
{
operation: "initializeTables",
schema: "task_analytics"
},
error instanceof Error ? error : void 0
);
}
}
getTaskMetrics(query = {}) {
try {
const { timeRange, userIds, states, priorities } = query;
const whereConditions = ["1=1"];
const params = {};
if (timeRange) {
whereConditions.push(
"created_at >= @startTime AND created_at <= @endTime"
);
params.startTime = Math.floor(timeRange.start.getTime() / 1e3);
params.endTime = Math.floor(timeRange.end.getTime() / 1e3);
}
if (userIds && userIds.length > 0) {
whereConditions.push(
`assignee_id IN (${userIds.map((_, i) => `@user${i}`).join(",")})`
);
userIds.forEach((id, i) => params[`user${i}`] = id);
}
if (states && states.length > 0) {
whereConditions.push(
`state IN (${states.map((_, i) => `@state${i}`).join(",")})`
);
states.forEach((s, i) => params[`state${i}`] = s);
}
if (priorities && priorities.length > 0) {
whereConditions.push(
`priority IN (${priorities.map((_, i) => `@priority${i}`).join(",")})`
);
priorities.forEach((p, i) => params[`priority${i}`] = p);
}
const whereClause = whereConditions.join(" AND ");
const metricsQuery = this.db.prepare(`
SELECT
COUNT(*) as total_tasks,
SUM(CASE WHEN state = 'completed' THEN 1 ELSE 0 END) as completed_tasks,
SUM(CASE WHEN state = 'in_progress' THEN 1 ELSE 0 END) as in_progress_tasks,
SUM(CASE WHEN state = 'blocked' THEN 1 ELSE 0 END) as blocked_tasks,
AVG(CASE
WHEN state = 'completed' AND completed_at IS NOT NULL
THEN (completed_at - created_at) * 1000
ELSE NULL
END) as avg_time_to_complete,
AVG(CASE
WHEN actual_effort IS NOT NULL AND estimated_effort IS NOT NULL AND estimated_effort > 0
THEN (CAST(actual_effort AS REAL) / estimated_effort) * 100
ELSE NULL
END) as effort_accuracy,
SUM(CASE
WHEN json_array_length(blocking_issues) > 0
THEN json_array_length(blocking_issues)
ELSE 0
END) as blocking_issues_count
FROM task_analytics
WHERE ${whereClause}
`);
const result = metricsQuery.get(params);
const velocityQuery = this.db.prepare(`
SELECT
DATE(created_at, 'unixepoch') as day,
COUNT(*) as completed_count
FROM task_analytics
WHERE state = 'completed'
AND ${whereClause}
GROUP BY day
ORDER BY day DESC
LIMIT 30
`);
const velocityData = velocityQuery.all(params);
const velocityTrend = velocityData.map((v) => v.completed_count).reverse();
return {
totalTasks: result.total_tasks || 0,
completedTasks: result.completed_tasks || 0,
inProgressTasks: result.in_progress_tasks || 0,
blockedTasks: result.blocked_tasks || 0,
completionRate: result.total_tasks > 0 ? result.completed_tasks / result.total_tasks * 100 : 0,
averageTimeToComplete: result.avg_time_to_complete || 0,
effortAccuracy: result.effort_accuracy || 100,
blockingIssuesCount: result.blocking_issues_count || 0,
velocityTrend
};
} catch (error) {
throw new DatabaseError(
"Failed to get task metrics",
ErrorCode.DB_QUERY_FAILED,
{
query,
operation: "getTaskMetrics"
},
error instanceof Error ? error : void 0
);
}
}
getRecentTasks(query = {}) {
try {
const { limit = 100, offset = 0 } = query;
const tasksQuery = this.db.prepare(`
SELECT
id,
title,
state,
created_at,
completed_at,
estimated_effort,
actual_effort,
assignee_id,
priority,
labels,
blocking_issues
FROM task_analytics
ORDER BY updated_at DESC
LIMIT ? OFFSET ?
`);
const rows = tasksQuery.all(limit, offset);
return rows.map((row) => ({
id: row.id,
title: row.title,
state: row.state,
createdAt: new Date(row.created_at * 1e3),
completedAt: row.completed_at ? new Date(row.completed_at * 1e3) : void 0,
estimatedEffort: row.estimated_effort,
actualEffort: row.actual_effort,
assigneeId: row.assignee_id,
priority: row.priority,
labels: JSON.parse(row.labels),
blockingIssues: JSON.parse(row.blocking_issues)
}));
} catch (error) {
throw new DatabaseError(
"Failed to get recent tasks",
ErrorCode.DB_QUERY_FAILED,
{
limit: query.limit,
offset: query.offset,
operation: "getRecentTasks"
},
error instanceof Error ? error : void 0
);
}
}
upsertTask(task) {
try {
const stmt = this.db.prepare(`
INSERT INTO task_analytics (
id, title, state, created_at, completed_at,
estimated_effort, actual_effort, assignee_id,
priority, labels, blocking_issues
) VALUES (
@id, @title, @state, @created_at, @completed_at,
@estimated_effort, @actual_effort, @assignee_id,
@priority, @labels, @blocking_issues
)
ON CONFLICT(id) DO UPDATE SET
title = @title,
state = @state,
completed_at = @completed_at,
estimated_effort = @estimated_effort,
actual_effort = @actual_effort,
assignee_id = @assignee_id,
priority = @priority,
labels = @labels,
blocking_issues = @blocking_issues,
updated_at = strftime('%s', 'now')
`);
stmt.run({
id: task.id,
title: task.title,
state: task.state,
created_at: Math.floor(task.createdAt.getTime() / 1e3),
completed_at: task.completedAt ? Math.floor(task.completedAt.getTime() / 1e3) : null,
estimated_effort: task.estimatedEffort || null,
actual_effort: task.actualEffort || null,
assignee_id: task.assigneeId || null,
priority: task.priority,
labels: JSON.stringify(task.labels),
blocking_issues: JSON.stringify(task.blockingIssues)
});
} catch (error) {
throw new DatabaseError(
`Failed to upsert task analytics: ${task.id}`,
ErrorCode.DB_QUERY_FAILED,
{
taskId: task.id,
taskState: task.state,
operation: "upsertTask"
},
error instanceof Error ? error : void 0
);
}
}
close() {
try {
this.db.close();
} catch (error) {
throw new DatabaseError(
"Failed to close analytics database",
ErrorCode.DB_CONNECTION_FAILED,
{
operation: "close"
},
error instanceof Error ? error : void 0
);
}
}
}
export {
MetricsQueries
};
//# sourceMappingURL=metrics-queries.js.map