@stackmemoryai/stackmemory
Version:
Lossless, project-scoped memory for AI coding tools. Durable context across sessions with 56 MCP tools, FTS5 search, conductor orchestrator, loop/watch monitoring, snapshot capture, pre-flight overlap checks, Claude/Codex/OpenCode wrappers, Linear sync, a
1,494 lines • 65.4 kB
JavaScript
import { fileURLToPath as __fileURLToPath } from 'url';
import { dirname as __pathDirname } from 'path';
const __filename = __fileURLToPath(import.meta.url);
const __dirname = __pathDirname(__filename);
import { spawn, execSync } from "child_process";
import {
appendFileSync,
existsSync,
mkdirSync,
readFileSync,
rmSync,
writeFileSync,
readdirSync,
createWriteStream
} from "fs";
import { join, dirname } from "path";
import { tmpdir, homedir } from "os";
import { createReadStream } from "fs";
import { createInterface } from "readline";
import { fileURLToPath } from "url";
import { Transform } from "stream";
import { logger } from "../../core/monitoring/logger.js";
import { isProcessAlive } from "../../utils/process-cleanup.js";
import {
LinearClient
} from "../../integrations/linear/client.js";
import { LinearAuthManager } from "../../integrations/linear/auth.js";
import {
PreflightChecker
} from "../../core/worktree/preflight.js";
import { ContextCapture } from "../../core/worktree/capture.js";
import { extractKeywords } from "../../core/utils/text.js";
import {
TraceCollector,
stringifyEventTruncated
} from "./conductor-traces.js";
function getOutcomesLogPath() {
return join(homedir(), ".stackmemory", "conductor", "outcomes.jsonl");
}
function logAgentOutcome(entry) {
const dir = join(homedir(), ".stackmemory", "conductor");
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
appendFileSync(getOutcomesLogPath(), JSON.stringify(entry) + "\n");
}
function createPullRequest(opts) {
try {
execSync(`git push -u origin "${opts.branch}"`, {
cwd: opts.workspacePath,
stdio: "pipe",
timeout: 6e4
});
const prTitle = `feat(conductor): ${opts.issueId} \u2014 ${opts.title}`;
const prBody = [
"## Summary",
"",
`Automated PR from conductor agent for **${opts.issueId}**.`,
"",
`- **Files modified:** ${opts.filesModified}`,
`- **Tool calls:** ${opts.toolCalls}`,
"",
"_This PR was auto-created by StackMemory Conductor._"
].join("\n");
const result = execSync(
`gh pr create --base "${opts.baseBranch}" --head "${opts.branch}" --title "${prTitle.replace(/"/g, '\\"')}" --body "${prBody.replace(/"/g, '\\"')}"`,
{
cwd: opts.workspacePath,
encoding: "utf-8",
stdio: ["pipe", "pipe", "pipe"],
timeout: 3e4
}
);
const prUrl = result.trim();
logger.info("Created PR", { issueId: opts.issueId, prUrl });
return prUrl;
} catch (err) {
logger.warn("Failed to create PR (best-effort)", {
issueId: opts.issueId,
error: err.message
});
return null;
}
}
function getRetryStrategy(issue, outcomes) {
if (!outcomes) {
const logPath = getOutcomesLogPath();
if (!existsSync(logPath)) {
return { shouldRetry: true, adjustments: [] };
}
try {
const lines = readFileSync(logPath, "utf-8").trim().split("\n").filter(Boolean);
outcomes = lines.map((l) => JSON.parse(l));
} catch {
return { shouldRetry: true, adjustments: [] };
}
}
const issueOutcomes = outcomes.filter((o) => o.issue === issue);
const relevant = issueOutcomes.length > 0 ? issueOutcomes : outcomes;
const failures = relevant.filter(
(o) => o.outcome === "failure" || o.outcome === "partial"
);
if (failures.length === 0) {
return { shouldRetry: true, adjustments: [] };
}
const lastFailure = failures[failures.length - 1];
if (lastFailure.errorTail && /429|rate.?limit|too many requests|usage.?limit|overloaded/i.test(
lastFailure.errorTail
)) {
return {
shouldRetry: false,
reason: "Last failure was a rate limit (429) \u2014 retrying immediately will not help",
adjustments: []
};
}
if (issueOutcomes.length > 0) {
const issueFailures = issueOutcomes.filter(
(o) => o.outcome === "failure" || o.outcome === "partial"
);
const phaseFailCounts = /* @__PURE__ */ new Map();
for (const f of issueFailures) {
phaseFailCounts.set(f.phase, (phaseFailCounts.get(f.phase) || 0) + 1);
}
for (const [phase, count] of phaseFailCounts) {
if (count >= 2) {
return {
shouldRetry: false,
reason: `Issue has failed ${count} times in '${phase}' phase \u2014 likely a structural problem`,
adjustments: []
};
}
}
}
const adjustments = [];
if (lastFailure.errorTail) {
const tail = lastFailure.errorTail;
if (/timeout|timed?.?out|ETIMEDOUT|ESOCKETTIMEDOUT/i.test(tail)) {
adjustments.push(
"Previous attempt timed out. Work in smaller increments and commit partial progress early."
);
}
if (/lint|eslint|prettier|formatting|style.?error/i.test(tail)) {
adjustments.push(
"Previous attempt failed on linting. Run `npm run lint:fix` after each file change and fix all lint errors before committing."
);
}
if (/test.?fail|assertion|expect|vitest|jest|FAIL/i.test(tail)) {
adjustments.push(
"Previous attempt failed tests. Run tests incrementally after each change. Check existing test expectations before modifying code."
);
}
if (/build.?fail|tsc|type.?error|TS\d{4}|compile/i.test(tail)) {
adjustments.push(
"Previous attempt had build/type errors. Run `npm run build` after changes and fix type errors before committing."
);
}
if (/cannot find module|ERR_MODULE_NOT_FOUND|import/i.test(tail)) {
adjustments.push(
"Previous attempt had module resolution errors. Ensure all imports use .js extensions for relative paths (ESM)."
);
}
}
return { shouldRetry: true, adjustments };
}
function findPackageRoot() {
const currentFile = fileURLToPath(import.meta.url);
let dir = dirname(currentFile);
for (let i = 0; i < 6; i++) {
if (existsSync(join(dir, "package.json"))) return dir;
dir = dirname(dir);
}
return dirname(currentFile);
}
function getAgentStatusDir(issueIdentifier) {
return join(
homedir(),
".stackmemory",
"conductor",
"agents",
issueIdentifier
);
}
function ensureAgentStatusDir(issueIdentifier) {
const dir = getAgentStatusDir(issueIdentifier);
if (!existsSync(dir)) {
mkdirSync(dir, { recursive: true });
}
return dir;
}
class TeeTransform extends Transform {
logStream;
constructor(logStream) {
super();
this.logStream = logStream;
}
_transform(chunk, _encoding, callback) {
this.logStream.write(chunk);
this.push(chunk);
callback();
}
_flush(callback) {
this.logStream.end();
callback();
}
}
function inferPhaseFromStreamJson(event) {
if (event.type !== "assistant") return null;
const message = event.message;
const content = message?.content || [];
for (const block of content) {
if (block.type !== "tool_use") continue;
const toolLower = (block.name || "").toLowerCase();
if (toolLower.includes("read") || toolLower.includes("glob") || toolLower.includes("grep") || toolLower.includes("search")) {
return "reading";
}
if (toolLower.includes("todowrite") || toolLower.includes("todo")) {
return "planning";
}
if (toolLower.includes("edit") || toolLower.includes("write") || toolLower.includes("bash")) {
if (toolLower === "bash") {
const input = block.input;
const command = (input?.command ?? "").toLowerCase();
if (command.includes("git commit") || command.includes("git add")) {
return "committing";
}
}
return "implementing";
}
if (toolLower.includes("test")) {
return "testing";
}
}
return null;
}
function inferPhase(msg) {
const method = msg.method;
const params = msg.params;
if (method === "item/commandExecution/started") {
const tool = params?.tool || params?.name || "";
const toolLower = tool.toLowerCase();
if (toolLower.includes("read") || toolLower.includes("glob") || toolLower.includes("grep") || toolLower.includes("search")) {
return "reading";
}
if (toolLower.includes("todowrite") || toolLower.includes("todo")) {
return "planning";
}
if (toolLower.includes("edit") || toolLower.includes("write") || toolLower.includes("bash")) {
return "implementing";
}
if (toolLower.includes("test")) {
return "testing";
}
}
if (method === "item/commandExecution/started") {
const args = params?.arguments;
const command = (args?.command ?? params?.command) || "";
if (command.includes("git commit") || command.includes("git add")) {
return "committing";
}
}
return null;
}
const SIMPLE_LABELS = ["bug", "fix", "typo", "chore", "docs", "hotfix"];
const COMPLEX_LABELS = [
"feature",
"refactor",
"architecture",
"migration",
"security",
"performance"
];
function estimateIssueComplexity(issue, attempt) {
let score = 0;
const descLen = (issue.description || "").length;
if (descLen > 800) score += 2;
else if (descLen > 400) score += 1;
else if (descLen < 200) score -= 1;
const labelNames = (issue.labels || []).map((l) => l.name.toLowerCase());
for (const label of labelNames) {
if (SIMPLE_LABELS.some((s) => label.includes(s))) score -= 1;
if (COMPLEX_LABELS.some((c) => label.includes(c))) score += 2;
}
const titleLower = issue.title.toLowerCase();
if (SIMPLE_LABELS.some((s) => titleLower.includes(s))) score -= 1;
if (COMPLEX_LABELS.some((c) => titleLower.includes(c))) score += 1;
if (issue.priority === 1) score += 2;
else if (issue.priority === 2) score += 1;
if (issue.estimate) {
if (issue.estimate >= 5) score += 2;
else if (issue.estimate >= 3) score += 1;
else if (issue.estimate <= 1) score -= 1;
}
if (attempt && attempt > 1) score += 2;
if (score >= 3) return "complex";
if (score >= 1) return "moderate";
return "simple";
}
function selectModelForIssue(complexity, config) {
const modelSetting = config.model || "auto";
if (modelSetting !== "auto") return modelSetting;
switch (complexity) {
case "simple":
case "moderate":
return "claude-sonnet-4-20250514";
case "complex":
return "claude-opus-4-20250514";
}
}
const DEFAULT_CONFIG = {
activeStates: ["Todo"],
terminalStates: ["Done", "Cancelled", "Canceled", "Closed"],
inProgressState: "In Progress",
inReviewState: "In Review",
pollIntervalMs: 3e4,
maxConcurrent: 5,
workspaceRoot: join(tmpdir(), "conductor_workspaces"),
repoRoot: process.cwd(),
baseBranch: "main",
appServerPath: join(
findPackageRoot(),
"scripts",
"conductor",
"claude-app-server.cjs"
),
turnTimeoutMs: 36e5,
maxRetries: 1,
hookTimeoutMs: 6e4,
agentMode: "cli"
};
class Conductor {
config;
client = null;
running = /* @__PURE__ */ new Map();
claimed = /* @__PURE__ */ new Set();
completed = /* @__PURE__ */ new Set();
pollTimer = null;
startedAt = 0;
totalAttempts = 0;
failCount = 0;
completeCount = 0;
stopping = false;
stateCache = /* @__PURE__ */ new Map();
activeStatesLower;
terminalStatesLower;
/** Global rate limit backoff state */
rateLimit = {
inBackoff: false,
backoffUntil: 0,
totalHits: 0,
consecutiveHits: 0,
lastHitAt: 0
};
/** Aggregated usage stats */
usage = {
inputTokens: 0,
outputTokens: 0,
cacheCreationTokens: 0,
cacheReadTokens: 0,
estimatedMessages: 0,
perAgent: /* @__PURE__ */ new Map()
};
constructor(config = {}) {
this.config = { ...DEFAULT_CONFIG, ...config };
this.activeStatesLower = this.config.activeStates.map(
(s) => s.trim().toLowerCase()
);
this.terminalStatesLower = this.config.terminalStates.map(
(s) => s.trim().toLowerCase()
);
}
/**
* Start the orchestrator loop.
* Resolves when stopped via stop() or SIGINT/SIGTERM.
*/
async start() {
this.startedAt = Date.now();
this.stopping = false;
if (!existsSync(this.config.appServerPath)) {
const candidates = [
join(
this.config.repoRoot,
"scripts",
"conductor",
"claude-app-server.cjs"
),
join(
this.config.repoRoot,
"scripts",
"symphony",
"claude-app-server.cjs"
)
];
const found = candidates.find((p) => existsSync(p));
if (found) {
this.config.appServerPath = found;
} else {
throw new Error(
`claude-app-server.cjs not found at ${this.config.appServerPath}`
);
}
}
if (!existsSync(this.config.workspaceRoot)) {
mkdirSync(this.config.workspaceRoot, { recursive: true });
}
this.client = await this.createLinearClient();
if (!this.config.teamId && this.client) {
try {
const team = await this.client.getTeam();
this.config.teamId = team.id;
logger.info("Auto-detected Linear team", {
id: team.id,
name: team.name,
key: team.key
});
} catch (err) {
logger.warn("Failed to auto-detect team", {
error: err.message
});
}
}
await this.cacheWorkflowStates();
logger.info("Orchestrator started", {
activeStates: this.config.activeStates,
maxConcurrent: this.config.maxConcurrent,
pollIntervalMs: this.config.pollIntervalMs,
workspaceRoot: this.config.workspaceRoot
});
console.log(
`Orchestrator started \u2014 polling every ${this.config.pollIntervalMs / 1e3}s, max ${this.config.maxConcurrent} concurrent`
);
this.writeStatusFile();
const shutdown = () => this.stop();
process.on("SIGINT", shutdown);
process.on("SIGTERM", shutdown);
try {
await this.poll();
} catch (err) {
logger.error("Initial poll failed", { error: err.message });
}
await this.schedulePoll();
}
/**
* Gracefully stop the orchestrator.
* Waits for running agents to finish (up to 30s), then force-kills.
*/
async stop() {
if (this.stopping) return;
this.stopping = true;
console.log("\nOrchestrator stopping...");
logger.info("Orchestrator stopping", {
runningCount: this.running.size
});
if (this.pollTimer) {
clearTimeout(this.pollTimer);
this.pollTimer = null;
}
for (const [issueId, run] of this.running) {
if (run.process && !run.process.killed) {
logger.info("Killing agent process", {
issueId,
identifier: run.issue.identifier
});
run.process.kill("SIGTERM");
}
}
const deadline = Date.now() + 1e4;
while (this.running.size > 0 && Date.now() < deadline) {
await new Promise((r) => setTimeout(r, 500));
}
for (const [_issueId, run] of this.running) {
if (run.process && !run.process.killed) {
run.process.kill("SIGKILL");
}
}
this.running.clear();
this.claimed.clear();
this.clearStatusFile();
console.log(
`Orchestrator stopped. Completed: ${this.completeCount}, Failed: ${this.failCount}`
);
}
/**
* Get current orchestrator stats.
*/
getStats() {
const issues = Array.from(this.running.values()).map((r) => ({
identifier: r.issue.identifier,
status: r.status,
attempt: r.attempt,
runtime: Date.now() - r.startedAt
}));
return {
running: this.running.size,
completed: this.completeCount,
failed: this.failCount,
totalAttempts: this.totalAttempts,
uptime: Date.now() - this.startedAt,
issues,
rateLimit: { ...this.rateLimit },
usage: { ...this.usage, perAgent: new Map(this.usage.perAgent) }
};
}
// ── Status File ──
/**
* Write current conductor state to .stackmemory/conductor-status.json
* for consumption by `stackmemory dashboard` and other tools.
*/
writeStatusFile() {
const statusDir = join(this.config.repoRoot, ".stackmemory");
if (!existsSync(statusDir)) return;
const status = {
pid: process.pid,
startedAt: this.startedAt,
updatedAt: Date.now(),
running: Array.from(this.running.values()).map((r) => ({
identifier: r.issue.identifier,
title: r.issue.title,
status: r.status,
attempt: r.attempt,
startedAt: r.startedAt,
runtime: Date.now() - r.startedAt
})),
queued: Array.from(this.claimed).filter(
(id) => !this.running.has(id) && !this.completed.has(id)
).length,
completed: this.completeCount,
failed: this.failCount,
totalAttempts: this.totalAttempts,
maxConcurrent: this.config.maxConcurrent,
stopping: this.stopping,
rateLimit: {
inBackoff: this.rateLimit.inBackoff,
backoffUntil: this.rateLimit.backoffUntil,
backoffRemainingSec: this.rateLimit.inBackoff ? Math.max(
0,
Math.ceil((this.rateLimit.backoffUntil - Date.now()) / 1e3)
) : 0,
totalHits: this.rateLimit.totalHits
},
usage: (() => {
const summary = this.getUsageSummary();
return {
inputTokens: summary.inputTokens,
outputTokens: summary.outputTokens,
totalTokens: summary.totalTokens,
estimatedMessages: summary.estimatedMessages,
tokensPerMin: summary.tokensPerMin,
budgetPct5x: summary.budgetPct5x,
budgetPct20x: summary.budgetPct20x,
minutesRemaining5x: summary.minutesRemaining5x,
minutesRemaining20x: summary.minutesRemaining20x,
cacheHitRate: summary.cacheHitRate
};
})()
};
try {
writeFileSync(
join(statusDir, "conductor-status.json"),
JSON.stringify(status, null, 2)
);
} catch {
}
}
clearStatusFile() {
const statusPath = join(
this.config.repoRoot,
".stackmemory",
"conductor-status.json"
);
try {
if (existsSync(statusPath)) rmSync(statusPath);
} catch {
}
}
// ── Agent Status Files ──
/**
* Write per-agent status to ~/.stackmemory/conductor/agents/<issue-id>/status.json
*/
writeAgentStatus(issueIdentifier, run) {
try {
const dir = ensureAgentStatusDir(issueIdentifier);
const status = {
issue: issueIdentifier,
pid: run.process?.pid || process.pid,
started: new Date(run.startedAt).toISOString(),
lastUpdate: (/* @__PURE__ */ new Date()).toISOString(),
phase: run.phase,
filesModified: run.filesModified,
toolCalls: run.toolCalls,
tokensUsed: run.tokensUsed,
workspacePath: run.workspacePath || void 0
};
writeFileSync(join(dir, "status.json"), JSON.stringify(status, null, 2));
} catch {
}
}
/**
* Open a log file write stream for an agent.
*/
openAgentLogStream(issueIdentifier) {
const dir = ensureAgentStatusDir(issueIdentifier);
return createWriteStream(join(dir, "output.log"), { flags: "a" });
}
// ── Polling ──
async schedulePoll() {
while (!this.stopping) {
await new Promise((resolve) => {
this.pollTimer = setTimeout(resolve, this.config.pollIntervalMs);
});
if (this.stopping) break;
try {
await this.poll();
} catch (err) {
logger.error("Poll cycle failed", { error: err.message });
}
this.writeStatusFile();
}
}
async poll() {
if (!this.client || this.stopping) return;
if (this.rateLimit.inBackoff) {
const remaining = this.rateLimit.backoffUntil - Date.now();
if (remaining > 0) {
logger.info("Rate limit backoff active, skipping poll", {
remainingMs: remaining,
remainingSec: Math.ceil(remaining / 1e3),
totalHits: this.rateLimit.totalHits
});
return;
}
this.rateLimit.inBackoff = false;
this.rateLimit.backoffUntil = 0;
logger.info("Rate limit backoff expired, resuming dispatch");
console.log("[rate-limit] Backoff expired, resuming dispatch");
}
await this.checkStaleAgents();
await this.reconcile();
const available = this.config.maxConcurrent - this.running.size;
if (available <= 0) {
logger.debug("At capacity, skipping dispatch", {
running: this.running.size,
max: this.config.maxConcurrent
});
return;
}
const candidates = await this.fetchCandidates();
if (candidates.length === 0) return;
const eligible = candidates.filter(
(issue) => !this.claimed.has(issue.id) && !this.completed.has(issue.id)
);
if (eligible.length === 0) return;
const sorted = eligible.sort((a, b) => (a.priority || 4) - (b.priority || 4)).slice(0, available);
const toDispatch = this.preflightFilter(sorted);
if (this.usage.inputTokens > 0) {
const summary = this.getUsageSummary();
logger.info("Usage summary", {
totalTokens: summary.totalTokens,
estimatedMessages: summary.estimatedMessages,
tokensPerMin: summary.tokensPerMin,
budgetPct5x: `${summary.budgetPct5x}%`,
budgetPct20x: `${summary.budgetPct20x}%`,
minutesRemaining5x: summary.minutesRemaining5x,
minutesRemaining20x: summary.minutesRemaining20x,
rateLimitHits: this.rateLimit.totalHits
});
if (summary.budgetPct5x >= 75 && summary.budgetPct5x < 100) {
console.log(
`[usage] \u26A0 ${summary.budgetPct5x}% of Max 5x budget used \u2014 ~${summary.minutesRemaining5x}min remaining at ${summary.tokensPerMin} tok/min`
);
}
if (summary.budgetPct5x >= 100) {
console.log(
`[usage] \u{1F6D1} Max 5x budget likely exhausted (${summary.estimatedMessages} est. messages / 225 limit). Expect 429s.`
);
}
}
logger.info("Dispatching issues", {
count: toDispatch.length,
identifiers: toDispatch.map((i) => i.identifier),
skipped: sorted.length - toDispatch.length
});
for (const issue of toDispatch) {
this.dispatch(issue).catch((err) => {
logger.error("Dispatch failed", {
identifier: issue.identifier,
error: err.message
});
});
}
}
async fetchCandidates() {
if (!this.client) return [];
const allCandidates = [];
const issues = await this.client.getIssues({
teamId: this.config.teamId,
limit: 50
});
for (const issue of issues) {
const stateName = issue.state.name.trim().toLowerCase();
if (this.activeStatesLower.includes(stateName)) {
allCandidates.push(issue);
}
}
return allCandidates;
}
// ── Pre-flight ──
/**
* Filter candidates against running issues using file overlap prediction.
* Returns only issues that are parallel-safe with currently running work.
*/
preflightFilter(candidates) {
if (candidates.length === 0 || this.running.size === 0) {
return candidates;
}
try {
const checker = new PreflightChecker(this.config.repoRoot);
const runningFileSets = [];
const runningNames = [];
for (const run of this.running.values()) {
const task = {
name: run.issue.identifier,
description: run.issue.title,
keywords: this.extractIssueKeywords(run.issue)
};
runningFileSets.push(checker.predictFiles(task));
runningNames.push(run.issue.identifier);
}
const safe = [];
for (const candidate of candidates) {
const candidateTask = {
name: candidate.identifier,
description: candidate.title,
keywords: this.extractIssueKeywords(candidate)
};
const candidateFiles = checker.predictFiles(candidateTask);
const conflictFiles = [];
const conflictTasks = [];
for (let i = 0; i < runningFileSets.length; i++) {
const shared = [...candidateFiles].filter(
(f) => runningFileSets[i].has(f)
);
if (shared.length > 0) {
conflictFiles.push(...shared);
conflictTasks.push(runningNames[i]);
}
}
if (conflictFiles.length > 0) {
const uniqueFiles = [...new Set(conflictFiles)].slice(0, 3);
logger.info("Preflight: skipping conflicting issue", {
identifier: candidate.identifier,
conflictsWith: conflictTasks,
files: uniqueFiles
});
console.log(
`[${candidate.identifier}] Deferred \u2014 file overlap with running work (${uniqueFiles.join(", ")})`
);
} else {
safe.push(candidate);
}
}
return safe;
} catch (err) {
logger.warn("Preflight check failed, dispatching all", {
error: err.message
});
return candidates;
}
}
extractIssueKeywords(issue) {
const labelText = issue.labels.map((l) => l.name).join(" ");
return extractKeywords(`${issue.title} ${labelText}`, { maxCount: 8 });
}
// ── Dispatch ──
async dispatch(issue) {
const issueId = issue.id;
this.claimed.add(issueId);
const run = {
issue,
workspacePath: "",
process: null,
attempt: 1,
startedAt: Date.now(),
status: "starting",
phase: "reading",
toolCalls: 0,
filesModified: 0,
tokensUsed: 0
};
this.writeAgentStatus(issue.identifier, run);
this.running.set(issueId, run);
this.totalAttempts++;
console.log(`[${issue.identifier}] Dispatching: ${issue.title}`);
try {
const workspacePath = await this.createWorkspace(issue);
run.workspacePath = workspacePath;
await this.transitionIssue(issue, this.config.inProgressState);
await this.runHook("after-create", workspacePath, issue);
await this.attemptRun(issue, run);
} catch (err) {
this.failCount++;
console.log(`[${issue.identifier}] Failed: ${err.message}`);
} finally {
this.running.delete(issueId);
this.writeStatusFile();
}
}
/**
* Run the agent with retry logic. Throws on final failure.
*/
async attemptRun(issue, run) {
const maxAttempts = this.config.maxRetries + 1;
while (run.attempt <= maxAttempts) {
try {
run.status = "running";
await this.runAgent(issue, run);
run.status = "completed";
this.completeCount++;
let prUrl;
if (this.config.autoPR !== false) {
const wsKey = this.sanitizeIdentifier(issue.identifier);
const branchName = `conductor/${wsKey}`;
const url = createPullRequest({
branch: branchName,
baseBranch: this.config.baseBranch,
issueId: issue.identifier,
title: issue.title,
filesModified: run.filesModified,
toolCalls: run.toolCalls,
workspacePath: run.workspacePath
});
if (url) {
prUrl = url;
console.log(`[${issue.identifier}] PR created: ${url}`);
}
}
logAgentOutcome({
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
issue: issue.identifier,
attempt: run.attempt,
outcome: "success",
phase: run.phase,
toolCalls: run.toolCalls,
filesModified: run.filesModified,
tokensUsed: run.tokensUsed,
durationMs: Date.now() - run.startedAt,
hasCommits: true,
labels: issue.labels.map((l) => l.name),
prUrl
});
await this.runHook(
"after-run",
run.workspacePath,
issue,
run.attempt
).catch(() => {
});
this.takeSnapshot(run.workspacePath, issue);
await this.transitionIssue(issue, this.config.inReviewState);
console.log(
run.attempt === 1 ? `[${issue.identifier}] Completed successfully` : `[${issue.identifier}] Completed on retry ${run.attempt}`
);
return;
} catch (err) {
run.status = "failed";
run.error = err.message;
logger.error("Agent run failed", {
identifier: issue.identifier,
error: run.error,
attempt: run.attempt
});
logAgentOutcome({
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
issue: issue.identifier,
attempt: run.attempt,
outcome: "failure",
phase: run.phase,
toolCalls: run.toolCalls,
filesModified: run.filesModified,
tokensUsed: run.tokensUsed,
durationMs: Date.now() - run.startedAt,
hasCommits: false,
labels: issue.labels.map((l) => l.name),
errorTail: run.error?.slice(-500)
});
if (this.handleRateLimitError(run.error, issue.identifier)) {
throw err;
}
if (run.workspacePath) {
await this.runHook(
"after-run",
run.workspacePath,
issue,
run.attempt
).catch(() => {
});
}
if (run.attempt < maxAttempts && !this.stopping) {
const strategy = getRetryStrategy(issue.identifier);
if (!strategy.shouldRetry) {
console.log(
`[${issue.identifier}] Skipping retry: ${strategy.reason}`
);
logger.info("Retry skipped by intelligence", {
identifier: issue.identifier,
reason: strategy.reason
});
throw err;
}
if (strategy.adjustments.length > 0) {
run.retryAdjustments = strategy.adjustments;
console.log(
`[${issue.identifier}] Retry with adjustments: ${strategy.adjustments.length} hint(s)`
);
}
console.log(
`[${issue.identifier}] Failed (attempt ${run.attempt}), retrying...`
);
run.attempt++;
this.totalAttempts++;
const backoffMs = Math.min(
1e3 * Math.pow(2, run.attempt - 1),
3e5
);
await new Promise((r) => setTimeout(r, backoffMs));
} else {
throw err;
}
}
}
}
// ── Rate Limit Detection ──
/**
* Check if an error indicates a rate limit (429) or usage cap.
* Triggers global backoff so no new agents are dispatched.
*
* Claude Max limits (approximate, shared between claude.ai + Claude Code):
* - Max 5x: ~225 messages per 5h window
* - Max 20x: ~900 messages per 5h window
* Messages are token-weighted: heavy agent usage ≈ 5-10x a casual message.
*/
handleRateLimitError(error, identifier) {
const rateLimitPatterns = [
"usage limits",
"rate_limit",
"rate limit",
"overloaded",
"429",
"too many requests",
"quota exceeded",
"capacity"
];
const isRateLimit = rateLimitPatterns.some(
(p) => error.toLowerCase().includes(p)
);
if (!isRateLimit) return false;
this.rateLimit.totalHits++;
this.rateLimit.lastHitAt = Date.now();
const timeSinceLast = Date.now() - this.rateLimit.lastHitAt;
if (timeSinceLast < 6e5) {
this.rateLimit.consecutiveHits++;
} else {
this.rateLimit.consecutiveHits = 1;
}
const backoffSec = Math.min(
60 * Math.pow(2, this.rateLimit.consecutiveHits - 1),
900
);
this.rateLimit.inBackoff = true;
this.rateLimit.backoffUntil = Date.now() + backoffSec * 1e3;
logger.warn("Rate limit hit \u2014 global backoff", {
identifier,
backoffSec,
consecutiveHits: this.rateLimit.consecutiveHits,
totalHits: this.rateLimit.totalHits,
error: error.slice(0, 200)
});
console.log(
`[rate-limit] Hit rate limit on ${identifier} \u2014 backing off ${backoffSec}s (hit #${this.rateLimit.totalHits})`
);
return true;
}
// ── Usage Tracking ──
/**
* Track token usage from a stream-json event (CLI mode) or JSON-RPC message (adapter mode).
* Updates both global and per-agent counters.
*/
trackUsage(identifier, usage) {
const input = usage.input_tokens || 0;
const output = usage.output_tokens || 0;
const cacheCreate = usage.cache_creation_input_tokens || 0;
const cacheRead = usage.cache_read_input_tokens || 0;
this.usage.inputTokens += input;
this.usage.outputTokens += output;
this.usage.cacheCreationTokens += cacheCreate;
this.usage.cacheReadTokens += cacheRead;
this.usage.estimatedMessages = Math.ceil(
(this.usage.inputTokens + this.usage.outputTokens) / 1e4
);
const agent = this.usage.perAgent.get(identifier) || {
inputTokens: 0,
outputTokens: 0
};
agent.inputTokens += input;
agent.outputTokens += output;
this.usage.perAgent.set(identifier, agent);
}
/**
* Scan Claude Code JSONL logs for token usage from conductor-spawned sessions.
* Reads logs from ~/.claude/projects/ matching conductor workspace paths.
*/
async scanUsageLogs() {
const logDirs = [
join(homedir(), ".claude", "projects"),
join(homedir(), ".config", "claude", "projects")
];
for (const logDir of logDirs) {
if (!existsSync(logDir)) continue;
try {
const entries = readdirSync(logDir);
const conductorDirs = entries.filter(
(e) => e.includes("conductor_workspaces") || e.includes("conductor-")
);
for (const dir of conductorDirs) {
const projectDir = join(logDir, dir);
if (!existsSync(projectDir)) continue;
const jsonlFiles = readdirSync(projectDir).filter(
(f) => f.endsWith(".jsonl")
);
for (const file of jsonlFiles) {
await this.parseUsageFromJsonl(join(projectDir, file));
}
}
} catch (err) {
logger.debug("Usage log scan failed", {
error: err.message
});
}
}
return this.usage;
}
async parseUsageFromJsonl(filePath) {
try {
const rl = createInterface({
input: createReadStream(filePath),
crlfDelay: Infinity
});
for await (const line of rl) {
if (!line.trim()) continue;
try {
const entry = JSON.parse(line);
if (entry.type === "assistant" && entry.message?.usage) {
const usage = entry.message.usage;
const identifier = entry.sessionId?.slice(0, 8) || "unknown";
this.trackUsage(identifier, usage);
}
} catch {
}
}
} catch {
}
}
/**
* Get current usage summary with time-to-exhaustion estimate.
*
* Claude Max limits (approximate, shared between claude.ai + Claude Code):
* - Max 5x: ~225 messages per 5h window (~10k tokens per "message")
* - Max 20x: ~900 messages per 5h window
*
* Heavy agent usage burns 5-10x faster than casual chat.
*/
getUsageSummary() {
const totalCache = this.usage.cacheCreationTokens + this.usage.cacheReadTokens;
const cacheHitRate = totalCache > 0 ? this.usage.cacheReadTokens / totalCache : 0;
const uptimeMin = Math.max(1, (Date.now() - this.startedAt) / 6e4);
const totalTokens = this.usage.inputTokens + this.usage.outputTokens;
const tokensPerMin = Math.round(totalTokens / uptimeMin);
const estMessages = this.usage.estimatedMessages;
const MAX_5X_MESSAGES = 225;
const MAX_20X_MESSAGES = 900;
const budgetPct5x = Math.round(estMessages / MAX_5X_MESSAGES * 100);
const budgetPct20x = Math.round(estMessages / MAX_20X_MESSAGES * 100);
const msgsPerMin = tokensPerMin > 0 ? tokensPerMin / 1e4 : 0;
const minutesRemaining5x = msgsPerMin > 0 ? Math.round((MAX_5X_MESSAGES - estMessages) / msgsPerMin) : -1;
const minutesRemaining20x = msgsPerMin > 0 ? Math.round((MAX_20X_MESSAGES - estMessages) / msgsPerMin) : -1;
return {
totalTokens,
inputTokens: this.usage.inputTokens,
outputTokens: this.usage.outputTokens,
estimatedMessages: estMessages,
cacheHitRate: Math.round(cacheHitRate * 100),
tokensPerMin,
budgetPct5x,
budgetPct20x,
minutesRemaining5x: minutesRemaining5x < 0 ? -1 : minutesRemaining5x,
minutesRemaining20x: minutesRemaining20x < 0 ? -1 : minutesRemaining20x,
perAgent: Array.from(this.usage.perAgent.entries()).map(
([id, stats]) => ({
id,
...stats
})
)
};
}
// ── Workspace Management ──
async createWorkspace(issue) {
const wsKey = this.sanitizeIdentifier(issue.identifier);
const wsPath = join(this.config.workspaceRoot, wsKey);
if (existsSync(wsPath)) {
logger.info("Reusing existing workspace", {
identifier: issue.identifier,
path: wsPath
});
return wsPath;
}
const branchName = `conductor/${wsKey}`;
try {
execSync("git fetch origin", {
cwd: this.config.repoRoot,
stdio: "pipe",
timeout: 3e4
});
execSync(
`git worktree add "${wsPath}" -b "${branchName}" "origin/${this.config.baseBranch}"`,
{
cwd: this.config.repoRoot,
stdio: "pipe",
timeout: 3e4
}
);
logger.info("Created workspace", {
identifier: issue.identifier,
path: wsPath,
branch: branchName
});
} catch (err) {
try {
execSync(`git worktree add "${wsPath}" "${branchName}"`, {
cwd: this.config.repoRoot,
stdio: "pipe",
timeout: 3e4
});
} catch {
throw new Error(
`Failed to create workspace for ${issue.identifier}: ${err.message}`
);
}
}
return wsPath;
}
async removeWorkspace(issue) {
const wsKey = this.sanitizeIdentifier(issue.identifier);
const wsPath = join(this.config.workspaceRoot, wsKey);
if (!existsSync(wsPath)) return;
await this.runHook("before-remove", wsPath, issue).catch(() => {
});
try {
execSync(`git worktree remove "${wsPath}" --force`, {
cwd: this.config.repoRoot,
stdio: "pipe",
timeout: 3e4
});
} catch {
try {
rmSync(wsPath, { recursive: true, force: true });
execSync("git worktree prune", {
cwd: this.config.repoRoot,
stdio: "pipe",
timeout: 1e4
});
} catch {
logger.warn("Failed to clean workspace", {
identifier: issue.identifier,
path: wsPath
});
}
}
}
sanitizeIdentifier(identifier) {
return identifier.replace(/[^A-Za-z0-9._-]/g, "_");
}
/**
* Find the real claude binary, skipping shell wrappers (cmux, claude-smd).
* Wrappers inject --settings with hooks that block headless -p mode.
*/
findClaudeBinary() {
const candidates = [
join(homedir(), ".local", "bin", "claude"),
"/usr/local/bin/claude",
"/opt/homebrew/bin/claude"
];
for (const c of candidates) {
if (existsSync(c)) return c;
}
return "claude";
}
// ── Agent Execution ──
/**
* Build environment variables for an agent process.
* Injects PORTLESS_URL if portless is available, giving each worktree
* a stable named localhost URL for service discovery.
*/
buildAgentEnv(issue, run) {
const env = { ...process.env };
delete env["CLAUDECODE"];
delete env["ANTHROPIC_API_KEY"];
const wsKey = this.sanitizeIdentifier(issue.identifier);
return {
...env,
SYMPHONY_WORKSPACE_DIR: run.workspacePath,
SYMPHONY_ISSUE_ID: issue.id,
SYMPHONY_ISSUE_IDENTIFIER: issue.identifier,
SYMPHONY_ATTEMPT: String(run.attempt),
PORTLESS_URL: `http://${wsKey}.localhost:1355`,
PORTLESS_NAME: wsKey
};
}
async runAgent(issue, run) {
if (this.config.agentMode === "cli") {
try {
return await this.runAgentCLI(issue, run);
} catch (err) {
const msg = err.message || "";
if (this.handleRateLimitError(msg, issue.identifier)) {
throw err;
}
if (msg.includes("usage limits") || msg.includes("overloaded")) {
logger.warn("CLI mode hit limits, falling back to adapter", {
identifier: issue.identifier,
error: msg.slice(0, 200)
});
run.toolCalls = 0;
run.filesModified = 0;
run.tokensUsed = 0;
run.phase = "reading";
return this.runAgentAdapter(issue, run);
}
throw err;
}
}
return this.runAgentAdapter(issue, run);
}
/**
* CLI mode: spawn `claude -p --output-format stream-json` directly.
* Uses whatever auth the environment provides (session quota, API key, etc).
*/
runAgentCLI(issue, run) {
return new Promise((resolve, reject) => {
const prompt = this.buildPrompt(issue, run.attempt, run.retryAdjustments);
const complexity = estimateIssueComplexity(issue, run.attempt);
const selectedModel = selectModelForIssue(complexity, this.config);
logger.info("Agent model selected", {
identifier: issue.identifier,
complexity,
model: selectedModel || "(default)",
reason: this.config.model && this.config.model !== "auto" ? "config override" : `auto: ${complexity} issue`
});
const claudeBin = this.findClaudeBinary();
const args = [
"-p",
"--bare",
"--output-format",
"stream-json",
"--dangerously-skip-permissions"
];
if (selectedModel) {
args.push("--model", selectedModel);
}
args.push(prompt);
const proc = spawn(claudeBin, args, {
cwd: run.workspacePath,
env: this.buildAgentEnv(issue, run),
stdio: ["pipe", "pipe", "pipe"]
});
run.process = proc;
proc.stdin.end();
const logStream = this.openAgentLogStream(issue.identifier);
run.logStream = logStream;
const tee = new TeeTransform(logStream);
proc.stdout.pipe(tee);
let traceCollector;
try {
traceCollector = new TraceCollector({
issueId: issue.identifier,
attempt: run.attempt
});
} catch {
logger.warn("Failed to initialize trace collector", {
identifier: issue.identifier
});
}
this.writeAgentStatus(issue.identifier, run);
let stderr = "";
let lastResultText = "";
const timer = setTimeout(() => {
logger.warn("Agent turn timeout (cli)", {
identifier: issue.identifier,
timeoutMs: this.config.turnTimeoutMs
});
proc.kill("SIGTERM");
reject(new Error(`Agent timeout after ${this.config.turnTimeoutMs}ms`));
}, this.config.turnTimeoutMs);
let lineBuffer = "";
tee.on("data", (chunk) => {
lineBuffer += chunk.toString();
const lines = lineBuffer.split("\n");
lineBuffer = lines.pop() || "";
for (const line of lines) {
if (!line.trim()) continue;
try {
const event = JSON.parse(line);
const phase = inferPhaseFromStreamJson(event);
if (phase) {
run.phase = phase;
}
if (event.type === "assistant") {
const message = event.message;
const msgUsage = message?.usage;
if (msgUsage) {
this.trackUsage(issue.identifier, msgUsage);
}
const content = message?.content || [];
const turnToolNames = [];
let turnFilesModified = 0;
const turnTextParts = [];
for (const block of content) {
if (block.type === "tool_use") {
run.toolCalls++;
const name = block.name || "";
turnToolNames.push(name);
const toolLower = name.toLowerCase();
if (toolLower.includes("edit") || toolLower.includes("write")) {
run.filesModified++;
turnFilesModified++;
}
}
if (block.type === "text" && block.text) {
const text = block.text;
run.tokensUsed += Math.ceil(text.length / 4);
turnTextParts.push(text);
}
}
try {
if (traceCollector) {
const turnData = {
toolNames: turnToolNames,
toolCount: turnToolNames.length,
filesModified: turnFilesModified,
textPreview: turnTextParts.length > 0 ? turnTextParts.join("\n").slice(0, 500) : null,
inputTokens: msgUsage?.input_tokens ?? 0,
outputTokens: msgUsage?.output_tokens ?? 0,
cacheCreationTokens: msgUsage?.cache_creation_input_tokens ?? 0,
cacheReadTokens: msgUsage?.cache_read_input_tokens ?? 0
};
traceCollector.recordTurn(
turnData,
phase,
stringifyEventTruncated(event)
);
}
} catch {
}
}
if (event.type === "result" && event.result) {
lastResultText = typeof event.result === "string" ? event.result : JSON.stringify(event.result);
try {
traceCollector?.recordResult(event);
} catch {
}
}
if (run.toolCalls % 5 === 0 || phase) {
this.writeAgentStatus(issue.identifier, run);
}
} catch {
}
}
});
proc.stderr.on("data", (data) => {
stderr += data.toString();
const lines = data.toString().split("\n").filter((l) => l.trim());
for (const line of lines) {
logger.debug(`[${issue.identifier}] ${line}`);
}
});
proc.on("error", (err) => {
clearTimeout(timer);
traceCollector?.close();
reject(new Error(`Failed to spawn claude: ${err.message}`));
});
proc.on("close", (code) => {
clearTimeout(timer);
traceCollector?.close();
run.process = null;
if (run.logStream && !run.logStream.destroyed) {
run.logStream.end();
}
this.writeAgentStatus(issue.identifier, run);
if (code === 0) {
logger.info("Agent completed (cli)", {
identifier: issue.identifier,
toolCalls: run.toolCalls,
resultLength: lastResultText.length
});
resolve();
} else {
reject(
new Error(
`Claude exited with code ${code}: ${stderr.slice(0, 500)}`
)
);
}
});
});
}
/**
* Adapter mode: spawn claude-app-server.cjs via JSON-RPC protocol.
* Uses ANTHROPIC_API_KEY for auth.
*/
runAgentAdapter(issue, run) {
return new Promise((resolve, reject) => {
const prompt = this.buildPrompt(issue, run.attempt, run.retryAdjustments);
const proc = spawn("node", [this.config.appServerPath], {
cwd: run.workspacePath,
env: this.buildAgentEnv(issue, run),
stdio: ["pipe", "pipe", "pipe"]
});
run.process = proc;
const logStream = this.openAgentLogStream(issue.identifier);
run.logStream = logStream;
const tee = new TeeTransform(logStream);
proc.stdout.pipe(tee);
this.writeAgentStatus(issue.identifier, run);
let stderr = "";
let turnCompleted = false;
const timer = setTimeout(() => {
if (!turnCompleted) {
logger.warn("Agent turn timeout", {
identifier: issue.identifier,
timeoutMs: this.config.turnTimeoutMs
});
proc.kill("SIGTERM");
reject(
new Error(`Agent timeout after ${this.config.turnTimeoutMs}ms`)
);
}
}, this.config.turnTimeoutMs);
const send = (msg) => {
proc.stdin.write(JSON.stringify(msg) + "\n");
};
let lineBuffer = "";
tee.on("data", (chunk) => {
lineBuffer += chunk.toString();
const lines = lineBuffer.split("\n");
lineBuffer = lines.pop() || "";
for (const line of lines) {
if (!line.trim()) continue;
try {
const msg = JSON.parse(line);
this.handleAgentMessage(msg, issue, run);
const phase = inferPhase(msg);
if (phase) {
run.phase = phase;
}
if (msg.method === "item/commandExecution/started" || msg.method === "item/toolUse/started") {
run.toolCalls++;
}
const params = msg.params;
if (msg.method === "item/commandExecution/started" && params) {
const tool = (params.tool || params.name || "").toLowerCase();
if (tool.includes("edit") || tool.includes("write")) {
run.filesModified++;
}
}
if (msg.method === "item/text" && params?.text) {
run.tokensUsed += Math.ceil(params.text.length / 4);
}
if (run.toolCalls % 5 === 0 || phase) {
this.writeAgentStatus(issue.identifier, run);
}
if (msg.method === "turn/completed") {
turnCompleted = true;
this.writeAgentStatus(issue.identifier, run);
}
if (msg.method === "turn/failed") {
turnCompleted = true;
this.writeAgentStatus(issue.identifier, run);
const errMsg = msg.params?.error?.message || "Agent turn failed";
clearTimeout(timer);
reject(new Error(errMsg));
return;
}
} catch {
}
}
});
proc.stderr.on("data", (data) => {
stderr += dat