vibe-coder-mcp
Version:
Production-ready MCP server with complete agent integration, multi-transport support, and comprehensive development automation tools for AI-assisted workflows.
833 lines (832 loc) • 37.5 kB
JavaScript
import { EventEmitter } from 'events';
import { WorkflowStateManager, WorkflowPhase } from './workflow-state-manager.js';
import { DecompositionService } from './decomposition-service.js';
import { createErrorContext, ValidationError } from '../utils/enhanced-errors.js';
import { InitializationMonitor } from '../../../utils/initialization-monitor.js';
import logger from '../../../logger.js';
const DEFAULT_WORKFLOW_TIMEOUT_CONFIG = {
baseHeartbeatInterval: 30000,
activityTimeoutMultipliers: {
idle: 2,
decomposition: 20,
orchestration: 10,
task_execution: 6,
research: 15,
context_enrichment: 8,
dependency_analysis: 12
},
maxGracePeriods: 3,
gracePeriodDuration: 60000,
progressUpdateInterval: 120000,
workflowCriticalExtension: 300000,
enableAdaptiveTimeouts: true
};
export class WorkflowAwareAgentManager extends EventEmitter {
static instance = null;
config;
agentStates = new Map();
agentOrchestrator = null;
workflowStateManager = null;
decompositionService = null;
monitoringInterval = null;
isMonitoring = false;
startTime = Date.now();
constructor(config = {}) {
super();
this.config = { ...DEFAULT_WORKFLOW_TIMEOUT_CONFIG, ...config };
try {
this.workflowStateManager = WorkflowStateManager.getInstance();
}
catch (error) {
logger.warn({ err: error }, 'WorkflowStateManager getInstance not available, using null fallback');
this.workflowStateManager = null;
}
this.scheduleAsyncInitialization();
logger.info('Workflow-aware agent manager initialized', {
config: this.config
});
}
scheduleAsyncInitialization() {
process.nextTick(() => {
this.initializeDecompositionService().then(() => {
this.setupEventListeners();
}).catch(error => {
logger.warn({ err: error }, 'DecompositionService initialization failed, setting up event listeners with fallback');
this.setupEventListeners();
});
});
}
async initializeDecompositionService() {
try {
const { getVibeTaskManagerConfig } = await import('../utils/config-loader.js');
const config = await getVibeTaskManagerConfig();
if (!config) {
throw new Error('Failed to load task manager configuration');
}
const openRouterConfig = {
baseUrl: 'https://openrouter.ai/api/v1',
apiKey: process.env.OPENROUTER_API_KEY || '',
model: 'anthropic/claude-3-sonnet',
geminiModel: 'gemini-pro',
perplexityModel: 'llama-3.1-sonar-small-128k-online'
};
this.decompositionService = DecompositionService.getInstance(openRouterConfig);
}
catch (error) {
logger.warn({ err: error }, 'DecompositionService initialization failed, using fallback');
this.decompositionService = null;
}
}
async getAgentOrchestrator() {
if (!this.agentOrchestrator) {
const { AgentOrchestrator } = await import('./agent-orchestrator.js');
this.agentOrchestrator = AgentOrchestrator.getInstance();
}
return this.agentOrchestrator;
}
static getInstance(config) {
if (!WorkflowAwareAgentManager.instance) {
const monitor = InitializationMonitor.getInstance();
monitor.startServiceInitialization('WorkflowAwareAgentManager', [
'WorkflowStateManager',
'DecompositionService'
], { config });
try {
monitor.startPhase('WorkflowAwareAgentManager', 'constructor');
WorkflowAwareAgentManager.instance = new WorkflowAwareAgentManager(config);
monitor.endPhase('WorkflowAwareAgentManager', 'constructor');
monitor.endServiceInitialization('WorkflowAwareAgentManager');
}
catch (error) {
monitor.endPhase('WorkflowAwareAgentManager', 'constructor', error);
monitor.endServiceInitialization('WorkflowAwareAgentManager', error);
throw error;
}
}
return WorkflowAwareAgentManager.instance;
}
async startMonitoring() {
if (this.isMonitoring) {
logger.warn('Workflow-aware agent monitoring already active');
return;
}
try {
this.isMonitoring = true;
this.startTime = Date.now();
this.monitoringInterval = setInterval(() => {
this.performWorkflowAwareHealthCheck().catch(error => {
logger.error({ err: error }, 'Error in workflow-aware health check');
});
}, this.config.baseHeartbeatInterval);
logger.info('Workflow-aware agent monitoring started', {
interval: this.config.baseHeartbeatInterval,
enableAdaptiveTimeouts: this.config.enableAdaptiveTimeouts
});
}
catch (error) {
this.isMonitoring = false;
const context = createErrorContext('WorkflowAwareAgentManager', 'startMonitoring')
.metadata({ config: this.config })
.build();
logger.error({ err: error, context }, 'Failed to start workflow-aware monitoring');
throw new ValidationError('Failed to start workflow-aware monitoring', context);
}
}
async stopMonitoring() {
if (!this.isMonitoring) {
return;
}
try {
this.isMonitoring = false;
if (this.monitoringInterval) {
clearInterval(this.monitoringInterval);
this.monitoringInterval = null;
}
logger.info('Workflow-aware agent monitoring stopped');
}
catch (error) {
logger.error({ err: error }, 'Error stopping workflow-aware monitoring');
}
}
async registerAgentActivity(agentId, activity, options = {}) {
const now = new Date();
const agentState = {
agentId,
currentActivity: activity,
activityStartTime: now,
lastHeartbeat: now,
lastProgressUpdate: now,
workflowId: options.workflowId,
sessionId: options.sessionId,
expectedDuration: options.expectedDuration,
progressPercentage: 0,
isWorkflowCritical: options.isWorkflowCritical || false,
gracePeriodCount: 0,
metadata: {
...options.metadata,
lastActivityUpdate: now
}
};
if (activity !== 'idle' && options.isWorkflowCritical) {
const baseTimeout = this.calculateActivityTimeout(activity);
agentState.extendedTimeoutUntil = new Date(now.getTime() + baseTimeout + this.config.workflowCriticalExtension);
}
this.agentStates.set(agentId, agentState);
logger.info({
agentId,
activity,
workflowId: options.workflowId,
sessionId: options.sessionId,
isWorkflowCritical: options.isWorkflowCritical,
extendedTimeoutUntil: agentState.extendedTimeoutUntil
}, 'Agent activity registered');
this.emit('agent_activity_changed', {
agentId,
activity,
timestamp: now,
metadata: agentState.metadata
});
}
async updateAgentProgress(agentId, progressPercentage, metadata) {
const agentState = this.agentStates.get(agentId);
if (!agentState) {
logger.warn({ agentId }, 'Cannot update progress for unregistered agent');
return;
}
const now = new Date();
agentState.progressPercentage = Math.max(0, Math.min(100, progressPercentage));
agentState.lastProgressUpdate = now;
agentState.lastHeartbeat = now;
if (metadata) {
agentState.metadata = { ...agentState.metadata, ...metadata, lastActivityUpdate: now };
}
agentState.gracePeriodCount = 0;
if (this.config.enableAdaptiveTimeouts && agentState.expectedDuration) {
this.adjustTimeoutBasedOnProgress(agentState);
}
logger.debug({
agentId,
progressPercentage,
activity: agentState.currentActivity,
workflowId: agentState.workflowId
}, 'Agent progress updated');
this.emit('agent_progress_updated', {
agentId,
progressPercentage,
activity: agentState.currentActivity,
timestamp: now,
metadata: agentState.metadata
});
this.getAgentOrchestrator().then(orchestrator => {
if (orchestrator && typeof orchestrator.updateAgentHeartbeat === 'function') {
orchestrator.updateAgentHeartbeat(agentId, 'available');
}
}).catch(error => {
logger.warn({ err: error, agentId }, 'Failed to update agent heartbeat');
});
}
async completeAgentActivity(agentId, success = true, metadata) {
const agentState = this.agentStates.get(agentId);
if (!agentState) {
logger.warn({ agentId }, 'Cannot complete activity for unregistered agent');
return;
}
const now = new Date();
const duration = now.getTime() - agentState.activityStartTime.getTime();
logger.info({
agentId,
activity: agentState.currentActivity,
duration: Math.round(duration / 1000),
success,
workflowId: agentState.workflowId
}, 'Agent activity completed');
this.emit('agent_activity_completed', {
agentId,
activity: agentState.currentActivity,
duration,
success,
timestamp: now,
metadata: { ...agentState.metadata, ...metadata }
});
await this.registerAgentActivity(agentId, 'idle', {
workflowId: agentState.workflowId,
sessionId: agentState.sessionId
});
}
getAgentState(agentId) {
return this.agentStates.get(agentId);
}
getAllAgentStates() {
return Array.from(this.agentStates.values());
}
getWorkflowAwareStats() {
const states = Array.from(this.agentStates.values());
const agentsByActivity = {
idle: 0,
decomposition: 0,
orchestration: 0,
task_execution: 0,
research: 0,
context_enrichment: 0,
dependency_analysis: 0
};
let criticalAgents = 0;
let agentsInGracePeriod = 0;
let totalProgress = 0;
const activeWorkflows = new Set();
for (const state of states) {
agentsByActivity[state.currentActivity]++;
if (state.isWorkflowCritical)
criticalAgents++;
if (state.gracePeriodCount > 0)
agentsInGracePeriod++;
if (state.workflowId)
activeWorkflows.add(state.workflowId);
totalProgress += state.progressPercentage;
}
return {
totalAgents: states.length,
activeWorkflows: activeWorkflows.size,
agentsByActivity,
criticalAgents,
agentsInGracePeriod,
averageProgress: states.length > 0 ? totalProgress / states.length : 0
};
}
setupEventListeners() {
try {
const workflowStateManagerAny = this.workflowStateManager;
if (typeof workflowStateManagerAny.on === 'function') {
workflowStateManagerAny.on('workflow_phase_changed', (data) => {
this.handleWorkflowPhaseChange(data).catch(error => {
logger.error({ err: error, data }, 'Error handling workflow phase change');
});
});
workflowStateManagerAny.on('workflow_progress_updated', (data) => {
this.handleWorkflowProgressUpdate(data).catch(error => {
logger.error({ err: error, data }, 'Error handling workflow progress update');
});
});
}
else {
logger.debug('WorkflowStateManager does not support event listeners, using fallback mode');
}
}
catch (error) {
logger.warn({ err: error }, 'Failed to setup workflow state manager event listeners');
}
try {
const decompositionServiceAny = this.decompositionService;
if (decompositionServiceAny && typeof decompositionServiceAny.on === 'function') {
decompositionServiceAny.on('decomposition_started', (data) => {
this.handleDecompositionStarted(data).catch(error => {
logger.error({ err: error, data }, 'Error handling decomposition started');
});
});
decompositionServiceAny.on('decomposition_progress', (data) => {
this.handleDecompositionProgress(data).catch(error => {
logger.error({ err: error, data }, 'Error handling decomposition progress');
});
});
decompositionServiceAny.on('decomposition_completed', (data) => {
this.handleDecompositionCompleted(data).catch(error => {
logger.error({ err: error, data }, 'Error handling decomposition completed');
});
});
decompositionServiceAny.on('decomposition_failed', (data) => {
this.handleDecompositionFailed(data).catch(error => {
logger.error({ err: error, data }, 'Error handling decomposition failed');
});
});
decompositionServiceAny.on('epic_generation_started', (data) => {
this.handleEpicGenerationStarted(data).catch(error => {
logger.error({ err: error, data }, 'Error handling epic generation started');
});
});
decompositionServiceAny.on('epic_generation_completed', (data) => {
this.handleEpicGenerationCompleted(data).catch(error => {
logger.error({ err: error, data }, 'Error handling epic generation completed');
});
});
decompositionServiceAny.on('task_list_started', (data) => {
this.handleTaskListStarted(data).catch(error => {
logger.error({ err: error, data }, 'Error handling task list started');
});
});
decompositionServiceAny.on('task_list_completed', (data) => {
this.handleTaskListCompleted(data).catch(error => {
logger.error({ err: error, data }, 'Error handling task list completed');
});
});
}
else {
logger.debug('DecompositionService does not support event listeners, using fallback mode');
}
}
catch (error) {
logger.warn({ err: error }, 'Failed to setup decomposition service event listeners');
}
logger.debug('Event listeners setup for workflow-aware agent management');
}
async performWorkflowAwareHealthCheck() {
const now = new Date();
for (const [agentId, agentState] of this.agentStates.entries()) {
try {
const shouldMarkOffline = await this.shouldMarkAgentOffline(agentState, now);
if (shouldMarkOffline) {
await this.handleAgentTimeout(agentState, now);
}
else {
const timeSinceProgress = now.getTime() - agentState.lastProgressUpdate.getTime();
if (timeSinceProgress > this.config.progressUpdateInterval && agentState.currentActivity !== 'idle') {
this.emit('agent_progress_reminder', {
agentId,
activity: agentState.currentActivity,
timeSinceProgress,
timestamp: now
});
}
}
}
catch (error) {
logger.error({ err: error, agentId }, 'Error in workflow-aware health check for agent');
}
}
}
async shouldMarkAgentOffline(agentState, now) {
const timeSinceHeartbeat = now.getTime() - agentState.lastHeartbeat.getTime();
const activityTimeout = this.calculateActivityTimeout(agentState.currentActivity);
if (agentState.extendedTimeoutUntil && now < agentState.extendedTimeoutUntil) {
return false;
}
if (agentState.gracePeriodCount < this.config.maxGracePeriods) {
if (timeSinceHeartbeat > activityTimeout) {
agentState.gracePeriodCount++;
const gracePeriodEnd = new Date(now.getTime() + this.config.gracePeriodDuration);
logger.warn({
agentId: agentState.agentId,
activity: agentState.currentActivity,
gracePeriod: agentState.gracePeriodCount,
maxGracePeriods: this.config.maxGracePeriods,
gracePeriodEnd
}, 'Agent entered grace period');
this.emit('agent_grace_period', {
agentId: agentState.agentId,
gracePeriod: agentState.gracePeriodCount,
gracePeriodEnd,
timestamp: now
});
return false;
}
}
return timeSinceHeartbeat > activityTimeout + (this.config.gracePeriodDuration * this.config.maxGracePeriods);
}
calculateActivityTimeout(activity) {
const multiplier = this.config.activityTimeoutMultipliers[activity] || 2;
return this.config.baseHeartbeatInterval * multiplier;
}
adjustTimeoutBasedOnProgress(agentState) {
if (!agentState.expectedDuration || agentState.progressPercentage === 0) {
return;
}
const progressRatio = agentState.progressPercentage / 100;
const elapsedTime = Date.now() - agentState.activityStartTime.getTime();
const estimatedTotalTime = elapsedTime / progressRatio;
const estimatedRemainingTime = estimatedTotalTime - elapsedTime;
if (progressRatio > 0.1 && estimatedRemainingTime > 0) {
const bufferTime = estimatedRemainingTime * 0.5;
agentState.extendedTimeoutUntil = new Date(Date.now() + estimatedRemainingTime + bufferTime);
logger.debug({
agentId: agentState.agentId,
progressRatio,
estimatedRemainingTime,
extendedTimeoutUntil: agentState.extendedTimeoutUntil
}, 'Adjusted timeout based on progress');
}
}
async handleAgentTimeout(agentState, now) {
logger.warn({
agentId: agentState.agentId,
activity: agentState.currentActivity,
workflowId: agentState.workflowId,
gracePeriodCount: agentState.gracePeriodCount,
isWorkflowCritical: agentState.isWorkflowCritical
}, 'Agent timeout detected - marking offline');
this.emit('agent_timeout', {
agentId: agentState.agentId,
activity: agentState.currentActivity,
workflowId: agentState.workflowId,
gracePeriodCount: agentState.gracePeriodCount,
timestamp: now
});
this.getAgentOrchestrator().then(orchestrator => {
if (orchestrator && typeof orchestrator.updateAgentHeartbeat === 'function') {
orchestrator.updateAgentHeartbeat(agentState.agentId, 'offline');
}
}).catch(error => {
logger.warn({ err: error, agentId: agentState.agentId }, 'Failed to update agent heartbeat to offline');
});
this.agentStates.delete(agentState.agentId);
}
async handleWorkflowPhaseChange(data) {
const { workflowId, sessionId, fromPhase, toPhase, agentId } = data;
if (!agentId || typeof agentId !== 'string')
return;
const agentState = this.agentStates.get(agentId);
if (!agentState)
return;
let newActivity = 'idle';
let isWorkflowCritical = false;
switch (toPhase) {
case WorkflowPhase.DECOMPOSITION:
newActivity = 'decomposition';
isWorkflowCritical = true;
break;
case WorkflowPhase.ORCHESTRATION:
newActivity = 'orchestration';
isWorkflowCritical = true;
break;
case WorkflowPhase.EXECUTION:
newActivity = 'task_execution';
isWorkflowCritical = false;
break;
default:
newActivity = 'idle';
isWorkflowCritical = false;
}
await this.registerAgentActivity(agentId, newActivity, {
workflowId: workflowId,
sessionId: sessionId,
isWorkflowCritical,
metadata: {
workflowPhase: toPhase,
previousPhase: fromPhase
}
});
}
async handleWorkflowProgressUpdate(data) {
const { workflowId, sessionId, progress, agentId } = data;
if (!agentId || typeof agentId !== 'string' || typeof progress !== 'number')
return;
await this.updateAgentProgress(agentId, progress, {
workflowId: workflowId,
sessionId: sessionId,
lastWorkflowUpdate: new Date()
});
}
async handleDecompositionStarted(data) {
const { sessionId, agentId, taskId, projectId, originalSessionId, jobId } = data;
if (!agentId || typeof agentId !== 'string')
return;
await this.registerAgentActivity(agentId, 'decomposition', {
sessionId: sessionId,
workflowId: sessionId,
isWorkflowCritical: true,
expectedDuration: 10 * 60 * 1000,
metadata: {
taskId,
projectId,
decompositionStarted: new Date()
}
});
if (originalSessionId && jobId) {
const bridgeStartTime = Date.now();
try {
const { sseNotifier } = await import('../../../services/sse-notifier/index.js');
const { JobStatus } = await import('../../../services/job-manager/index.js');
sseNotifier.sendProgress(originalSessionId, jobId, JobStatus.RUNNING, 'Decomposition started - analyzing task complexity', 0);
const bridgeExecutionTime = Date.now() - bridgeStartTime;
logger.info({
originalSessionId,
jobId,
step: 'decomposition_started',
bridgeExecutionTime,
performance: {
bridgeLatency: bridgeExecutionTime,
timestamp: new Date().toISOString()
}
}, 'Progress bridge: decomposition started forwarded to stdio client');
}
catch (error) {
const bridgeExecutionTime = Date.now() - bridgeStartTime;
logger.error({
err: error,
originalSessionId,
jobId,
bridgeExecutionTime,
errorType: error instanceof Error ? error.constructor.name : 'Unknown',
errorMessage: error instanceof Error ? error.message : String(error)
}, 'Progress bridge: failed to send decomposition started to stdio client');
}
}
}
async handleDecompositionProgress(data) {
const { sessionId, agentId, progress, originalSessionId, jobId, step } = data;
if (!agentId || typeof agentId !== 'string' || typeof progress !== 'number')
return;
await this.updateAgentProgress(agentId, progress, {
sessionId: sessionId,
lastDecompositionUpdate: new Date()
});
if (originalSessionId && jobId) {
const bridgeStartTime = Date.now();
try {
const { sseNotifier } = await import('../../../services/sse-notifier/index.js');
const { JobStatus } = await import('../../../services/job-manager/index.js');
let progressMessage = `Decomposition ${progress}% complete`;
const metadata = data.metadata || {};
if (step) {
switch (step) {
case 'decomposition_started':
progressMessage = 'Decomposition started - analyzing task complexity';
break;
case 'context_enrichment_completed':
progressMessage = 'Context analysis complete - beginning decomposition';
break;
case 'epic_generation_started':
progressMessage = `Epic identification started - analyzing ${metadata.taskCount || 0} tasks`;
break;
case 'epic_generation_completed':
progressMessage = `Epic identification completed - functional areas identified`;
break;
case 'task_persistence_started':
progressMessage = `Task persistence started - saving ${metadata.taskCount || 0} tasks`;
break;
case 'task_persisted':
progressMessage = metadata.message || `Task ${metadata.persistedCount}/${metadata.totalTasks} persisted`;
break;
case 'dependency_analysis_started':
progressMessage = `Dependency analysis started - mapping task relationships`;
break;
case 'dependency_analysis_completed':
progressMessage = 'Dependency analysis completed - task graph generated';
break;
case 'decomposition_completed':
progressMessage = `Decomposition completed - ${metadata.persistedTasks || 0} tasks ready`;
break;
case 'task_processing':
progressMessage = `Processing ${metadata.currentTaskTitle || 'task'} - ${metadata.processedTasks}/${metadata.totalTasks}`;
break;
default:
progressMessage = `Decomposition ${progress}% complete - ${step}`;
}
}
sseNotifier.sendProgress(originalSessionId, jobId, JobStatus.RUNNING, progressMessage, progress);
const bridgeExecutionTime = Date.now() - bridgeStartTime;
logger.info({
originalSessionId,
jobId,
progress,
step,
progressMessage,
bridgeExecutionTime,
performance: {
bridgeLatency: bridgeExecutionTime,
progressValue: progress,
timestamp: new Date().toISOString()
}
}, 'Progress bridge: decomposition progress forwarded to stdio client');
}
catch (error) {
const bridgeExecutionTime = Date.now() - bridgeStartTime;
logger.error({
err: error,
originalSessionId,
jobId,
progress,
step,
bridgeExecutionTime,
errorType: error instanceof Error ? error.constructor.name : 'Unknown',
errorMessage: error instanceof Error ? error.message : String(error)
}, 'Progress bridge: failed to send decomposition progress to stdio client');
}
}
}
async handleDecompositionCompleted(data) {
const { sessionId, agentId, status, originalSessionId, jobId, totalTasks, persistedTasks } = data;
const success = status === 'completed';
if (!agentId || typeof agentId !== 'string')
return;
await this.completeAgentActivity(agentId, success, {
sessionId: sessionId,
decompositionCompleted: new Date()
});
if (originalSessionId && jobId) {
try {
const { sseNotifier } = await import('../../../services/sse-notifier/index.js');
const { JobStatus } = await import('../../../services/job-manager/index.js');
const taskCount = persistedTasks || totalTasks || 0;
const completionMessage = success
? `Decomposition completed successfully - ${taskCount} tasks generated`
: 'Decomposition completed with issues - check results for details';
sseNotifier.sendProgress(originalSessionId, jobId, success ? JobStatus.COMPLETED : JobStatus.FAILED, completionMessage, 100);
logger.debug({
originalSessionId,
jobId,
success,
taskCount,
completionMessage
}, 'Progress bridge: forwarded decomposition completion to stdio client');
}
catch (error) {
logger.warn({
err: error,
originalSessionId,
jobId,
success
}, 'Failed to send decomposition completion to stdio client');
}
}
}
async handleDecompositionFailed(data) {
const { sessionId, agentId, error, originalSessionId, jobId } = data;
if (!agentId || typeof agentId !== 'string')
return;
await this.completeAgentActivity(agentId, false, {
sessionId: sessionId,
decompositionFailed: new Date(),
error: error
});
if (originalSessionId && jobId) {
try {
const { sseNotifier } = await import('../../../services/sse-notifier/index.js');
const { JobStatus } = await import('../../../services/job-manager/index.js');
const errorMessage = error && typeof error === 'object' && 'message' in error
? `Decomposition failed: ${error.message}`
: 'Decomposition failed - check logs for details';
sseNotifier.sendProgress(originalSessionId, jobId, JobStatus.FAILED, errorMessage, 0);
logger.debug({
originalSessionId,
jobId,
error
}, 'Progress bridge: forwarded decomposition failure to stdio client');
}
catch (bridgeError) {
logger.warn({
err: bridgeError,
originalSessionId,
jobId,
error
}, 'Failed to send decomposition failure to stdio client');
}
}
}
async handleEpicGenerationStarted(data) {
const { agentId, originalSessionId, jobId, metadata } = data;
if (!agentId || typeof agentId !== 'string')
return;
if (originalSessionId && jobId) {
try {
const { sseNotifier } = await import('../../../services/sse-notifier/index.js');
const { JobStatus } = await import('../../../services/job-manager/index.js');
const taskCount = metadata && typeof metadata === 'object' && 'taskCount' in metadata
? metadata.taskCount
: 'multiple';
sseNotifier.sendProgress(originalSessionId, jobId, JobStatus.RUNNING, `Epic generation started - organizing ${taskCount} tasks`, 82);
logger.debug({
originalSessionId,
jobId,
taskCount
}, 'Progress bridge: forwarded epic generation started to stdio client');
}
catch (error) {
logger.warn({
err: error,
originalSessionId,
jobId
}, 'Failed to send epic generation started to stdio client');
}
}
}
async handleEpicGenerationCompleted(data) {
const { agentId, status, originalSessionId, jobId, metadata } = data;
if (!agentId || typeof agentId !== 'string')
return;
const success = status === 'completed';
if (originalSessionId && jobId) {
try {
const { sseNotifier } = await import('../../../services/sse-notifier/index.js');
const { JobStatus } = await import('../../../services/job-manager/index.js');
const taskCount = metadata && typeof metadata === 'object' && 'taskCount' in metadata
? metadata.taskCount
: 'multiple';
const completionMessage = success
? `Epic generation completed - ${taskCount} tasks organized into epics`
: 'Epic generation completed with issues - tasks may use default epic';
sseNotifier.sendProgress(originalSessionId, jobId, JobStatus.RUNNING, completionMessage, 85);
logger.debug({
originalSessionId,
jobId,
success,
taskCount,
completionMessage
}, 'Progress bridge: forwarded epic generation completion to stdio client');
}
catch (error) {
logger.warn({
err: error,
originalSessionId,
jobId,
success
}, 'Failed to send epic generation completion to stdio client');
}
}
}
async handleTaskListStarted(data) {
const { agentId, originalSessionId, jobId } = data;
if (!agentId || typeof agentId !== 'string')
return;
if (originalSessionId && jobId) {
try {
const { sseNotifier } = await import('../../../services/sse-notifier/index.js');
const { JobStatus } = await import('../../../services/job-manager/index.js');
sseNotifier.sendProgress(originalSessionId, jobId, JobStatus.RUNNING, 'Task list processing started - persisting tasks to storage', 86);
logger.debug({
originalSessionId,
jobId
}, 'Progress bridge: forwarded task list started to stdio client');
}
catch (error) {
logger.warn({
err: error,
originalSessionId,
jobId
}, 'Failed to send task list started to stdio client');
}
}
}
async handleTaskListCompleted(data) {
const { agentId, originalSessionId, jobId, totalTasks, persistedTasks } = data;
if (!agentId || typeof agentId !== 'string')
return;
if (originalSessionId && jobId) {
try {
const { sseNotifier } = await import('../../../services/sse-notifier/index.js');
const { JobStatus } = await import('../../../services/job-manager/index.js');
const taskCount = persistedTasks || totalTasks || 0;
const completionMessage = `Task list processing completed - ${taskCount} tasks persisted to storage`;
sseNotifier.sendProgress(originalSessionId, jobId, JobStatus.RUNNING, completionMessage, 90);
logger.debug({
originalSessionId,
jobId,
taskCount,
completionMessage
}, 'Progress bridge: forwarded task list completion to stdio client');
}
catch (error) {
logger.warn({
err: error,
originalSessionId,
jobId,
taskCount: persistedTasks || totalTasks || 0
}, 'Failed to send task list completion to stdio client');
}
}
}
dispose() {
this.stopMonitoring();
this.removeAllListeners();
this.agentStates.clear();
logger.info('Workflow-aware agent manager disposed');
}
}