vibe-coder-mcp
Version:
Production-ready MCP server with complete agent integration, multi-transport support, and comprehensive development automation tools for AI-assisted workflows.
1,004 lines • 152 kB
JavaScript
import path from 'path';
import { EventEmitter } from 'events';
import { RDDEngine } from '../core/rdd-engine.js';
import { getVibeTaskManagerOutputDir } from '../utils/config-loader.js';
import { ContextEnrichmentService } from './context-enrichment-service.js';
import { AutoResearchDetector } from './auto-research-detector.js';
import { ResearchIntegration } from '../integrations/research-integration.js';
import { CodeMapIntegrationService } from '../integrations/code-map-integration.js';
import { getTaskOperations } from '../core/operations/task-operations.js';
import { EnhancedError, TaskExecutionError, ValidationError, createErrorContext } from '../utils/enhanced-errors.js';
import logger from '../../../logger.js';
import { TaskOperations } from '../core/operations/task-operations.js';
import { WorkflowStateManager, WorkflowPhase, WorkflowState, resolveWorkflowIdWithMapping } from './workflow-state-manager.js';
import { DecompositionSummaryGenerator } from './decomposition-summary-generator.js';
import { getDependencyGraph } from '../core/dependency-graph.js';
import { ProgressTracker } from './progress-tracker.js';
import { JobManagerIntegrationService } from '../integrations/job-manager-integration.js';
import { ProgressJobBridge } from '../integrations/progress-job-bridge.js';
export class DecompositionService extends EventEmitter {
static instance = null;
engine;
sessions = new Map();
config;
contextService;
autoResearchDetector;
researchIntegrationService;
codeMapIntegrationService;
workflowStateManager;
summaryGenerator;
dependencyGraphs = new Map();
jobManagerIntegration;
constructor(config, summaryConfig) {
super();
this.config = config;
this.engine = new RDDEngine(config);
this.contextService = ContextEnrichmentService.getInstance();
this.autoResearchDetector = AutoResearchDetector.getInstance();
this.researchIntegrationService = ResearchIntegration.getInstance();
this.codeMapIntegrationService = CodeMapIntegrationService.getInstance();
const outputDir = getVibeTaskManagerOutputDir();
const workflowStatesDir = outputDir ? path.join(outputDir, 'workflow-states') : '/tmp/test-workflow-states';
this.workflowStateManager = WorkflowStateManager.getInstance(workflowStatesDir);
this.summaryGenerator = new DecompositionSummaryGenerator(summaryConfig);
this.jobManagerIntegration = JobManagerIntegrationService.getInstance();
ProgressJobBridge.getInstance({
enableProgressMapping: true,
enableResourceTracking: true,
progressUpdateThreshold: 5,
debounceMs: 500
});
this.initializeProgressTrackerIntegration();
}
async createDecompositionJob(request, toolName = 'vibe-task-manager') {
try {
if (request.originalJobId) {
logger.info({
originalJobId: request.originalJobId,
taskId: request.task.id,
projectId: request.task.projectId,
sessionId: request.sessionId
}, 'Using original job ID for decomposition progress tracking');
return request.originalJobId;
}
const jobId = await this.jobManagerIntegration.createTaskJob(toolName, {
taskId: request.task.id,
projectId: request.task.projectId,
sessionId: request.sessionId,
operation: 'decomposition'
}, {
taskId: request.task.id,
projectId: request.task.projectId || request.context.projectId,
operationType: 'decomposition',
priority: request.task.priority === 'critical' ? 'critical' :
request.task.priority === 'high' ? 'high' :
request.task.priority === 'low' ? 'low' : 'medium',
estimatedDuration: (request.task.estimatedHours || 1) * 60 * 60 * 1000,
resourceRequirements: {
memoryMB: 512,
cpuWeight: 2
},
metadata: {
sessionId: request.sessionId,
batchId: request.sessionId,
retryCount: 0,
maxRetries: 3
}
});
logger.info({
jobId,
taskId: request.task.id,
projectId: request.task.projectId,
sessionId: request.sessionId
}, 'Enhanced decomposition job created');
return jobId;
}
catch (error) {
logger.error({
err: error,
taskId: request.task.id,
sessionId: request.sessionId
}, 'Failed to create enhanced decomposition job');
throw error;
}
}
async updateJobProgress(jobId, progress, message, resourceUsage) {
try {
await this.jobManagerIntegration.updateJobProgress(jobId, progress, message, resourceUsage);
}
catch (error) {
logger.debug({
err: error,
jobId,
progress
}, 'Failed to update job progress - continuing with operation');
}
}
initializeProgressTrackerIntegration() {
const progressTracker = ProgressTracker.getInstance();
const eventToSubPhaseMap = {
'research_triggered': { phase: WorkflowPhase.DECOMPOSITION, subPhase: 'research' },
'research_completed': { phase: WorkflowPhase.DECOMPOSITION, subPhase: 'research' },
'context_gathering_started': { phase: WorkflowPhase.DECOMPOSITION, subPhase: 'context_gathering' },
'context_gathering_completed': { phase: WorkflowPhase.DECOMPOSITION, subPhase: 'context_gathering' },
'decomposition_progress': { phase: WorkflowPhase.DECOMPOSITION, subPhase: 'decomposition' },
'validation_started': { phase: WorkflowPhase.DECOMPOSITION, subPhase: 'validation' },
'validation_completed': { phase: WorkflowPhase.DECOMPOSITION, subPhase: 'validation' },
'dependency_detection_started': { phase: WorkflowPhase.DECOMPOSITION, subPhase: 'dependency_detection' },
'dependency_detection_completed': { phase: WorkflowPhase.DECOMPOSITION, subPhase: 'dependency_detection' }
};
Object.entries(eventToSubPhaseMap).forEach(([eventType, mapping]) => {
progressTracker.addEventListener(eventType, (data) => {
this.handleProgressEventForSubPhase(data, mapping.phase, mapping.subPhase, eventType);
});
});
logger.debug('ProgressTracker integration initialized for workflow sub-phases');
}
handleProgressEventForSubPhase(data, phase, subPhase, eventType) {
try {
const workflowIdResult = resolveWorkflowIdWithMapping(data);
if (!workflowIdResult.success) {
logger.debug({
eventType,
phase,
subPhase,
error: workflowIdResult.error
}, 'Skipping progress event - workflow ID resolution failed');
return;
}
const workflowId = workflowIdResult.data;
const workflowExists = this.workflowStateManager.hasWorkflow(workflowId);
if (!workflowExists) {
logger.debug({
workflowId,
eventType,
phase,
subPhase
}, 'Skipping progress event - workflow not found');
return;
}
const phaseExists = this.workflowStateManager.hasPhase(workflowId, phase);
if (!phaseExists) {
logger.debug({
workflowId,
eventType,
phase,
subPhase
}, 'Skipping progress event - workflow phase not found, may be in initialization');
return;
}
let progress = data.progressPercentage || 0;
let state;
if (eventType.includes('started') || eventType.includes('triggered')) {
state = WorkflowState.IN_PROGRESS;
progress = Math.max(progress, 10);
}
else if (eventType.includes('completed')) {
state = WorkflowState.COMPLETED;
progress = 100;
}
else if (eventType.includes('progress')) {
state = WorkflowState.IN_PROGRESS;
progress = Math.max(progress, 20);
}
this.workflowStateManager.updateSubPhaseProgress(workflowId, phase, subPhase, progress, state, {
lastProgressEvent: eventType,
eventTimestamp: data.timestamp,
componentName: data.componentName,
message: data.message
}).then(result => {
if (!result.success) {
logger.debug({
error: result.error,
workflowId,
phase,
subPhase,
eventType
}, 'Failed to update workflow sub-phase from progress event');
}
}).catch(error => {
logger.debug({
err: error,
workflowId,
phase,
subPhase,
eventType
}, 'Unexpected error updating workflow sub-phase from progress event');
});
}
catch (error) {
logger.debug({
err: error,
eventType,
phase,
subPhase
}, 'Error handling progress event for sub-phase');
}
}
static getInstance(config, summaryConfig) {
if (!DecompositionService.instance) {
if (!config) {
throw new Error('DecompositionService requires config for first initialization');
}
DecompositionService.instance = new DecompositionService(config, summaryConfig);
}
return DecompositionService.instance;
}
verifyEventEmitterIntegration() {
const supportedEvents = [
'decomposition_started',
'decomposition_progress',
'decomposition_completed',
'decomposition_failed',
'task_list_started',
'task_list_completed',
'epic_generation_started',
'epic_generation_completed'
];
const hasEventEmitter = typeof this.emit === 'function' && typeof this.on === 'function';
const listenerCount = this.listenerCount('decomposition_started') +
this.listenerCount('decomposition_progress') +
this.listenerCount('decomposition_completed') +
this.listenerCount('decomposition_failed');
const isWorkflowAwareCompatible = hasEventEmitter && supportedEvents.length > 0;
logger.info({
hasEventEmitter,
supportedEvents,
listenerCount,
isWorkflowAwareCompatible,
eventEmitterMethods: {
emit: typeof this.emit,
on: typeof this.on,
removeListener: typeof this.removeListener,
listenerCount: typeof this.listenerCount
}
}, 'DecompositionService EventEmitter integration verification');
return {
hasEventEmitter,
supportedEvents,
listenerCount,
isWorkflowAwareCompatible
};
}
async startDecomposition(request) {
const sessionId = request.sessionId || this.generateSessionId();
const context = createErrorContext('DecompositionService', 'startDecomposition')
.taskId(request.task.id)
.projectId(request.context.projectId)
.sessionId(sessionId)
.metadata({
maxDepth: request.config?.maxDepth || 5,
hasCustomConfig: !!request.config
})
.build();
try {
if (!request.task) {
throw new ValidationError('Task is required for decomposition', context, {
field: 'request.task',
expectedFormat: 'AtomicTask object'
});
}
if (!request.task.id || request.task.id.trim() === '') {
throw new ValidationError('Task ID is required for decomposition', context, {
field: 'request.task.id',
expectedFormat: 'Non-empty string',
actualValue: request.task.id
});
}
if (!request.context) {
throw new ValidationError('Project context is required for decomposition', context, {
field: 'request.context',
expectedFormat: 'ProjectContext object'
});
}
if (!request.context.projectId || request.context.projectId.trim() === '') {
throw new ValidationError('Project ID is required in context for decomposition', context, {
field: 'request.context.projectId',
expectedFormat: 'Non-empty string',
actualValue: request.context.projectId
});
}
logger.info({
sessionId,
taskId: request.task.id,
projectId: request.context.projectId
}, 'Starting decomposition session');
const session = {
id: sessionId,
taskId: request.task.id,
projectId: request.context.projectId,
agentId: request.agentId || 'unknown',
status: 'pending',
startTime: new Date(),
progress: 0,
currentDepth: 0,
maxDepth: request.config?.maxDepth || 5,
totalTasks: 1,
processedTasks: 0,
results: []
};
this.sessions.set(sessionId, session);
await this.workflowStateManager.initializeWorkflow(sessionId, sessionId, request.context.projectId, {
taskId: request.task.id,
taskTitle: request.task.title,
maxDepth: request.config?.maxDepth || 5
});
if (request.task.id !== sessionId) {
await this.workflowStateManager.initializeWorkflow(request.task.id, sessionId, request.context.projectId, {
taskId: request.task.id,
taskTitle: `Parent workflow for ${request.task.title}`,
maxDepth: request.config?.maxDepth || 5,
isParentTaskWorkflow: true,
originalSessionId: sessionId
});
}
await this.workflowStateManager.transitionWorkflow(sessionId, WorkflowPhase.INITIALIZATION, WorkflowState.IN_PROGRESS, {
reason: 'Starting initialization process',
progress: 50,
triggeredBy: 'DecompositionService'
});
await this.workflowStateManager.transitionWorkflow(sessionId, WorkflowPhase.DECOMPOSITION, WorkflowState.PENDING, {
reason: 'Initialization completed, starting decomposition',
progress: 0,
triggeredBy: 'DecompositionService'
});
const startedEvent = {
sessionId,
projectId: request.context.projectId,
taskId: request.task.id,
agentId: request.agentId || 'unknown',
timestamp: new Date(),
maxDepth: request.config?.maxDepth || 5,
hasCustomConfig: !!request.config,
originalSessionId: request.sessionId,
jobId: sessionId,
metadata: {
taskTitle: request.task.title,
taskType: request.task.type,
jobId: request.originalJobId || sessionId,
sessionId: request.sessionId
}
};
this.emit('decomposition_started', startedEvent);
logger.info({
event: 'decomposition_started',
sessionId,
taskId: request.task.id,
projectId: request.context.projectId,
agentId: request.agentId,
maxDepth: startedEvent.maxDepth,
hasCustomConfig: startedEvent.hasCustomConfig,
taskTitle: startedEvent.metadata?.taskTitle || 'Unknown',
taskType: startedEvent.metadata?.taskType || 'development',
timestamp: startedEvent.timestamp.toISOString()
}, 'EventEmitter: decomposition_started event emitted');
setImmediate(() => {
this.executeDecomposition(session, request).catch(error => {
const errorMessage = error instanceof EnhancedError
? error.message
: error instanceof Error
? error.message
: 'Unknown error';
logger.error({
err: error,
sessionId,
errorType: error.constructor.name,
retryable: error instanceof EnhancedError ? error.retryable : false
}, 'Decomposition session failed');
session.status = 'failed';
session.error = errorMessage;
session.endTime = new Date();
this.emitFailedEvent(session, request, error, {
phase: 'async_execution',
step: 'execution_failed'
});
});
});
return session;
}
catch (error) {
if (error instanceof EnhancedError) {
throw error;
}
throw new TaskExecutionError(`Failed to start decomposition session: ${error instanceof Error ? error.message : String(error)}`, context, {
cause: error instanceof Error ? error : undefined,
retryable: true
});
}
}
getSession(sessionId) {
return this.sessions.get(sessionId) || null;
}
getActiveSessions() {
return Array.from(this.sessions.values()).filter(session => session.status === 'pending' || session.status === 'in_progress');
}
cancelSession(sessionId) {
const session = this.sessions.get(sessionId);
if (!session || session.status === 'completed' || session.status === 'failed') {
return false;
}
session.status = 'failed';
session.error = 'Cancelled by user';
session.endTime = new Date();
logger.info({ sessionId }, 'Decomposition session cancelled');
return true;
}
cleanupSessions(maxAge = 24 * 60 * 60 * 1000) {
const cutoff = new Date(Date.now() - maxAge);
let cleaned = 0;
for (const [sessionId, session] of this.sessions.entries()) {
if (session.endTime && session.endTime < cutoff) {
this.sessions.delete(sessionId);
cleaned++;
}
}
if (cleaned > 0) {
logger.info({ cleaned }, 'Cleaned up old decomposition sessions');
}
return cleaned;
}
async executeDecomposition(session, request) {
try {
session.status = 'in_progress';
session.progress = 10;
await this.workflowStateManager.transitionWorkflow(session.id, WorkflowPhase.DECOMPOSITION, WorkflowState.IN_PROGRESS, {
reason: 'Starting decomposition execution',
progress: 10,
triggeredBy: 'DecompositionService'
});
this.emitProgressEvent(session, request, 10, 'decomposition_started', 'DECOMPOSITION', {
message: `Starting decomposition of task: ${request.task.title}`,
taskType: request.task.type,
taskId: request.task.id
});
if (request.config) {
this.engine = new RDDEngine(this.config, request.config);
}
const enrichedContext = await this.enrichContext(request.context, request.task);
session.progress = 20;
await this.workflowStateManager.updatePhaseProgress(session.id, WorkflowPhase.DECOMPOSITION, 20, { step: 'context_enrichment_completed' });
this.emitProgressEvent(session, request, 20, 'context_enrichment_completed', 'DECOMPOSITION', {
message: 'Context enrichment completed - project structure and dependencies analyzed',
languages: enrichedContext.languages,
frameworks: enrichedContext.frameworks
});
logger.info({
sessionId: session.id,
taskId: request.task.id,
projectId: request.task.projectId,
method: 'decomposeTaskWithEpics'
}, 'Using epic-first decomposition approach');
const result = await this.engine.decomposeTaskWithEpics(request.task, enrichedContext, 0);
session.progress = 80;
await this.workflowStateManager.updatePhaseProgress(session.id, WorkflowPhase.DECOMPOSITION, 80, {
step: 'decomposition_completed',
decomposedTaskCount: result.subTasks?.length || 0,
isAtomic: result.isAtomic
});
this.emitProgressEvent(session, request, 80, 'decomposition_completed', 'DECOMPOSITION', {
decomposedTaskCount: result.subTasks?.length || 0,
isAtomic: result.isAtomic,
message: result.isAtomic
? 'Task is already atomic - no decomposition needed'
: `Successfully decomposed into ${result.subTasks?.length || 0} atomic tasks`
});
session.results = [result];
session.processedTasks = 1;
session.currentDepth = result.depth;
if (result.subTasks && result.subTasks.length > 0) {
session.progress = 82;
logger.info({
sessionId: session.id,
projectId: session.projectId,
taskCount: result.subTasks.length
}, 'Starting epic generation phase');
this.emitProgressEvent(session, request, 82, 'epic_generation_started', 'DECOMPOSITION', {
taskCount: result.subTasks.length,
message: `Starting epic identification for ${result.subTasks.length} tasks`
});
const epicStartedEvent = {
sessionId: session.id,
projectId: session.projectId,
taskId: session.taskId,
agentId: request.agentId || 'unknown',
timestamp: new Date(),
originalSessionId: request.sessionId,
jobId: session.id,
metadata: {
taskCount: result.subTasks.length,
phase: 'epic_generation'
}
};
this.emit('epic_generation_started', epicStartedEvent);
logger.info({
event: 'epic_generation_started',
sessionId: session.id,
projectId: session.projectId,
taskId: session.taskId,
agentId: request.agentId || 'unknown',
taskCount: epicStartedEvent.metadata.taskCount,
phase: epicStartedEvent.metadata.phase,
timestamp: epicStartedEvent.timestamp.toISOString()
}, 'EventEmitter: epic_generation_started event emitted');
try {
await this.generateProjectEpics(session, result.subTasks);
const epicCompletedEvent = {
sessionId: session.id,
projectId: session.projectId,
taskId: session.taskId,
agentId: request.agentId || 'unknown',
timestamp: new Date(),
status: 'completed',
originalSessionId: request.sessionId,
jobId: session.id,
metadata: {
taskCount: result.subTasks.length,
phase: 'epic_generation',
success: true
}
};
this.emit('epic_generation_completed', epicCompletedEvent);
logger.info({
event: 'epic_generation_completed',
sessionId: session.id,
projectId: session.projectId,
taskId: session.taskId,
agentId: request.agentId || 'unknown',
status: epicCompletedEvent.status,
taskCount: epicCompletedEvent.metadata.taskCount,
phase: epicCompletedEvent.metadata.phase,
success: epicCompletedEvent.metadata.success,
timestamp: epicCompletedEvent.timestamp.toISOString()
}, 'EventEmitter: epic_generation_completed event emitted (success)');
}
catch (error) {
const epicFailedEvent = {
sessionId: session.id,
projectId: session.projectId,
taskId: session.taskId,
agentId: request.agentId || 'unknown',
timestamp: new Date(),
status: 'failed',
originalSessionId: request.sessionId,
jobId: session.id,
metadata: {
taskCount: result.subTasks.length,
phase: 'epic_generation',
success: false,
error: error instanceof Error ? error.message : String(error)
}
};
this.emit('epic_generation_completed', epicFailedEvent);
logger.warn({
event: 'epic_generation_completed',
err: error,
sessionId: session.id,
projectId: session.projectId,
taskId: session.taskId,
agentId: request.agentId || 'unknown',
status: epicFailedEvent.status,
taskCount: epicFailedEvent.metadata.taskCount,
phase: epicFailedEvent.metadata.phase,
success: epicFailedEvent.metadata.success,
error: epicFailedEvent.metadata.error,
timestamp: epicFailedEvent.timestamp.toISOString()
}, 'EventEmitter: epic_generation_completed event emitted (failure) - tasks will use fallback epic IDs');
logger.info({
sessionId: session.id,
projectId: session.projectId,
taskCount: result.subTasks.length
}, 'Applying fallback epic assignment after epic generation failure');
const fallbackMainEpicId = `${session.projectId}-main-epic`;
for (const task of result.subTasks) {
task.epicId = fallbackMainEpicId;
logger.debug({
taskId: task.id,
taskTitle: task.title,
assignedEpicId: fallbackMainEpicId
}, 'Task assigned to fallback main epic after generation failure');
}
logger.info({
sessionId: session.id,
fallbackEpicId: fallbackMainEpicId,
tasksAssigned: result.subTasks.length
}, 'Fallback epic assignment completed - all tasks assigned to main epic');
}
session.progress = 85;
this.emitProgressEvent(session, request, 85, 'task_persistence_started', 'DECOMPOSITION', {
taskCount: result.subTasks.length,
message: `Starting to persist ${result.subTasks.length} decomposed tasks`
});
const taskOps = getTaskOperations();
const persistedTasks = [];
const taskFiles = [];
const taskIdMapping = new Map();
for (const subTask of result.subTasks) {
try {
const createResult = await taskOps.createTask({
title: subTask.title,
description: subTask.description,
type: subTask.type || 'development',
priority: subTask.priority || 'medium',
projectId: session.projectId,
epicId: subTask.epicId,
estimatedHours: subTask.estimatedHours || 1,
acceptanceCriteria: subTask.acceptanceCriteria || [],
tags: subTask.tags || []
}, session.id);
if (createResult.success && createResult.data) {
persistedTasks.push(createResult.data);
taskIdMapping.set(subTask.id, createResult.data.id);
logger.info({
sessionId: session.id,
originalTaskId: subTask.id,
persistedTaskId: createResult.data.id,
taskTitle: subTask.title,
epicId: subTask.epicId,
projectId: session.projectId,
persistedCount: persistedTasks.length,
totalTasks: result.subTasks.length
}, 'Task successfully persisted to storage');
const taskProgress = 85 + (persistedTasks.length / result.subTasks.length) * 5;
this.emitProgressEvent(session, request, taskProgress, 'task_persisted', 'DECOMPOSITION', {
persistedCount: persistedTasks.length,
totalTasks: result.subTasks.length,
currentTask: subTask.title,
message: `Persisted task ${persistedTasks.length}/${result.subTasks.length}: ${subTask.title}`
});
if (createResult.data.filePaths && createResult.data.filePaths.length > 0) {
taskFiles.push(...createResult.data.filePaths);
}
}
else {
logger.error({
sessionId: session.id,
taskTitle: subTask.title,
originalTaskId: subTask.id,
error: createResult.error
}, 'Failed to persist task - createResult was not successful');
}
}
catch (error) {
logger.warn({
err: error,
taskTitle: subTask.title,
sessionId: session.id
}, 'Failed to persist individual task');
}
}
const { getDependencyOperations } = await import('../core/operations/dependency-operations.js');
const dependencyOps = getDependencyOperations();
let dependenciesCreated = 0;
for (const subTask of result.subTasks) {
if (subTask.dependencies && subTask.dependencies.length > 0) {
const newTaskId = taskIdMapping.get(subTask.id);
if (newTaskId) {
for (const depId of subTask.dependencies) {
const newDepId = taskIdMapping.get(depId);
if (newDepId) {
try {
const depResult = await dependencyOps.createDependency({
fromTaskId: newTaskId,
toTaskId: newDepId,
type: 'requires',
description: `${subTask.title} depends on ${depId}`,
critical: false
}, session.id);
if (depResult.success) {
dependenciesCreated++;
logger.debug({
fromTask: newTaskId,
toTask: newDepId,
sessionId: session.id
}, 'Dependency created successfully');
}
}
catch (error) {
logger.warn({
err: error,
fromTask: newTaskId,
toTask: newDepId,
sessionId: session.id
}, 'Failed to create dependency');
}
}
}
}
}
}
if (dependenciesCreated > 0) {
try {
const graphResult = await dependencyOps.generateDependencyGraph(session.projectId);
if (graphResult.success) {
logger.info({
projectId: session.projectId,
dependenciesCreated,
sessionId: session.id
}, 'Dependency graph generated successfully');
}
else {
logger.warn({
projectId: session.projectId,
error: graphResult.error,
sessionId: session.id
}, 'Failed to generate dependency graph');
}
}
catch (error) {
logger.warn({
err: error,
projectId: session.projectId,
sessionId: session.id
}, 'Error generating dependency graph');
}
}
session.persistedTasks = persistedTasks;
session.taskFiles = taskFiles;
logger.info({
sessionId: session.id,
executionPath: 'executeDecomposition',
persistedTasksArrayLength: persistedTasks.length,
sessionPersistedTasksLength: session.persistedTasks?.length || 0,
totalDecomposedTasksGenerated: result.subTasks.length,
persistedTaskIds: persistedTasks.map(t => t.id),
sessionPersistedTaskIds: session.persistedTasks?.map(t => t.id) || []
}, 'DEBUG: Session persistence tracking in executeDecomposition');
session.richResults = {
tasks: persistedTasks,
files: taskFiles,
summary: {
totalTasks: persistedTasks.length,
totalHours: persistedTasks.reduce((sum, task) => sum + (task?.estimatedHours || 0), 0),
projectId: session.projectId,
successfullyPersisted: persistedTasks.length,
totalGenerated: result.subTasks.length
}
};
logger.info({
sessionId: session.id,
totalGenerated: result.subTasks.length,
successfullyPersisted: persistedTasks.length,
taskFiles: taskFiles.length
}, 'Tasks persisted to storage successfully');
}
if (result.subTasks && result.subTasks.length > 1) {
session.progress = 90;
logger.info({
sessionId: session.id,
taskCount: result.subTasks.length
}, 'Starting dependency analysis for decomposed tasks');
try {
this.emitProgressEvent(session, request, 90, 'dependency_analysis_started', 'DECOMPOSITION', {
taskCount: session.persistedTasks?.length || 0,
message: `Analyzing dependencies for ${session.persistedTasks?.length || 0} tasks`
});
await this.performDependencyAnalysis(session, session.persistedTasks || []);
this.emitProgressEvent(session, request, 95, 'dependency_analysis_completed', 'DECOMPOSITION', {
taskCount: session.persistedTasks?.length || 0,
message: 'Dependency analysis completed successfully'
});
logger.info({
sessionId: session.id
}, 'Dependency analysis completed successfully');
}
catch (error) {
logger.warn({
err: error,
sessionId: session.id
}, 'Dependency analysis failed, continuing without dependencies');
}
}
this.calculateSessionStats(session);
session.progress = 100;
session.status = 'completed';
session.endTime = new Date();
this.emitProgressEvent(session, request, 100, 'decomposition_completed', 'DECOMPOSITION', {
totalSubTasks: result.subTasks?.length || 0,
isAtomic: result.isAtomic,
depth: result.depth,
persistedTasks: session.persistedTasks?.length || 0
});
await this.workflowStateManager.transitionWorkflow(session.id, WorkflowPhase.DECOMPOSITION, WorkflowState.COMPLETED, {
reason: 'Decomposition completed successfully',
progress: 100,
triggeredBy: 'DecompositionService',
metadata: {
totalSubTasks: result.subTasks?.length || 0,
isAtomic: result.isAtomic,
depth: result.depth,
persistedTasks: session.persistedTasks?.length || 0
}
});
logger.info({
sessionId: session.id,
totalSubTasks: result.subTasks.length,
isAtomic: result.isAtomic,
depth: result.depth
}, 'Decomposition session completed');
try {
const summaryResult = await this.summaryGenerator.generateSessionSummary(session);
if (summaryResult.success) {
logger.info({
sessionId: session.id,
outputDirectory: summaryResult.outputDirectory,
filesGenerated: summaryResult.generatedFiles.length,
generationTime: summaryResult.metadata.generationTime
}, 'Decomposition session summary generated successfully');
}
else {
logger.warn({
sessionId: session.id,
error: summaryResult.error
}, 'Failed to generate decomposition session summary');
}
}
catch (summaryError) {
logger.warn({
err: summaryError,
sessionId: session.id
}, 'Error generating decomposition session summary');
}
logger.info({
sessionId: session.id,
projectId: session.projectId,
finalSessionPersistedTasksLength: session.persistedTasks?.length || 0,
finalSessionPersistedTasksIds: session.persistedTasks?.map(t => t.id) || [],
sessionStatus: session.status,
sessionProgress: session.progress
}, 'DEBUG: Final session state before orchestration trigger');
await this.triggerOrchestrationWorkflow(session);
const completedEvent = {
sessionId: session.id,
projectId: session.projectId,
taskId: session.taskId,
agentId: request.agentId || 'unknown',
timestamp: new Date(),
results: {
totalTasks: session.persistedTasks?.length || session.totalTasks,
isAtomic: session.results[0]?.isAtomic || false,
depth: session.results[0]?.depth || session.currentDepth,
persistedTasks: session.persistedTasks?.length || 0
},
duration: session.endTime.getTime() - session.startTime.getTime(),
status: 'completed',
originalSessionId: request.sessionId,
jobId: session.id,
metadata: {
hasResearchContext: !!session.researchInsights,
summaryGenerated: true,
orchestrationTriggered: true,
jobId: request.originalJobId || session.id,
sessionId: request.sessionId
}
};
this.emit('decomposition_completed', completedEvent);
logger.info({
event: 'decomposition_completed',
sessionId: session.id,
taskId: session.taskId,
projectId: session.projectId,
agentId: request.agentId,
totalTasks: completedEvent.results.totalTasks,
isAtomic: completedEvent.results.isAtomic,
depth: completedEvent.results.depth,
persistedTasks: completedEvent.results.persistedTasks,
duration: completedEvent.duration,
status: completedEvent.status,
hasResearchContext: completedEvent.metadata?.hasResearchContext || false,
summaryGenerated: completedEvent.metadata?.summaryGenerated || false,
orchestrationTriggered: completedEvent.metadata?.orchestrationTriggered || false,
timestamp: completedEvent.timestamp.toISOString()
}, 'EventEmitter: decomposition_completed event emitted');
}
catch (error) {
logger.error({ err: error, sessionId: session.id }, 'Decomposition execution failed');
session.status = 'failed';
session.error = error instanceof Error ? error.message : String(error);
session.endTime = new Date();
this.emitFailedEvent(session, request, error, {
phase: 'decomposition',
step: 'execution_failed'
});
try {
await this.workflowStateManager.transitionWorkflow(session.id, WorkflowPhase.DECOMPOSITION, WorkflowState.FAILED, {
reason: `Decomposition failed: ${error instanceof Error ? error.message : String(error)}`,
triggeredBy: 'DecompositionService',
metadata: { error: error instanceof Error ? error.message : String(error) }
});
}
catch (workflowError) {
logger.warn({ err: workflowError, sessionId: session.id }, 'Failed to update workflow state on error');
}
throw error;
}
}
async enrichContext(context, task) {
try {
logger.info({ projectId: context.projectId }, 'Enriching context with codebase information and auto-research');
if (!task) {
logger.debug('No task provided for context enrichment, using original context');
return context;
}
if (task.estimatedHours <= 0.5 && !task.description.toLowerCase().includes('complex')) {
logger.debug({
taskId: task.id,
estimatedHours: task.estimatedHours
}, 'Skipping context enrichment for simple task');
return context;
}
if (context.codebaseContext && context.codebaseContext.relevantFiles.length > 0) {
logger.debug({
taskId: task.id,
existingFiles: context.codebaseContext.relevantFiles.length
}, 'Context already enriched, skipping re-enrichment');
return context;
}
const projectPath = this.getProjectPath(context);
const contextRequest = {
taskDescription: task.description || task.title,
projectPath,
maxFiles: this.determineMaxFiles(task),
maxContentSize: this.determineMaxContentSize(task),
searchPatterns: this.extractSearchPatterns(task),
priorityFileTypes: this.determineFileTypes(context),
excludeDirs: ['node_modules', '.git', 'dist', 'build', '.next', 'coverage'],
contentKeywords: this.extractContentKeywords(task)
};
logger.debug({
taskId: task.id,
searchPatterns: contextRequest.searchPatterns,
maxFiles: contextRequest.maxFiles
}, 'Gathering context for task decomposition');
const contextResult = await this.contextService.gatherContext(contextRequest);
let codeMapResult = null;
try {
logger.debug({ projectPath, taskId: task.id }, 'Refreshing code map for enhanced context');
codeMapResult = await this.codeMapIntegrationService.refreshCodeMap(projectPath);
if (codeMapResult.success) {
logger.info({
taskId: task.id,
codeMapPath: codeMapResult.filePath,
generationTime: codeMapResult.generationTime
}, 'Code map generated successfully for context enrichment');
}
else {
logger.warn({
taskId: task.id,
error: codeMapResult.error
}, 'Code map generation failed, continuing without code map context');
}
}
catch (codeMapError) {
logger.warn({
err: codeMapError,
taskId: task.id
}, 'Code map generation error, continuing without code map context');
}
const researchTriggerContext = {
task,
projectContext: context,
contextResult,
projectPath,
sessionId: `research_${task.id}_${Date.now()}`
};
logger.debug({
taskId: task.id,
projectId: context.projectId
}, 'Evaluating auto-research need');
const researchEvaluation = await this.autoResearchDetector.evaluateResearchNeed(researchTriggerContext);
let enhancedContext = context;
if (researchEvaluation.decision.shouldTriggerResearch) {
logger.info({
taskId: task.id,
primaryReason: researchEvaluation.decision.primaryReason,
confidence: researchEvaluation.decision.confidence,
estimatedQueries: researchEvaluation.decision.recommendedScope.estimatedQueries
}, 'Auto-research triggered - enhancing decomposition with research');
try {
const researchResult = await this.researchIntegrationService.enhanceDecompositionWithResearch({
taskDescription: task.description || task.title,
projectPath,
domain: this.extractDomain(context),
context: context
});
const re