UNPKG

vibe-coder-mcp

Version:

Production-ready MCP server with complete agent integration, multi-transport support, and comprehensive development automation tools for AI-assisted workflows.

235 lines 7.88 kB
import { EventEmitter } from 'events'; import { DecompositionResult, RDDConfig } from '../core/rdd-engine.js'; import { ProjectContext } from '../types/project-context.js'; import type { AtomicTask } from '../types/task.js'; import { OpenRouterConfig } from '../../../types/workflow.js'; import type { ParsedTaskList } from '../types/artifact-types.js'; import { SummaryConfig } from './decomposition-summary-generator.js'; import { ProgressEventData } from './progress-tracker.js'; export interface DecompositionEventData { sessionId: string; projectId: string; taskId: string; agentId: string; timestamp: Date; metadata?: Record<string, unknown>; } export interface DecompositionStartedEvent extends DecompositionEventData { maxDepth: number; hasCustomConfig: boolean; originalSessionId?: string; jobId?: string; } export interface DecompositionProgressEvent extends DecompositionEventData { progress: number; step: string; phase: string; originalSessionId?: string; jobId?: string; } export interface DecompositionCompletedEvent extends DecompositionEventData { results: { totalTasks: number; isAtomic: boolean; depth: number; persistedTasks: number; }; duration: number; status: 'completed'; originalSessionId?: string; jobId?: string; } export interface DecompositionFailedEvent extends DecompositionEventData { error: { message: string; type: string; retryable: boolean; }; duration: number; status: 'failed'; } export interface TaskListStartedEvent extends DecompositionEventData { metadata: { taskListPath: string; totalTasks: number; phaseCount: number; projectName: string; }; } export interface TaskListCompletedEvent extends DecompositionEventData { results: { totalTasks: number; totalHours: number; successfullyPersisted: number; totalGenerated: number; }; duration: number; status: 'completed'; metadata: { taskListPath: string; projectName: string; phaseCount: number; summaryGenerated: boolean; orchestrationTriggered: boolean; }; } export interface EpicGenerationStartedEvent extends DecompositionEventData { metadata: { taskCount: number; phase: 'epic_generation'; }; } export interface EpicGenerationCompletedEvent extends DecompositionEventData { status: 'completed' | 'failed'; metadata: { taskCount: number; phase: 'epic_generation'; success: boolean; error?: string; }; } export interface DecompositionSession { id: string; taskId: string; projectId: string; agentId: string; status: 'pending' | 'in_progress' | 'completed' | 'failed'; startTime: Date; endTime?: Date; progress: number; currentDepth: number; maxDepth: number; totalTasks: number; processedTasks: number; results: DecompositionResult[]; error?: string; persistedTasks?: AtomicTask[]; taskFiles?: string[]; richResults?: { tasks: AtomicTask[]; files: string[]; summary: { totalTasks: number; totalHours: number; projectId: string; successfullyPersisted: number; totalGenerated: number; }; }; } export interface DecompositionRequest { task: AtomicTask; context: ProjectContext; config?: Partial<RDDConfig>; sessionId?: string; agentId?: string; originalJobId?: string; } export declare class DecompositionService extends EventEmitter { private static instance; private engine; private sessions; private config; private contextService; private autoResearchDetector; private researchIntegrationService; private codeMapIntegrationService; private workflowStateManager; private summaryGenerator; private dependencyGraphs; private jobManagerIntegration; constructor(config: OpenRouterConfig, summaryConfig?: Partial<SummaryConfig>); createDecompositionJob(request: DecompositionRequest, toolName?: string): Promise<string>; private updateJobProgress; private initializeProgressTrackerIntegration; private handleProgressEventForSubPhase; static getInstance(config?: OpenRouterConfig, summaryConfig?: Partial<SummaryConfig>): DecompositionService; verifyEventEmitterIntegration(): { hasEventEmitter: boolean; supportedEvents: string[]; listenerCount: number; isWorkflowAwareCompatible: boolean; }; startDecomposition(request: DecompositionRequest): Promise<DecompositionSession>; getSession(sessionId: string): DecompositionSession | null; getActiveSessions(): DecompositionSession[]; cancelSession(sessionId: string): boolean; cleanupSessions(maxAge?: number): number; private executeDecomposition; private enrichContext; private emitProgressEvent; private emitFailedEvent; private calculateSessionStats; private generateSessionId; retryDecomposition(sessionId: string, _newConfig?: Partial<RDDConfig>): Promise<DecompositionSession | null>; getStatistics(): { totalSessions: number; activeSessions: number; completedSessions: number; failedSessions: number; averageProcessingTime: number; }; decomposeMultipleTasks(requests: DecompositionRequest[]): Promise<DecompositionSession[]>; decomposeTask(task: AtomicTask, projectContext: ProjectContext, onProgress?: (progress: ProgressEventData) => void): Promise<{ success: boolean; data?: AtomicTask[]; error?: string; }>; decomposeFromTaskList(taskList: ParsedTaskList, projectId: string, epicId?: string, options?: { maxDepth?: number; minHours?: number; maxHours?: number; forceDecomposition?: boolean; }): Promise<DecompositionSession>; getResults(sessionId: string): AtomicTask[]; exportSession(sessionId: string): { session: Record<string, unknown>; results: Array<Record<string, unknown>>; } | null; private executeTaskListDecomposition; private performDependencyAnalysis; private buildDependencyAnalysisPrompt; private parseDependencyAnalysisResponse; private applyDependencyRelationships; private generateAndSaveVisualDependencyGraphs; private generateMermaidDependencyDiagram; private generateTextDependencySummary; private verifyDependencyPersistence; private triggerOrchestrationWorkflow; private mapDependencyType; private getProjectPath; private determineMaxFiles; private determineMaxContentSize; private extractSearchPatterns; private extractContentKeywords; private determineFileTypes; private extractDomain; private createResearchSummary; private determineTaskType; private parseEstimatedHours; private extractTaskListDependencies; private generateProjectEpics; private resolveEpicId; private extractTaskContext; private finalizeWorkflow; private applyDependencyDetection; private getDependencyGraphForProject; getExecutionPlan(projectId: string): Promise<{ topologicalOrder: string[]; parallelBatches: Array<{ batchNumber: number; tasks: string[]; estimatedDuration: number; }>; criticalPath: string[]; estimatedDuration: number; } | null>; exportDependencyAnalysis(projectId: string): Promise<Record<string, unknown> | null>; clearProjectDependencyGraph(projectId: string): void; getDependencyStatistics(): { activeProjects: number; totalGraphs: number; cacheMemoryUsage: string; }; } //# sourceMappingURL=decomposition-service.d.ts.map