shipdeck
Version:
Ship MVPs in 48 hours. Fix bugs in 30 seconds. The command deck for developers who ship.
1,206 lines (1,017 loc) • 33.9 kB
JavaScript
/**
* Parallel Runner Agent for Shipdeck Ultimate
* Orchestrates parallel execution of multiple agents with thread pool management
* Supports true concurrency through Node.js worker_threads
*/
const { BaseAgent } = require('./base-agent');
const { Worker, isMainThread, parentPort, workerData } = require('worker_threads');
const { EventEmitter } = require('events');
const os = require('os');
const path = require('path');
const fs = require('fs');
/**
* Task execution states
*/
const TASK_STATES = {
PENDING: 'pending',
RUNNING: 'running',
COMPLETED: 'completed',
FAILED: 'failed',
CANCELLED: 'cancelled',
TIMEOUT: 'timeout'
};
/**
* Resource lock types for conflict detection
*/
const LOCK_TYPES = {
FILE: 'file',
DIRECTORY: 'directory',
PORT: 'port',
DATABASE: 'database',
NETWORK: 'network'
};
/**
* Worker execution context for isolated agent execution
*/
const WORKER_SCRIPT = `
const { parentPort, workerData } = require('worker_threads');
const { BaseAgent } = require('./base-agent');
// Import agent class dynamically
async function executeAgent() {
try {
const { agentPath, agentConfig, task, context } = workerData;
// Load agent class
const AgentClass = require(agentPath);
const agent = new AgentClass(agentConfig);
// Execute task
const result = await agent.executeWithRetry(task, context);
parentPort.postMessage({
type: 'success',
result,
taskId: context.taskId
});
} catch (error) {
parentPort.postMessage({
type: 'error',
error: {
message: error.message,
stack: error.stack,
name: error.constructor.name
},
taskId: workerData.context.taskId
});
}
}
executeAgent();
`;
/**
* Parallel Runner Agent
* Manages concurrent execution of multiple agents with advanced scheduling
*/
class ParallelRunnerAgent extends BaseAgent {
constructor(options = {}) {
super({
name: 'ParallelRunner',
description: 'Orchestrates parallel execution of multiple agents with thread pool management',
version: '1.0.0',
...options
});
// Thread pool configuration
this.config = {
...this.config,
maxWorkers: options.maxWorkers || Math.min(os.cpus().length, 8),
minWorkers: options.minWorkers || 2,
workerTimeout: options.workerTimeout || 300000, // 5 minutes
taskTimeout: options.taskTimeout || 600000, // 10 minutes
retryAttempts: options.retryAttempts || 2,
resourceCheckInterval: options.resourceCheckInterval || 1000,
deadlockDetectionInterval: options.deadlockDetectionInterval || 5000,
maxQueueSize: options.maxQueueSize || 100,
enableResourceMonitoring: options.enableResourceMonitoring !== false
};
// Initialize state
this.workers = new Map();
this.taskQueue = [];
this.activeTasks = new Map();
this.completedTasks = new Map();
this.resourceLocks = new Map();
this.eventEmitter = new EventEmitter();
// Performance tracking
this.stats = {
tasksCompleted: 0,
tasksFailed: 0,
averageExecutionTime: 0,
peakConcurrency: 0,
resourceContentions: 0,
deadlocksDetected: 0
};
// Initialize worker script
this.workerScriptPath = this.createWorkerScript();
// Start monitoring
this.startResourceMonitoring();
this.startDeadlockDetection();
}
/**
* Get agent capabilities
* @returns {Array<string>} Agent capabilities
*/
getCapabilities() {
return [
'parallel-execution',
'concurrency',
'thread-pool',
'batch-processing',
'resource-management',
'conflict-detection',
'deadlock-prevention'
];
}
/**
* Create worker script file for dynamic agent execution
* @returns {string} Path to worker script
*/
createWorkerScript() {
const scriptPath = path.join(__dirname, 'parallel-worker.js');
if (!fs.existsSync(scriptPath)) {
fs.writeFileSync(scriptPath, WORKER_SCRIPT);
}
return scriptPath;
}
/**
* Initialize worker pool
* @param {number} size - Initial pool size
*/
async initializeWorkerPool(size = this.config.minWorkers) {
this.log('info', `Initializing worker pool with ${size} workers`);
for (let i = 0; i < size; i++) {
await this.createWorker();
}
}
/**
* Create a new worker
* @returns {Promise<string>} Worker ID
*/
async createWorker() {
const workerId = `worker-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
try {
const worker = new Worker(this.workerScriptPath);
const workerContext = {
id: workerId,
worker,
busy: false,
currentTask: null,
createdAt: Date.now(),
tasksCompleted: 0,
lastActivity: Date.now()
};
// Setup worker event handlers
worker.on('message', (message) => this.handleWorkerMessage(workerId, message));
worker.on('error', (error) => this.handleWorkerError(workerId, error));
worker.on('exit', (code) => this.handleWorkerExit(workerId, code));
this.workers.set(workerId, workerContext);
this.log('info', `Created worker ${workerId}`);
return workerId;
} catch (error) {
this.log('error', `Failed to create worker: ${error.message}`, { workerId, error });
throw error;
}
}
/**
* Handle worker messages
* @param {string} workerId - Worker ID
* @param {Object} message - Message from worker
*/
handleWorkerMessage(workerId, message) {
const workerContext = this.workers.get(workerId);
if (!workerContext) return;
const { taskId } = message;
const task = this.activeTasks.get(taskId);
if (!task) {
this.log('warn', `Received message for unknown task: ${taskId}`, { workerId });
return;
}
switch (message.type) {
case 'success':
this.handleTaskSuccess(taskId, message.result, workerId);
break;
case 'error':
this.handleTaskError(taskId, message.error, workerId);
break;
default:
this.log('warn', `Unknown message type: ${message.type}`, { workerId, taskId });
}
}
/**
* Handle worker errors
* @param {string} workerId - Worker ID
* @param {Error} error - Worker error
*/
handleWorkerError(workerId, error) {
this.log('error', `Worker error: ${error.message}`, { workerId, error });
const workerContext = this.workers.get(workerId);
if (workerContext && workerContext.currentTask) {
this.handleTaskError(workerContext.currentTask, error, workerId);
}
// Restart worker
this.restartWorker(workerId);
}
/**
* Handle worker exit
* @param {string} workerId - Worker ID
* @param {number} code - Exit code
*/
handleWorkerExit(workerId, code) {
this.log('info', `Worker exited: ${workerId} (code: ${code})`);
const workerContext = this.workers.get(workerId);
if (workerContext && workerContext.currentTask) {
const error = new Error(`Worker exited unexpectedly (code: ${code})`);
this.handleTaskError(workerContext.currentTask, error, workerId);
}
this.workers.delete(workerId);
// Maintain minimum worker count
if (this.workers.size < this.config.minWorkers) {
this.createWorker();
}
}
/**
* Restart a failed worker
* @param {string} workerId - Worker ID to restart
*/
async restartWorker(workerId) {
try {
const workerContext = this.workers.get(workerId);
if (workerContext) {
await workerContext.worker.terminate();
this.workers.delete(workerId);
}
await this.createWorker();
} catch (error) {
this.log('error', `Failed to restart worker: ${error.message}`, { workerId, error });
}
}
/**
* Execute multiple tasks in parallel
* @param {Array<Object>} tasks - Array of task definitions
* @param {Object} context - Execution context
* @returns {Promise<Object>} Execution results
*/
async execute(task, context = {}) {
this.validateTask(task);
const { tasks = [], maxConcurrency, timeout } = task;
if (!Array.isArray(tasks) || tasks.length === 0) {
throw new Error('ParallelRunner requires an array of tasks to execute');
}
this.log('info', `Starting parallel execution of ${tasks.length} tasks`);
// Initialize worker pool if needed
if (this.workers.size === 0) {
await this.initializeWorkerPool();
}
// Validate task resources and detect conflicts
const resourceConflicts = this.detectResourceConflicts(tasks);
if (resourceConflicts.length > 0) {
this.log('warn', 'Resource conflicts detected', { conflicts: resourceConflicts });
this.stats.resourceContentions++;
}
// Schedule tasks for execution
const executionPlan = this.createExecutionPlan(tasks, {
maxConcurrency: maxConcurrency || this.config.maxWorkers,
timeout: timeout || this.config.taskTimeout,
conflicts: resourceConflicts
});
try {
const results = await this.executeTaskBatch(executionPlan, context);
return {
success: true,
results,
stats: this.getExecutionStats(),
conflicts: resourceConflicts
};
} catch (error) {
this.log('error', `Parallel execution failed: ${error.message}`, { error });
return {
success: false,
error: error.message,
stats: this.getExecutionStats(),
conflicts: resourceConflicts
};
}
}
/**
* Detect resource conflicts between tasks
* @param {Array<Object>} tasks - Tasks to analyze
* @returns {Array<Object>} Detected conflicts
*/
detectResourceConflicts(tasks) {
const conflicts = [];
const resourceMap = new Map();
for (let i = 0; i < tasks.length; i++) {
const task = tasks[i];
const resources = this.extractTaskResources(task);
for (const resource of resources) {
const conflictingTasks = resourceMap.get(resource.identifier) || [];
// Check for write conflicts
if (resource.type === 'write' || conflictingTasks.some(t => t.resource.type === 'write')) {
conflicts.push({
resource: resource.identifier,
type: resource.lockType,
tasks: [task.id, ...conflictingTasks.map(t => t.task.id)]
});
}
conflictingTasks.push({ task, resource });
resourceMap.set(resource.identifier, conflictingTasks);
}
}
return conflicts;
}
/**
* Extract resources required by a task
* @param {Object} task - Task definition
* @returns {Array<Object>} Required resources
*/
extractTaskResources(task) {
const resources = [];
// File system resources
if (task.files) {
task.files.forEach(file => {
resources.push({
identifier: path.resolve(file.path),
lockType: LOCK_TYPES.FILE,
type: file.mode || 'read'
});
});
}
// Directory resources
if (task.directories) {
task.directories.forEach(dir => {
resources.push({
identifier: path.resolve(dir.path),
lockType: LOCK_TYPES.DIRECTORY,
type: dir.mode || 'read'
});
});
}
// Port resources
if (task.ports) {
task.ports.forEach(port => {
resources.push({
identifier: `port:${port}`,
lockType: LOCK_TYPES.PORT,
type: 'write'
});
});
}
// Database resources
if (task.database) {
resources.push({
identifier: `db:${task.database.connection}:${task.database.schema}`,
lockType: LOCK_TYPES.DATABASE,
type: task.database.mode || 'read'
});
}
return resources;
}
/**
* Create execution plan with optimal scheduling
* @param {Array<Object>} tasks - Tasks to schedule
* @param {Object} options - Execution options
* @returns {Object} Execution plan
*/
createExecutionPlan(tasks, options) {
const { maxConcurrency, conflicts } = options;
// Group tasks by dependencies and conflicts
const groups = this.groupTasksByDependencies(tasks, conflicts);
return {
groups,
maxConcurrency,
totalTasks: tasks.length,
estimatedDuration: this.estimateExecutionDuration(groups),
options
};
}
/**
* Group tasks by dependencies to optimize parallel execution
* @param {Array<Object>} tasks - Tasks to group
* @param {Array<Object>} conflicts - Resource conflicts
* @returns {Array<Array<Object>>} Grouped tasks
*/
groupTasksByDependencies(tasks, conflicts) {
const groups = [];
const processedTasks = new Set();
const conflictMap = new Map();
// Build conflict mapping
conflicts.forEach(conflict => {
conflict.tasks.forEach(taskId => {
if (!conflictMap.has(taskId)) {
conflictMap.set(taskId, new Set());
}
conflict.tasks.forEach(otherTaskId => {
if (taskId !== otherTaskId) {
conflictMap.get(taskId).add(otherTaskId);
}
});
});
});
// Group tasks without conflicts first
const independentTasks = tasks.filter(task =>
!conflictMap.has(task.id) && !processedTasks.has(task.id)
);
if (independentTasks.length > 0) {
groups.push(independentTasks);
independentTasks.forEach(task => processedTasks.add(task.id));
}
// Group remaining tasks by conflict resolution
const remainingTasks = tasks.filter(task => !processedTasks.has(task.id));
while (remainingTasks.length > 0) {
const group = [];
const groupConflicts = new Set();
for (const task of remainingTasks) {
const taskConflicts = conflictMap.get(task.id) || new Set();
// Check if task conflicts with current group
let hasConflict = false;
for (const conflictTaskId of taskConflicts) {
if (group.some(groupTask => groupTask.id === conflictTaskId)) {
hasConflict = true;
break;
}
}
if (!hasConflict) {
group.push(task);
processedTasks.add(task.id);
taskConflicts.forEach(id => groupConflicts.add(id));
}
}
if (group.length > 0) {
groups.push(group);
// Remove processed tasks from remaining
remainingTasks.splice(0, remainingTasks.length,
...remainingTasks.filter(task => !processedTasks.has(task.id))
);
} else {
// Fallback: add one task per group to prevent infinite loop
const task = remainingTasks.shift();
groups.push([task]);
processedTasks.add(task.id);
}
}
return groups;
}
/**
* Execute a batch of task groups
* @param {Object} executionPlan - Execution plan
* @param {Object} context - Execution context
* @returns {Promise<Array<Object>>} Execution results
*/
async executeTaskBatch(executionPlan, context) {
const { groups, maxConcurrency } = executionPlan;
const allResults = [];
this.log('info', `Executing ${groups.length} task groups`, {
totalTasks: executionPlan.totalTasks,
maxConcurrency
});
for (let groupIndex = 0; groupIndex < groups.length; groupIndex++) {
const group = groups[groupIndex];
this.log('info', `Executing group ${groupIndex + 1}/${groups.length} (${group.length} tasks)`);
// Execute group tasks in parallel
const groupResults = await this.executeTaskGroup(group, {
...context,
groupIndex,
maxConcurrency
});
allResults.push(...groupResults);
// Update statistics
this.updateStats(groupResults);
}
return allResults;
}
/**
* Execute a group of tasks in parallel
* @param {Array<Object>} tasks - Tasks to execute
* @param {Object} context - Execution context
* @returns {Promise<Array<Object>>} Group results
*/
async executeTaskGroup(tasks, context) {
const concurrency = Math.min(tasks.length, context.maxConcurrency, this.workers.size);
const results = [];
// Create execution promises
const executeTask = async (task) => {
const taskId = task.id || `task-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
try {
// Acquire resource locks
await this.acquireResourceLocks(task, taskId);
// Execute task
const result = await this.executeTaskOnWorker(task, {
...context,
taskId
});
return {
taskId,
success: true,
result,
duration: result.duration || 0
};
} catch (error) {
return {
taskId,
success: false,
error: error.message,
duration: 0
};
} finally {
// Release resource locks
this.releaseResourceLocks(taskId);
}
};
// Execute tasks with controlled concurrency
const taskPromises = tasks.map(executeTask);
const groupResults = await Promise.all(taskPromises);
// Update peak concurrency tracking
this.stats.peakConcurrency = Math.max(this.stats.peakConcurrency, concurrency);
return groupResults;
}
/**
* Execute a single task on an available worker
* @param {Object} task - Task to execute
* @param {Object} context - Execution context
* @returns {Promise<Object>} Task result
*/
async executeTaskOnWorker(task, context) {
const worker = await this.getAvailableWorker();
const { taskId } = context;
if (!worker) {
throw new Error('No workers available for task execution');
}
try {
// Mark worker as busy
const workerContext = this.workers.get(worker.id);
workerContext.busy = true;
workerContext.currentTask = taskId;
workerContext.lastActivity = Date.now();
// Add to active tasks
this.activeTasks.set(taskId, {
task,
context,
workerId: worker.id,
startTime: Date.now(),
status: TASK_STATES.RUNNING
});
// Prepare worker data
const workerData = {
agentPath: task.agentPath || path.join(__dirname, 'base-agent.js'),
agentConfig: task.agentConfig || {},
task: task.taskDefinition || task,
context
};
// Execute task with timeout
const result = await this.executeWithTimeout(
() => this.sendTaskToWorker(worker.worker, workerData, taskId),
this.config.taskTimeout
);
return result;
} finally {
// Mark worker as available
const workerContext = this.workers.get(worker.id);
if (workerContext) {
workerContext.busy = false;
workerContext.currentTask = null;
workerContext.tasksCompleted++;
workerContext.lastActivity = Date.now();
}
// Remove from active tasks
this.activeTasks.delete(taskId);
}
}
/**
* Send task to worker and wait for result
* @param {Worker} worker - Worker instance
* @param {Object} workerData - Data to send to worker
* @param {string} taskId - Task ID
* @returns {Promise<Object>} Task result
*/
async sendTaskToWorker(worker, workerData, taskId) {
return new Promise((resolve, reject) => {
const timeout = setTimeout(() => {
reject(new Error(`Task ${taskId} timed out`));
}, this.config.workerTimeout);
const handleMessage = (message) => {
if (message.taskId === taskId) {
clearTimeout(timeout);
worker.off('message', handleMessage);
worker.off('error', handleError);
if (message.type === 'success') {
resolve(message.result);
} else {
reject(new Error(message.error.message));
}
}
};
const handleError = (error) => {
clearTimeout(timeout);
worker.off('message', handleMessage);
worker.off('error', handleError);
reject(error);
};
worker.on('message', handleMessage);
worker.on('error', handleError);
// Send task to worker
worker.postMessage(workerData);
});
}
/**
* Get an available worker
* @returns {Promise<Object>} Available worker
*/
async getAvailableWorker() {
// Try to find available worker
for (const [workerId, workerContext] of this.workers) {
if (!workerContext.busy) {
return { id: workerId, ...workerContext };
}
}
// Create new worker if under limit
if (this.workers.size < this.config.maxWorkers) {
const newWorkerId = await this.createWorker();
return { id: newWorkerId, ...this.workers.get(newWorkerId) };
}
// Wait for worker to become available
return new Promise((resolve) => {
const checkInterval = setInterval(() => {
for (const [workerId, workerContext] of this.workers) {
if (!workerContext.busy) {
clearInterval(checkInterval);
resolve({ id: workerId, ...workerContext });
return;
}
}
}, 100);
});
}
/**
* Acquire resource locks for task execution
* @param {Object} task - Task requiring resources
* @param {string} taskId - Task ID
* @returns {Promise<void>}
*/
async acquireResourceLocks(task, taskId) {
const resources = this.extractTaskResources(task);
const acquiredLocks = [];
try {
for (const resource of resources) {
await this.acquireResourceLock(resource, taskId);
acquiredLocks.push(resource.identifier);
}
// Store acquired locks for cleanup
if (acquiredLocks.length > 0) {
this.resourceLocks.set(taskId, acquiredLocks);
}
} catch (error) {
// Release any acquired locks on failure
for (const lockId of acquiredLocks) {
this.releaseResourceLock(lockId, taskId);
}
throw error;
}
}
/**
* Acquire a single resource lock
* @param {Object} resource - Resource to lock
* @param {string} taskId - Task ID requesting lock
* @returns {Promise<void>}
*/
async acquireResourceLock(resource, taskId) {
const { identifier, type } = resource;
return new Promise((resolve) => {
const tryAcquire = () => {
const currentLock = this.resourceLocks.get(identifier);
if (!currentLock || (type === 'read' && currentLock.type === 'read')) {
// Can acquire lock
this.resourceLocks.set(identifier, {
type,
tasks: currentLock ? [...currentLock.tasks, taskId] : [taskId],
acquiredAt: Date.now()
});
resolve();
} else {
// Wait and retry
setTimeout(tryAcquire, this.config.resourceCheckInterval);
}
};
tryAcquire();
});
}
/**
* Release resource locks for a task
* @param {string} taskId - Task ID to release locks for
*/
releaseResourceLocks(taskId) {
const locks = this.resourceLocks.get(taskId);
if (!locks) return;
for (const lockId of locks) {
this.releaseResourceLock(lockId, taskId);
}
this.resourceLocks.delete(taskId);
}
/**
* Release a single resource lock
* @param {string} lockId - Lock identifier
* @param {string} taskId - Task ID releasing lock
*/
releaseResourceLock(lockId, taskId) {
const lock = this.resourceLocks.get(lockId);
if (!lock) return;
const updatedTasks = lock.tasks.filter(id => id !== taskId);
if (updatedTasks.length === 0) {
this.resourceLocks.delete(lockId);
} else {
this.resourceLocks.set(lockId, {
...lock,
tasks: updatedTasks
});
}
}
/**
* Handle successful task completion
* @param {string} taskId - Completed task ID
* @param {Object} result - Task result
* @param {string} workerId - Worker ID
*/
handleTaskSuccess(taskId, result, workerId) {
const task = this.activeTasks.get(taskId);
if (!task) return;
const duration = Date.now() - task.startTime;
this.completedTasks.set(taskId, {
...task,
result,
duration,
status: TASK_STATES.COMPLETED,
completedAt: Date.now()
});
this.stats.tasksCompleted++;
this.updateAverageExecutionTime(duration);
this.log('info', `Task completed: ${taskId}`, { duration, workerId });
this.eventEmitter.emit('task:completed', { taskId, result, duration });
}
/**
* Handle task execution error
* @param {string} taskId - Failed task ID
* @param {Object} error - Error details
* @param {string} workerId - Worker ID
*/
handleTaskError(taskId, error, workerId) {
const task = this.activeTasks.get(taskId);
if (!task) return;
const duration = Date.now() - task.startTime;
this.completedTasks.set(taskId, {
...task,
error,
duration,
status: TASK_STATES.FAILED,
completedAt: Date.now()
});
this.stats.tasksFailed++;
this.log('error', `Task failed: ${taskId}`, { error: error.message, duration, workerId });
this.eventEmitter.emit('task:failed', { taskId, error, duration });
}
/**
* Start resource monitoring
*/
startResourceMonitoring() {
if (!this.config.enableResourceMonitoring) return;
setInterval(() => {
this.monitorSystemResources();
}, this.config.resourceCheckInterval);
}
/**
* Start deadlock detection
*/
startDeadlockDetection() {
setInterval(() => {
this.detectDeadlocks();
}, this.config.deadlockDetectionInterval);
}
/**
* Monitor system resources and adjust worker pool
*/
monitorSystemResources() {
const memUsage = process.memoryUsage();
const cpuUsage = process.cpuUsage();
// Adjust worker pool based on resource usage
const memoryPressure = memUsage.heapUsed / memUsage.heapTotal;
if (memoryPressure > 0.9 && this.workers.size > this.config.minWorkers) {
// Reduce worker count under memory pressure
this.scaleDownWorkers();
} else if (memoryPressure < 0.7 && this.taskQueue.length > 0 &&
this.workers.size < this.config.maxWorkers) {
// Scale up workers when resources are available
this.createWorker();
}
}
/**
* Detect potential deadlocks in resource locking
*/
detectDeadlocks() {
const lockGraph = new Map();
// Build dependency graph
for (const [taskId, task] of this.activeTasks) {
const waitingFor = [];
const resources = this.extractTaskResources(task.task);
for (const resource of resources) {
const lock = this.resourceLocks.get(resource.identifier);
if (lock && !lock.tasks.includes(taskId)) {
waitingFor.push(...lock.tasks);
}
}
if (waitingFor.length > 0) {
lockGraph.set(taskId, waitingFor);
}
}
// Detect cycles using DFS
const visited = new Set();
const recStack = new Set();
const hasCycle = (node) => {
visited.add(node);
recStack.add(node);
const neighbors = lockGraph.get(node) || [];
for (const neighbor of neighbors) {
if (!visited.has(neighbor)) {
if (hasCycle(neighbor)) return true;
} else if (recStack.has(neighbor)) {
return true;
}
}
recStack.delete(node);
return false;
};
for (const node of lockGraph.keys()) {
if (!visited.has(node) && hasCycle(node)) {
this.handleDeadlock(lockGraph);
break;
}
}
}
/**
* Handle detected deadlock
* @param {Map} lockGraph - Lock dependency graph
*/
handleDeadlock(lockGraph) {
this.stats.deadlocksDetected++;
this.log('warn', 'Deadlock detected, attempting resolution');
// Simple resolution: cancel youngest task in cycle
const tasks = Array.from(this.activeTasks.values())
.sort((a, b) => b.startTime - a.startTime);
if (tasks.length > 0) {
const taskToCancel = tasks[0];
this.cancelTask(taskToCancel.context.taskId);
}
}
/**
* Cancel a running task
* @param {string} taskId - Task ID to cancel
*/
cancelTask(taskId) {
const task = this.activeTasks.get(taskId);
if (!task) return;
// Release resource locks
this.releaseResourceLocks(taskId);
// Mark as cancelled
this.completedTasks.set(taskId, {
...task,
status: TASK_STATES.CANCELLED,
cancelledAt: Date.now()
});
this.activeTasks.delete(taskId);
this.log('info', `Task cancelled: ${taskId}`);
this.eventEmitter.emit('task:cancelled', { taskId });
}
/**
* Scale down worker pool
*/
scaleDownWorkers() {
const availableWorkers = Array.from(this.workers.values())
.filter(w => !w.busy)
.sort((a, b) => a.lastActivity - b.lastActivity);
if (availableWorkers.length > 0) {
const worker = availableWorkers[0];
worker.worker.terminate();
this.workers.delete(worker.id);
this.log('info', `Scaled down worker pool: ${worker.id}`);
}
}
/**
* Update execution statistics
* @param {Array<Object>} results - Task results
*/
updateStats(results) {
for (const result of results) {
if (result.success) {
this.stats.tasksCompleted++;
this.updateAverageExecutionTime(result.duration);
} else {
this.stats.tasksFailed++;
}
}
}
/**
* Update average execution time
* @param {number} duration - Task duration
*/
updateAverageExecutionTime(duration) {
const totalTasks = this.stats.tasksCompleted + this.stats.tasksFailed;
this.stats.averageExecutionTime =
(this.stats.averageExecutionTime * (totalTasks - 1) + duration) / totalTasks;
}
/**
* Get execution statistics
* @returns {Object} Current statistics
*/
getExecutionStats() {
return {
...this.stats,
activeWorkers: this.workers.size,
busyWorkers: Array.from(this.workers.values()).filter(w => w.busy).length,
activeTasks: this.activeTasks.size,
queuedTasks: this.taskQueue.length,
resourceLocks: this.resourceLocks.size,
totalTasks: this.stats.tasksCompleted + this.stats.tasksFailed
};
}
/**
* Estimate execution duration for task groups
* @param {Array<Array<Object>>} groups - Task groups
* @returns {number} Estimated duration in milliseconds
*/
estimateExecutionDuration(groups) {
let totalDuration = 0;
for (const group of groups) {
const groupDuration = Math.max(
...group.map(task => task.estimatedDuration || this.stats.averageExecutionTime || 30000)
);
totalDuration += groupDuration;
}
return totalDuration;
}
/**
* Execute with timeout wrapper
* @param {Function} promise - Promise to execute
* @param {number} timeoutMs - Timeout in milliseconds
* @returns {Promise} Promise with timeout
*/
async executeWithTimeout(promise, timeoutMs) {
const timeout = new Promise((_, reject) => {
setTimeout(() => reject(new Error(`Operation timed out after ${timeoutMs}ms`)), timeoutMs);
});
return Promise.race([promise(), timeout]);
}
/**
* Get system prompt for the agent
* @returns {string} System prompt
*/
getSystemPrompt() {
return `You are the Parallel Runner Agent for Shipdeck Ultimate.
Your role is to orchestrate the parallel execution of multiple agents with sophisticated thread pool management and resource conflict detection.
Key capabilities:
- True parallel execution using Node.js worker_threads
- Advanced thread pool management with automatic scaling
- Resource lock management and deadlock prevention
- Intelligent task scheduling and conflict resolution
- Real-time progress monitoring and performance tracking
You excel at:
- Analyzing task dependencies and resource requirements
- Optimizing concurrent execution strategies
- Preventing resource contention and deadlocks
- Monitoring system resources and adjusting execution plans
- Providing detailed execution statistics and insights
When executing parallel tasks, you automatically:
1. Analyze resource requirements and detect conflicts
2. Create optimal execution plans with proper scheduling
3. Manage worker threads with intelligent load balancing
4. Monitor execution progress and handle failures gracefully
5. Provide comprehensive execution reports and statistics
Always prioritize system stability, resource efficiency, and execution reliability.`;
}
/**
* Cleanup resources on shutdown
*/
async cleanup() {
this.log('info', 'Starting parallel runner cleanup');
// Cancel all active tasks
for (const taskId of this.activeTasks.keys()) {
this.cancelTask(taskId);
}
// Terminate all workers
const terminationPromises = Array.from(this.workers.values())
.map(workerContext => workerContext.worker.terminate());
await Promise.all(terminationPromises);
// Clear state
this.workers.clear();
this.activeTasks.clear();
this.resourceLocks.clear();
this.taskQueue.length = 0;
// Remove worker script file
if (fs.existsSync(this.workerScriptPath)) {
fs.unlinkSync(this.workerScriptPath);
}
this.log('info', 'Parallel runner cleanup completed');
super.cleanup();
}
}
module.exports = { ParallelRunnerAgent, TASK_STATES, LOCK_TYPES };