shipdeck
Version:
Ship MVPs in 48 hours. Fix bugs in 30 seconds. The command deck for developers who ship.
463 lines (372 loc) โข 12.2 kB
JavaScript
/**
* Workflow Runner for Shipdeck Ultimate
* Executes workflows with state management and persistence
*/
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');
class WorkflowRunner {
constructor() {
this.workflowsPath = path.join(process.cwd(), '.shipdeck', 'workflows');
this.activeWorkflows = new Map();
this.completedWorkflows = new Set();
// Ensure workflows directory exists
this.ensureWorkflowsDirectory();
}
/**
* Ensure workflows directory exists
*/
ensureWorkflowsDirectory() {
if (!fs.existsSync(this.workflowsPath)) {
fs.mkdirSync(this.workflowsPath, { recursive: true });
}
}
/**
* Execute a workflow
* @param {Object} workflow - Workflow to execute
* @returns {Object} Execution result
*/
async execute(workflow) {
console.log(`\n๐ Starting workflow: ${workflow.id}`);
// Initialize workflow properties
workflow.status = 'running';
workflow.startedAt = new Date().toISOString();
workflow.results = workflow.results || {};
workflow.currentStep = 0;
// Store in active workflows
this.activeWorkflows.set(workflow.id, workflow);
// Save initial state
await this.saveState(workflow);
try {
// Execute each step
for (let i = 0; i < workflow.steps.length; i++) {
const step = workflow.steps[i];
workflow.currentStep = i;
// Check if step can run in parallel
if (step.parallel && this.canRunParallel(step, workflow)) {
await this.executeParallelStep(step, workflow);
} else {
await this.executeSequentialStep(step, workflow);
}
// Save progress after each step
await this.saveState(workflow);
// Check for interruption
if (workflow.status === 'paused' || workflow.status === 'cancelled') {
console.log(`โธ Workflow ${workflow.status}: ${workflow.id}`);
break;
}
}
// Mark as completed if all steps done
if (workflow.currentStep >= workflow.steps.length - 1) {
workflow.status = 'completed';
workflow.completedAt = new Date().toISOString();
console.log(`โ
Workflow completed: ${workflow.id}`);
}
} catch (error) {
workflow.status = 'failed';
workflow.error = error.message;
workflow.failedAt = new Date().toISOString();
console.error(`โ Workflow failed: ${error.message}`);
throw error;
} finally {
// Save final state
await this.saveState(workflow);
// Move from active to completed
this.activeWorkflows.delete(workflow.id);
this.completedWorkflows.add(workflow.id);
}
return workflow;
}
/**
* Execute a sequential step
*/
async executeSequentialStep(step, workflow) {
console.log(`\n๐ Step ${workflow.currentStep + 1}: ${step.name || step.description}`);
step.status = 'running';
step.startedAt = new Date().toISOString();
try {
// Simulate step execution (in real implementation, this would call agents)
const result = await this.runStep(step, workflow);
step.result = result;
step.status = 'completed';
step.completedAt = new Date().toISOString();
// Store result in workflow context
if (result && step.name) {
workflow.results[step.name] = result;
}
console.log(`โ
Step completed: ${step.name || step.description}`);
} catch (error) {
step.status = 'failed';
step.error = error.message;
step.failedAt = new Date().toISOString();
throw error;
}
}
/**
* Execute parallel steps
*/
async executeParallelStep(step, workflow) {
console.log(`\nโก Parallel execution: ${step.name || step.description}`);
const parallelTasks = step.agents || step.tasks || [];
if (parallelTasks.length === 0) {
// Treat as sequential if no parallel tasks defined
return this.executeSequentialStep(step, workflow);
}
step.status = 'running';
step.startedAt = new Date().toISOString();
try {
// Execute all tasks in parallel
const promises = parallelTasks.map(task =>
this.runTask(task, workflow).catch(error => ({
error: error.message,
task: task
}))
);
const results = await Promise.all(promises);
// Check for failures
const failures = results.filter(r => r.error);
if (failures.length > 0) {
step.status = 'partial';
step.failures = failures;
console.warn(`โ ๏ธ ${failures.length} tasks failed in parallel execution`);
} else {
step.status = 'completed';
}
step.result = results;
step.completedAt = new Date().toISOString();
// Store results
workflow.results[step.name] = results;
console.log(`โ
Parallel step completed: ${results.length} tasks`);
} catch (error) {
step.status = 'failed';
step.error = error.message;
step.failedAt = new Date().toISOString();
throw error;
}
}
/**
* Check if step can run in parallel
*/
canRunParallel(step, workflow) {
// Check for file conflicts
if (step.files) {
const previousFiles = new Set();
for (let i = 0; i < workflow.currentStep; i++) {
const prevStep = workflow.steps[i];
if (prevStep.files) {
prevStep.files.forEach(f => previousFiles.add(f));
}
}
// Check for overlaps
for (const file of step.files) {
if (previousFiles.has(file)) {
console.log(`โ ๏ธ File conflict detected: ${file}`);
return false;
}
}
}
// Check for explicit dependencies
if (step.dependsOn) {
for (const dep of step.dependsOn) {
const depStep = workflow.steps.find(s => s.name === dep);
if (depStep && depStep.status !== 'completed') {
console.log(`โ ๏ธ Dependency not met: ${dep}`);
return false;
}
}
}
return true;
}
/**
* Run a single step (mock implementation)
*/
async runStep(step, workflow) {
// Simulate work
await this.delay(500);
// In real implementation, this would:
// 1. Load the appropriate agent
// 2. Pass the step configuration
// 3. Execute the agent
// 4. Return the result
return {
success: true,
output: `Executed: ${step.name || step.description}`,
timestamp: new Date().toISOString()
};
}
/**
* Run a single task in parallel
*/
async runTask(task, workflow) {
// Simulate work with random delay
await this.delay(Math.random() * 1000 + 500);
return {
task: task.name || task,
success: true,
output: `Completed: ${task.name || task}`,
timestamp: new Date().toISOString()
};
}
/**
* Save workflow state to disk
*/
async saveState(workflow) {
const statePath = path.join(this.workflowsPath, `${workflow.id}.json`);
const state = {
...workflow,
savedAt: new Date().toISOString()
};
fs.writeFileSync(statePath, JSON.stringify(state, null, 2));
}
/**
* Load workflow state from disk
*/
async loadState(workflowId) {
const statePath = path.join(this.workflowsPath, `${workflowId}.json`);
if (!fs.existsSync(statePath)) {
throw new Error(`Workflow not found: ${workflowId}`);
}
const content = fs.readFileSync(statePath, 'utf8');
return JSON.parse(content);
}
/**
* Resume a workflow from where it left off
*/
async resume(workflowId) {
console.log(`๐ Resuming workflow: ${workflowId}`);
const workflow = await this.loadState(workflowId);
if (workflow.status === 'completed') {
console.log('โน๏ธ Workflow already completed');
return workflow;
}
if (workflow.status === 'failed') {
console.log('๐ Retrying failed workflow...');
workflow.status = 'running';
workflow.retryCount = (workflow.retryCount || 0) + 1;
}
// Resume from current step
console.log(`๐ Resuming from step ${workflow.currentStep + 1}/${workflow.steps.length}`);
return this.execute(workflow);
}
/**
* Get workflow status
*/
async getStatus(workflowId) {
// Check active workflows first
if (this.activeWorkflows.has(workflowId)) {
return this.activeWorkflows.get(workflowId);
}
// Load from disk
try {
return await this.loadState(workflowId);
} catch (error) {
return null;
}
}
/**
* List all workflows
*/
async listWorkflows() {
const files = fs.readdirSync(this.workflowsPath)
.filter(f => f.endsWith('.json'));
const workflows = [];
for (const file of files) {
const id = path.basename(file, '.json');
const workflow = await this.loadState(id);
workflows.push({
id: workflow.id,
name: workflow.name || workflow.templateId,
status: workflow.status,
createdAt: workflow.createdAt,
currentStep: workflow.currentStep,
totalSteps: workflow.steps.length
});
}
return workflows.sort((a, b) =>
new Date(b.createdAt) - new Date(a.createdAt)
);
}
/**
* Cancel a running workflow
*/
async cancel(workflowId) {
const workflow = this.activeWorkflows.get(workflowId);
if (!workflow) {
throw new Error(`Workflow not active: ${workflowId}`);
}
workflow.status = 'cancelled';
workflow.cancelledAt = new Date().toISOString();
await this.saveState(workflow);
this.activeWorkflows.delete(workflowId);
this.completedWorkflows.add(workflowId);
console.log(`๐ Workflow cancelled: ${workflowId}`);
return workflow;
}
/**
* Pause a running workflow
*/
async pause(workflowId) {
const workflow = this.activeWorkflows.get(workflowId);
if (!workflow) {
throw new Error(`Workflow not active: ${workflowId}`);
}
workflow.status = 'paused';
workflow.pausedAt = new Date().toISOString();
await this.saveState(workflow);
console.log(`โธ Workflow paused: ${workflowId}`);
return workflow;
}
/**
* Clean up old workflows
*/
async cleanup(daysToKeep = 30) {
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - daysToKeep);
const files = fs.readdirSync(this.workflowsPath)
.filter(f => f.endsWith('.json'));
let deleted = 0;
for (const file of files) {
const filePath = path.join(this.workflowsPath, file);
const stat = fs.statSync(filePath);
if (stat.mtime < cutoffDate) {
fs.unlinkSync(filePath);
deleted++;
}
}
console.log(`๐งน Cleaned up ${deleted} old workflows`);
return deleted;
}
/**
* Get workflow metrics
*/
async getMetrics() {
const workflows = await this.listWorkflows();
const metrics = {
total: workflows.length,
completed: workflows.filter(w => w.status === 'completed').length,
failed: workflows.filter(w => w.status === 'failed').length,
running: workflows.filter(w => w.status === 'running').length,
paused: workflows.filter(w => w.status === 'paused').length,
averageSteps: 0,
successRate: 0
};
if (workflows.length > 0) {
metrics.averageSteps = workflows.reduce((sum, w) => sum + w.totalSteps, 0) / workflows.length;
metrics.successRate = (metrics.completed / (metrics.completed + metrics.failed)) * 100;
}
return metrics;
}
/**
* Helper: delay function
*/
delay(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
/**
* Generate workflow ID
*/
generateId() {
return `wf-${Date.now()}-${crypto.randomBytes(4).toString('hex')}`;
}
}
module.exports = WorkflowRunner;