UNPKG

dryrun-ci

Version:

DryRun CI - Local GitLab CI/CD pipeline testing tool with Docker execution, performance monitoring, and security sandboxing

254 lines (250 loc) 10.9 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.PipelineExecutor = void 0; const events_1 = require("events"); const performanceMonitor_1 = require("./performanceMonitor"); const securitySandbox_1 = require("./securitySandbox"); const dockerExecutor_1 = require("./dockerExecutor"); const projectScanner_1 = require("./projectScanner"); const realTimeUiManager_1 = require("./realTimeUiManager"); class PipelineExecutor extends events_1.EventEmitter { constructor(pipeline, config) { super(); this.execution = null; this.pipeline = pipeline; this.config = config; this.performanceMonitor = new performanceMonitor_1.PerformanceMonitor(); this.securitySandbox = new securitySandbox_1.SecuritySandbox(config.security.level, config.security.allowNetwork, config.security.allowedPaths, config.security.deniedPaths); this.dockerExecutor = new dockerExecutor_1.DockerExecutor(); this.projectScanner = new projectScanner_1.ProjectScanner(); this.uiManager = new realTimeUiManager_1.RealTimeUiManager(); this.securitySandbox.on('security-alert', (alert) => { this.emit('security-alert', alert); }); this.performanceMonitor.on('metrics-update', (metrics) => { this.emit('performance-update', metrics); }); } async executeJob(job) { const jobExecution = { name: job.name, stage: job.stage, status: 'pending', startTime: new Date(), output: [] }; try { this.uiManager.startJob(job); this.emit('job-started', { name: job.name, containerId: null }); this.performanceMonitor.start(); if (job.artifacts?.paths || job.cache?.paths) { const scanResult = await this.projectScanner.scan(job.stage); jobExecution.output.push(...scanResult.logs); } const containerId = await this.dockerExecutor.createContainer(job); jobExecution.containerId = containerId; this.emit('job-started', { name: job.name, containerId }); const { exitCode, output } = await this.dockerExecutor.executeJob(containerId, job); jobExecution.output.push(output); jobExecution.exitCode = exitCode; jobExecution.status = exitCode === 0 ? 'success' : 'failed'; jobExecution.endTime = new Date(); const metrics = this.performanceMonitor.getMetrics(); if (metrics) { jobExecution.metrics = metrics; } this.performanceMonitor.stop(); try { await this.dockerExecutor.removeContainer(containerId); } catch (cleanupError) { console.error(`Failed to cleanup container ${containerId}:`, cleanupError); jobExecution.output.push(`Warning: Failed to cleanup container: ${cleanupError instanceof Error ? cleanupError.message : 'Unknown error'}`); } this.uiManager.completeJob(jobExecution); this.emit('job-completed', { name: job.name, containerId, metrics }); } catch (error) { jobExecution.status = 'failed'; jobExecution.endTime = new Date(); jobExecution.output.push(`Error: ${error instanceof Error ? error.message : 'Unknown error'}`); this.uiManager.completeJob(jobExecution); this.emit('job-failed', { name: job.name, error }); } return jobExecution; } async execute() { this.execution = { id: `pipeline-${Date.now()}`, status: 'pending', startTime: new Date(), jobs: [] }; try { await this.validatePipelineConfiguration(); this.uiManager.startPipeline(); this.emit('pipeline-started', { id: this.execution.id }); for (const stage of this.pipeline.stages) { const stageJobs = Object.values(this.pipeline.jobs).filter(job => job.stage === stage); if (stageJobs.length === 0) { console.log(`⚠️ Stage '${stage}' has no jobs - skipping`); continue; } console.log(`\n📦 Stage: ${stage}`); this.uiManager.startStage(stage, stageJobs.length); for (const job of stageJobs) { const jobExecution = await this.executeJob(job); this.execution.jobs.push(jobExecution); if (jobExecution.status === 'failed') { this.execution.status = 'failed'; } } this.uiManager.completeStage(stage); } if (this.execution.status !== 'failed') { this.execution.status = 'success'; } this.execution.endTime = new Date(); this.uiManager.completePipeline(this.execution); this.emit('pipeline-completed', { id: this.execution.id, status: this.execution.status, duration: this.execution.endTime.getTime() - this.execution.startTime.getTime() }); } catch (error) { this.execution.status = 'failed'; this.execution.endTime = new Date(); console.error('Pipeline execution failed:', error); this.emit('pipeline-failed', { id: this.execution.id, error: error instanceof Error ? error.message : 'Unknown error' }); } return this.execution; } async validatePipelineConfiguration() { console.log('🔍 Validating pipeline configuration...'); const issues = []; const suggestions = []; for (const job of Object.values(this.pipeline.jobs)) { if (job.image === 'docker:latest' && job.before_script?.some((cmd) => cmd.includes('pip install'))) { issues.push(`Job '${job.name}': Using docker:latest with pip install commands`); suggestions.push(`Consider using python:3.11-slim for Python operations`); } if (!job.script || job.script.length === 0) { issues.push(`Job '${job.name}': No script defined`); suggestions.push(`Add a script section to job '${job.name}'`); } if (job.image?.includes('latest')) { suggestions.push(`Job '${job.name}': Consider using specific version tag instead of 'latest'`); } if (job.before_script?.some((cmd) => cmd.includes('curl') && cmd.includes('| bash'))) { issues.push(`Job '${job.name}': Using curl | bash pattern (security risk)`); suggestions.push(`Download and verify scripts before execution`); } } const emptyStages = this.pipeline.stages.filter(stage => !Object.values(this.pipeline.jobs).some(job => job.stage === stage)); if (emptyStages.length > 0) { issues.push(`Empty stages: ${emptyStages.join(', ')}`); suggestions.push(`Remove unused stages or add jobs to them`); } if (issues.length > 0) { console.log('\n⚠️ Configuration Issues Found:'); issues.forEach(issue => console.log(` - ${issue}`)); } if (suggestions.length > 0) { console.log('\n💡 Suggestions:'); suggestions.forEach(suggestion => console.log(` - ${suggestion}`)); } if (issues.length === 0 && suggestions.length === 0) { console.log('✅ Pipeline configuration looks good!'); } else if (issues.length > 0) { console.log('\n❓ Continue with execution? (Issues found but not blocking)'); } } async stop() { if (!this.execution) return; try { for (const job of this.execution.jobs) { if (job.status === 'running' && job.containerId) { try { await this.dockerExecutor.stopContainer(job.containerId); } catch (error) { console.error(`Failed to stop container ${job.containerId}:`, error); } } } this.execution.status = 'canceled'; this.execution.endTime = new Date(); this.performanceMonitor.stop(); this.emit('pipeline-canceled', this.execution); } catch (error) { console.error('Failed to stop pipeline:', error); throw error; } } async cleanup() { try { this.performanceMonitor.stop(); if (this.execution) { for (const job of this.execution.jobs) { if (job.containerId) { try { await this.dockerExecutor.removeContainer(job.containerId); } catch (error) { console.error(`Failed to remove container ${job.containerId}:`, error); } } } } this.emit('cleanup-completed'); } catch (error) { console.error('Failed to cleanup pipeline:', error); throw error; } } getJobMetrics(jobId) { const metrics = this.performanceMonitor.getMetrics(); if (!metrics) return undefined; return { jobId, startTime: Date.now(), endTime: undefined, cpu: metrics.cpu, memory: metrics.memory, gc: metrics.gc }; } getMetricsReport(jobId) { const metrics = this.getJobMetrics(jobId); if (!metrics) return 'No metrics available'; return ` Job Metrics - ${jobId} ========================================== Duration: ${metrics.endTime ? (metrics.endTime - metrics.startTime) / 1000 : 'N/A'} seconds CPU Usage: System: ${metrics.cpu.system.toFixed(2)}s User: ${metrics.cpu.user.toFixed(2)}s Total: ${metrics.cpu.usage.toFixed(2)}s Memory Usage: Heap Used: ${(metrics.memory.heapUsed / 1024 / 1024).toFixed(2)} MB Heap Total: ${(metrics.memory.heapTotal / 1024 / 1024).toFixed(2)} MB External: ${(metrics.memory.external / 1024 / 1024).toFixed(2)} MB RSS: ${(metrics.memory.rss / 1024 / 1024).toFixed(2)} MB ${metrics.gc ? `Garbage Collection: Type: ${metrics.gc.type} Duration: ${metrics.gc.duration.toFixed(2)}ms` : ''} ========================================== `.trim(); } } exports.PipelineExecutor = PipelineExecutor;