dryrun-ci
Version:
DryRun CI - Local GitLab CI/CD pipeline testing tool with Docker execution, performance monitoring, and security sandboxing
660 lines (659 loc) • 27.5 kB
JavaScript
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.GitLabRunnerCompatibilityLayer = void 0;
const events_1 = require("events");
const fs = __importStar(require("fs/promises"));
const path = __importStar(require("path"));
class GitLabRunnerCompatibilityLayer extends events_1.EventEmitter {
constructor(config) {
super();
this.activeJobs = new Map();
this.config = config;
this.buildsDir = config.builds_dir || '/builds';
this.cacheDir = config.cache_dir || '/cache';
this.initializeDocker();
this.setupDirectories();
}
async initializeDocker() {
try {
const Docker = require('dockerode');
this.docker = new Docker();
await this.docker.ping();
this.emit('runner-ready');
}
catch (error) {
this.emit('runner-error', { error: error instanceof Error ? error.message : 'Unknown error' });
throw new Error('Failed to initialize GitLab Runner compatibility layer');
}
}
async setupDirectories() {
try {
await fs.mkdir(this.buildsDir, { recursive: true });
await fs.mkdir(this.cacheDir, { recursive: true });
this.emit('directories-ready', { buildsDir: this.buildsDir, cacheDir: this.cacheDir });
}
catch (error) {
throw new Error('Failed to setup runner directories');
}
}
async executeJob(job, pipeline, config) {
const jobExecution = this.createJobExecution(job, pipeline);
this.activeJobs.set(jobExecution.id, jobExecution);
this.emit('job-started', { jobId: jobExecution.jobId, name: jobExecution.name });
try {
jobExecution.status = 'running';
jobExecution.startedAt = new Date();
await this.prepareJobEnvironment(jobExecution, config);
await this.downloadArtifactsAndCache(jobExecution);
await this.cloneRepository(jobExecution, pipeline);
await this.downloadJobArtifacts(jobExecution, pipeline);
await this.restoreCache(jobExecution);
await this.downloadUserArtifacts(jobExecution);
if (job.before_script) {
await this.executeScript(jobExecution, job.before_script, 'before_script');
}
if (job.script) {
await this.executeScript(jobExecution, job.script, 'script');
}
if (job.after_script) {
await this.executeScript(jobExecution, job.after_script, 'after_script');
}
await this.archiveCache(jobExecution);
await this.uploadArtifacts(jobExecution);
jobExecution.status = 'success';
jobExecution.finishedAt = new Date();
this.emit('job-completed', {
jobId: jobExecution.jobId,
status: 'success',
duration: jobExecution.finishedAt.getTime() - jobExecution.startedAt.getTime()
});
}
catch (error) {
jobExecution.status = 'failed';
jobExecution.finishedAt = new Date();
jobExecution.failureReason = error instanceof Error ? error.message : 'Unknown error';
this.emit('job-failed', {
jobId: jobExecution.jobId,
error: jobExecution.failureReason
});
if (job.artifacts?.when === 'on_failure' || job.artifacts?.when === 'always') {
try {
await this.uploadArtifacts(jobExecution);
}
catch (artifactError) {
this.emit('artifact-upload-failed', {
jobId: jobExecution.jobId,
error: artifactError instanceof Error ? artifactError.message : 'Unknown error'
});
}
}
throw error;
}
finally {
await this.cleanupJobResources(jobExecution);
}
return jobExecution;
}
createJobExecution(job, pipeline) {
const jobId = Math.floor(Math.random() * 1000000);
const token = this.generateJobToken();
const executionId = `job-${jobId}-${Date.now()}`;
const predefinedVariables = this.createPredefinedVariables(job, pipeline, jobId, token);
const environment = {
predefinedVariables,
userVariables: Object.entries(job.variables || {}).reduce((acc, [key, value]) => {
acc[key] = typeof value === 'string' ? value : value.value;
return acc;
}, {}),
jobToken: token,
artifactsUrl: `http://localhost:8080/api/v4/jobs/${jobId}/artifacts`,
cacheUrl: `http://localhost:8080/api/v4/jobs/${jobId}/cache`,
featuresFlags: {
FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY: false,
FF_USE_DIRECT_DOWNLOAD: true,
FF_SKIP_NOOP_BUILD_STAGES: true,
FF_USE_FASTZIP: true,
FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: true,
FF_ENABLE_BASH_EXIT_CODE_CHECK: true,
FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY: false,
FF_USE_NEW_BASH_EVAL_STRATEGY: true,
FF_USE_POWERSHELL_PATH_RESOLVER: true
}
};
return {
id: executionId,
jobId,
token,
stage: job.stage || 'test',
name: job.name,
workingDirectory: path.join(this.buildsDir, pipeline.project?.name || 'project'),
environment,
artifacts: this.normalizeArtifactConfig(job.artifacts),
cache: this.normalizeCacheConfig(job.cache),
services: this.normalizeServices(job.services),
status: 'pending'
};
}
createPredefinedVariables(job, pipeline, jobId, token) {
const projectName = pipeline.project?.name || 'project';
const projectPath = pipeline.project?.path || 'group/project';
const commitSha = pipeline.sha || 'abc123';
const refName = pipeline.ref || 'main';
const pipelineId = pipeline.id || Math.floor(Math.random() * 1000000);
return {
CI: 'true',
GITLAB_CI: 'true',
CI_SERVER: 'yes',
CI_SERVER_NAME: 'GitLab',
CI_SERVER_VERSION: '15.11.0',
CI_SERVER_VERSION_MAJOR: '15',
CI_SERVER_VERSION_MINOR: '11',
CI_SERVER_VERSION_PATCH: '0',
CI_SERVER_REVISION: 'gitlab-15.11.0',
CI_SERVER_URL: 'http://localhost:8080',
CI_SERVER_HOST: 'localhost',
CI_SERVER_PORT: '8080',
CI_SERVER_PROTOCOL: 'http',
CI_API_V4_URL: 'http://localhost:8080/api/v4',
CI_PIPELINE_URL: `http://localhost:8080/${projectPath}/-/pipelines/${pipelineId}`,
CI_JOB_ID: jobId.toString(),
CI_JOB_TOKEN: token,
CI_JOB_NAME: job.name,
CI_JOB_STAGE: job.stage || 'test',
CI_JOB_URL: `http://localhost:8080/${projectPath}/-/jobs/${jobId}`,
CI_JOB_STARTED_AT: new Date().toISOString(),
CI_PIPELINE_ID: pipelineId.toString(),
CI_PIPELINE_IID: pipelineId.toString(),
CI_PIPELINE_SOURCE: 'push',
CI_PIPELINE_CREATED_AT: new Date().toISOString(),
CI_PROJECT_ID: '1',
CI_PROJECT_NAME: projectName,
CI_PROJECT_TITLE: projectName,
CI_PROJECT_PATH: projectPath,
CI_PROJECT_PATH_SLUG: projectPath.replace(/[^a-z0-9]/g, '-'),
CI_PROJECT_NAMESPACE: projectPath.split('/')[0] || 'group',
CI_PROJECT_ROOT_NAMESPACE: projectPath.split('/')[0] || 'group',
CI_PROJECT_URL: `http://localhost:8080/${projectPath}`,
CI_PROJECT_VISIBILITY: 'private',
CI_PROJECT_REPOSITORY_LANGUAGES: 'javascript,typescript',
CI_REPOSITORY_URL: `http://localhost:8080/${projectPath}.git`,
CI_DEFAULT_BRANCH: 'main',
CI_COMMIT_SHA: commitSha,
CI_COMMIT_SHORT_SHA: commitSha.substring(0, 8),
CI_COMMIT_REF_NAME: refName,
CI_COMMIT_REF_SLUG: refName.replace(/[^a-z0-9]/g, '-'),
CI_COMMIT_BRANCH: refName === 'main' ? refName : '',
CI_COMMIT_MESSAGE: 'Test commit',
CI_COMMIT_TITLE: 'Test commit',
CI_COMMIT_DESCRIPTION: '',
CI_COMMIT_AUTHOR: 'DryRun Test <test@dryrun.local>',
CI_COMMIT_TIMESTAMP: new Date().toISOString(),
CI_RUNNER_ID: '1',
CI_RUNNER_DESCRIPTION: 'DryRun Compatibility Runner',
CI_RUNNER_TAGS: 'dryrun,docker,test',
CI_RUNNER_VERSION: '15.11.0',
CI_RUNNER_REVISION: 'dryrun-compat',
CI_RUNNER_EXECUTABLE_ARCH: 'linux/amd64',
CI_BUILDS_DIR: this.buildsDir,
CI_CONCURRENT_ID: '1',
CI_CONCURRENT_PROJECT_ID: '1',
CI_CONFIG_PATH: '.gitlab-ci.yml',
CI_ENVIRONMENT_NAME: job.environment?.name || '',
CI_ENVIRONMENT_SLUG: job.environment?.name?.replace(/[^a-z0-9]/g, '-') || '',
CI_ENVIRONMENT_URL: job.environment?.url || '',
CI_REGISTRY: 'registry.gitlab.com',
CI_REGISTRY_IMAGE: `registry.gitlab.com/${projectPath}`,
CI_REGISTRY_USER: 'gitlab-ci-token',
CI_REGISTRY_PASSWORD: token,
CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX: `localhost:8080/group/dependency_proxy/containers`,
CI_DEPENDENCY_PROXY_DIRECT_GROUP_IMAGE_PREFIX: `localhost:8080/group/dependency_proxy/containers`,
CI_DEPENDENCY_PROXY_SERVER: 'localhost:8080',
CI_DEPENDENCY_PROXY_USER: 'gitlab-ci-token',
CI_DEPENDENCY_PROXY_PASSWORD: token,
CI_PAGES_DOMAIN: 'gitlab.io',
CI_PAGES_URL: `http://${projectPath.replace('/', '.')}.gitlab.io`,
CI_HAS_OPEN_REQUIREMENTS: 'false',
CI_KUBERNETES_ACTIVE: 'false',
CI_DEPLOY_FREEZE: 'false',
GITLAB_USER_ID: '1',
GITLAB_USER_EMAIL: 'test@dryrun.local',
GITLAB_USER_LOGIN: 'dryrun-test',
GITLAB_USER_NAME: 'DryRun Test User',
GIT_STRATEGY: job.git?.strategy || 'fetch',
GIT_CHECKOUT: 'true',
GIT_CLEAN_FLAGS: '-ffdx',
GIT_FETCH_EXTRA_FLAGS: '--prune',
GIT_DEPTH: job.git?.depth?.toString() || '50',
GIT_SUBMODULE_STRATEGY: job.git?.submodule_strategy || 'none',
CUSTOM_ENV_VAR: 'dryrun'
};
}
normalizeArtifactConfig(artifacts) {
if (!artifacts) {
return {
paths: [],
when: 'on_success'
};
}
return {
name: artifacts.name,
paths: Array.isArray(artifacts.paths) ? artifacts.paths : [artifacts.paths].filter(Boolean),
excludePaths: artifacts.exclude,
expireIn: artifacts.expire_in || '30 days',
when: artifacts.when || 'on_success',
reports: artifacts.reports
};
}
normalizeCacheConfig(cache) {
if (!cache) {
return {
key: 'default',
paths: [],
policy: 'pull-push',
when: 'on_success'
};
}
return {
key: cache.key || 'default',
paths: Array.isArray(cache.paths) ? cache.paths : [cache.paths].filter(Boolean),
policy: cache.policy || 'pull-push',
when: cache.when || 'on_success',
fallbackKeys: cache.fallback_keys
};
}
normalizeServices(services) {
if (!Array.isArray(services)) {
return [];
}
return services.map(service => {
if (typeof service === 'string') {
return { name: service };
}
return {
name: service.name,
alias: service.alias,
command: service.command,
entrypoint: service.entrypoint,
variables: service.variables
};
});
}
async prepareJobEnvironment(jobExecution, config) {
this.emit('job-phase', { jobId: jobExecution.jobId, phase: 'prepare_environment' });
try {
await fs.mkdir(jobExecution.workingDirectory, { recursive: true });
const envFile = path.join(jobExecution.workingDirectory, '.env');
const envContent = [
...Object.entries(jobExecution.environment.predefinedVariables),
...Object.entries(jobExecution.environment.userVariables)
].map(([key, value]) => `export ${key}="${value}"`).join('\n');
await fs.writeFile(envFile, envContent);
this.emit('environment-prepared', {
jobId: jobExecution.jobId,
workingDirectory: jobExecution.workingDirectory
});
}
catch (error) {
throw new Error(`Failed to prepare job environment: ${error}`);
}
}
async downloadArtifactsAndCache(jobExecution) {
this.emit('job-phase', { jobId: jobExecution.jobId, phase: 'download_artifacts' });
}
async cloneRepository(jobExecution, pipeline) {
this.emit('job-phase', { jobId: jobExecution.jobId, phase: 'get_sources' });
try {
const gitDir = path.join(jobExecution.workingDirectory, '.git');
await fs.mkdir(gitDir, { recursive: true });
await fs.writeFile(path.join(gitDir, 'HEAD'), 'ref: refs/heads/main\n');
await fs.writeFile(path.join(gitDir, 'config'), `[core]\n\trepositoryformatversion = 0\n\tfilemode = true\n\tbare = false\n\tlogallrefupdates = true\n`);
this.emit('sources-fetched', {
jobId: jobExecution.jobId,
repository: jobExecution.environment.predefinedVariables.CI_REPOSITORY_URL
});
}
catch (error) {
throw new Error(`Failed to clone repository: ${error}`);
}
}
async downloadJobArtifacts(jobExecution, pipeline) {
this.emit('job-phase', { jobId: jobExecution.jobId, phase: 'download_job_artifacts' });
}
async restoreCache(jobExecution) {
this.emit('job-phase', { jobId: jobExecution.jobId, phase: 'restore_cache' });
if (jobExecution.cache.paths.length === 0) {
return;
}
try {
const cacheKey = this.interpolateVariables(jobExecution.cache.key, jobExecution.environment);
const cacheFile = path.join(this.cacheDir, `${cacheKey}.tar.gz`);
if (await fs.access(cacheFile).then(() => true, () => false)) {
const tar = require('tar');
await tar.extract({
file: cacheFile,
cwd: jobExecution.workingDirectory
});
this.emit('cache-restored', {
jobId: jobExecution.jobId,
cacheKey,
paths: jobExecution.cache.paths
});
}
else {
this.emit('cache-not-found', {
jobId: jobExecution.jobId,
cacheKey
});
}
}
catch (error) {
this.emit('cache-restore-failed', {
jobId: jobExecution.jobId,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
}
async downloadUserArtifacts(jobExecution) {
this.emit('job-phase', { jobId: jobExecution.jobId, phase: 'download_user_artifacts' });
}
async executeScript(jobExecution, script, phase) {
this.emit('job-phase', { jobId: jobExecution.jobId, phase });
try {
const scriptContent = [
'#!/bin/bash',
'set -eo pipefail',
'',
`source ${path.join(jobExecution.workingDirectory, '.env')}`,
'',
`cd ${jobExecution.workingDirectory}`,
'',
...script.map(cmd => this.interpolateVariables(cmd, jobExecution.environment))
].join('\n');
const scriptFile = path.join(jobExecution.workingDirectory, `${phase}.sh`);
await fs.writeFile(scriptFile, scriptContent, { mode: 0o755 });
if (jobExecution.environment.predefinedVariables.CI_JOB_IMAGE) {
await this.executeInDocker(jobExecution, scriptFile, phase);
}
else {
await this.executeInShell(jobExecution, scriptFile, phase);
}
this.emit('script-completed', {
jobId: jobExecution.jobId,
phase,
commands: script.length
});
}
catch (error) {
this.emit('script-failed', {
jobId: jobExecution.jobId,
phase,
error: error instanceof Error ? error.message : 'Unknown error'
});
throw error;
}
}
async executeInDocker(jobExecution, scriptFile, phase) {
const image = jobExecution.environment.predefinedVariables.CI_JOB_IMAGE || 'alpine:latest';
await this.pullImageIfNeeded(image);
const containerConfig = {
Image: image,
WorkingDir: '/builds',
Cmd: ['/bin/bash', path.basename(scriptFile)],
Env: [
...Object.entries(jobExecution.environment.predefinedVariables),
...Object.entries(jobExecution.environment.userVariables)
].map(([key, value]) => `${key}=${value}`),
HostConfig: {
Binds: [
`${jobExecution.workingDirectory}:/builds:rw`,
`${this.cacheDir}:/cache:rw`
],
AutoRemove: true
}
};
const container = await this.docker.createContainer(containerConfig);
const stream = await container.attach({
stream: true,
stdout: true,
stderr: true
});
stream.on('data', (chunk) => {
this.emit('script-output', {
jobId: jobExecution.jobId,
phase,
output: chunk.toString()
});
});
await container.start();
const result = await container.wait();
if (result.StatusCode !== 0) {
throw new Error(`Script execution failed with exit code ${result.StatusCode}`);
}
}
async executeInShell(jobExecution, scriptFile, phase) {
const { spawn } = require('child_process');
return new Promise((resolve, reject) => {
const childProcess = spawn('/bin/bash', [scriptFile], {
cwd: jobExecution.workingDirectory,
env: {
...process.env,
...jobExecution.environment.predefinedVariables,
...jobExecution.environment.userVariables
}
});
childProcess.stdout.on('data', (data) => {
this.emit('script-output', {
jobId: jobExecution.jobId,
phase,
output: data.toString()
});
});
childProcess.stderr.on('data', (data) => {
this.emit('script-output', {
jobId: jobExecution.jobId,
phase,
output: data.toString()
});
});
childProcess.on('close', (code) => {
if (code === 0) {
resolve();
}
else {
reject(new Error(`Script execution failed with exit code ${code}`));
}
});
childProcess.on('error', (error) => {
reject(error);
});
});
}
async archiveCache(jobExecution) {
this.emit('job-phase', { jobId: jobExecution.jobId, phase: 'archive_cache' });
if (jobExecution.cache.paths.length === 0) {
return;
}
try {
const cacheKey = this.interpolateVariables(jobExecution.cache.key, jobExecution.environment);
const cacheFile = path.join(this.cacheDir, `${cacheKey}.tar.gz`);
const tar = require('tar');
await tar.create({
gzip: true,
file: cacheFile,
cwd: jobExecution.workingDirectory
}, jobExecution.cache.paths);
this.emit('cache-archived', {
jobId: jobExecution.jobId,
cacheKey,
paths: jobExecution.cache.paths
});
}
catch (error) {
this.emit('cache-archive-failed', {
jobId: jobExecution.jobId,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
}
async uploadArtifacts(jobExecution) {
this.emit('job-phase', { jobId: jobExecution.jobId, phase: 'upload_artifacts' });
if (jobExecution.artifacts.paths.length === 0) {
return;
}
try {
const artifactsDir = path.join(this.buildsDir, 'artifacts', jobExecution.id);
await fs.mkdir(artifactsDir, { recursive: true });
for (const artifactPath of jobExecution.artifacts.paths) {
const sourcePath = path.join(jobExecution.workingDirectory, artifactPath);
const targetPath = path.join(artifactsDir, artifactPath);
try {
await fs.mkdir(path.dirname(targetPath), { recursive: true });
await fs.copyFile(sourcePath, targetPath);
}
catch (error) {
this.emit('artifact-copy-failed', {
jobId: jobExecution.jobId,
path: artifactPath,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
}
this.emit('artifacts-uploaded', {
jobId: jobExecution.jobId,
paths: jobExecution.artifacts.paths,
destination: artifactsDir
});
}
catch (error) {
this.emit('artifact-upload-failed', {
jobId: jobExecution.jobId,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
}
async cleanupJobResources(jobExecution) {
this.emit('job-phase', { jobId: jobExecution.jobId, phase: 'cleanup' });
try {
if (this.config.executorType === 'docker') {
const tempFiles = ['.env', 'before_script.sh', 'script.sh', 'after_script.sh'];
for (const file of tempFiles) {
const filePath = path.join(jobExecution.workingDirectory, file);
await fs.unlink(filePath).catch(() => { });
}
}
this.activeJobs.delete(jobExecution.id);
this.emit('job-cleaned', { jobId: jobExecution.jobId });
}
catch (error) {
this.emit('cleanup-failed', {
jobId: jobExecution.jobId,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
}
interpolateVariables(text, environment) {
let result = text;
const allVariables = {
...environment.predefinedVariables,
...environment.userVariables
};
for (const [key, value] of Object.entries(allVariables)) {
result = result.replace(new RegExp(`\\$\\{${key}\\}`, 'g'), value);
result = result.replace(new RegExp(`\\$${key}\\b`, 'g'), value);
}
return result;
}
async pullImageIfNeeded(image) {
try {
await this.docker.getImage(image).inspect();
}
catch (error) {
this.emit('image-pulling', { image });
await new Promise((resolve, reject) => {
this.docker.pull(image, (err, stream) => {
if (err)
return reject(err);
this.docker.modem.followProgress(stream, (err) => {
if (err)
return reject(err);
resolve();
});
});
});
this.emit('image-pulled', { image });
}
}
generateJobToken() {
const chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789';
let result = '';
for (let i = 0; i < 64; i++) {
result += chars.charAt(Math.floor(Math.random() * chars.length));
}
return result;
}
async cancelJob(jobId) {
const job = this.activeJobs.get(jobId);
if (job) {
job.status = 'canceled';
job.finishedAt = new Date();
job.failureReason = 'Job was canceled';
await this.cleanupJobResources(job);
this.emit('job-canceled', { jobId: job.jobId });
}
}
async getJobStatus(jobId) {
return this.activeJobs.get(jobId) || null;
}
async listActiveJobs() {
return Array.from(this.activeJobs.values());
}
getRunnerInfo() {
return {
...this.config,
version: '15.11.0',
features: [
'multi_build_steps',
'artifacts',
'cache',
'shared_cache',
'upload_multiple_artifacts',
'upload_raw_artifacts',
'terminal',
'refspecs',
'masking',
'session',
'set_permissions_before_cleanup'
]
};
}
cleanup() {
for (const jobId of Array.from(this.activeJobs.keys())) {
this.cancelJob(jobId).catch(() => { });
}
this.removeAllListeners();
}
}
exports.GitLabRunnerCompatibilityLayer = GitLabRunnerCompatibilityLayer;