dryrun-ci
Version:
DryRun CI - Local GitLab CI/CD pipeline testing tool with Docker execution, performance monitoring, and security sandboxing
536 lines (535 loc) • 22 kB
JavaScript
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ArtifactManager = void 0;
const events_1 = require("events");
const fs = __importStar(require("fs/promises"));
const path = __importStar(require("path"));
const tar = __importStar(require("tar-stream"));
const zlib = __importStar(require("zlib"));
const crypto = __importStar(require("crypto"));
const fs_1 = require("fs");
class ArtifactManager extends events_1.EventEmitter {
constructor(config) {
super();
this.artifacts = new Map();
this.storageUsage = 0;
this.config = config;
this.initializeStorage();
}
async initializeStorage() {
try {
await fs.mkdir(this.config.outputPath, { recursive: true });
await fs.mkdir(path.join(this.config.outputPath, '.metadata'), { recursive: true });
await this.loadExistingArtifacts();
this.scheduleCleanup();
this.emit('storage-initialized', { outputPath: this.config.outputPath });
}
catch (error) {
this.emit('storage-error', { error: error instanceof Error ? error.message : 'Unknown error' });
throw new Error(`Failed to initialize artifact storage: ${error}`);
}
}
async collectArtifactsFromContainer(containerId, job, executionId) {
if (!job.artifacts?.paths || job.artifacts.paths.length === 0) {
return {
artifacts: [],
skipped: [],
errors: [],
totalSize: 0,
compressionRatio: 0
};
}
const result = {
artifacts: [],
skipped: [],
errors: [],
totalSize: 0,
compressionRatio: 0
};
this.emit('collection-started', {
containerId,
jobName: job.name,
paths: job.artifacts.paths
});
try {
const Docker = require('dockerode');
const docker = new Docker();
const container = docker.getContainer(containerId);
for (const artifactPath of job.artifacts.paths) {
try {
const artifactResult = await this.collectSingleArtifact(container, artifactPath, job.name, executionId);
if (artifactResult) {
result.artifacts.push(artifactResult);
result.totalSize += artifactResult.size;
this.storageUsage += artifactResult.size;
}
}
catch (error) {
const errorMsg = error instanceof Error ? error.message : 'Unknown error';
result.errors.push({
path: artifactPath,
error: errorMsg,
severity: 'error'
});
this.emit('collection-error', {
path: artifactPath,
error: errorMsg,
jobName: job.name
});
}
}
if (result.artifacts.length > 0) {
const originalSize = result.artifacts.reduce((sum, a) => sum + (a.metadata.totalSize || a.size), 0);
result.compressionRatio = originalSize > 0 ? result.totalSize / originalSize : 1;
}
await this.checkStorageLimits();
this.emit('collection-completed', {
jobName: job.name,
artifactCount: result.artifacts.length,
totalSize: result.totalSize,
errors: result.errors.length
});
}
catch (error) {
const errorMsg = error instanceof Error ? error.message : 'Unknown error';
this.emit('collection-failed', { jobName: job.name, error: errorMsg });
throw error;
}
return result;
}
async collectSingleArtifact(container, artifactPath, jobName, executionId) {
try {
const stream = await container.getArchive({ path: artifactPath });
const artifactId = this.generateArtifactId(artifactPath, jobName, executionId);
const artifactFileName = `${artifactId}.tar.gz`;
const outputPath = path.join(this.config.outputPath, artifactFileName);
const gzipStream = zlib.createGzip({ level: this.config.compressionLevel });
const writeStream = (0, fs_1.createWriteStream)(outputPath);
stream.pipe(gzipStream).pipe(writeStream);
await new Promise((resolve, reject) => {
writeStream.on('finish', resolve);
writeStream.on('error', reject);
stream.on('error', reject);
});
const stats = await fs.stat(outputPath);
const hash = await this.calculateFileHash(outputPath);
const artifactInfo = {
id: artifactId,
name: path.basename(artifactPath),
path: outputPath,
size: stats.size,
hash,
createdAt: new Date(),
expiresAt: new Date(Date.now() + (this.config.retentionDays * 24 * 60 * 60 * 1000)),
jobName,
executionId,
metadata: {
originalPaths: [artifactPath],
fileCount: 1,
totalSize: stats.size,
compressionLevel: this.config.compressionLevel,
mimeType: 'application/gzip',
source: 'docker'
}
};
await this.storeArtifactMetadata(artifactInfo);
this.artifacts.set(artifactId, artifactInfo);
this.emit('artifact-collected', {
artifactId,
path: artifactPath,
size: stats.size,
jobName
});
return artifactInfo;
}
catch (error) {
this.emit('artifact-collection-error', {
path: artifactPath,
error: error instanceof Error ? error.message : 'Unknown error',
jobName
});
return null;
}
}
async collectArtifactsFromLocal(localPaths, jobName, executionId) {
const result = {
artifacts: [],
skipped: [],
errors: [],
totalSize: 0,
compressionRatio: 0
};
for (const localPath of localPaths) {
try {
const resolvedPath = path.resolve(localPath);
const stats = await fs.stat(resolvedPath);
if (stats.isDirectory()) {
const artifact = await this.collectDirectoryArtifact(resolvedPath, jobName, executionId);
if (artifact) {
result.artifacts.push(artifact);
result.totalSize += artifact.size;
}
}
else {
const artifact = await this.collectFileArtifact(resolvedPath, jobName, executionId);
if (artifact) {
result.artifacts.push(artifact);
result.totalSize += artifact.size;
}
}
}
catch (error) {
result.errors.push({
path: localPath,
error: error instanceof Error ? error.message : 'Unknown error',
severity: 'error'
});
}
}
return result;
}
async collectDirectoryArtifact(dirPath, jobName, executionId) {
try {
const artifactId = this.generateArtifactId(dirPath, jobName, executionId);
const artifactFileName = `${artifactId}.tar.gz`;
const outputPath = path.join(this.config.outputPath, artifactFileName);
const pack = tar.pack();
const gzip = zlib.createGzip({ level: this.config.compressionLevel });
const writeStream = (0, fs_1.createWriteStream)(outputPath);
pack.pipe(gzip).pipe(writeStream);
let fileCount = 0;
let totalSize = 0;
await this.addDirectoryToTar(pack, dirPath, '', (size) => {
fileCount++;
totalSize += size;
});
pack.finalize();
await new Promise((resolve, reject) => {
writeStream.on('finish', resolve);
writeStream.on('error', reject);
});
const stats = await fs.stat(outputPath);
const hash = await this.calculateFileHash(outputPath);
const artifactInfo = {
id: artifactId,
name: path.basename(dirPath),
path: outputPath,
size: stats.size,
hash,
createdAt: new Date(),
expiresAt: new Date(Date.now() + (this.config.retentionDays * 24 * 60 * 60 * 1000)),
jobName,
executionId,
compressionRatio: totalSize > 0 ? stats.size / totalSize : 1,
metadata: {
originalPaths: [dirPath],
fileCount,
totalSize,
compressionLevel: this.config.compressionLevel,
mimeType: 'application/gzip',
source: 'local'
}
};
await this.storeArtifactMetadata(artifactInfo);
this.artifacts.set(artifactId, artifactInfo);
return artifactInfo;
}
catch (error) {
this.emit('artifact-collection-error', {
path: dirPath,
error: error instanceof Error ? error.message : 'Unknown error'
});
return null;
}
}
async collectFileArtifact(filePath, jobName, executionId) {
try {
const artifactId = this.generateArtifactId(filePath, jobName, executionId);
const artifactFileName = `${artifactId}.gz`;
const outputPath = path.join(this.config.outputPath, artifactFileName);
const readStream = (0, fs_1.createReadStream)(filePath);
const gzipStream = zlib.createGzip({ level: this.config.compressionLevel });
const writeStream = (0, fs_1.createWriteStream)(outputPath);
readStream.pipe(gzipStream).pipe(writeStream);
await new Promise((resolve, reject) => {
writeStream.on('finish', resolve);
writeStream.on('error', reject);
readStream.on('error', reject);
});
const originalStats = await fs.stat(filePath);
const compressedStats = await fs.stat(outputPath);
const hash = await this.calculateFileHash(outputPath);
const artifactInfo = {
id: artifactId,
name: path.basename(filePath),
path: outputPath,
size: compressedStats.size,
hash,
createdAt: new Date(),
expiresAt: new Date(Date.now() + (this.config.retentionDays * 24 * 60 * 60 * 1000)),
jobName,
executionId,
compressionRatio: compressedStats.size / originalStats.size,
metadata: {
originalPaths: [filePath],
fileCount: 1,
totalSize: originalStats.size,
compressionLevel: this.config.compressionLevel,
mimeType: 'application/gzip',
source: 'local'
}
};
await this.storeArtifactMetadata(artifactInfo);
this.artifacts.set(artifactId, artifactInfo);
return artifactInfo;
}
catch (error) {
this.emit('artifact-collection-error', {
path: filePath,
error: error instanceof Error ? error.message : 'Unknown error'
});
return null;
}
}
async addDirectoryToTar(pack, dirPath, prefix, onFile) {
const entries = await fs.readdir(dirPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
const entryPath = prefix ? `${prefix}/${entry.name}` : entry.name;
if (entry.isDirectory()) {
await this.addDirectoryToTar(pack, fullPath, entryPath, onFile);
}
else {
const stats = await fs.stat(fullPath);
onFile(stats.size);
pack.entry({ name: entryPath, size: stats.size }, await fs.readFile(fullPath));
}
}
}
async extractArtifact(artifactId, extractPath) {
const artifact = this.artifacts.get(artifactId);
if (!artifact) {
throw new Error(`Artifact ${artifactId} not found`);
}
try {
await fs.mkdir(extractPath, { recursive: true });
if (artifact.metadata.source === 'docker' || artifact.path.endsWith('.tar.gz')) {
const readStream = (0, fs_1.createReadStream)(artifact.path);
const gunzipStream = zlib.createGunzip();
const extract = tar.extract();
extract.on('entry', async (header, stream, next) => {
const outputPath = path.join(extractPath, header.name);
if (header.type === 'directory') {
await fs.mkdir(outputPath, { recursive: true });
stream.resume();
next();
}
else {
await fs.mkdir(path.dirname(outputPath), { recursive: true });
const writeStream = (0, fs_1.createWriteStream)(outputPath);
stream.pipe(writeStream);
writeStream.on('finish', next);
}
});
readStream.pipe(gunzipStream).pipe(extract);
await new Promise((resolve, reject) => {
extract.on('finish', resolve);
extract.on('error', reject);
});
}
else {
const outputPath = path.join(extractPath, artifact.name);
const readStream = (0, fs_1.createReadStream)(artifact.path);
const gunzipStream = zlib.createGunzip();
const writeStream = (0, fs_1.createWriteStream)(outputPath);
readStream.pipe(gunzipStream).pipe(writeStream);
await new Promise((resolve, reject) => {
writeStream.on('finish', resolve);
writeStream.on('error', reject);
});
}
this.emit('artifact-extracted', { artifactId, extractPath });
}
catch (error) {
this.emit('extraction-error', {
artifactId,
error: error instanceof Error ? error.message : 'Unknown error'
});
throw error;
}
}
async listArtifacts(executionId, jobName) {
let artifacts = Array.from(this.artifacts.values());
if (executionId) {
artifacts = artifacts.filter(a => a.executionId === executionId);
}
if (jobName) {
artifacts = artifacts.filter(a => a.jobName === jobName);
}
return artifacts.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
}
async deleteArtifact(artifactId) {
const artifact = this.artifacts.get(artifactId);
if (!artifact) {
throw new Error(`Artifact ${artifactId} not found`);
}
try {
await fs.unlink(artifact.path);
const metadataPath = path.join(this.config.outputPath, '.metadata', `${artifactId}.json`);
await fs.unlink(metadataPath).catch(() => { });
this.storageUsage -= artifact.size;
this.artifacts.delete(artifactId);
this.emit('artifact-deleted', { artifactId });
}
catch (error) {
this.emit('deletion-error', {
artifactId,
error: error instanceof Error ? error.message : 'Unknown error'
});
throw error;
}
}
async getStorageInfo() {
const artifacts = Array.from(this.artifacts.values());
const dates = artifacts.map(a => a.createdAt).sort();
return {
totalArtifacts: artifacts.length,
totalSize: this.storageUsage,
storageLimit: this.parseSize(this.config.maxSize),
usage: this.storageUsage / this.parseSize(this.config.maxSize),
oldestArtifact: dates[0],
newestArtifact: dates[dates.length - 1]
};
}
generateArtifactId(path, jobName, executionId) {
const timestamp = Date.now();
const pathHash = this.simpleHash(path);
return `${executionId}-${jobName}-${pathHash}-${timestamp}`;
}
async calculateFileHash(filePath) {
const hash = crypto.createHash('sha256');
const stream = (0, fs_1.createReadStream)(filePath);
stream.on('data', (data) => {
hash.update(data);
});
return new Promise((resolve, reject) => {
stream.on('end', () => resolve(hash.digest('hex')));
stream.on('error', reject);
});
}
simpleHash(str) {
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash;
}
return Math.abs(hash).toString(36);
}
async storeArtifactMetadata(artifact) {
const metadataPath = path.join(this.config.outputPath, '.metadata', `${artifact.id}.json`);
await fs.writeFile(metadataPath, JSON.stringify(artifact, null, 2));
}
async loadExistingArtifacts() {
try {
const metadataDir = path.join(this.config.outputPath, '.metadata');
const files = await fs.readdir(metadataDir).catch(() => []);
for (const file of files) {
if (file.endsWith('.json')) {
try {
const metadataPath = path.join(metadataDir, file);
const content = await fs.readFile(metadataPath, 'utf-8');
const artifact = JSON.parse(content);
if (await fs.stat(artifact.path).catch(() => null)) {
this.artifacts.set(artifact.id, artifact);
this.storageUsage += artifact.size;
}
else {
await fs.unlink(metadataPath).catch(() => { });
}
}
catch (error) {
}
}
}
this.emit('artifacts-loaded', { count: this.artifacts.size, totalSize: this.storageUsage });
}
catch (error) {
}
}
async checkStorageLimits() {
const maxSize = this.parseSize(this.config.maxSize);
if (this.storageUsage > maxSize) {
this.emit('storage-limit-exceeded', {
usage: this.storageUsage,
limit: maxSize
});
await this.cleanupExpiredArtifacts();
}
}
async cleanupExpiredArtifacts() {
const now = new Date();
const toDelete = [];
for (const [id, artifact] of Array.from(this.artifacts.entries())) {
if (artifact.expiresAt < now) {
toDelete.push(id);
}
}
if (toDelete.length > 0) {
this.emit('cleanup-started', { count: toDelete.length });
for (const id of toDelete) {
try {
await this.deleteArtifact(id);
}
catch (error) {
}
}
this.emit('cleanup-completed', { deleted: toDelete.length });
}
}
scheduleCleanup() {
setInterval(() => {
this.cleanupExpiredArtifacts();
}, 60 * 60 * 1000);
}
parseSize(sizeStr) {
const units = {
'b': 1,
'k': 1024,
'm': 1024 * 1024,
'g': 1024 * 1024 * 1024
};
const match = sizeStr.toLowerCase().match(/^(\d+)([kmg]?)b?$/);
if (!match)
return 0;
const [, size, unit] = match;
return parseInt(size) * (units[unit] || 1);
}
cleanup() {
this.removeAllListeners();
}
}
exports.ArtifactManager = ArtifactManager;