dryrun-ci
Version:
DryRun CI - Local GitLab CI/CD pipeline testing tool with Docker execution, performance monitoring, and security sandboxing
234 lines (233 loc) • 9.61 kB
JavaScript
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.CacheManager = void 0;
const fs_1 = require("fs");
const path_1 = __importDefault(require("path"));
const crypto_1 = __importDefault(require("crypto"));
class CacheManager {
constructor(performanceMonitor) {
this.performanceMonitor = performanceMonitor;
this.cacheEntries = new Map();
this.currentCacheSize = 0;
this.cacheDir = path_1.default.normalize(path_1.default.join(process.cwd(), '.dryrun', 'cache'));
this.maxCacheSize = 5 * 1024 * 1024 * 1024;
}
normalizePath(p) {
return path_1.default.normalize(p).replace(/\\/g, '/');
}
joinPaths(...paths) {
return this.normalizePath(path_1.default.join(...paths));
}
async initialize() {
await fs_1.promises.mkdir(this.cacheDir, { recursive: true });
await this.loadExistingCache();
}
async loadExistingCache() {
try {
this.cacheEntries.clear();
this.currentCacheSize = 0;
const entries = await fs_1.promises.readdir(this.cacheDir);
for (const entry of entries) {
try {
const metadata = await this.loadCacheMetadata(entry);
if (metadata) {
const cacheFiles = await fs_1.promises.readdir(this.joinPaths(this.cacheDir, entry));
if (cacheFiles.length > 0) {
this.cacheEntries.set(entry, metadata);
this.currentCacheSize += metadata.size;
}
else {
await fs_1.promises.rm(this.joinPaths(this.cacheDir, entry), { recursive: true, force: true });
}
}
else {
await fs_1.promises.rm(this.joinPaths(this.cacheDir, entry), { recursive: true, force: true });
}
}
catch (error) {
console.warn(`Failed to load cache entry ${entry}:`, error);
await fs_1.promises.rm(this.joinPaths(this.cacheDir, entry), { recursive: true, force: true }).catch(() => { });
}
}
await this.enforceMaxSize();
}
catch (error) {
console.error('Failed to load existing cache:', error);
}
}
async saveCache(job, jobId) {
if (!job.cache?.paths?.length)
return;
const cacheKey = job.cache.key || await this.generateCacheKey(job);
const cacheDir = this.joinPaths(this.cacheDir, cacheKey);
await fs_1.promises.mkdir(cacheDir, { recursive: true });
let totalSize = 0;
for (const cachePath of job.cache.paths) {
const sourcePath = this.joinPaths(process.cwd(), cachePath);
const targetPath = this.joinPaths(cacheDir, cachePath);
try {
await fs_1.promises.mkdir(path_1.default.dirname(targetPath), { recursive: true });
if (await this.pathExists(sourcePath)) {
await this.copyRecursive(sourcePath, targetPath);
const stats = await fs_1.promises.stat(sourcePath);
totalSize += stats.size;
}
}
catch (error) {
console.error(`Failed to save cache for path ${cachePath}:`, error);
}
}
const entry = {
key: cacheKey,
paths: job.cache.paths.map(p => this.normalizePath(p)),
size: totalSize,
createdAt: Date.now(),
jobId,
expiresAt: job.cache.expire_in ? this.calculateExpiration(job.cache.expire_in) : undefined
};
await this.saveCacheMetadata(cacheKey, entry);
this.performanceMonitor.updateCacheMetrics(jobId, true, totalSize);
}
async copyRecursive(src, dest) {
src = this.normalizePath(src);
dest = this.normalizePath(dest);
const stats = await fs_1.promises.stat(src);
if (stats.isDirectory()) {
await fs_1.promises.mkdir(dest, { recursive: true });
const files = await fs_1.promises.readdir(src);
await Promise.all(files.map(file => this.copyRecursive(this.joinPaths(src, file), this.joinPaths(dest, file))));
}
else {
await fs_1.promises.mkdir(path_1.default.dirname(dest), { recursive: true });
const content = await fs_1.promises.readFile(src);
await fs_1.promises.writeFile(dest, content);
}
}
async pathExists(p) {
try {
await fs_1.promises.access(this.normalizePath(p));
return true;
}
catch {
return false;
}
}
async restoreCache(job, jobId) {
if (!job.cache?.paths?.length)
return false;
const cacheKey = job.cache.key || await this.generateCacheKey(job);
const cacheDir = this.joinPaths(this.cacheDir, cacheKey);
try {
const metadata = await this.loadCacheMetadata(cacheKey);
if (!metadata)
return false;
if (metadata.expiresAt && metadata.expiresAt < Date.now()) {
await fs_1.promises.rm(cacheDir, { recursive: true, force: true });
return false;
}
for (const cachePath of job.cache.paths) {
const sourcePath = this.joinPaths(cacheDir, cachePath);
const targetPath = this.joinPaths(process.cwd(), cachePath);
try {
if (await this.pathExists(sourcePath)) {
await fs_1.promises.mkdir(path_1.default.dirname(targetPath), { recursive: true });
await this.copyRecursive(sourcePath, targetPath);
}
}
catch (error) {
console.error(`Failed to restore cache for path ${cachePath}:`, error);
}
}
this.performanceMonitor.updateCacheMetrics(jobId, true, metadata.size);
return true;
}
catch (error) {
console.error('Failed to restore cache:', error);
return false;
}
}
async generateCacheKey(job) {
const key = job.cache?.key || job.name;
const hash = crypto_1.default.createHash('sha256')
.update(key)
.update(job.stage || '')
.update(job.cache?.paths?.join(',') || '')
.digest('hex');
return hash;
}
parseExpiration(expireIn) {
const units = {
s: 1000,
m: 60 * 1000,
h: 60 * 60 * 1000,
d: 24 * 60 * 60 * 1000,
w: 7 * 24 * 60 * 60 * 1000
};
const match = expireIn.match(/^(\d+)([smhdw])$/);
if (!match)
return 7 * 24 * 60 * 60 * 1000;
const [, value, unit] = match;
return parseInt(value) * (units[unit] || units.d);
}
calculateExpiration(expireIn) {
const duration = this.parseExpiration(expireIn);
return Date.now() + duration;
}
async saveCacheMetadata(cacheKey, entry) {
const metadataPath = this.joinPaths(this.cacheDir, cacheKey, 'metadata.json');
await fs_1.promises.mkdir(path_1.default.dirname(metadataPath), { recursive: true });
await fs_1.promises.writeFile(metadataPath, JSON.stringify(entry, null, 2));
this.cacheEntries.set(cacheKey, entry);
this.currentCacheSize += entry.size;
await this.enforceMaxSize();
}
async enforceMaxSize() {
if (this.currentCacheSize <= this.maxCacheSize)
return;
const entries = Array.from(this.cacheEntries.entries())
.sort(([, a], [, b]) => a.createdAt - b.createdAt);
const sizeToRemove = this.currentCacheSize - this.maxCacheSize;
let removedSize = 0;
for (const [key, entry] of entries) {
const entryPath = this.joinPaths(this.cacheDir, key);
try {
await fs_1.promises.rm(entryPath, { recursive: true, force: true });
this.cacheEntries.delete(key);
removedSize += entry.size;
if (removedSize >= sizeToRemove)
break;
}
catch (error) {
console.error(`Failed to remove cache entry ${key}:`, error);
}
}
this.currentCacheSize = Math.max(0, this.currentCacheSize - removedSize);
}
async cleanup() {
try {
this.cacheEntries.clear();
this.currentCacheSize = 0;
if (await this.pathExists(this.cacheDir)) {
await fs_1.promises.rm(this.cacheDir, { recursive: true, force: true });
await fs_1.promises.mkdir(this.cacheDir, { recursive: true });
}
}
catch (error) {
console.error('Failed to cleanup cache:', error);
}
}
async loadCacheMetadata(cacheKey) {
try {
const metadataPath = this.joinPaths(this.cacheDir, cacheKey, 'metadata.json');
const data = await fs_1.promises.readFile(metadataPath, 'utf-8');
return JSON.parse(data);
}
catch {
return null;
}
}
}
exports.CacheManager = CacheManager;