dryrun-ci
Version:
DryRun CI - Local GitLab CI/CD pipeline testing tool with Docker execution, performance monitoring, and security sandboxing
129 lines (128 loc) • 4.59 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Logger = void 0;
const fs_1 = require("fs");
const path_1 = __importDefault(require("path"));
const winston_1 = require("winston");
const winston_daily_rotate_file_1 = __importDefault(require("winston-daily-rotate-file"));
class Logger {
constructor(config) {
this.logDir = path_1.default.join(process.cwd(), '.dryrun', 'logs');
fs_1.promises.mkdir(this.logDir, { recursive: true }).catch(err => {
console.error('Failed to create log directory:', err);
});
const logFormat = winston_1.format.combine(winston_1.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), winston_1.format.errors({ stack: true }), winston_1.format.printf(({ timestamp, level, message, stack }) => {
return `[${timestamp}] ${level.toUpperCase()}: ${message}${stack ? '\n' + stack : ''}`;
}));
const loggerTransports = [];
if (config.console) {
loggerTransports.push(new winston_1.transports.Console({
format: winston_1.format.combine(winston_1.format.colorize(), logFormat)
}));
}
if (config.file) {
loggerTransports.push(new winston_daily_rotate_file_1.default({
dirname: this.logDir,
filename: 'dryrun-%DATE%.log',
datePattern: 'YYYY-MM-DD',
maxSize: config.maxSize,
maxFiles: config.maxFiles,
format: logFormat
}));
}
this.logger = (0, winston_1.createLogger)({
level: config.level,
transports: loggerTransports,
exceptionHandlers: [
new winston_1.transports.File({ filename: path_1.default.join(this.logDir, 'exceptions.log') })
],
rejectionHandlers: [
new winston_1.transports.File({ filename: path_1.default.join(this.logDir, 'rejections.log') })
]
});
}
static getInstance(config) {
if (!Logger.instance) {
Logger.instance = new Logger(config || {
level: 'info',
file: true,
console: true,
maxSize: '10m',
maxFiles: '7d'
});
}
return Logger.instance;
}
info(message, meta) {
this.logger.info(message, meta);
}
error(message, error, meta) {
this.logger.error(message, {
error: error?.stack || error?.message,
...meta
});
}
warn(message, meta) {
this.logger.warn(message, meta);
}
debug(message, meta) {
this.logger.debug(message, meta);
}
async logJobExecution(jobId, stage, status, duration, meta) {
this.logger.info('Job execution completed', {
jobId,
stage,
status,
durationMs: duration,
...meta
});
}
async logPipelineExecution(pipelineId, status, duration, meta) {
this.logger.info('Pipeline execution completed', {
pipelineId,
status,
durationMs: duration,
...meta
});
}
async logSecurityEvent(type, severity, details) {
this.logger.warn('Security event detected', {
type,
severity,
details
});
}
async logPerformanceMetrics(jobId, metrics) {
this.logger.info('Performance metrics collected', {
jobId,
metrics
});
}
maskSensitiveData(message, patterns) {
let maskedMessage = message;
for (const pattern of patterns) {
maskedMessage = maskedMessage.replace(new RegExp(pattern, 'g'), '[MASKED]');
}
return maskedMessage;
}
async cleanup(olderThan) {
try {
const files = await fs_1.promises.readdir(this.logDir);
const now = Date.now();
for (const file of files) {
const filePath = path_1.default.join(this.logDir, file);
const stats = await fs_1.promises.stat(filePath);
if (olderThan && now - stats.mtimeMs > olderThan) {
await fs_1.promises.unlink(filePath);
}
}
}
catch (error) {
console.error('Failed to cleanup logs:', error);
}
}
}
exports.Logger = Logger;