vibe-coder-mcp
Version:
Production-ready MCP server with complete agent integration, multi-transport support, and comprehensive development automation tools for AI-assisted workflows.
204 lines (203 loc) • 8.63 kB
JavaScript
import fs from 'fs/promises';
import path from 'path';
import logger from '../../../logger.js';
import { getOutputDirectory, getCacheDirectory } from '../directoryUtils.js';
export class OutputCleaner {
options;
config;
static DEFAULT_OPTIONS = {
maxAge: 7 * 24 * 60 * 60 * 1000,
maxOutputDirs: 10,
cleanupOldOutputs: true,
cleanupTempFiles: true,
tempFilesMaxAge: 24 * 60 * 60 * 1000
};
constructor(config) {
this.config = config;
this.options = {
...OutputCleaner.DEFAULT_OPTIONS,
...config.output
};
logger.debug(`OutputCleaner created with options: ${JSON.stringify(this.options)}`);
}
async cleanup() {
const result = {
directoriesRemoved: 0,
filesRemoved: 0,
totalSizeRemoved: 0,
removedPaths: [],
errors: []
};
try {
if (this.options.cleanupOldOutputs) {
await this.cleanupOldOutputs(result);
}
if (this.options.cleanupTempFiles) {
await this.cleanupTempFiles(result);
}
logger.info(`Cleanup completed: ${result.directoriesRemoved} directories and ${result.filesRemoved} files removed (${this.formatBytes(result.totalSizeRemoved)})`);
}
catch (error) {
logger.error({ err: error }, 'Error during cleanup');
result.errors.push(error instanceof Error ? error : new Error(String(error)));
}
return result;
}
async cleanupOldOutputs(result) {
try {
const outputDir = this.config.output?.outputDir || getOutputDirectory(this.config);
try {
await fs.access(outputDir);
}
catch {
return;
}
const items = await this.getDirectoryContents(outputDir);
const directories = items.filter(item => item.isDirectory);
directories.sort((a, b) => a.creationTime.getTime() - b.creationTime.getTime());
const now = Date.now();
const maxAge = this.options.maxAge;
const oldDirectories = directories.filter(dir => now - dir.creationTime.getTime() > maxAge);
let directoriesToRemove = [...oldDirectories];
if (directories.length > this.options.maxOutputDirs) {
const additionalCount = directories.length - this.options.maxOutputDirs - oldDirectories.length;
if (additionalCount > 0) {
const remainingDirs = directories.filter(dir => !oldDirectories.includes(dir));
directoriesToRemove = [...directoriesToRemove, ...remainingDirs.slice(0, additionalCount)];
}
}
for (const dir of directoriesToRemove) {
try {
const dirSize = await this.getDirectorySize(dir.path);
await fs.rm(dir.path, { recursive: true, force: true });
result.directoriesRemoved++;
result.totalSizeRemoved += dirSize;
result.removedPaths.push(dir.path);
logger.debug(`Removed old output directory: ${dir.path} (${this.formatBytes(dirSize)})`);
}
catch (error) {
logger.warn({ err: error, path: dir.path }, 'Failed to remove output directory');
result.errors.push(error instanceof Error ? error : new Error(`Failed to remove ${dir.path}: ${String(error)}`));
}
}
logger.info(`Cleaned up ${directoriesToRemove.length} old output directories`);
}
catch (error) {
logger.error({ err: error }, 'Error cleaning up old outputs');
result.errors.push(error instanceof Error ? error : new Error(`Error cleaning up old outputs: ${String(error)}`));
}
}
async cleanupTempFiles(result) {
try {
const cacheDir = getCacheDirectory(this.config);
try {
await fs.access(cacheDir);
}
catch {
return;
}
const tempDir = path.join(cacheDir, 'temp');
try {
await fs.access(tempDir);
}
catch {
return;
}
const items = await this.getDirectoryContents(tempDir);
const tempItems = [...items];
const now = Date.now();
const maxAge = this.options.tempFilesMaxAge;
const oldItems = tempItems.filter(item => now - item.modificationTime.getTime() > maxAge);
for (const item of oldItems) {
try {
await fs.rm(item.path, { recursive: true, force: true });
if (item.isDirectory) {
result.directoriesRemoved++;
}
else {
result.filesRemoved++;
}
result.totalSizeRemoved += item.size;
result.removedPaths.push(item.path);
logger.debug(`Removed old temporary ${item.isDirectory ? 'directory' : 'file'}: ${item.path} (${this.formatBytes(item.size)})`);
}
catch (error) {
logger.warn({ err: error, path: item.path }, `Failed to remove temporary ${item.isDirectory ? 'directory' : 'file'}`);
result.errors.push(error instanceof Error ? error : new Error(`Failed to remove ${item.path}: ${String(error)}`));
}
}
logger.info(`Cleaned up ${oldItems.length} old temporary items`);
}
catch (error) {
logger.error({ err: error }, 'Error cleaning up temporary files');
result.errors.push(error instanceof Error ? error : new Error(`Error cleaning up temporary files: ${String(error)}`));
}
}
async getDirectoryContents(dirPath) {
try {
const entries = await fs.readdir(dirPath, { withFileTypes: true });
const items = [];
for (const entry of entries) {
const entryPath = path.join(dirPath, entry.name);
try {
const stats = await fs.stat(entryPath);
items.push({
path: entryPath,
name: entry.name,
isDirectory: entry.isDirectory(),
creationTime: new Date(stats.birthtime),
modificationTime: new Date(stats.mtime),
size: stats.size
});
}
catch (error) {
logger.warn({ err: error, path: entryPath }, 'Failed to get stats for directory entry');
}
}
return items;
}
catch (error) {
logger.error({ err: error, path: dirPath }, 'Failed to read directory');
return [];
}
}
async getDirectorySize(dirPath) {
try {
const entries = await fs.readdir(dirPath, { withFileTypes: true });
let size = 0;
for (const entry of entries) {
const entryPath = path.join(dirPath, entry.name);
try {
if (entry.isDirectory()) {
size += await this.getDirectorySize(entryPath);
}
else {
const stats = await fs.stat(entryPath);
size += stats.size;
}
}
catch (error) {
logger.warn({ err: error, path: entryPath }, 'Failed to get size for directory entry');
}
}
return size;
}
catch (error) {
logger.error({ err: error, path: dirPath }, 'Failed to get directory size');
return 0;
}
}
formatBytes(bytes, decimals = 2) {
if (bytes === 0)
return '0 Bytes';
if (!bytes || isNaN(bytes))
return 'Unknown';
const k = 1024;
const dm = decimals < 0 ? 0 : decimals;
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'];
const i = Math.floor(Math.log(Math.abs(bytes)) / Math.log(k));
if (i < 0 || i >= sizes.length)
return `${bytes} Bytes`;
return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + ' ' + sizes[i];
}
}