logstack-zee
Version:
Complete Node.js logging solution with 6 integration methods, S3 bidirectional operations, advanced analytics, and multi-cloud storage support for enterprise-scale applications.
456 lines (455 loc) โข 19.2 kB
JavaScript
/**
* ๐๏ธ Log Retention Service
*
* Handles automatic cleanup of database logs and cloud storage files
* based on configured retention policies.
*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.RetentionService = void 0;
exports.initRetention = initRetention;
const job_1 = require("../models/job");
const log_1 = require("../models/log");
const apiLog_1 = require("../models/apiLog");
const aws_sdk_1 = __importDefault(require("aws-sdk"));
const node_cron_1 = __importDefault(require("node-cron"));
class RetentionService {
constructor(config, db) {
this.config = config;
this.db = db;
// Initialize S3 if using S3 storage
if (config.uploadProvider === 's3' && config.s3) {
this.s3 = new aws_sdk_1.default.S3({
accessKeyId: config.s3.accessKeyId,
secretAccessKey: config.s3.secretAccessKey,
region: config.s3.region,
endpoint: config.s3.endpoint
});
}
}
/**
* ๐งน Start automatic retention cleanup
*/
startAutomaticCleanup() {
const retention = this.config.retention;
if (!retention) {
console.log('โ ๏ธ No retention configuration found, skipping automatic cleanup');
return;
}
// Database cleanup cron
if (retention.database?.autoCleanup) {
const dbCron = retention.database.cleanupCron || '0 2 * * *'; // 2 AM daily
console.log(`๐๏ธ Scheduling database cleanup: ${dbCron}`);
node_cron_1.default.schedule(dbCron, async () => {
console.log('๐งน Starting automatic database cleanup...');
await this.cleanupDatabase();
});
}
// Storage cleanup cron
if (retention.storage?.autoCleanup) {
const storageCron = retention.storage.cleanupCron || '0 3 * * *'; // 3 AM daily
console.log(`๐๏ธ Scheduling storage cleanup: ${storageCron}`);
node_cron_1.default.schedule(storageCron, async () => {
console.log('๐งน Starting automatic storage cleanup...');
await this.cleanupStorage();
});
}
console.log('โ
Automatic retention cleanup scheduled');
}
/**
* ๐๏ธ Clean up database collections based on retention policy
*/
async cleanupDatabase() {
const retention = this.config.retention?.database;
if (!retention) {
throw new Error('No database retention configuration found');
}
const results = {
apiLogs: 0,
jobs: 0,
logs: 0
};
console.log('๐๏ธ Starting database cleanup...');
try {
// Clean API logs
if (retention.apiLogs) {
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - retention.apiLogs);
const ApiLogModel = (0, apiLog_1.getApiLogModel)(this.config.collections?.apiLogsCollectionName || 'apilogs');
const result = await ApiLogModel.deleteMany({
request_time: { $lt: cutoffDate }
});
results.apiLogs = result.deletedCount || 0;
console.log(`๐๏ธ Deleted ${results.apiLogs} API logs older than ${retention.apiLogs} days`);
}
// Clean job records
if (retention.jobs) {
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - retention.jobs);
const JobModel = (0, job_1.getJobModel)(this.config.collections?.jobsCollectionName || 'jobs');
const result = await JobModel.deleteMany({
createdAt: { $lt: cutoffDate }
});
results.jobs = result.deletedCount || 0;
console.log(`๐๏ธ Deleted ${results.jobs} job records older than ${retention.jobs} days`);
}
// Clean processing logs
if (retention.logs) {
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - retention.logs);
const LogModel = (0, log_1.getLogModel)(this.config.collections?.logsCollectionName || 'logs');
const result = await LogModel.deleteMany({
timestamp: { $lt: cutoffDate }
});
results.logs = result.deletedCount || 0;
console.log(`๐๏ธ Deleted ${results.logs} processing logs older than ${retention.logs} days`);
}
console.log('โ
Database cleanup completed successfully');
return results;
}
catch (error) {
console.error('โ Database cleanup failed:', error);
throw error;
}
}
/**
* โ๏ธ Clean up cloud storage files based on retention policy
*/
async cleanupStorage() {
const retention = this.config.retention?.storage;
if (!retention?.files) {
throw new Error('No storage retention configuration found');
}
console.log('๐๏ธ Starting storage cleanup...');
switch (this.config.uploadProvider) {
case 's3':
return await this.cleanupS3Storage(retention.files);
case 'local':
return await this.cleanupLocalStorage(retention.files);
default:
console.log(`โ ๏ธ Storage cleanup not implemented for ${this.config.uploadProvider}`);
return { deletedFiles: 0, deletedSize: 0 };
}
}
/**
* ๐ฉ๏ธ Clean up S3 storage files
*/
async cleanupS3Storage(retentionDays) {
if (!this.s3 || !this.config.s3) {
throw new Error('S3 not configured');
}
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
const bucket = this.config.s3.bucket;
const prefix = this.config.outputDirectory || 'uploads';
let deletedFiles = 0;
let deletedSize = 0;
try {
// List all objects in the bucket with prefix
const listParams = {
Bucket: bucket,
Prefix: prefix
};
let continuationToken;
do {
if (continuationToken) {
listParams.ContinuationToken = continuationToken;
}
const listResult = await this.s3.listObjectsV2(listParams).promise();
if (listResult.Contents) {
// Filter objects older than cutoff date
const objectsToDelete = listResult.Contents.filter(obj => obj.LastModified && obj.LastModified < cutoffDate);
if (objectsToDelete.length > 0) {
// Delete objects in batches of 1000 (S3 limit)
const batchSize = 1000;
for (let i = 0; i < objectsToDelete.length; i += batchSize) {
const batch = objectsToDelete.slice(i, i + batchSize);
const deleteParams = {
Bucket: bucket,
Delete: {
Objects: batch.map(obj => ({ Key: obj.Key }))
}
};
const deleteResult = await this.s3.deleteObjects(deleteParams).promise();
if (deleteResult.Deleted) {
deletedFiles += deleteResult.Deleted.length;
// Calculate deleted size
batch.forEach(obj => {
if (obj.Size)
deletedSize += obj.Size;
});
}
}
}
}
continuationToken = listResult.NextContinuationToken;
} while (continuationToken);
console.log(`๐๏ธ S3 cleanup: Deleted ${deletedFiles} files (${this.formatBytes(deletedSize)}) older than ${retentionDays} days`);
return { deletedFiles, deletedSize };
}
catch (error) {
console.error('โ S3 cleanup failed:', error);
throw error;
}
}
/**
* ๐ Clean up local storage files
*/
async cleanupLocalStorage(retentionDays) {
const fs = require('fs').promises;
const path = require('path');
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
const uploadsDir = this.config.outputDirectory || 'uploads';
let deletedFiles = 0;
let deletedSize = 0;
try {
const cleanupDirectory = async (dirPath) => {
try {
const items = await fs.readdir(dirPath);
for (const item of items) {
const itemPath = path.join(dirPath, item);
const stats = await fs.stat(itemPath);
if (stats.isDirectory()) {
await cleanupDirectory(itemPath);
// Check if directory is empty and remove it
const remainingItems = await fs.readdir(itemPath);
if (remainingItems.length === 0) {
await fs.rmdir(itemPath);
console.log(`๐๏ธ Removed empty directory: ${itemPath}`);
}
}
else if (stats.isFile() && stats.mtime < cutoffDate) {
deletedSize += stats.size;
await fs.unlink(itemPath);
deletedFiles++;
}
}
}
catch (error) {
// Directory might not exist or be accessible
console.log(`โ ๏ธ Could not access directory: ${dirPath}`);
}
};
await cleanupDirectory(uploadsDir);
console.log(`๐๏ธ Local cleanup: Deleted ${deletedFiles} files (${this.formatBytes(deletedSize)}) older than ${retentionDays} days`);
return { deletedFiles, deletedSize };
}
catch (error) {
console.error('โ Local storage cleanup failed:', error);
throw error;
}
}
/**
* ๐๏ธ Setup S3 lifecycle policies for automatic archival and deletion
*/
async setupS3LifecyclePolicies() {
if (!this.s3 || !this.config.s3) {
throw new Error('S3 not configured');
}
const lifecycle = this.config.retention?.storage?.s3Lifecycle;
if (!lifecycle) {
console.log('โ ๏ธ No S3 lifecycle configuration found');
return;
}
const bucket = this.config.s3.bucket;
const prefix = this.config.outputDirectory || 'uploads';
const lifecycleRules = [];
// Create lifecycle rule
const rule = {
ID: 'cron-log-service-retention',
Status: 'Enabled',
Filter: {
Prefix: prefix
},
Transitions: [],
Expiration: undefined
};
// Add transitions
if (lifecycle.transitionToIA) {
rule.Transitions.push({
Days: lifecycle.transitionToIA,
StorageClass: 'STANDARD_IA'
});
}
if (lifecycle.transitionToGlacier) {
rule.Transitions.push({
Days: lifecycle.transitionToGlacier,
StorageClass: 'GLACIER'
});
}
if (lifecycle.transitionToDeepArchive) {
rule.Transitions.push({
Days: lifecycle.transitionToDeepArchive,
StorageClass: 'DEEP_ARCHIVE'
});
}
// Add expiration
if (lifecycle.expiration) {
rule.Expiration = {
Days: lifecycle.expiration
};
}
lifecycleRules.push(rule);
try {
const params = {
Bucket: bucket,
LifecycleConfiguration: {
Rules: lifecycleRules
}
};
await this.s3.putBucketLifecycleConfiguration(params).promise();
console.log('โ
S3 lifecycle policies configured successfully');
console.log(`๐ Prefix: ${prefix}`);
if (lifecycle.transitionToIA)
console.log(`๐ Transition to IA: ${lifecycle.transitionToIA} days`);
if (lifecycle.transitionToGlacier)
console.log(`๐ง Transition to Glacier: ${lifecycle.transitionToGlacier} days`);
if (lifecycle.transitionToDeepArchive)
console.log(`๐๏ธ Transition to Deep Archive: ${lifecycle.transitionToDeepArchive} days`);
if (lifecycle.expiration)
console.log(`๐๏ธ Expiration: ${lifecycle.expiration} days`);
}
catch (error) {
console.error('โ Failed to setup S3 lifecycle policies:', error);
throw error;
}
}
/**
* ๐ Get retention statistics
*/
async getRetentionStats() {
const retention = this.config.retention;
if (!retention) {
throw new Error('No retention configuration found');
}
const stats = {
database: {
apiLogs: { total: 0, oldRecords: 0 },
jobs: { total: 0, oldRecords: 0 },
logs: { total: 0, oldRecords: 0 }
},
storage: {
totalFiles: 0,
totalSize: 0,
oldFiles: 0,
oldSize: 0
}
};
// Database stats
if (retention.database) {
if (retention.database.apiLogs) {
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - retention.database.apiLogs);
const ApiLogModel = (0, apiLog_1.getApiLogModel)(this.config.collections?.apiLogsCollectionName || 'apilogs');
stats.database.apiLogs.total = await ApiLogModel.countDocuments();
stats.database.apiLogs.oldRecords = await ApiLogModel.countDocuments({
request_time: { $lt: cutoffDate }
});
}
if (retention.database.jobs) {
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - retention.database.jobs);
const JobModel = (0, job_1.getJobModel)(this.config.collections?.jobsCollectionName || 'jobs');
stats.database.jobs.total = await JobModel.countDocuments();
stats.database.jobs.oldRecords = await JobModel.countDocuments({
createdAt: { $lt: cutoffDate }
});
}
if (retention.database.logs) {
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - retention.database.logs);
const LogModel = (0, log_1.getLogModel)(this.config.collections?.logsCollectionName || 'logs');
stats.database.logs.total = await LogModel.countDocuments();
stats.database.logs.oldRecords = await LogModel.countDocuments({
timestamp: { $lt: cutoffDate }
});
}
}
// Storage stats (S3 only for now)
if (retention.storage?.files && this.config.uploadProvider === 's3' && this.s3) {
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - retention.storage.files);
const bucket = this.config.s3.bucket;
const prefix = this.config.outputDirectory || 'uploads';
const listParams = {
Bucket: bucket,
Prefix: prefix
};
let continuationToken;
do {
if (continuationToken) {
listParams.ContinuationToken = continuationToken;
}
const listResult = await this.s3.listObjectsV2(listParams).promise();
if (listResult.Contents) {
for (const obj of listResult.Contents) {
stats.storage.totalFiles++;
stats.storage.totalSize += obj.Size || 0;
if (obj.LastModified && obj.LastModified < cutoffDate) {
stats.storage.oldFiles++;
stats.storage.oldSize += obj.Size || 0;
}
}
}
continuationToken = listResult.NextContinuationToken;
} while (continuationToken);
}
return stats;
}
/**
* ๐ง Manual cleanup trigger
*/
async runManualCleanup(options = {}) {
const results = {};
if (options.dryRun) {
console.log('๐ DRY RUN MODE - No actual deletions will be performed');
const stats = await this.getRetentionStats();
console.log('๐ Retention Statistics:');
console.log('Database:', stats.database);
console.log('Storage:', stats.storage);
return results;
}
if (options.database !== false) {
results.database = await this.cleanupDatabase();
}
if (options.storage !== false) {
results.storage = await this.cleanupStorage();
}
return results;
}
/**
* ๐ง Helper: Format bytes to human readable
*/
formatBytes(bytes) {
if (bytes === 0)
return '0 Bytes';
const k = 1024;
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
}
}
exports.RetentionService = RetentionService;
/**
* ๐ Initialize retention service
*/
async function initRetention(config, db) {
const service = new RetentionService(config, db);
// Setup S3 lifecycle policies if configured
if (config.uploadProvider === 's3' && config.retention?.storage?.s3Lifecycle) {
try {
await service.setupS3LifecyclePolicies();
}
catch (error) {
console.warn('โ ๏ธ Could not setup S3 lifecycle policies:', error.message);
}
}
// Start automatic cleanup if enabled
service.startAutomaticCleanup();
return service;
}
exports.default = RetentionService;
;