fortify2-js
Version:
MOST POWERFUL JavaScript Security Library! Military-grade cryptography + 19 enhanced object methods + quantum-resistant algorithms + perfect TypeScript support. More powerful than Lodash with built-in security.
533 lines (529 loc) • 21.9 kB
JavaScript
'use strict';
var events = require('events');
var perf_hooks = require('perf_hooks');
var pidusage = require('pidusage');
var errorHandler = require('../../../../utils/errorHandler.js');
var index = require('../../../../components/fortified-function/index.js');
var Logger = require('../../server/utils/Logger.js');
var cluster = require('cluster');
var os = require('os');
/**
* FortifyJS Auto Scaler
* Intelligent auto-scaling with predictive analytics and resource optimization
*/
/**
* Advanced auto-scaler with machine learning-inspired decision making
*/
class AutoScaler extends events.EventEmitter {
constructor(config, errorLogger) {
super();
this.scalingHistory = [];
this.lastScalingAction = perf_hooks.performance.now();
this.isScaling = false;
this.currentWorkerCount = 0;
this.scalingTimings = new Map();
this.config = config;
this.errorLogger = errorLogger;
this.autoScaler = {
enabled: config.autoScaling?.enabled !== false,
minWorkers: config.autoScaling?.minWorkers || 1,
maxWorkers: config.autoScaling?.maxWorkers || os.cpus().length,
cooldownPeriod: config.autoScaling?.cooldownPeriod || 300000,
lastScalingAction: new Date(),
pendingActions: [],
};
this.setupAutoScaling();
}
setupAutoScaling() {
if (!this.autoScaler.enabled)
return;
this.startScalingEvaluation();
this.emit("autoscaler:initialized", {
minWorkers: this.autoScaler.minWorkers,
maxWorkers: this.autoScaler.maxWorkers,
cooldownPeriod: this.autoScaler.cooldownPeriod,
});
}
startScalingEvaluation() {
const evaluationInterval = 30000;
const fortifiedEvaluator = index.func(async () => {
await this.evaluateScaling();
}, {
ultraFast: "maximum",
auditLog: true,
timeout: 10000,
errorHandling: "graceful",
});
this.scalingInterval = setInterval(() => {
fortifiedEvaluator().catch((error) => {
const securityError = errorHandler.createSecurityError(`Auto-scaling evaluation failed: ${error.message}`, errorHandler.ErrorType.INTERNAL, errorHandler.ErrorSeverity.MEDIUM, "AUTOSCALING_ERROR", { operation: "auto_scaling_evaluation" });
this.errorLogger.logError(securityError);
});
}, evaluationInterval);
}
stopScaling() {
if (this.scalingInterval) {
clearInterval(this.scalingInterval);
this.scalingInterval = undefined;
}
this.emit("autoscaler:stopped");
}
async evaluateScaling() {
if (this.isScaling || !this.autoScaler.enabled)
return;
if (this.isInCooldownPeriod())
return;
try {
const metrics = await this.getCurrentMetrics();
const decision = this.makeScalingDecision(metrics);
if (decision.action !== "no-action") {
await this.executeScaling(decision);
}
}
catch (error) {
const securityError = errorHandler.createSecurityError(`Scaling evaluation error: ${error.message}`, errorHandler.ErrorType.INTERNAL, errorHandler.ErrorSeverity.MEDIUM, "SCALING_EVALUATION_ERROR", { operation: "scaling_evaluation" });
this.errorLogger.logError(securityError);
}
}
makeScalingDecision(metrics) {
const scaleUpThreshold = this.config.autoScaling?.scaleUpThreshold;
const scaleDownThreshold = this.config.autoScaling?.scaleDownThreshold;
let action = "no-action";
let reason = "No scaling needed";
let confidence = 0;
let targetWorkers = this.currentWorkerCount;
const scaleUpReasons = [];
let scaleUpScore = 0;
if (scaleUpThreshold?.cpu && metrics.cpu > scaleUpThreshold.cpu) {
scaleUpReasons.push(`CPU usage (${metrics.cpu}%) > threshold (${scaleUpThreshold.cpu}%)`);
scaleUpScore += 30;
}
if (scaleUpThreshold?.memory &&
metrics.memory > scaleUpThreshold.memory) {
scaleUpReasons.push(`Memory usage (${metrics.memory}%) > threshold (${scaleUpThreshold.memory}%)`);
scaleUpScore += 25;
}
if (scaleUpThreshold?.responseTime &&
metrics.responseTime > scaleUpThreshold.responseTime) {
scaleUpReasons.push(`Response time (${metrics.responseTime}ms) > threshold (${scaleUpThreshold.responseTime}ms)`);
scaleUpScore += 35;
}
if (scaleUpThreshold?.queueLength &&
metrics.queueLength > scaleUpThreshold.queueLength) {
scaleUpReasons.push(`Queue length (${metrics.queueLength}) > threshold (${scaleUpThreshold.queueLength})`);
scaleUpScore += 40;
}
const scaleDownReasons = [];
let scaleDownScore = 0;
if (scaleDownThreshold?.cpu && metrics.cpu < scaleDownThreshold.cpu) {
scaleDownReasons.push(`CPU usage (${metrics.cpu}%) < threshold (${scaleDownThreshold.cpu}%)`);
scaleDownScore += 20;
}
if (scaleDownThreshold?.memory &&
metrics.memory < scaleDownThreshold.memory) {
scaleDownReasons.push(`Memory usage (${metrics.memory}%) < threshold (${scaleDownThreshold.memory}%)`);
scaleDownScore += 15;
}
if (scaleDownThreshold?.idleTime &&
metrics.idleTime > scaleDownThreshold.idleTime) {
scaleDownReasons.push(`Idle time (${metrics.idleTime}min) > threshold (${scaleDownThreshold.idleTime}min)`);
scaleDownScore += 30;
}
if (scaleUpScore >= 50 &&
this.currentWorkerCount < this.autoScaler.maxWorkers) {
action = "scale-up";
reason = scaleUpReasons.join(", ");
confidence = Math.min(100, scaleUpScore);
targetWorkers = Math.min(this.autoScaler.maxWorkers, this.currentWorkerCount +
(this.config.autoScaling?.scaleStep || 1));
}
else if (scaleDownScore >= 40 &&
this.currentWorkerCount > this.autoScaler.minWorkers) {
action = "scale-down";
reason = scaleDownReasons.join(", ");
confidence = Math.min(100, scaleDownScore);
targetWorkers = Math.max(this.autoScaler.minWorkers, this.currentWorkerCount -
(this.config.autoScaling?.scaleStep || 1));
}
confidence = this.adjustConfidenceBasedOnHistory(action, confidence);
return {
action,
targetWorkers,
reason,
confidence,
metrics: {
cpu: metrics.cpu,
memory: metrics.memory,
responseTime: metrics.responseTime,
queueLength: metrics.queueLength,
},
};
}
adjustConfidenceBasedOnHistory(action, baseConfidence) {
const recentHistory = this.scalingHistory
.filter((h) => h.timestamp.getTime() > Date.now() - 3600000)
.filter((h) => h.action === action);
if (recentHistory.length === 0)
return baseConfidence;
const successRate = recentHistory.filter((h) => h.success).length /
recentHistory.length;
return successRate > 0.8
? Math.min(100, baseConfidence * 1.1)
: successRate < 0.5
? Math.max(0, baseConfidence * 0.8)
: baseConfidence;
}
async executeScaling(decision) {
if (decision.confidence < 60) {
this.emit("scaling:skipped", {
reason: "Low confidence",
confidence: decision.confidence,
decision: decision.action,
});
return;
}
this.isScaling = true;
const startWorkers = this.currentWorkerCount;
const scalingId = `${decision.action}_${Date.now()}`;
const scalingStartTime = perf_hooks.performance.now();
let success = false;
try {
this.emit("scaling:executing", {
action: decision.action,
fromWorkers: startWorkers,
toWorkers: decision.targetWorkers,
reason: decision.reason,
confidence: decision.confidence,
});
if (decision.action === "scale-up") {
await this.scaleUp(decision.targetWorkers - startWorkers);
}
else {
await this.scaleDown(startWorkers - decision.targetWorkers);
}
success = true;
this.currentWorkerCount = decision.targetWorkers;
this.lastScalingAction = perf_hooks.performance.now();
this.autoScaler.lastScalingAction = new Date();
const scalingDuration = perf_hooks.performance.now() - scalingStartTime;
this.scalingTimings.set(scalingId, scalingDuration);
this.emit("scaling:completed", {
action: decision.action,
fromWorkers: startWorkers,
toWorkers: decision.targetWorkers,
success: true,
duration: scalingDuration,
});
}
catch (error) {
const securityError = errorHandler.createSecurityError(`Scaling execution failed: ${error.message}`, errorHandler.ErrorType.INTERNAL, errorHandler.ErrorSeverity.HIGH, "SCALING_EXECUTION_ERROR", { operation: "scaling_execution" });
this.errorLogger.logError(securityError);
}
finally {
this.recordScalingHistory({
timestamp: new Date(),
action: decision.action,
fromWorkers: startWorkers,
toWorkers: decision.targetWorkers,
reason: decision.reason,
success,
});
this.isScaling = false;
}
}
async scaleUp(count = 1) {
const targetCount = Math.min(this.autoScaler.maxWorkers, this.currentWorkerCount + count);
const actualCount = targetCount - this.currentWorkerCount;
if (actualCount <= 0) {
throw new Error("Cannot scale up: already at maximum workers");
}
this.emit("scaling:starting", {
action: "scale-up",
count: actualCount,
targetCount,
});
try {
const startPromises = [];
for (let i = 0; i < actualCount; i++) {
startPromises.push(new Promise((resolve, reject) => {
const worker = cluster.fork();
const timeout = setTimeout(() => {
worker.kill();
reject(new Error(`Worker startup timeout after 8s`));
}, 8000);
worker.once("online", () => {
clearTimeout(timeout);
this.emit("worker:online", {
workerId: worker.id,
pid: worker.process.pid,
});
resolve();
});
worker.once("error", (error) => {
clearTimeout(timeout);
reject(error);
});
}));
if (i < actualCount - 1) {
await new Promise((resolve) => setTimeout(resolve, 200));
}
}
await Promise.all(startPromises);
this.currentWorkerCount = targetCount;
this.emit("cluster:scaled", "scale-up", targetCount);
this.emit("scaling:success", {
action: "scale-up",
targetCount,
message: `Scaled up to ${targetCount} workers`,
});
}
catch (error) {
throw new Error(`Failed to scale up: ${error.message}`);
}
}
async scaleDown(count = 1) {
const targetCount = Math.max(this.autoScaler.minWorkers, this.currentWorkerCount - count);
const actualCount = this.currentWorkerCount - targetCount;
if (actualCount <= 0) {
throw new Error("Cannot scale down: already at minimum workers");
}
this.emit("scaling:starting", {
action: "scale-down",
count: actualCount,
targetCount,
});
try {
const workers = Object.values(cluster.workers || {}).slice(0, actualCount);
const stopPromises = [];
for (const worker of workers) {
if (worker &&
typeof worker === "object" &&
"disconnect" in worker) {
stopPromises.push(new Promise((resolve) => {
const timeout = setTimeout(() => {
worker.kill();
resolve();
}, 8000);
worker.once("disconnect", () => {
clearTimeout(timeout);
this.emit("worker:disconnected", {
workerId: worker.id,
graceful: true,
});
resolve();
});
worker.once("error", () => {
clearTimeout(timeout);
resolve();
});
worker.disconnect();
}));
}
}
await Promise.all(stopPromises);
this.currentWorkerCount = targetCount;
this.emit("cluster:scaled", "scale-down", targetCount);
this.emit("scaling:success", {
action: "scale-down",
targetCount,
message: `Scaled down to ${targetCount} workers`,
});
}
catch (error) {
throw new Error(`Failed to scale down: ${error.message}`);
}
}
async autoScale() {
await this.evaluateScaling();
}
async getOptimalWorkerCount() {
const metrics = await this.getCurrentMetrics();
return this.makeScalingDecision(metrics).targetWorkers;
}
isInCooldownPeriod() {
return (perf_hooks.performance.now() - this.lastScalingAction <
this.autoScaler.cooldownPeriod);
}
async getCurrentMetrics() {
try {
if (this.metricsCollector) {
const { cpu, memory, responseTime, activeWorkers, totalRequests, errorRate, } = this.metricsCollector.getAggregatedMetrics();
return {
cpu,
memory,
responseTime,
queueLength: this.estimateQueueLength(cpu, activeWorkers),
idleTime: Math.max(0, (100 - cpu) / 10),
systemLoad: cpu,
systemMemory: memory,
workerCount: activeWorkers,
totalRequests,
errorRate,
};
}
if (this.workerManager) {
const workers = this.workerManager.getActiveWorkers();
const workerCount = workers.length;
if (workerCount > 0) {
let totalWorkerCpu = 0;
let totalWorkerMemory = 0;
let totalResponseTime = 0;
let totalQueueLength = 0;
for (const worker of workers) {
totalWorkerCpu += worker.cpu?.usage || 0;
totalWorkerMemory += worker.memory?.percentage || 0;
totalResponseTime +=
worker.requests?.averageResponseTime || 0;
totalQueueLength +=
worker.requests?.queuedRequests || 0;
}
return {
cpu: Math.min(100, totalWorkerCpu / workerCount),
memory: Math.min(100, totalWorkerMemory / workerCount),
responseTime: totalResponseTime / workerCount,
queueLength: totalQueueLength / workerCount,
idleTime: Math.max(0, (100 - totalWorkerCpu / workerCount) / 10),
systemLoad: totalWorkerCpu / workerCount,
systemMemory: totalWorkerMemory / workerCount,
workerCount,
};
}
}
const workerPromises = Object.values(cluster.workers || {})
.filter((worker) => !!worker && !worker.isDead())
.map(async (worker) => {
try {
if (worker.process?.pid) {
const stats = await pidusage(worker.process.pid);
return {
cpu: stats.cpu,
memory: stats.memory / (1024 * 1024),
};
}
return { cpu: 0, memory: 0 };
}
catch {
return { cpu: 0, memory: 0 };
}
});
const workerStats = await Promise.all(workerPromises);
const validStats = workerStats.filter((stats) => stats.cpu > 0 || stats.memory > 0);
if (validStats.length > 0) {
const avgWorkerCpu = validStats.reduce((sum, stats) => sum + stats.cpu, 0) /
validStats.length;
const avgWorkerMemory = validStats.reduce((sum, stats) => sum + stats.memory, 0) /
validStats.length;
return {
cpu: Math.min(100, avgWorkerCpu),
memory: Math.min(100, avgWorkerMemory),
responseTime: this.estimateResponseTime(avgWorkerCpu, validStats.length),
queueLength: this.estimateQueueLength(avgWorkerCpu, validStats.length),
idleTime: Math.max(0, (100 - avgWorkerCpu) / 10),
systemLoad: avgWorkerCpu,
systemMemory: avgWorkerMemory,
workerCount: validStats.length,
};
}
return {
cpu: 50,
memory: 50,
responseTime: 100,
queueLength: 5,
idleTime: 5,
systemLoad: 50,
systemMemory: 50,
workerCount: this.currentWorkerCount,
};
}
catch (error) {
Logger.logger.error("other", `Metrics collection failed: ${error.message}`);
return {
cpu: 50,
memory: 50,
responseTime: 100,
queueLength: 5,
idleTime: 5,
systemLoad: 50,
systemMemory: 50,
workerCount: this.currentWorkerCount,
};
}
}
estimateResponseTime(cpuUsage, workerCount) {
let baseTime = 50;
if (cpuUsage > 80)
baseTime += (cpuUsage - 80) * 15;
else if (cpuUsage > 60)
baseTime += (cpuUsage - 60) * 4;
if (workerCount > 0) {
const optimalWorkers = os.cpus().length;
if (workerCount < optimalWorkers)
baseTime *= optimalWorkers / workerCount;
}
return Math.min(5000, baseTime);
}
estimateQueueLength(cpuUsage, workerCount) {
if (cpuUsage < 50)
return 0;
let queueLength = Math.floor((cpuUsage - 50) / 8);
if (workerCount > 0) {
const optimalWorkers = os.cpus().length;
if (workerCount < optimalWorkers)
queueLength *= optimalWorkers / workerCount;
}
return Math.min(100, queueLength);
}
recordScalingHistory(entry) {
this.scalingHistory.push(entry);
if (this.scalingHistory.length > 100)
this.scalingHistory.shift();
}
updateWorkerCount(count) {
this.currentWorkerCount = count;
}
getConfiguration() {
return { ...this.autoScaler };
}
getScalingHistory() {
return [...this.scalingHistory];
}
getScalingStats() {
const total = this.scalingHistory.length;
const successful = this.scalingHistory.filter((h) => h.success).length;
return {
totalScalingActions: total,
successfulActions: successful,
failedActions: total - successful,
successRate: total > 0 ? (successful / total) * 100 : 0,
averageScalingTime: this.calculateAverageScalingTime(),
lastScalingAction: this.autoScaler.lastScalingAction,
};
}
enable() {
this.autoScaler.enabled = true;
this.startScalingEvaluation();
Logger.logger.debug("other", "Auto-scaling enabled");
}
disable() {
this.autoScaler.enabled = false;
this.stopScaling();
Logger.logger.debug("other", "Auto-scaling disabled");
}
isEnabled() {
return this.autoScaler.enabled;
}
setWorkerManager(workerManager) {
this.workerManager = workerManager;
}
setMetricsCollector(metricsCollector) {
this.metricsCollector = metricsCollector;
}
calculateAverageScalingTime() {
if (this.scalingTimings.size === 0)
return 0;
const timings = Array.from(this.scalingTimings.values());
return timings.reduce((sum, time) => sum + time, 0) / timings.length;
}
}
exports.AutoScaler = AutoScaler;
//# sourceMappingURL=AutoScaler.js.map