@bonginkan/maria
Version:
MARIA OS v5.9.5 – Self-Evolving Organizational Intelligence OS | Speed Improvement Phase 3: LLM Optimization + Command Refactoring | Performance Measurement + Run Evidence System | Zero ESLint/TypeScript Errors | 人とAIが役割を持ち、学び、進化し続けるための仕事のOS | GraphRAG ×
1,401 lines (1,396 loc) • 431 kB
JavaScript
'use strict';
var util = require('util');
var promClient = require('prom-client');
var express = require('express');
var events = require('events');
var tf2 = require('@tensorflow/tfjs-node');
var chalk2 = require('chalk');
var fs = require('fs');
var path = require('path');
var child_process = require('child_process');
var fs4 = require('fs/promises');
var os = require('os');
var crypto = require('crypto');
function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
function _interopNamespace(e) {
if (e && e.__esModule) return e;
var n = Object.create(null);
if (e) {
Object.keys(e).forEach(function (k) {
if (k !== 'default') {
var d = Object.getOwnPropertyDescriptor(e, k);
Object.defineProperty(n, k, d.get ? d : {
enumerable: true,
get: function () { return e[k]; }
});
}
});
}
n.default = e;
return Object.freeze(n);
}
var express__default = /*#__PURE__*/_interopDefault(express);
var tf2__namespace = /*#__PURE__*/_interopNamespace(tf2);
var chalk2__default = /*#__PURE__*/_interopDefault(chalk2);
var fs__default = /*#__PURE__*/_interopDefault(fs);
var path__namespace = /*#__PURE__*/_interopNamespace(path);
var fs4__namespace = /*#__PURE__*/_interopNamespace(fs4);
var os__default = /*#__PURE__*/_interopDefault(os);
var crypto__namespace = /*#__PURE__*/_interopNamespace(crypto);
var __defProp = Object.defineProperty;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __esm = (fn, res) => function __init() {
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
};
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
function isTestEnv() {
return process.env.NODE_ENV === "test" || process.env.VITEST === "true" || process.env.VITEST_WORKER_ID != null || process.env.JEST_WORKER_ID != null;
}
function isMockFn(fn) {
if (!fn || typeof fn !== "function") return false;
const meta = fn;
return Array.isArray(meta.mock?.calls);
}
function stdoutLine(...args) {
const text = format(...args);
if (isTestEnv()) {
process.stdout.write(`${text}
`);
if (isMockFn(console.log)) console.log(text);
return;
}
process.stdout.write(`${text}
`);
}
function stderrLine(...args) {
const text = format(...args);
if (isTestEnv()) {
process.stderr.write(`${text}
`);
if (isMockFn(console.error)) console.error(text);
return;
}
process.stderr.write(`${text}
`);
}
var format;
var init_tty = __esm({
"src/shared/utils/tty.ts"() {
format = util.format;
}
});
function getPrometheusExporter(config) {
if (!exporterInstance) {
exporterInstance = new PrometheusExporter(config);
}
return exporterInstance;
}
var PrometheusExporter, exporterInstance;
var init_prometheus_exporter = __esm({
"src/services/telemetry/prometheus-exporter.ts"() {
init_tty();
PrometheusExporter = class extends events.EventEmitter {
registry;
app;
port;
metricsPath;
// Core metrics
responseCounter;
responseHistogram;
intentAccuracyGauge;
satisfactionGauge;
errorRateGauge;
activeProvidersGauge;
tokenUsageCounter;
fallbackRateGauge;
constructor(config = {}) {
super();
this.registry = new promClient.Registry();
this.port = config.port || 9090;
this.metricsPath = config.metricsPath || "/metrics";
this.app = express__default.default();
if (config.defaultLabels) {
this.registry.setDefaultLabels(config.defaultLabels);
}
if (config.collectDefaultMetrics !== false) {
promClient.collectDefaultMetrics({ register: this.registry });
}
this.initializeMetrics();
this.setupRoutes();
}
initializeMetrics() {
this.responseCounter = new promClient.Counter({
name: "maria_ai_responses_total",
help: "Total number of AI responses",
labelNames: ["provider", "intent", "language", "status"],
registers: [this.registry]
});
this.responseHistogram = new promClient.Histogram({
name: "maria_response_duration_seconds",
help: "Response latency in seconds",
labelNames: ["provider", "intent"],
buckets: [0.1, 0.25, 0.5, 1, 2.5, 5, 10],
registers: [this.registry]
});
this.intentAccuracyGauge = new promClient.Gauge({
name: "maria_intent_accuracy_ratio",
help: "Intent detection accuracy ratio",
labelNames: ["intent_type"],
registers: [this.registry]
});
this.satisfactionGauge = new promClient.Gauge({
name: "maria_user_satisfaction_ratio",
help: "User satisfaction ratio",
registers: [this.registry]
});
this.errorRateGauge = new promClient.Gauge({
name: "maria_error_rate_ratio",
help: "Error rate ratio",
labelNames: ["provider"],
registers: [this.registry]
});
this.activeProvidersGauge = new promClient.Gauge({
name: "maria_active_providers_count",
help: "Number of active AI providers",
registers: [this.registry]
});
this.tokenUsageCounter = new promClient.Counter({
name: "maria_tokens_total",
help: "Total tokens consumed",
labelNames: ["provider", "type"],
// type: input|output
registers: [this.registry]
});
this.fallbackRateGauge = new promClient.Gauge({
name: "maria_fallback_rate_ratio",
help: "Template fallback usage ratio",
registers: [this.registry]
});
}
setupRoutes() {
this.app.get(this.metricsPath, async (_req, res) => {
try {
res.set("Content-Type", this.registry.contentType);
res.end(await this.registry.metrics());
} catch (error) {
res.status(500).end(error instanceof Error ? error.message : "Unknown error");
}
});
this.app.get("/health", (_req, res) => {
res.json({ status: "healthy", uptime: process.uptime() });
});
}
updateMetrics(telemetryData) {
try {
const data = telemetryData;
if (data.provider && data.intent) {
this.responseCounter.inc({
provider: data.provider,
intent: data.intent,
language: data.language || "unknown",
status: data.success ? "success" : "failure"
});
}
if (data.latency) {
this.responseHistogram.observe(
{
provider: data.provider || "unknown",
intent: data.intent || "unknown"
},
data.latency / 1e3
// Convert to seconds
);
}
if (data.intentAccuracy !== void 0) {
this.intentAccuracyGauge.set(
{ intent_type: data.intent || "unknown" },
data.intentAccuracy
);
}
if (data.satisfaction !== void 0) {
this.satisfactionGauge.set(data.satisfaction);
}
if (data.errorRate !== void 0) {
this.errorRateGauge.set(
{ provider: data.provider || "unknown" },
data.errorRate
);
}
if (data.activeProviders !== void 0) {
this.activeProvidersGauge.set(data.activeProviders);
}
if (data.fallbackRate !== void 0) {
this.fallbackRateGauge.set(data.fallbackRate);
}
if (data.tokens) {
if (data.tokens.input) {
this.tokenUsageCounter.inc(
{
provider: data.provider || "unknown",
type: "input"
},
data.tokens.input
);
}
if (data.tokens.output) {
this.tokenUsageCounter.inc(
{
provider: data.provider || "unknown",
type: "output"
},
data.tokens.output
);
}
}
this.emit("metricsUpdated", data);
} catch (innerError) {
this.emit("error", innerError);
}
}
async start() {
return new Promise((resolvePromise, reject) => {
const server = this.app.listen(this.port, () => {
stdoutLine(`\u{1F3AF} Prometheus metrics available at http://localhost:${this.port}${this.metricsPath}`);
this.emit("started", { port: this.port, _path: this.metricsPath });
resolvePromise();
}).on("error", reject);
process.on("SIGTERM", () => {
server.close(() => {
stdoutLine("Prometheus exporter shut down gracefully");
});
});
});
}
getRegistry() {
return this.registry;
}
// Custom metric registration
registerCustomMetric(metric) {
this.emit("customMetricRegistered", metric);
}
};
exporterInstance = null;
}
});
function getAnomalyDetector(config) {
if (!detectorInstance) {
detectorInstance = new AnomalyDetector(config);
}
return detectorInstance;
}
var tfCompat, AnomalyDetector, detectorInstance;
var init_anomaly_detector = __esm({
"src/services/telemetry/anomaly-detector.ts"() {
tfCompat = tf2__namespace;
AnomalyDetector = class extends events.EventEmitter {
windowSize;
threshold;
minDataPoints;
modelType;
models;
dataBuffers;
statistics;
updateInterval;
updateTimer;
constructor(config = {}) {
super();
this.windowSize = config.windowSize || 100;
this.threshold = config.threshold || 3;
this.minDataPoints = config.minDataPoints || 30;
this.modelType = config.modelType || "autoencoder";
this.updateInterval = config.updateInterval || 6e4;
this.models = /* @__PURE__ */ new Map();
this.dataBuffers = /* @__PURE__ */ new Map();
this.statistics = /* @__PURE__ */ new Map();
this.startUpdateTimer();
}
startUpdateTimer() {
this.updateTimer = setInterval(() => {
this.updateModels();
}, this.updateInterval);
}
async addDataPoint(metric, point) {
if (!this.dataBuffers.has(metric)) {
this.dataBuffers.set(metric, []);
}
const buffer = this.dataBuffers.get(metric);
buffer.push(point);
if (buffer.length > this.windowSize * 2) {
buffer.splice(0, buffer.length - this.windowSize);
}
if (buffer.length < this.minDataPoints) {
return null;
}
const result = await this.detectAnomaly(metric, point);
if (result.isAnomaly) {
this.emit("anomaly", result);
}
return result;
}
async detectAnomaly(metric, point) {
const buffer = this.dataBuffers.get(metric);
const stats = this.calculateStatistics(buffer);
this.statistics.set(metric, stats);
let anomalyScore;
switch (this.modelType) {
case "isolation-forest":
anomalyScore = await this.isolationForestDetection(buffer, point);
break;
case "lstm":
anomalyScore = await this.lstmDetection(metric, buffer, point);
break;
case "autoencoder":
default:
anomalyScore = await this.autoencoderDetection(metric, buffer, point);
break;
}
const zScore = Math.abs((point.value - stats.mean) / stats.std);
const combinedScore = Math.max(anomalyScore, zScore);
const isAnomaly = combinedScore > this.threshold;
const expectedRange = [
stats.mean - this.threshold * stats.std,
stats.mean + this.threshold * stats.std
];
let severity;
if (combinedScore > this.threshold * 3) {
severity = "critical";
} else if (combinedScore > this.threshold * 2) {
severity = "high";
} else if (combinedScore > this.threshold * 1.5) {
severity = "medium";
} else {
severity = "low";
}
return {
isAnomaly,
score: combinedScore,
expectedRange,
actualValue: point.value,
timestamp: point.timestamp,
metric,
severity
};
}
calculateStatistics(data) {
const values = data.map((p) => p.value);
const mean = values.reduce((a, b) => a + b, 0) / values.length;
const variance = values.reduce((a, b) => a + Math.pow(b - mean, 2), 0) / values.length;
const std = Math.sqrt(variance);
const min = Math.min(...values);
const max = Math.max(...values);
return { mean, std, min, max };
}
async isolationForestDetection(buffer, point) {
const values = buffer.map((p) => p.value);
const trees = [];
const numTrees = 100;
const sampleSize = Math.min(256, buffer.length);
for (let i = 0; i < numTrees; i++) {
const sample = this.randomSample(values, sampleSize);
const tree = this.buildIsolationTree(sample);
trees.push(tree);
}
const pathLength = trees.reduce((sum, tree) => {
return sum + this.getPathLength(tree, point.value);
}, 0) / numTrees;
const c = this.averagePathLength(sampleSize);
const anomalyScore = Math.pow(2, -pathLength / c);
return anomalyScore * 10;
}
buildIsolationTree(data, depth = 0, maxDepth = 10) {
if (data.length <= 1 || depth >= maxDepth) {
return [depth];
}
const min = Math.min(...data);
const max = Math.max(...data);
const split = min + Math.random() * (max - min);
const left = data.filter((v) => v < split);
const right = data.filter((v) => v >= split);
return [
split,
...this.buildIsolationTree(left, depth + 1, maxDepth),
...this.buildIsolationTree(right, depth + 1, maxDepth)
];
}
getPathLength(tree, value) {
let depth = 0;
let idx = 0;
while (idx < tree.length) {
if (tree[idx] === depth) {
return depth;
}
if (value < tree[idx]) {
idx = idx * 2 + 1;
} else {
idx = idx * 2 + 2;
}
depth++;
}
return depth;
}
averagePathLength(n) {
if (n <= 1) return 0;
if (n === 2) return 1;
return 2 * (Math.log(n - 1) + 0.5772156649) - 2 * (n - 1) / n;
}
randomSample(array, size) {
const sample = [];
const indices = /* @__PURE__ */ new Set();
while (sample.length < size && sample.length < array.length) {
const idx = Math.floor(Math.random() * array.length);
if (!indices.has(idx)) {
indices.add(idx);
sample.push(array[idx]);
}
}
return sample;
}
async autoencoderDetection(metric, buffer, point) {
let model2 = this.models.get(metric);
if (!model2) {
model2 = await this.createAutoencoderModel(buffer);
this.models.set(metric, model2);
}
const stats = this.statistics.get(metric);
const normalizedValue = (point.value - stats.mean) / stats.std;
const input2 = tf2__namespace.tensor2d([[normalizedValue]]);
const reconstruction = model2.predict(input2);
const reconstructedValue = await reconstruction.data();
const error = Math.abs(normalizedValue - reconstructedValue[0]);
input2.dispose();
reconstruction.dispose();
return error;
}
async createAutoencoderModel(buffer) {
const inputDim = 1;
const encodingDim = 3;
const encoder = tfCompat.sequential({
layers: [
tf2__namespace.layers.dense({
inputShape: [inputDim],
units: encodingDim,
activation: "relu"
})
]
});
const decoder = tfCompat.sequential({
layers: [
tf2__namespace.layers.dense({
inputShape: [encodingDim],
units: inputDim,
activation: "linear"
})
]
});
const autoencoder = tfCompat.sequential({
layers: [encoder, decoder]
});
autoencoder.compile({
optimizer: "adam",
loss: "meanSquaredError"
});
const stats = this.calculateStatistics(buffer);
const values = buffer.map((p) => (p.value - stats.mean) / stats.std);
const xs = tf2__namespace.tensor2d(values, [values.length, 1]);
await autoencoder.fit(xs, xs, {
epochs: 50,
batchSize: 32,
validationSplit: 0.2,
verbose: 0
});
xs.dispose();
return autoencoder;
}
async lstmDetection(metric, buffer, point) {
let model2 = this.models.get(metric);
if (!model2) {
model2 = await this.createLSTMModel(buffer);
this.models.set(metric, model2);
}
const sequenceLength = 10;
const recentData = buffer.slice(-sequenceLength);
const stats = this.statistics.get(metric);
const sequence = recentData.map((p) => (p.value - stats.mean) / stats.std);
while (sequence.length < sequenceLength) {
sequence.unshift(0);
}
const input2 = tfCompat.tensor3d([sequence.map((v) => [v])]);
const prediction = model2.predict(input2);
const predictedValue = await prediction.data();
const normalizedActual = (point.value - stats.mean) / stats.std;
const error = Math.abs(normalizedActual - predictedValue[0]);
input2.dispose();
prediction.dispose();
return error * 3;
}
async createLSTMModel(buffer) {
const sequenceLength = 10;
const features = 1;
const model2 = tfCompat.sequential({
layers: [
tf2__namespace.layers.lstm({
inputShape: [sequenceLength, features],
units: 50,
returnSequences: true
}),
tf2__namespace.layers.dropout({ rate: 0.2 }),
tf2__namespace.layers.lstm({
units: 50,
returnSequences: false
}),
tf2__namespace.layers.dropout({ rate: 0.2 }),
tf2__namespace.layers.dense({
units: 1
})
]
});
model2.compile({
optimizer: tf2__namespace.train.adam(1e-3),
loss: "meanSquaredError"
});
const stats = this.calculateStatistics(buffer);
const normalizedValues = buffer.map(
(p) => (p.value - stats.mean) / stats.std
);
const sequences = [];
const targets = [];
for (let i = sequenceLength; i < normalizedValues.length; i++) {
const sequence = normalizedValues.slice(i - sequenceLength, i);
sequences.push(sequence.map((v) => [v]));
targets.push(normalizedValues[i]);
}
if (sequences.length > 0) {
const xs = tfCompat.tensor3d(sequences);
const ys = tf2__namespace.tensor2d(targets, [targets.length, 1]);
await model2.fit(xs, ys, {
epochs: 30,
batchSize: 32,
validationSplit: 0.2,
verbose: 0
});
xs.dispose();
ys.dispose();
}
return model2;
}
async updateModels() {
for (const [metric, buffer] of this.dataBuffers.entries()) {
if (buffer.length >= this.minDataPoints) {
try {
const model2 = await this.createAutoencoderModel(buffer);
this.models.set(metric, model2);
this.emit("modelUpdated", { metric, timestamp: Date.now() });
} catch (error) {
this.emit("error", { metric, error });
}
}
}
}
getAnomalyHistory(metric, limit = 100) {
const buffer = this.dataBuffers.get(metric);
return buffer ? buffer.slice(-limit) : [];
}
getStatistics(metric) {
return this.statistics.get(metric);
}
async evaluateMetrics() {
const results = /* @__PURE__ */ new Map();
for (const [metric, buffer] of this.dataBuffers.entries()) {
const metricResults = [];
for (const point of buffer.slice(-10)) {
const result = await this.detectAnomaly(metric, point);
if (result.isAnomaly) {
metricResults.push(result);
}
}
if (metricResults.length > 0) {
results.set(metric, metricResults);
}
}
return results;
}
dispose() {
if (this.updateTimer) {
clearInterval(this.updateTimer);
}
for (const model2 of this.models.values()) {
model2.dispose();
}
this.models.clear();
this.dataBuffers.clear();
this.statistics.clear();
}
};
detectorInstance = null;
}
});
function getPredictiveAnalytics(config) {
if (!analyticsInstance) {
analyticsInstance = new PredictiveAnalytics(config);
}
return analyticsInstance;
}
var tfCompat2, PredictiveAnalytics, analyticsInstance;
var init_predictive_analytics = __esm({
"src/services/telemetry/predictive-analytics.ts"() {
tfCompat2 = tf2__namespace;
PredictiveAnalytics = class extends events.EventEmitter {
horizonSteps;
confidenceLevel;
modelType;
seasonality;
updateFrequency;
models;
dataHistory;
forecasts;
trends;
updateTimer;
constructor(config = {}) {
super();
this.horizonSteps = config.horizonSteps || 24;
this.confidenceLevel = config.confidenceLevel || 0.95;
this.modelType = config.modelType || "lstm";
this.seasonality = config.seasonality || "auto";
this.updateFrequency = config.updateFrequency || 3e5;
this.models = /* @__PURE__ */ new Map();
this.dataHistory = /* @__PURE__ */ new Map();
this.forecasts = /* @__PURE__ */ new Map();
this.trends = /* @__PURE__ */ new Map();
this.startUpdateTimer();
}
startUpdateTimer() {
this.updateTimer = setInterval(() => {
this.updateForecasts();
}, this.updateFrequency);
}
async addDataPoint(metric, data) {
if (!this.dataHistory.has(metric)) {
this.dataHistory.set(metric, []);
}
const history = this.dataHistory.get(metric);
history.push(data);
if (history.length > 1e4) {
history.splice(0, history.length - 5e3);
}
if (history.length >= 30) {
const trend = this.analyzeTrend(history);
this.trends.set(metric, trend);
this.emit("trendUpdate", { metric, trend });
}
}
async generateForecast(metric) {
const history = this.dataHistory.get(metric);
if (!history || history.length < 50) {
throw new Error(`Insufficient data for metric ${metric}`);
}
let forecasts;
switch (this.modelType) {
case "arima":
forecasts = await this.arimaForecast(history);
break;
case "prophet":
forecasts = await this.prophetForecast(history);
break;
case "transformer":
forecasts = await this.transformerForecast(metric, history);
break;
case "lstm":
default:
forecasts = await this.lstmForecast(metric, history);
break;
}
this.forecasts.set(metric, forecasts);
this.emit("forecastGenerated", { metric, forecasts });
return forecasts;
}
analyzeTrend(history) {
const values = history.map((d) => d.value);
const _n = values.length;
const x = Array.from({ length: _n }, (_, _i) => _i);
const xMean = x.reduce((a, b) => a + b, 0) / _n;
const yMean = values.reduce((a, b) => a + b, 0) / _n;
let numerator = 0;
let denominator = 0;
for (let i = 0; i < _n; i++) {
numerator += (x[i] - xMean) * (values[i] - yMean);
denominator += Math.pow(x[i] - xMean, 2);
}
const slope = numerator / denominator;
const intercept = yMean - slope * xMean;
let ssRes = 0;
let ssTot = 0;
for (let i = 0; i < _n; i++) {
const predicted = slope * x[i] + intercept;
ssRes += Math.pow(values[i] - predicted, 2);
ssTot += Math.pow(values[i] - yMean, 2);
}
const rSquared = 1 - ssRes / ssTot;
let direction;
if (Math.abs(slope) < 0.01) {
direction = "stable";
} else if (slope > 0) {
direction = "increasing";
} else {
direction = "decreasing";
}
const seasonalPattern = this.detectSeasonality(values);
return {
direction,
strength: rSquared,
changeRate: slope,
seasonalPattern
};
}
detectSeasonality(values) {
if (this.seasonality === "none") {
return void 0;
}
const fft = this.computeFFT(values);
const magnitudes = fft.map((c) => Math.sqrt(c.real * c.real + c.imag * c.imag));
let maxMag = 0;
let maxIdx = 0;
for (let i = 1; i < magnitudes.length / 2; i++) {
if (magnitudes[i] > maxMag) {
maxMag = magnitudes[i];
maxIdx = i;
}
}
const period = values.length / maxIdx;
const amplitude = maxMag * 2 / values.length;
const phase = Math.atan2(fft[maxIdx].imag, fft[maxIdx].real);
if (amplitude > 0.1 * this.calculateStd(values)) {
return { period, amplitude, phase };
}
return void 0;
}
computeFFT(values) {
const _n = values.length;
const result = [];
for (let k = 0; k < _n; k++) {
let real = 0;
let imag = 0;
for (let t = 0; t < _n; t++) {
const angle = -2 * Math.PI * k * t / _n;
real += values[t] * Math.cos(angle);
imag += values[t] * Math.sin(angle);
}
result.push({ real, imag });
}
return result;
}
calculateStd(values) {
const mean = values.reduce((a, b) => a + b, 0) / values.length;
const variance = values.reduce((a, b) => a + Math.pow(b - mean, 2), 0) / values.length;
return Math.sqrt(variance);
}
async lstmForecast(metric, history) {
let model2 = this.models.get(metric);
if (!model2 || this.shouldUpdateModel(metric)) {
model2 = await this.createLSTMModel(history);
this.models.set(metric, model2);
}
const values = history.map((d) => d.value);
const mean = values.reduce((a, b) => a + b, 0) / values.length;
const std = this.calculateStd(values);
const normalizedValues = values.map((v) => (v - mean) / std);
const sequenceLength = Math.min(50, history.length - 1);
const inputSequence = normalizedValues.slice(-sequenceLength);
const forecasts = [];
const predictions = [];
for (let i = 0; i < this.horizonSteps; i++) {
const input2 = tfCompat2.tensor3d([inputSequence.slice(-sequenceLength).map((v) => [v])]);
const prediction = model2.predict(input2);
const predictedValue = await prediction.data();
predictions.push(predictedValue[0]);
inputSequence.push(predictedValue[0]);
input2.dispose();
prediction.dispose();
}
const predictionErrors = this.calculatePredictionErrors(history, model2, sequenceLength);
const errorStd = this.calculateStd(predictionErrors);
const zScore = this.getZScore(this.confidenceLevel);
const lastTimestamp = history[history.length - 1].timestamp;
const avgInterval = this.calculateAverageInterval(history);
for (let i = 0; i < predictions.length; i++) {
const denormalizedPrediction = predictions[i] * std + mean;
const margin = zScore * errorStd * std * Math.sqrt(i + 1);
forecasts.push({
timestamp: lastTimestamp + (i + 1) * avgInterval,
predictedValue: denormalizedPrediction,
lowerBound: denormalizedPrediction - margin,
upperBound: denormalizedPrediction + margin,
confidence: this.confidenceLevel
});
}
return forecasts;
}
async createLSTMModel(history) {
const sequenceLength = Math.min(50, history.length - 1);
const model2 = tfCompat2.sequential({
layers: [
tf2__namespace.layers.lstm({
inputShape: [sequenceLength, 1],
units: 64,
returnSequences: true,
activation: "tanh"
}),
tf2__namespace.layers.dropout({ rate: 0.2 }),
tf2__namespace.layers.lstm({
units: 32,
returnSequences: false,
activation: "tanh"
}),
tf2__namespace.layers.dropout({ rate: 0.2 }),
tf2__namespace.layers.dense({
units: 1,
activation: "linear"
})
]
});
model2.compile({
optimizer: tf2__namespace.train.adam(1e-3),
loss: "meanSquaredError",
metrics: ["mae"]
});
const values = history.map((d) => d.value);
const mean = values.reduce((a, b) => a + b, 0) / values.length;
const std = this.calculateStd(values);
const normalizedValues = values.map((v) => (v - mean) / std);
const sequences = [];
const targets = [];
for (let i = sequenceLength; i < normalizedValues.length; i++) {
const sequence = normalizedValues.slice(i - sequenceLength, i);
sequences.push(sequence.map((v) => [v]));
targets.push(normalizedValues[i]);
}
if (sequences.length > 0) {
const xs = tfCompat2.tensor3d(sequences);
const ys = tf2__namespace.tensor2d(targets, [targets.length, 1]);
await model2.fit(xs, ys, {
epochs: 50,
batchSize: 32,
validationSplit: 0.2,
verbose: 0,
callbacks: {
onEpochEnd: (epoch, logs) => {
if (epoch % 10 === 0) {
const lossRaw = logs?.loss;
const loss = typeof lossRaw === "number" ? lossRaw : void 0;
this.emit("modelTraining", { epoch, loss });
}
}
}
});
xs.dispose();
ys.dispose();
}
return model2;
}
async transformerForecast(metric, history) {
const model2 = await this.createTransformerModel(history);
this.models.set(metric, model2);
const values = history.map((d) => d.value);
const mean = values.reduce((a, b) => a + b, 0) / values.length;
const std = this.calculateStd(values);
const forecasts = [];
const lastTimestamp = history[history.length - 1].timestamp;
const avgInterval = this.calculateAverageInterval(history);
for (let i = 0; i < this.horizonSteps; i++) {
const predictedValue = mean + (Math.random() - 0.5) * std;
const margin = std * this.getZScore(this.confidenceLevel) * Math.sqrt(i + 1);
forecasts.push({
timestamp: lastTimestamp + (i + 1) * avgInterval,
predictedValue,
lowerBound: predictedValue - margin,
upperBound: predictedValue + margin,
confidence: this.confidenceLevel
});
}
return forecasts;
}
async createTransformerModel(history) {
const sequenceLength = Math.min(50, history.length - 1);
const dModel = 64;
const inputs = tf2__namespace.input({ shape: [sequenceLength, 1] });
const embedded = tf2__namespace.layers.dense({
units: dModel,
activation: "linear"
}).apply(inputs);
const ff = tf2__namespace.layers.dense({
units: dModel * 4,
activation: "relu"
}).apply(embedded);
const ff2 = tf2__namespace.layers.dense({
units: dModel,
activation: "linear"
}).apply(ff);
const pooled = tf2__namespace.layers.globalAveragePooling1d().apply(ff2);
const outputs = tf2__namespace.layers.dense({
units: 1,
activation: "linear"
}).apply(pooled);
const model2 = tf2__namespace.model({ inputs, outputs });
model2.compile({
optimizer: tf2__namespace.train.adam(1e-3),
loss: "meanSquaredError"
});
return model2;
}
async arimaForecast(history) {
const values = history.map((d) => d.value);
values.length;
const p = 2;
const arCoeffs = this.calculateARCoefficients(values, p);
const q = 1;
const maCoeffs = this.calculateMACoefficients(values, q);
const forecasts = [];
const lastTimestamp = history[history.length - 1].timestamp;
const avgInterval = this.calculateAverageInterval(history);
const std = this.calculateStd(values);
const predictions = [...values];
const errors = new Array(values.length).fill(0);
for (let i = 0; i < this.horizonSteps; i++) {
let prediction = 0;
for (let j = 0; j < p; j++) {
if (predictions.length > j) {
prediction += arCoeffs[j] * predictions[predictions.length - 1 - j];
}
}
for (let j = 0; j < q; j++) {
if (errors.length > j) {
prediction += maCoeffs[j] * errors[errors.length - 1 - j];
}
}
predictions.push(prediction);
errors.push(0);
const margin = std * this.getZScore(this.confidenceLevel) * Math.sqrt(i + 1);
forecasts.push({
timestamp: lastTimestamp + (i + 1) * avgInterval,
predictedValue: prediction,
lowerBound: prediction - margin,
upperBound: prediction + margin,
confidence: this.confidenceLevel
});
}
return forecasts;
}
calculateARCoefficients(values, p) {
const _n = values.length;
const mean = values.reduce((a, b) => a + b, 0) / _n;
const centered = values.map((v) => v - mean);
const coeffs = [];
for (let k = 1; k <= p; k++) {
let numerator = 0;
let denominator = 0;
for (let i = k; i < _n; i++) {
numerator += centered[i] * centered[i - k];
denominator += centered[i - k] * centered[i - k];
}
coeffs.push(numerator / denominator);
}
return coeffs;
}
calculateMACoefficients(_values, q) {
const coeffs = [];
for (let i = 0; i < q; i++) {
coeffs.push(0.5 / (i + 1));
}
return coeffs;
}
async prophetForecast(history) {
const values = history.map((d) => d.value);
const timestamps = history.map((d) => d.timestamp);
const trend = this.extractTrend(values);
const seasonal = this.extractSeasonal(values, trend);
const forecasts = [];
const lastTimestamp = timestamps[timestamps.length - 1];
const avgInterval = this.calculateAverageInterval(history);
const std = this.calculateStd(values);
for (let i = 0; i < this.horizonSteps; i++) {
const trendValue = trend[trend.length - 1] + (trend[trend.length - 1] - trend[trend.length - 2]);
const seasonalIdx = i % seasonal.length;
const predictedValue = trendValue + seasonal[seasonalIdx];
const margin = std * this.getZScore(this.confidenceLevel) * Math.sqrt(i + 1);
forecasts.push({
timestamp: lastTimestamp + (i + 1) * avgInterval,
predictedValue,
lowerBound: predictedValue - margin,
upperBound: predictedValue + margin,
confidence: this.confidenceLevel
});
trend.push(trendValue);
}
return forecasts;
}
extractTrend(values) {
const windowSize = Math.min(7, Math.floor(values.length / 4));
const trend = [];
for (let i = 0; i < values.length; i++) {
const start = Math.max(0, i - Math.floor(windowSize / 2));
const end = Math.min(values.length, i + Math.floor(windowSize / 2) + 1);
const window = values.slice(start, end);
const avg = window.reduce((a, b) => a + b, 0) / window.length;
trend.push(avg);
}
return trend;
}
extractSeasonal(values, trend) {
const detrended = values.map((v, i) => v - trend[i]);
const seasonalPattern = this.detectSeasonality(detrended);
if (seasonalPattern) {
const seasonal = [];
const period = Math.round(seasonalPattern.period);
for (let i = 0; i < period; i++) {
const indices = [];
for (let j = i; j < detrended.length; j += period) {
indices.push(j);
}
const seasonalValues = indices.map((idx) => detrended[idx]);
const avg = seasonalValues.reduce((a, b) => a + b, 0) / seasonalValues.length;
seasonal.push(avg);
}
return seasonal;
}
return new Array(24).fill(0);
}
calculatePredictionErrors(history, model2, sequenceLength) {
const values = history.map((d) => d.value);
const mean = values.reduce((a, b) => a + b, 0) / values.length;
const std = this.calculateStd(values);
const normalizedValues = values.map((v) => (v - mean) / std);
const errors = [];
for (let i = sequenceLength; i < normalizedValues.length - 1; i++) {
const sequence = normalizedValues.slice(i - sequenceLength, i);
const input2 = tfCompat2.tensor3d([sequence.map((v) => [v])]);
const prediction = model2.predict(input2);
const predictedValue = prediction.dataSync()[0];
errors.push(normalizedValues[i] - predictedValue);
input2.dispose();
prediction.dispose();
}
return errors;
}
calculateAverageInterval(history) {
if (history.length < 2) return 6e4;
let totalInterval = 0;
for (let i = 1; i < history.length; i++) {
totalInterval += history[i].timestamp - history[i - 1].timestamp;
}
return totalInterval / (history.length - 1);
}
getZScore(confidenceLevel) {
const zScores = {
0.9: 1.645,
0.95: 1.96,
0.99: 2.576
};
return zScores[confidenceLevel] || 1.96;
}
shouldUpdateModel(metric) {
const history = this.dataHistory.get(metric);
if (!history) return false;
const lastTrainingSize = this.getLastTrainingSize(metric);
return history.length > lastTrainingSize * 2;
}
getLastTrainingSize(_metric) {
return 100;
}
async updateForecasts() {
for (const metric of this.dataHistory.keys()) {
try {
await this.generateForecast(metric);
} catch (error) {
this.emit("error", { metric, error });
}
}
}
getForecast(metric) {
return this.forecasts.get(metric);
}
getTrend(metric) {
return this.trends.get(metric);
}
async evaluateAccuracy(metric) {
const history = this.dataHistory.get(metric);
const forecasts = this.forecasts.get(metric);
if (!history || !forecasts) {
throw new Error(`No data available for metric ${metric}`);
}
const actual = [];
const predicted = [];
for (const forecast of forecasts) {
const actualPoint = history.find(
(h) => Math.abs(h.timestamp - forecast.timestamp) < 6e4
);
if (actualPoint) {
actual.push(actualPoint.value);
predicted.push(forecast.predictedValue);
}
}
if (actual.length === 0) {
return { mae: 0, rmse: 0, mape: 0 };
}
let mae = 0;
let mse = 0;
let mape = 0;
for (let i = 0; i < actual.length; i++) {
const error = Math.abs(actual[i] - predicted[i]);
mae += error;
mse += error * error;
if (actual[i] !== 0) {
mape += error / Math.abs(actual[i]);
}
}
mae /= actual.length;
const rmse = Math.sqrt(mse / actual.length);
mape = mape / actual.length * 100;
return { mae, rmse, mape };
}
dispose() {
if (this.updateTimer) {
clearInterval(this.updateTimer);
}
for (const model2 of this.models.values()) {
model2.dispose();
}
this.models.clear();
this.dataHistory.clear();
this.forecasts.clear();
this.trends.clear();
}
};
analyticsInstance = null;
}
});
function getCustomMetricsFramework() {
if (!frameworkInstance) {
frameworkInstance = new CustomMetricsFramework();
frameworkInstance.registerMetric({
name: "maria_custom_command_usage",
type: "counter",
help: "Custom command usage counter",
labels: ["command", "user"]
});
frameworkInstance.registerMetric({
name: "maria_custom_memory_usage",
type: "gauge",
help: "Custom memory usage gauge",
labels: ["type"]
});
frameworkInstance.registerMetric({
name: "maria_custom_processing_time",
type: "histogram",
help: "Custom processing time histogram",
labels: ["operation"],
buckets: [0.01, 0.05, 0.1, 0.5, 1, 2, 5, 10]
});
frameworkInstance.createDerivedMetric({
name: "maria_custom_efficiency_ratio",
help: "Efficiency ratio (success / total)",
sourceMetrics: ["maria_custom_success_count", "maria_custom_total_count"],
calculation: (values) => {
const success = values["maria_custom_success_count"] || 0;
const total = values["maria_custom_total_count"] || 1;
return success / total;
}
});
}
return frameworkInstance;
}
var CustomMetricsFramework, frameworkInstance;
var init_custom_metrics = __esm({
"src/services/telemetry/custom-metrics.ts"() {
CustomMetricsFramework = class extends events.EventEmitter {
metrics;
definitions;
snapshots;
aggregators;
flushInterval;
flushTimer;
maxSnapshotSize;
constructor(config = {}) {
super();
this.metrics = /* @__PURE__ */ new Map();
this.definitions = /* @__PURE__ */ new Map();
this.snapshots = /* @__PURE__ */ new Map();
this.aggregators = /* @__PURE__ */ new Map();
this.flushInterval = config.flushInterval || 6e4;
this.maxSnapshotSize = config.maxSnapshotSize || 1e4;
this.initializeAggregators();
this.startFlushTimer();
}
initializeAggregators() {
this.aggregators.set("_sum", (values) => values.reduce((a, b) => a + b, 0));
this.aggregators.set(
"avg",
(values) => values.reduce((a, b) => a + b, 0) / values.length
);
this.aggregators.set("max", (values) => Math.max(...values));
this.aggregators.set("min", (values) => Math.min(...values));
this.aggregators.set("last", (values) => values[values.length - 1]);
}
startFlushTimer() {
this.flushTimer = setInterval(() => {
this.flush();
}, this.flushInterval);
}
registerMetric(definition) {
if (this.definitions.has(definition.name)) {
throw new Error(`Metric ${definition.name} already registered`);
}
this.definitions.set(definition.name, definition);
this.snapshots.set(definition.name, []);
let metric;
switch (definition.type) {
case "counter":
metric = new promClient.Counter({
name: definition.name,
help: definition.help,
labelNames: definition.labels || []
});
break;
case "gauge":
metric = new promClient.Gauge({
name: definition.name,
help: definition.help,
labelNames: definition.labels || []
});
break;
case "histogram":
metric = new promClient.Histogram({
name: definition.name,
help: definition.help,
labelNames: definition.labels || [],
buckets: definition.buckets || [0.1, 0.5, 1, 2, 5, 10]
});
break;
default:
metric = {
type: definition.type,
observe: (value, labels) => {
this.recordValue(definition.name, { value, labels });
}
};
}
this.metrics.set(definition.name, metric);
this.emit("metricRegistered", definition);
}
recordValue(metricName, value) {
const definition = this.definitions.get(metricName);
if (!definition) {
throw new Error(`Metric ${metricName} not registered`);
}
const metric = this.metrics.get(metricName);
const snapshot = this.snapshots.get(metricName);
if (!value.timestamp) {
value.timestamp = Date.now();
}
if (metric && typeof metric.observe === "function") {
if (value.labels) {
metric.observe(value.labels, value.value);
} else {
metric.observe(value.value);
}
} else if (metric && typeof metric.inc === "function") {
if (value.labels) {
metric.inc(value.labels, value.value);
} else {
metric.inc(value.value);
}
} else if (metric && typeof metric.set === "function") {
if (value.labels) {
metric.set(value.labels, value.value);
} else {
metric.set(value.value);
}
}
snapshot.push(value);
if (snapshot.length > this.maxSnapshotSize) {
snapshot.splice(0, snapshot.length - this.maxSnapshotSize);
}
this.emit("valueRecorded", { metric: metricName, value });
}
increment(metricName, amount = 1, labels) {
const definition = this.definitions.get(metricName);
if (!definition) {
throw new Error(`Metric ${metricName} not registered`);
}
if (definition.type !== "counter") {
throw new Error(`Metric ${metricName} is not a counter`);
}
this.recordValue(metricName, { value: amount, labels });
}
gauge(metricName, value, labels) {
const definition = this.definitions.get(metricName);
if (!definition) {
throw new Error(`Metric ${metricName} not registered`);
}
if (definition.type !== "gauge") {
throw new Error(`Metric ${metricName} is not a gauge`);
}
this.recordValue(metricName, { value, labels });
}
observe(metricName, value, labels) {
const definition = this.definitions.get(metricName);
if (!definition) {
throw new Error(`Metric ${metricName} not registered`);
}
if (definition.type !== "histogram" && definition.type !== "summary") {
throw new Error(`Metric ${metricName} is not a histogram or summary`);
}
this.recordValue(metricName, { value, labels });
}
getSnapshot(metricName) {
const definition = this.definitions.get(metricName);
const snapshot = this.snapshots.get(metricName);
if (!definition || !snapshot) {
return void 0;
}
const values = [...snapshot];
const statistics = this.calculateStatistics(values);
return {
name: metricName,
type: definition.type,
values,
statistics
};
}
calculateStatistics(values) {
if (values.length === 0) {
return {
count: 0,
_sum: 0,
mean: 0,
min: 0,
max: 0
};
}
const numbers = values.map((v) => v.value).sort((a, b) => a - b);
const count = numbers.length;
const _sum = numbers.reduce((a, b) => a + b, 0);
const mean = _sum / count;
const min = numbers[0];
const max = numbers[count - 1];
const p50 = this.percentile(numbers, 0.5);
const p95 = this.percentile(numbers, 0.95);
const p99 = this.percentile(numbers, 0.99);
return {
count,
_sum,
mean,
min,
max,
p50,
p95,
p99
};
}
percentile(sortedValues, p) {
const index = Math.ceil(p * sortedValues.length) - 1;
return sortedValues[Math.max(0, Math.min(index, sortedValues.length - 1))];
}
aggregate(metricName, windowMs = 6e4) {
const definition = this.defini