auto-publishing-mcp-server
Version:
Enterprise-grade MCP Server for Auto-Publishing with pre-publish validation, multi-cloud deployment, and monitoring
652 lines (566 loc) • 23.3 kB
JavaScript
import { exec } from 'child_process';
import { promisify } from 'util';
import { promises as fs } from 'fs';
import path from 'path';
const execAsync = promisify(exec);
class ABTestingTool {
constructor() {
this.activeTests = new Map();
this.testResults = new Map();
this.configPath = '/tmp/ab-testing-config.json';
this.loadConfiguration();
}
async loadConfiguration() {
try {
const config = await fs.readFile(this.configPath, 'utf8');
const parsedConfig = JSON.parse(config);
this.activeTests = new Map(Object.entries(parsedConfig.activeTests || {}));
this.testResults = new Map(Object.entries(parsedConfig.testResults || {}));
} catch (error) {
console.log('No existing A/B test configuration found, starting fresh');
}
}
async saveConfiguration() {
const config = {
activeTests: Object.fromEntries(this.activeTests),
testResults: Object.fromEntries(this.testResults),
lastUpdated: new Date().toISOString()
};
await fs.writeFile(this.configPath, JSON.stringify(config, null, 2));
}
async createABTest(args) {
const {
testName,
dockerIdA,
dockerIdB,
trafficSplit = { A: 50, B: 50 },
duration = 3600000, // 1 hour default
successMetrics = ['response_time', 'error_rate'],
environment = 'prod'
} = args;
console.log(`Creating A/B test: ${testName}`);
// Validate containers exist
await this.validateContainers([dockerIdA, dockerIdB]);
// Create test configuration
const testConfig = {
testName,
dockerIdA,
dockerIdB,
trafficSplit,
duration,
successMetrics,
environment,
startTime: new Date().toISOString(),
endTime: new Date(Date.now() + duration).toISOString(),
status: 'starting',
metrics: {
A: { requests: 0, errors: 0, responseTime: [] },
B: { requests: 0, errors: 0, responseTime: [] }
}
};
// Configure Nginx for traffic splitting
await this.configureTrafficSplitting(testConfig);
// Start metrics collection
await this.startMetricsCollection(testConfig);
this.activeTests.set(testName, testConfig);
await this.saveConfiguration();
return {
success: true,
message: `A/B test '${testName}' created successfully`,
testConfig: testConfig
};
}
async validateContainers(containerIds) {
for (const containerId of containerIds) {
try {
const { stdout } = await execAsync(`docker inspect ${containerId}`);
const containerInfo = JSON.parse(stdout)[0];
if (containerInfo.State.Status !== 'running') {
throw new Error(`Container ${containerId} is not running`);
}
} catch (error) {
throw new Error(`Container validation failed for ${containerId}: ${error.message}`);
}
}
}
async configureTrafficSplitting(testConfig) {
const { testName, dockerIdA, dockerIdB, trafficSplit, environment } = testConfig;
// Get container IPs
const ipA = await this.getContainerIP(dockerIdA);
const ipB = await this.getContainerIP(dockerIdB);
// Create Nginx upstream configuration
const upstreamConfig = `
upstream ${testName}_upstream {
server ${ipA}:80 weight=${trafficSplit.A};
server ${ipB}:80 weight=${trafficSplit.B};
}
server {
listen 80;
server_name ${testName}.${environment}.local;
location / {
proxy_pass http://${testName}_upstream;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-AB-Test-Name "${testName}";
proxy_set_header X-AB-Test-Variant $upstream_addr;
# Add response time logging
access_log /var/log/nginx/${testName}_access.log combined;
error_log /var/log/nginx/${testName}_error.log;
}
# Health check endpoint
location /health {
proxy_pass http://${testName}_upstream/health;
access_log off;
}
}`;
// Write Nginx configuration
const configFile = `/tmp/nginx_${testName}.conf`;
await fs.writeFile(configFile, upstreamConfig);
// Apply configuration to Nginx (assuming it's running in a container)
try {
await execAsync(`docker cp ${configFile} nginx:/etc/nginx/conf.d/${testName}.conf`);
await execAsync(`docker exec nginx nginx -s reload`);
} catch (error) {
console.warn('Failed to configure Nginx, continuing with test setup:', error.message);
}
}
async getContainerIP(containerId) {
try {
const { stdout } = await execAsync(`docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' ${containerId}`);
return stdout.trim();
} catch (error) {
throw new Error(`Failed to get IP for container ${containerId}: ${error.message}`);
}
}
async startMetricsCollection(testConfig) {
const { testName, dockerIdA, dockerIdB } = testConfig;
// Create Prometheus configuration for this test
const prometheusConfig = `
- job_name: '${testName}_variant_a'
static_configs:
- targets: ['${await this.getContainerIP(dockerIdA)}:8080']
metrics_path: /metrics
scrape_interval: 10s
- job_name: '${testName}_variant_b'
static_configs:
- targets: ['${await this.getContainerIP(dockerIdB)}:8080']
metrics_path: /metrics
scrape_interval: 10s`;
// Add to Prometheus configuration (simplified approach)
const configFile = `/tmp/prometheus_${testName}.yml`;
await fs.writeFile(configFile, prometheusConfig);
console.log(`Metrics collection started for A/B test: ${testName}`);
}
async getTestStatus(args) {
const { testName } = args;
if (!this.activeTests.has(testName)) {
return {
success: false,
message: `A/B test '${testName}' not found`
};
}
const testConfig = this.activeTests.get(testName);
const currentTime = new Date();
const endTime = new Date(testConfig.endTime);
// Update status based on time
if (currentTime > endTime && testConfig.status !== 'completed') {
testConfig.status = 'completed';
await this.finalizeTest(testName);
}
// Collect current metrics
const currentMetrics = await this.collectCurrentMetrics(testConfig);
testConfig.metrics = { ...testConfig.metrics, ...currentMetrics };
this.activeTests.set(testName, testConfig);
await this.saveConfiguration();
return {
success: true,
testConfig: testConfig,
timeRemaining: Math.max(0, endTime - currentTime),
statisticalSignificance: await this.calculateSignificance(testConfig)
};
}
async collectCurrentMetrics(testConfig) {
const { dockerIdA, dockerIdB, testName } = testConfig;
try {
// Collect metrics from Prometheus or direct container stats
const metricsA = await this.getContainerMetrics(dockerIdA);
const metricsB = await this.getContainerMetrics(dockerIdB);
// Parse Nginx access logs for request counts and response times
const logMetrics = await this.parseAccessLogs(testName);
return {
A: { ...metricsA, ...logMetrics.A },
B: { ...metricsB, ...logMetrics.B }
};
} catch (error) {
console.error('Failed to collect metrics:', error.message);
return testConfig.metrics;
}
}
async getContainerMetrics(containerId) {
try {
const { stdout } = await execAsync(`docker stats ${containerId} --no-stream --format "table {{.CPUPerc}},{{.MemUsage}},{{.NetIO}}"`);
const lines = stdout.trim().split('\n');
if (lines.length > 1) {
const data = lines[1].split(',');
return {
cpuPercent: parseFloat(data[0].replace('%', '')),
memoryUsage: data[1],
networkIO: data[2]
};
}
} catch (error) {
console.warn(`Failed to get metrics for ${containerId}:`, error.message);
}
return {};
}
async parseAccessLogs(testName) {
try {
const logFile = `/var/log/nginx/${testName}_access.log`;
const { stdout } = await execAsync(`docker exec nginx tail -n 1000 ${logFile}`);
const requests = { A: 0, B: 0 };
const responseTimes = { A: [], B: [] };
const errors = { A: 0, B: 0 };
const lines = stdout.split('\n').filter(line => line.trim());
for (const line of lines) {
// Parse Nginx log line (simplified)
const match = line.match(/(\d+\.\d+\.\d+\.\d+).*"[^"]*" (\d+) \d+ ".*" ".*" ".*" (\d+\.\d+)/);
if (match) {
const [, ip, status, responseTime] = match;
const variant = ip.endsWith('.1') ? 'A' : 'B'; // Simplified variant detection
requests[variant]++;
responseTimes[variant].push(parseFloat(responseTime));
if (parseInt(status) >= 400) {
errors[variant]++;
}
}
}
return {
A: {
requests: requests.A,
errors: errors.A,
responseTime: responseTimes.A,
averageResponseTime: responseTimes.A.length > 0
? responseTimes.A.reduce((a, b) => a + b, 0) / responseTimes.A.length
: 0
},
B: {
requests: requests.B,
errors: errors.B,
responseTime: responseTimes.B,
averageResponseTime: responseTimes.B.length > 0
? responseTimes.B.reduce((a, b) => a + b, 0) / responseTimes.B.length
: 0
}
};
} catch (error) {
console.warn('Failed to parse access logs:', error.message);
return { A: {}, B: {} };
}
}
async calculateSignificance(testConfig) {
const { metrics } = testConfig;
const { A, B } = metrics;
if (!A.requests || !B.requests || A.requests < 30 || B.requests < 30) {
return {
significant: false,
reason: 'Insufficient sample size (minimum 30 requests per variant)',
confidence: 0
};
}
// Calculate conversion rates (simplified - using error rate as example)
const conversionA = 1 - (A.errors / A.requests);
const conversionB = 1 - (B.errors / B.requests);
// Simple statistical significance test (Z-test approximation)
const pooledRate = (A.requests * conversionA + B.requests * conversionB) / (A.requests + B.requests);
const standardError = Math.sqrt(pooledRate * (1 - pooledRate) * (1/A.requests + 1/B.requests));
const zScore = Math.abs(conversionA - conversionB) / standardError;
// 95% confidence level (z-score > 1.96)
const isSignificant = zScore > 1.96;
const confidence = this.zScoreToConfidence(zScore);
return {
significant: isSignificant,
zScore: zScore,
confidence: confidence,
conversionRateA: conversionA,
conversionRateB: conversionB,
improvement: ((conversionB - conversionA) / conversionA * 100).toFixed(2) + '%'
};
}
zScoreToConfidence(zScore) {
// Convert z-score to confidence percentage (approximation)
if (zScore < 1.28) return 80;
if (zScore < 1.645) return 90;
if (zScore < 1.96) return 95;
if (zScore < 2.58) return 99;
return 99.9;
}
async stopTest(args) {
const { testName, reason = 'Manual stop' } = args;
if (!this.activeTests.has(testName)) {
return {
success: false,
message: `A/B test '${testName}' not found`
};
}
const testConfig = this.activeTests.get(testName);
testConfig.status = 'stopped';
testConfig.stopReason = reason;
testConfig.stopTime = new Date().toISOString();
await this.finalizeTest(testName);
return {
success: true,
message: `A/B test '${testName}' stopped successfully`,
finalResults: this.testResults.get(testName)
};
}
async finalizeTest(testName) {
const testConfig = this.activeTests.get(testName);
// Collect final metrics
const finalMetrics = await this.collectCurrentMetrics(testConfig);
testConfig.metrics = { ...testConfig.metrics, ...finalMetrics };
// Calculate final statistics
const significance = await this.calculateSignificance(testConfig);
const finalResults = {
...testConfig,
finalStatistics: significance,
completedAt: new Date().toISOString()
};
// Store results and remove from active tests
this.testResults.set(testName, finalResults);
this.activeTests.delete(testName);
// Clean up Nginx configuration
try {
await execAsync(`docker exec nginx rm -f /etc/nginx/conf.d/${testName}.conf`);
await execAsync(`docker exec nginx nginx -s reload`);
} catch (error) {
console.warn('Failed to clean up Nginx configuration:', error.message);
}
await this.saveConfiguration();
console.log(`A/B test '${testName}' finalized with results:`, significance);
}
async listTests(args) {
const { status = 'all' } = args;
let tests = [];
if (status === 'all' || status === 'active') {
for (const [name, config] of this.activeTests) {
tests.push({
name,
status: config.status,
type: 'active',
startTime: config.startTime,
endTime: config.endTime,
trafficSplit: config.trafficSplit
});
}
}
if (status === 'all' || status === 'completed') {
for (const [name, results] of this.testResults) {
tests.push({
name,
status: results.status,
type: 'completed',
startTime: results.startTime,
completedAt: results.completedAt,
significant: results.finalStatistics?.significant || false
});
}
}
return {
success: true,
tests: tests,
totalActive: this.activeTests.size,
totalCompleted: this.testResults.size
};
}
async getTestResults(args) {
const { testName } = args;
if (this.testResults.has(testName)) {
return {
success: true,
results: this.testResults.get(testName)
};
}
if (this.activeTests.has(testName)) {
const currentStatus = await this.getTestStatus({ testName });
return {
success: true,
results: currentStatus.testConfig,
isActive: true
};
}
return {
success: false,
message: `Test '${testName}' not found`
};
}
async promoteWinner(args) {
const { testName, winner } = args;
if (!this.testResults.has(testName)) {
return {
success: false,
message: `Completed test '${testName}' not found`
};
}
const testResults = this.testResults.get(testName);
const winnerContainerId = winner === 'A' ? testResults.dockerIdA : testResults.dockerIdB;
const loserContainerId = winner === 'A' ? testResults.dockerIdB : testResults.dockerIdA;
try {
// Scale winner to 100% traffic
await execAsync(`docker scale ${winnerContainerId}=3`);
// Gracefully stop loser
await execAsync(`docker stop ${loserContainerId}`);
// Update production configuration
const promotionRecord = {
testName,
winner,
winnerContainerId,
promotedAt: new Date().toISOString(),
testResults: testResults.finalStatistics
};
return {
success: true,
message: `Variant ${winner} promoted to production`,
promotionRecord
};
} catch (error) {
return {
success: false,
message: `Failed to promote winner: ${error.message}`
};
}
}
getToolDefinitions() {
return [
{
name: 'abtest/create',
description: 'Create a new A/B test with traffic splitting between two container variants',
inputSchema: {
type: 'object',
properties: {
testName: {
type: 'string',
description: 'Unique name for the A/B test'
},
dockerIdA: {
type: 'string',
description: 'Container ID for variant A'
},
dockerIdB: {
type: 'string',
description: 'Container ID for variant B'
},
trafficSplit: {
type: 'object',
description: 'Traffic distribution percentage',
properties: {
A: { type: 'number', minimum: 1, maximum: 99 },
B: { type: 'number', minimum: 1, maximum: 99 }
},
default: { A: 50, B: 50 }
},
duration: {
type: 'number',
description: 'Test duration in milliseconds',
default: 3600000
},
successMetrics: {
type: 'array',
items: { type: 'string' },
description: 'Metrics to track for success',
default: ['response_time', 'error_rate']
},
environment: {
type: 'string',
description: 'Environment to run test in',
default: 'prod'
}
},
required: ['testName', 'dockerIdA', 'dockerIdB']
}
},
{
name: 'abtest/status',
description: 'Get status and current metrics of an A/B test',
inputSchema: {
type: 'object',
properties: {
testName: {
type: 'string',
description: 'Name of the A/B test'
}
},
required: ['testName']
}
},
{
name: 'abtest/stop',
description: 'Stop an active A/B test',
inputSchema: {
type: 'object',
properties: {
testName: {
type: 'string',
description: 'Name of the A/B test to stop'
},
reason: {
type: 'string',
description: 'Reason for stopping the test',
default: 'Manual stop'
}
},
required: ['testName']
}
},
{
name: 'abtest/list',
description: 'List all A/B tests (active and completed)',
inputSchema: {
type: 'object',
properties: {
status: {
type: 'string',
enum: ['all', 'active', 'completed'],
description: 'Filter tests by status',
default: 'all'
}
}
}
},
{
name: 'abtest/results',
description: 'Get detailed results of an A/B test',
inputSchema: {
type: 'object',
properties: {
testName: {
type: 'string',
description: 'Name of the A/B test'
}
},
required: ['testName']
}
},
{
name: 'abtest/promote',
description: 'Promote the winning variant to production',
inputSchema: {
type: 'object',
properties: {
testName: {
type: 'string',
description: 'Name of the completed A/B test'
},
winner: {
type: 'string',
enum: ['A', 'B'],
description: 'Which variant to promote'
}
},
required: ['testName', 'winner']
}
}
];
}
}
export default ABTestingTool;