survey-mcp-server
Version:
Survey management server handling survey creation, response collection, analysis, and reporting with database access for data management
303 lines • 10.8 kB
JavaScript
import { logger } from '../utils/logger.js';
import { BaseError, TimeoutError, ExternalApiError } from '../middleware/error-handling.js';
export var CircuitBreakerState;
(function (CircuitBreakerState) {
CircuitBreakerState["CLOSED"] = "CLOSED";
CircuitBreakerState["OPEN"] = "OPEN";
CircuitBreakerState["HALF_OPEN"] = "HALF_OPEN";
})(CircuitBreakerState || (CircuitBreakerState = {}));
export class CircuitBreaker {
constructor(config) {
this.state = CircuitBreakerState.CLOSED;
this.failureCount = 0;
this.lastFailureTime = null;
this.nextAttemptTime = null;
this.config = {
...config,
failureThreshold: config.failureThreshold ?? 5,
resetTimeout: config.resetTimeout ?? 60000, // 1 minute
monitoringPeriod: config.monitoringPeriod ?? 300000 // 5 minutes
};
}
async execute(operation) {
if (this.state === CircuitBreakerState.OPEN) {
if (this.shouldAttemptReset()) {
this.state = CircuitBreakerState.HALF_OPEN;
logger.info('Circuit breaker transitioning to HALF_OPEN state');
}
else {
throw new ExternalApiError('Circuit breaker is OPEN - service unavailable');
}
}
try {
const result = await operation();
this.onSuccess();
return result;
}
catch (error) {
this.onFailure(error);
throw error;
}
}
onSuccess() {
this.failureCount = 0;
this.lastFailureTime = null;
if (this.state === CircuitBreakerState.HALF_OPEN) {
this.state = CircuitBreakerState.CLOSED;
logger.info('Circuit breaker transitioned to CLOSED state');
}
}
onFailure(error) {
this.failureCount++;
this.lastFailureTime = new Date();
if (this.shouldTripCircuit()) {
this.state = CircuitBreakerState.OPEN;
this.nextAttemptTime = new Date(Date.now() + this.config.resetTimeout);
logger.warn(`Circuit breaker tripped - transitioning to OPEN state. Failure count: ${this.failureCount}`);
}
}
shouldTripCircuit() {
return this.failureCount >= this.config.failureThreshold;
}
shouldAttemptReset() {
return this.nextAttemptTime !== null && new Date() >= this.nextAttemptTime;
}
getState() {
return this.state;
}
getFailureCount() {
return this.failureCount;
}
reset() {
this.state = CircuitBreakerState.CLOSED;
this.failureCount = 0;
this.lastFailureTime = null;
this.nextAttemptTime = null;
logger.info('Circuit breaker manually reset');
}
}
export class RetryHandler {
constructor(config) {
this.config = {
...config,
maxAttempts: config.maxAttempts ?? 3,
baseDelay: config.baseDelay ?? 1000,
maxDelay: config.maxDelay ?? 30000,
backoffMultiplier: config.backoffMultiplier ?? 2
};
}
async execute(operation, operationName) {
let lastError;
for (let attempt = 1; attempt <= this.config.maxAttempts; attempt++) {
try {
const result = await operation();
if (attempt > 1) {
logger.info(`Operation succeeded on attempt ${attempt}/${this.config.maxAttempts}`, {
operationName
});
}
return result;
}
catch (error) {
lastError = error;
if (!this.shouldRetry(error, attempt)) {
logger.warn(`Operation failed on attempt ${attempt}/${this.config.maxAttempts} - not retrying`, {
operationName,
error: error instanceof Error ? error.message : String(error)
});
throw error;
}
if (attempt < this.config.maxAttempts) {
const delay = this.calculateDelay(attempt);
logger.warn(`Operation failed on attempt ${attempt}/${this.config.maxAttempts} - retrying in ${delay}ms`, {
operationName,
error: error instanceof Error ? error.message : String(error)
});
await this.delay(delay);
}
}
}
logger.error(`Operation failed after all ${this.config.maxAttempts} attempts`, {
operationName,
error: lastError.message
});
throw lastError;
}
shouldRetry(error, attempt) {
if (attempt >= this.config.maxAttempts) {
return false;
}
// Check if error is retryable based on configuration
if (this.config.retryableErrors) {
const errorMatches = this.config.retryableErrors.some(retryableError => {
if (typeof retryableError === 'string') {
return error.name === retryableError || error.code === retryableError;
}
else {
return error instanceof retryableError;
}
});
if (!errorMatches) {
return false;
}
}
// Default retryable conditions
if (error instanceof BaseError) {
return error.retryable;
}
// Network errors are generally retryable
const retryableNetworkCodes = ['ECONNREFUSED', 'ENOTFOUND', 'ETIMEDOUT', 'ECONNRESET'];
if (retryableNetworkCodes.includes(error.code)) {
return true;
}
// HTTP status codes that are retryable
if (error.statusCode) {
const retryableStatusCodes = [429, 502, 503, 504];
return retryableStatusCodes.includes(error.statusCode);
}
return false;
}
calculateDelay(attempt) {
let delay = this.config.baseDelay * Math.pow(this.config.backoffMultiplier, attempt - 1);
// Add jitter to prevent thundering herd
delay = delay * (0.5 + Math.random() * 0.5);
return Math.min(delay, this.config.maxDelay);
}
async delay(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
}
export class BaseService {
constructor(serviceName, circuitBreakerConfig, retryConfig) {
this.healthCheckInterval = null;
this.serviceName = serviceName;
this.circuitBreaker = new CircuitBreaker({
failureThreshold: 5,
resetTimeout: 60000,
monitoringPeriod: 300000,
...circuitBreakerConfig
});
this.retryHandler = new RetryHandler({
maxAttempts: 3,
baseDelay: 1000,
maxDelay: 30000,
backoffMultiplier: 2,
...retryConfig
});
this.healthStatus = {
isHealthy: true,
lastCheck: new Date()
};
// Start health monitoring
this.startHealthMonitoring();
}
async executeWithResilience(operation, operationName, options = {}) {
const { useCircuitBreaker = true, useRetry = true, timeout = 30000 } = options;
let wrappedOperation = operation;
// Add timeout
if (timeout > 0) {
wrappedOperation = () => this.withTimeout(operation(), timeout);
}
// Add retry logic
if (useRetry) {
const currentOperation = wrappedOperation;
wrappedOperation = () => this.retryHandler.execute(currentOperation, `${this.serviceName}:${operationName}`);
}
// Add circuit breaker
if (useCircuitBreaker) {
const currentOperation = wrappedOperation;
wrappedOperation = () => this.circuitBreaker.execute(currentOperation);
}
try {
const result = await wrappedOperation();
this.updateHealthStatus(true);
return result;
}
catch (error) {
this.updateHealthStatus(false, error);
throw error;
}
}
async withTimeout(promise, timeout) {
const timeoutPromise = new Promise((_, reject) => {
setTimeout(() => {
reject(new TimeoutError(`Operation timed out after ${timeout}ms`));
}, timeout);
});
return Promise.race([promise, timeoutPromise]);
}
startHealthMonitoring() {
const healthCheckInterval = 60000; // 1 minute
this.healthCheckInterval = setInterval(async () => {
try {
const healthResult = await this.performHealthCheck();
this.healthStatus = {
...healthResult,
lastCheck: new Date()
};
if (!healthResult.isHealthy) {
logger.warn(`Health check failed for service ${this.serviceName}`, {
error: healthResult.error,
details: healthResult.details
});
}
}
catch (error) {
this.healthStatus = {
isHealthy: false,
lastCheck: new Date(),
error: error.message
};
logger.error(`Health check error for service ${this.serviceName}:`, error);
}
}, healthCheckInterval);
}
updateHealthStatus(isHealthy, error) {
this.healthStatus = {
isHealthy,
lastCheck: new Date(),
error: error?.message,
details: error?.details
};
}
getHealthStatus() {
return { ...this.healthStatus };
}
getServiceMetrics() {
return {
circuitBreakerState: this.circuitBreaker.getState(),
failureCount: this.circuitBreaker.getFailureCount(),
healthStatus: this.getHealthStatus()
};
}
resetCircuitBreaker() {
this.circuitBreaker.reset();
}
async shutdown() {
if (this.healthCheckInterval) {
clearInterval(this.healthCheckInterval);
this.healthCheckInterval = null;
}
logger.info(`Service ${this.serviceName} shutdown complete`);
}
}
export const defaultServiceConfig = {
enabled: true,
circuitBreaker: {
failureThreshold: 5,
resetTimeout: 60000,
monitoringPeriod: 300000
},
retry: {
maxAttempts: 3,
baseDelay: 1000,
maxDelay: 30000,
backoffMultiplier: 2
},
healthCheck: {
enabled: true,
interval: 60000,
timeout: 5000
}
};
//# sourceMappingURL=base.js.map