claude-flow-novice
Version:
Claude Flow Novice - Advanced orchestration platform for multi-agent AI workflows with CFN Loop architecture Includes Local RuVector Accelerator and all CFN skills for complete functionality.
453 lines (452 loc) • 16.7 kB
JavaScript
/**
* Enhanced Retry Manager with Circuit Breaker Integration
*
* Provides advanced retry logic with:
* - Exponential backoff with jitter
* - Circuit breaker integration
* - Correlation ID tracking
* - Configurable retry policies
* - Detailed retry attempt logging
* - Retryable error classification
*
* Part of HIGH-PRIORITY retry logic implementation
*
* Usage:
* const manager = new RetryManager({ correlationId: 'req-123' });
* const result = await manager.executeWithRetry(
* async () => await databaseQuery(),
* { maxAttempts: 3, baseDelayMs: 1000 }
* );
*/ import { withRetry } from './retry.js';
import { StandardError, ErrorCode, isRetryableError } from './errors.js';
import { createLogger } from './logging.js';
const logger = createLogger('retry-manager');
/**
* Circuit breaker state
*/ export var CircuitState = /*#__PURE__*/ function(CircuitState) {
CircuitState["CLOSED"] = "CLOSED";
CircuitState["OPEN"] = "OPEN";
CircuitState["HALF_OPEN"] = "HALF_OPEN";
return CircuitState;
}({});
/**
* Predefined retry policies
*/ export const RetryPolicies = {
/** Quick retry for fast operations (3 attempts, 500ms base, 5s max) */ QUICK: {
name: 'QUICK',
maxAttempts: 3,
baseDelayMs: 500,
maxDelayMs: 5000,
exponential: true,
jitter: true
},
/** Standard retry for typical operations (3 attempts, 1s base, 30s max) */ STANDARD: {
name: 'STANDARD',
maxAttempts: 3,
baseDelayMs: 1000,
maxDelayMs: 30000,
exponential: true,
jitter: true
},
/** Aggressive retry for critical operations (5 attempts, 2s base, 60s max) */ AGGRESSIVE: {
name: 'AGGRESSIVE',
maxAttempts: 5,
baseDelayMs: 2000,
maxDelayMs: 60000,
exponential: true,
jitter: true
},
/** Database-specific retry (3 attempts, 1s base, 30s max, only retryable errors) */ DATABASE: {
name: 'DATABASE',
maxAttempts: 3,
baseDelayMs: 1000,
maxDelayMs: 30000,
exponential: true,
jitter: true,
shouldRetry: (error)=>{
// Only retry specific database errors
if (error instanceof StandardError) {
return error.isRetryable;
}
return isRetryableError(error);
}
},
/** Network-specific retry (4 attempts, 2s base, 45s max) */ NETWORK: {
name: 'NETWORK',
maxAttempts: 4,
baseDelayMs: 2000,
maxDelayMs: 45000,
exponential: true,
jitter: true,
shouldRetry: (error)=>{
if (error instanceof StandardError) {
return error.code === ErrorCode.NETWORK_ERROR || error.isRetryable;
}
return isRetryableError(error);
}
},
/** File system retry (2 attempts, 500ms base, 5s max) */ FILE_SYSTEM: {
name: 'FILE_SYSTEM',
maxAttempts: 2,
baseDelayMs: 500,
maxDelayMs: 5000,
exponential: false,
jitter: false
}
};
/**
* Enhanced Retry Manager with circuit breaker and correlation tracking
*/ export class RetryManager {
config;
circuitState = "CLOSED";
failureCount = 0;
successCount = 0;
lastFailureTime;
retryAttempts = [];
constructor(config = {}){
this.config = {
correlationId: config.correlationId,
circuitBreaker: {
failureThreshold: 5,
successThreshold: 2,
openTimeoutMs: 60000,
enabled: false,
...config.circuitBreaker
},
defaultPolicy: config.defaultPolicy || RetryPolicies.STANDARD,
enableLogging: config.enableLogging ?? true
};
}
/**
* Execute operation with retry logic
*
* @param fn - Async function to execute
* @param policyOrOptions - Retry policy or custom options
* @returns Result of the function
* @throws Error if all retry attempts fail or circuit is open
*/ async executeWithRetry(fn, policyOrOptions) {
// Check circuit breaker
if (this.config.circuitBreaker.enabled) {
this.checkCircuitBreaker();
}
// Determine retry options
const options = this.buildRetryOptions(policyOrOptions);
const policyName = policyOrOptions?.name;
// Clear retry attempts for new operation
this.retryAttempts = [];
// Log operation start
if (this.config.enableLogging) {
logger.debug('Starting operation with retry', {
correlationId: this.config.correlationId,
policy: policyName || 'custom',
maxAttempts: options.maxAttempts,
circuitState: this.circuitState
});
}
try {
// Execute with retry
const result = await withRetry(fn, {
...options,
onRetry: (attempt, error, delayMs)=>{
// Track retry attempt
this.retryAttempts.push({
attemptNumber: attempt,
correlationId: this.config.correlationId,
error,
delayMs,
timestamp: new Date(),
policyName
});
// Log retry attempt
if (this.config.enableLogging) {
logger.warn('Retry attempt', {
correlationId: this.config.correlationId,
attempt,
maxAttempts: options.maxAttempts,
error: error.message,
delayMs,
policyName,
isRetryable: error instanceof StandardError ? error.isRetryable : isRetryableError(error)
});
}
// Call custom onRetry if provided
if (options.onRetry) {
options.onRetry(attempt, error, delayMs);
}
}
});
// Record success for circuit breaker
if (this.config.circuitBreaker.enabled) {
this.recordSuccess();
}
// Log success
if (this.config.enableLogging) {
logger.debug('Operation succeeded', {
correlationId: this.config.correlationId,
retryAttempts: this.retryAttempts.length,
circuitState: this.circuitState
});
}
return result;
} catch (error) {
// Record failure for circuit breaker
if (this.config.circuitBreaker.enabled) {
this.recordFailure();
}
// Log failure
if (this.config.enableLogging) {
logger.error('Operation failed after retries', {
correlationId: this.config.correlationId,
retryAttempts: this.retryAttempts.length,
finalError: error instanceof Error ? error.message : String(error),
circuitState: this.circuitState
});
}
throw error;
}
}
/**
* Execute operation with retry and return statistics
*
* @param fn - Async function to execute
* @param policyOrOptions - Retry policy or custom options
* @returns Result and retry statistics
*/ async executeWithRetryStats(fn, policyOrOptions) {
const startTime = Date.now();
let result;
try {
result = await this.executeWithRetry(fn, policyOrOptions);
} catch (error) {
const stats = {
totalAttempts: this.retryAttempts.length + 1,
succeeded: false,
totalTimeMs: Date.now() - startTime,
delays: this.retryAttempts.map((a)=>a.delayMs),
errors: this.retryAttempts.map((a)=>a.error)
};
throw error;
}
const stats = {
totalAttempts: this.retryAttempts.length + 1,
succeeded: true,
totalTimeMs: Date.now() - startTime,
delays: this.retryAttempts.map((a)=>a.delayMs),
errors: this.retryAttempts.map((a)=>a.error)
};
return {
result,
stats,
attempts: [
...this.retryAttempts
]
};
}
/**
* Get current circuit breaker state
*/ getCircuitState() {
return this.circuitState;
}
/**
* Get circuit breaker statistics
*/ getCircuitStats() {
return {
state: this.circuitState,
failureCount: this.failureCount,
successCount: this.successCount,
lastFailureTime: this.lastFailureTime
};
}
/**
* Manually reset circuit breaker
*/ resetCircuit() {
this.circuitState = "CLOSED";
this.failureCount = 0;
this.successCount = 0;
this.lastFailureTime = undefined;
if (this.config.enableLogging) {
logger.info('Circuit breaker manually reset', {
correlationId: this.config.correlationId
});
}
}
/**
* Build retry options from policy or custom options
*/ buildRetryOptions(policyOrOptions) {
if (!policyOrOptions) {
// Use default policy
return {
maxAttempts: this.config.defaultPolicy.maxAttempts,
baseDelayMs: this.config.defaultPolicy.baseDelayMs,
maxDelayMs: this.config.defaultPolicy.maxDelayMs,
exponential: this.config.defaultPolicy.exponential,
jitter: this.config.defaultPolicy.jitter,
shouldRetry: this.config.defaultPolicy.shouldRetry
};
}
// Check if it's a retry policy
if ('name' in policyOrOptions) {
const policy = policyOrOptions;
return {
maxAttempts: policy.maxAttempts,
baseDelayMs: policy.baseDelayMs,
maxDelayMs: policy.maxDelayMs,
exponential: policy.exponential,
jitter: policy.jitter,
shouldRetry: policy.shouldRetry
};
}
// It's custom retry options
return policyOrOptions;
}
/**
* Check circuit breaker state and throw if open
*/ checkCircuitBreaker() {
const now = Date.now();
switch(this.circuitState){
case "OPEN":
// Check if timeout has elapsed
if (this.lastFailureTime && now - this.lastFailureTime.getTime() >= this.config.circuitBreaker.openTimeoutMs) {
// Transition to half-open
this.circuitState = "HALF_OPEN";
this.successCount = 0;
if (this.config.enableLogging) {
logger.info('Circuit breaker transitioning to half-open', {
correlationId: this.config.correlationId,
timeSinceOpen: now - this.lastFailureTime.getTime()
});
}
} else {
// Circuit is still open, reject request
throw new StandardError(ErrorCode.OPERATION_TIMEOUT, 'Circuit breaker is open - service unavailable', {
circuitState: this.circuitState,
failureCount: this.failureCount,
timeSinceOpen: this.lastFailureTime ? now - this.lastFailureTime.getTime() : 0
}, undefined, false // Not retryable
);
}
break;
case "HALF_OPEN":
// Allow request through for testing
if (this.config.enableLogging) {
logger.debug('Allowing request through half-open circuit', {
correlationId: this.config.correlationId,
successCount: this.successCount,
successThreshold: this.config.circuitBreaker.successThreshold
});
}
break;
case "CLOSED":
break;
}
}
/**
* Record successful operation for circuit breaker
*/ recordSuccess() {
switch(this.circuitState){
case "HALF_OPEN":
this.successCount++;
if (this.successCount >= this.config.circuitBreaker.successThreshold) {
// Close the circuit
this.circuitState = "CLOSED";
this.failureCount = 0;
this.successCount = 0;
if (this.config.enableLogging) {
logger.info('Circuit breaker closed after recovery', {
correlationId: this.config.correlationId
});
}
}
break;
case "CLOSED":
// Reset failure count on success
if (this.failureCount > 0) {
this.failureCount = 0;
}
break;
}
}
/**
* Record failed operation for circuit breaker
*/ recordFailure() {
this.lastFailureTime = new Date();
switch(this.circuitState){
case "HALF_OPEN":
// Failed during recovery, reopen circuit
this.circuitState = "OPEN";
this.successCount = 0;
if (this.config.enableLogging) {
logger.warn('Circuit breaker reopened after failed recovery', {
correlationId: this.config.correlationId
});
}
break;
case "CLOSED":
this.failureCount++;
if (this.failureCount >= this.config.circuitBreaker.failureThreshold) {
// Open the circuit
this.circuitState = "OPEN";
if (this.config.enableLogging) {
logger.error('Circuit breaker opened due to failures', {
correlationId: this.config.correlationId,
failureCount: this.failureCount,
threshold: this.config.circuitBreaker.failureThreshold
});
}
}
break;
}
}
}
/**
* Create a retry manager instance with correlation ID
*
* @param correlationId - Correlation ID for tracking
* @param config - Additional configuration
* @returns RetryManager instance
*/ export function createRetryManager(correlationId, config) {
return new RetryManager({
...config,
correlationId
});
}
/**
* Execute operation with standard retry policy
*
* @param fn - Async function to execute
* @param correlationId - Optional correlation ID
* @returns Result of the function
*/ export async function withStandardRetry(fn, correlationId) {
const manager = createRetryManager(correlationId);
return manager.executeWithRetry(fn, RetryPolicies.STANDARD);
}
/**
* Execute database operation with retry policy
*
* @param fn - Async function to execute
* @param correlationId - Optional correlation ID
* @returns Result of the function
*/ export async function withDatabaseRetry(fn, correlationId) {
const manager = createRetryManager(correlationId);
return manager.executeWithRetry(fn, RetryPolicies.DATABASE);
}
/**
* Execute network operation with retry policy
*
* @param fn - Async function to execute
* @param correlationId - Optional correlation ID
* @returns Result of the function
*/ export async function withNetworkRetry(fn, correlationId) {
const manager = createRetryManager(correlationId);
return manager.executeWithRetry(fn, RetryPolicies.NETWORK);
}
/**
* Execute file system operation with retry policy
*
* @param fn - Async function to execute
* @param correlationId - Optional correlation ID
* @returns Result of the function
*/ export async function withFileSystemRetry(fn, correlationId) {
const manager = createRetryManager(correlationId);
return manager.executeWithRetry(fn, RetryPolicies.FILE_SYSTEM);
}
//# sourceMappingURL=retry-manager.js.map