mcp-adr-analysis-server
Version:
MCP server for analyzing Architectural Decision Records and project architecture
171 lines • 4.63 kB
TypeScript
/**
* AI Executor Service for OpenRouter.ai Integration
*
* This service handles the execution of prompts using OpenRouter.ai,
* transforming the MCP server from returning prompts to returning actual results.
*
* @deprecated This module is part of the legacy OpenRouter execution path.
* As of CE-MCP Phase 4.4, tools should return OrchestrationDirectives instead
* of using this executor. This module is retained for:
* - Hybrid mode support (CE-MCP with OpenRouter fallback)
* - LLM-specific tools (llm_web_search, llm_cloud_management, llm_database_management)
*
* For new tools, use the directive-based approach defined in ADR-014.
* @see src/types/ce-mcp.ts for OrchestrationDirective types
* @see docs/adrs/adr-014-ce-mcp-architecture.md
*/
import { AIConfig } from '../config/ai-config.js';
export interface AIExecutionResult {
/** The AI-generated response content */
content: string;
/** Model used for generation */
model: string;
/** Token usage information */
usage?: {
promptTokens: number;
completionTokens: number;
totalTokens: number;
};
/** Execution metadata */
metadata: {
executionTime: number;
cached: boolean;
retryCount: number;
timestamp: string;
};
}
export interface AIExecutionError extends Error {
code: string;
retryable: boolean;
originalError?: unknown;
}
/**
* AI Executor Service Class
*
* @description Core service for executing AI prompts through OpenRouter.ai integration.
* Transforms the MCP server from returning prompts to returning actual AI-generated results.
* Includes caching, retry logic, and comprehensive error handling.
*
* @example
* ```typescript
* // Initialize with custom configuration
* const executor = new AIExecutor({
* apiKey: 'your-api-key',
* model: 'anthropic/claude-3-sonnet',
* maxTokens: 4000
* });
*
* // Execute a prompt
* const result = await executor.executePrompt({
* prompt: 'Analyze this ADR...',
* context: { projectPath: '/path/to/project' }
* });
*
* console.log(result.content); // AI-generated analysis
* ```
*
* @example
* ```typescript
* // Use singleton instance
* const executor = getAIExecutor();
* const result = await executor.executePrompt({
* prompt: 'Generate ADR suggestions',
* maxTokens: 2000
* });
* ```
*
* @since 2.0.0
* @category AI
* @category Core
*/
export declare class AIExecutor {
private client;
private config;
private cache;
constructor(config?: AIConfig);
/**
* Initialize OpenAI client for OpenRouter
*/
private initializeClient;
/**
* Check if AI execution is available
*/
isAvailable(): boolean;
/**
* Reload configuration if environment variables have changed
*/
private reloadConfigIfNeeded;
/**
* Execute a prompt and return the AI response
*/
executePrompt(prompt: string, options?: {
model?: string;
temperature?: number;
maxTokens?: number;
systemPrompt?: string;
}): Promise<AIExecutionResult>;
/**
* Execute a structured prompt that expects JSON response
*/
executeStructuredPrompt<T = any>(prompt: string, schema?: any, options?: {
model?: string;
temperature?: number;
maxTokens?: number;
systemPrompt?: string;
}): Promise<{
data: T;
raw: AIExecutionResult;
}>;
/**
* Extract JSON content from AI response, handling markdown code blocks
*/
private extractJsonFromResponse;
/**
* Generate cache key for a prompt execution
*/
private generateCacheKey;
/**
* Get cached result if available and not expired
*/
private getCachedResult;
/**
* Cache a result
*/
private setCachedResult;
/**
* Clean up expired cache entries
*/
private cleanupCache;
/**
* Create a standardized AI execution error
*/
private createError;
/**
* Get current configuration
*/
getConfig(): AIConfig;
/**
* Update configuration
*/
updateConfig(newConfig: Partial<AIConfig>): void;
/**
* Clear cache
*/
clearCache(): void;
/**
* Get cache statistics
*/
getCacheStats(): {
size: number;
hitRate: number;
};
}
/**
* Get or create the global AI executor instance
*/
export declare function getAIExecutor(): AIExecutor;
/**
* Reset the global AI executor (useful for testing)
*/
export declare function resetAIExecutor(): void;
//# sourceMappingURL=ai-executor.d.ts.map