mcp-adr-analysis-server
Version:
MCP server for analyzing Architectural Decision Records and project architecture
81 lines • 2.54 kB
TypeScript
/**
* AI Configuration for OpenRouter.ai Integration
*
* This module handles configuration for AI execution capabilities,
* allowing the MCP server to execute prompts internally and return
* actual results instead of prompts.
*/
export interface AIConfig {
/** OpenRouter API key for authentication */
apiKey: string;
/** Base URL for OpenRouter API */
baseURL: string;
/** Default AI model to use for prompt execution */
defaultModel: string;
/** Execution mode: 'full' executes prompts, 'prompt-only' returns prompts */
executionMode: 'full' | 'prompt-only';
/** Site URL for OpenRouter rankings (optional) */
siteUrl?: string;
/** Site name for OpenRouter rankings (optional) */
siteName?: string;
/** Request timeout in milliseconds */
timeout: number;
/** Maximum retries for failed requests */
maxRetries: number;
/** Temperature for AI responses (0-1) */
temperature: number;
/** Maximum tokens for AI responses */
maxTokens: number;
/** Enable response caching */
cacheEnabled: boolean;
/** Cache TTL in seconds */
cacheTTL: number;
}
export interface ModelConfig {
/** Model identifier for OpenRouter */
id: string;
/** Human-readable model name */
name: string;
/** Model provider (openai, anthropic, etc.) */
provider: string;
/** Cost per 1K tokens (input) */
inputCost: number;
/** Cost per 1K tokens (output) */
outputCost: number;
/** Maximum context length */
contextLength: number;
/** Recommended use cases */
useCases: string[];
}
/**
* Available AI models for different use cases
*/
export declare const AVAILABLE_MODELS: Record<string, ModelConfig>;
/**
* Default AI configuration
*/
export declare const DEFAULT_AI_CONFIG: Omit<AIConfig, 'siteUrl' | 'siteName'> & {
siteUrl: string;
siteName: string;
};
/**
* Load AI configuration from environment variables
*/
export declare function loadAIConfig(): AIConfig;
/**
* Validate AI configuration
*/
export declare function validateAIConfig(config: AIConfig): void;
/**
* Get model configuration by ID
*/
export declare function getModelConfig(modelId: string): ModelConfig | undefined;
/**
* Check if AI execution is enabled
*/
export declare function isAIExecutionEnabled(config: AIConfig): boolean;
/**
* Get recommended model for a specific use case
*/
export declare function getRecommendedModel(useCase: string, costSensitive?: boolean): string;
//# sourceMappingURL=ai-config.d.ts.map