UNPKG

@chainlink/mcp-server

Version:
221 lines 9.53 kB
"use strict"; /** * @fileoverview AI Service Factory for dynamic service creation * * Factory class that creates AI service instances based on configuration * and environment variables. Supports multiple AI providers with automatic * API key detection, model configuration, and service availability checking. */ Object.defineProperty(exports, "__esModule", { value: true }); exports.AIServiceFactory = exports.DEFAULT_AI_SERVICE = void 0; const anthropic_service_1 = require("./anthropic-service"); const openai_service_1 = require("./openai-service"); const gemini_service_1 = require("./gemini-service"); const ollama_service_1 = require("./ollama-service"); const logger_1 = require("../utils/logger"); /** * Default AI service used when no explicit service is configured */ exports.DEFAULT_AI_SERVICE = "openai"; /** * Factory class for creating AI service instances * * Provides centralized service creation with automatic configuration detection * from environment variables, API key validation, and support for multiple * AI providers including cloud-based and local services. * * Supported services: * - Anthropic Claude (cloud) * - OpenAI GPT (cloud) * - Google Gemini (cloud) * - Ollama (local) * * @class AIServiceFactory */ class AIServiceFactory { /** * Get API key for a specific service from environment variables * * @private * @static * @param service - The service to get API key for * @returns API key if available, undefined otherwise */ static getApiKey(service) { switch (service) { case exports.DEFAULT_AI_SERVICE: return process.env.OPENAI_API_KEY; case "anthropic": return process.env.ANTHROPIC_API_KEY; case "gemini": return process.env.GEMINI_API_KEY; case "ollama": return "ollama"; // Ollama doesn't need an API key, but we provide a dummy value default: return undefined; } } /** * Get default model for a specific service * * @private * @static * @param service - The service to get default model for * @returns Default model identifier for the service * @throws {Error} When service is not supported */ static getDefaultModel(service) { switch (service) { case exports.DEFAULT_AI_SERVICE: return "gpt-4o"; case "anthropic": return "claude-3-5-sonnet-latest"; case "gemini": return "gemini-1.5-pro"; case "ollama": return process.env.OLLAMA_MODEL || "llama3.2:3b"; default: throw new Error(`Unsupported service: ${service}`); } } /** * Create an AI service instance with automatic configuration * * Detects service configuration from environment variables or provided * config, validates API keys, and creates the appropriate service instance. * Only falls back to default (${DEFAULT_AI_SERVICE}) when NO service is explicitly requested. * * Environment variables used: * - MCP_AI_SERVICE: Service to use (default: ${DEFAULT_AI_SERVICE} ONLY if not set) * - MCP_AI_MODEL: Model to use (falls back to service defaults) * - MCP_MAX_TOKENS: Maximum tokens (default: 2000) * - *_API_KEY: API keys for each service (ANTHROPIC_API_KEY, OPENAI_API_KEY, GEMINI_API_KEY) * - OLLAMA_URL: Ollama server URL (default: localhost:11434) * * @static * @param config - Optional partial configuration to override defaults * @returns Configured AI service instance * @throws {Error} When explicitly requested service is not properly configured */ static async createService(config) { // Get explicitly requested service const requestedService = config?.service || process.env.MCP_AI_SERVICE?.toLowerCase(); // Only use default if NO service is explicitly requested const service = (requestedService || exports.DEFAULT_AI_SERVICE); const isExplicitlyRequested = !!requestedService; // Log service selection for debugging if (isExplicitlyRequested) { logger_1.Logger.log("info", `AI service explicitly configured: ${service}`); } else { logger_1.Logger.log("info", `AI service defaulted to: ${service} (no MCP_AI_SERVICE set)`); } // Get model from config or environment variable, or use default const model = config?.model || process.env.MCP_AI_MODEL || this.getDefaultModel(service); // Strict validation: if service is explicitly requested, it must be properly configured if (isExplicitlyRequested) { // For cloud services, check API key availability if (service !== "ollama") { const apiKey = this.getApiKey(service); if (!apiKey) { throw new Error(`❌ AI service '${service}' was explicitly requested but is not properly configured.\n` + ` Missing API key. Please set ${service.toUpperCase()}_API_KEY environment variable.\n` + ` Or remove MCP_AI_SERVICE to use the default service.`); } } // For Ollama, we'll test connectivity during service creation below } else { // For default service, ensure it has proper configuration const apiKey = this.getApiKey(service); if (!apiKey && service !== "ollama") { throw new Error(`API key not found for default service ${service}. Please set ${service.toUpperCase()}_API_KEY environment variable.`); } } const maxTokens = config?.maxTokens || parseInt(process.env.MCP_MAX_TOKENS || "2000"); // Get API key after validation const apiKey = this.getApiKey(service); logger_1.Logger.log("info", `Creating ${service} service with model: ${model}`); switch (service) { case exports.DEFAULT_AI_SERVICE: return new openai_service_1.OpenAIService({ apiKey: apiKey, model, maxTokens, }); case "anthropic": return new anthropic_service_1.AnthropicService({ apiKey: apiKey, model, maxTokens, }); case "gemini": return new gemini_service_1.GeminiService({ apiKey: apiKey, model, maxTokens, }); case "ollama": const ollamaConfig = { apiKey: "ollama", // Dummy value for interface compatibility model, maxTokens, baseURL: config?.baseURL || process.env.OLLAMA_URL || "http://localhost:11434", }; const ollamaService = new ollama_service_1.OllamaService(ollamaConfig); // Test Ollama connectivity immediately during service creation logger_1.Logger.log("info", `Testing Ollama connectivity at ${ollamaConfig.baseURL}...`); try { await ollamaService.testConnection(); logger_1.Logger.log("info", "✅ Ollama connectivity test successful"); } catch (error) { const baseErrorMsg = `❌ Ollama connectivity test failed: ${error}`; const helpMsg = isExplicitlyRequested ? `\n Ollama service was explicitly requested but is not running.\n Please start Ollama with: ollama serve\n Or remove MCP_AI_SERVICE=ollama to use a different service.` : `\n Please ensure Ollama is running at ${ollamaConfig.baseURL}`; const fullErrorMsg = baseErrorMsg + helpMsg; logger_1.Logger.log("error", fullErrorMsg); throw new Error(fullErrorMsg); } return ollamaService; default: throw new Error(`❌ Unsupported AI service: '${service}'.\n` + ` Supported services: anthropic, openai, gemini, ollama\n` + ` Please check your MCP_AI_SERVICE environment variable.`); } } /** * Get list of all supported AI services * * @static * @returns Array of supported service identifiers */ static getSupportedServices() { return [exports.DEFAULT_AI_SERVICE, "anthropic", "gemini", "ollama"]; } /** * Check if a specific service is properly configured * * Validates that the required API key is available for cloud services * or that the service is accessible for local services like Ollama. * * @static * @param service - Service to check configuration for * @returns True if service is configured and ready to use */ static isServiceConfigured(service) { if (service === "ollama") { // For Ollama, we'll do runtime checks in the service itself return true; } return !!this.getApiKey(service); } } exports.AIServiceFactory = AIServiceFactory; //# sourceMappingURL=service-factory.js.map