context-optimizer-mcp-server
Version:
Context optimization tools MCP server for AI coding assistants - compatible with GitHub Copilot, Cursor AI, and other MCP-supporting assistants
35 lines • 1.32 kB
JavaScript
/**
* LLM provider factory and abstractions
*
* Provides unified interface for different LLM providers (Gemini, Claude, OpenAI)
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.LLMProviderFactory = void 0;
const gemini_1 = require("./gemini");
const claude_1 = require("./claude");
const openai_1 = require("./openai");
class LLMProviderFactory {
static providers = new Map();
static createProvider(providerName) {
// Use singleton pattern for providers to avoid recreating them
if (!this.providers.has(providerName)) {
switch (providerName) {
case 'gemini':
this.providers.set(providerName, new gemini_1.GeminiProvider());
break;
case 'claude':
this.providers.set(providerName, new claude_1.ClaudeProvider());
break;
case 'openai':
this.providers.set(providerName, new openai_1.OpenAIProvider());
break;
default:
throw new Error(`Unknown provider: ${providerName}`);
}
}
return this.providers.get(providerName);
}
}
exports.LLMProviderFactory = LLMProviderFactory;
//# sourceMappingURL=factory.js.map
;