context-optimizer-mcp-server
Version:
Context optimization tools MCP server for AI coding assistants - compatible with GitHub Copilot, Cursor AI, and other MCP-supporting assistants
14 lines • 470 B
TypeScript
/**
* LLM provider factory and abstractions
*
* Provides unified interface for different LLM providers (Gemini, Claude, OpenAI)
*/
import { LLMResponse } from './base';
export interface LLMProvider {
processRequest(prompt: string, model?: string, apiKey?: string): Promise<LLMResponse>;
}
export declare class LLMProviderFactory {
private static providers;
static createProvider(providerName: string): LLMProvider;
}
//# sourceMappingURL=factory.d.ts.map