context-optimizer-mcp-server
Version:
Context optimization tools MCP server for AI coding assistants - compatible with GitHub Copilot, Cursor AI, and other MCP-supporting assistants
29 lines • 954 B
TypeScript
/**
* Abstract base class for LLM providers
*
* Provides common functionality for all LLM provider implementations
*/
export interface LLMResponse {
success: boolean;
content: string;
error?: string;
}
export declare abstract class BaseLLMProvider {
abstract readonly name: string;
abstract readonly defaultModel: string;
abstract readonly apiKeyUrl: string;
abstract readonly apiKeyPrefix: string | undefined;
abstract processRequest(prompt: string, model?: string, apiKey?: string): Promise<LLMResponse>;
protected createStandardRequest(prompt: string, model: string): {
model: string;
temperature: number;
max_tokens: number;
messages: {
role: "user";
content: string;
}[];
};
protected createSuccessResponse(content: string): LLMResponse;
protected createErrorResponse(error: string): LLMResponse;
}
//# sourceMappingURL=base.d.ts.map