context-optimizer-mcp-server
Version:
Context optimization tools MCP server for AI coding assistants - compatible with GitHub Copilot, Cursor AI, and other MCP-supporting assistants
12 lines • 461 B
TypeScript
/**
* OpenAI provider implementation
*/
import { BaseLLMProvider, LLMResponse } from './base';
export declare class OpenAIProvider extends BaseLLMProvider {
readonly name = "OpenAI";
readonly defaultModel = "gpt-4o-mini";
readonly apiKeyUrl = "https://platform.openai.com/api-keys";
readonly apiKeyPrefix = "sk-";
processRequest(prompt: string, model?: string, apiKey?: string): Promise<LLMResponse>;
}
//# sourceMappingURL=openai.d.ts.map