ultimate-mcp-server
Version:
The definitive all-in-one Model Context Protocol server for AI-assisted coding across 30+ platforms
136 lines • 4.11 kB
TypeScript
export declare const MODELS: {
GPT_4O: string;
GPT_4O_MINI: string;
GPT_4_TURBO: string;
GPT_4: string;
GPT_3_5_TURBO: string;
CLAUDE_3_OPUS: string;
CLAUDE_3_SONNET: string;
CLAUDE_3_HAIKU: string;
CLAUDE_3_5_SONNET: string;
CLAUDE_3_5_SONNET_LATEST: string;
GEMINI_2_PRO: string;
GEMINI_2_FLASH: string;
GEMINI_PRO: string;
GEMINI_PRO_VISION: string;
GEMINI_2_FLASH_THINKING: string;
GROK_2: string;
GROK_2_VISION: string;
GROK_3_BETA: string;
GROK_4: string;
LLAMA_3_3_70B: string;
LLAMA_3_1_405B: string;
LLAMA_3_1_70B: string;
LLAMA_3_1_8B: string;
LLAMA_3_2_90B_VISION: string;
LLAMA_3_2_11B_VISION: string;
LLAMA_3_2_3B: string;
LLAMA_3_2_1B: string;
MISTRAL_LARGE: string;
MISTRAL_MEDIUM: string;
MISTRAL_SMALL: string;
MIXTRAL_8X7B: string;
MIXTRAL_8X22B: string;
CODESTRAL: string;
PIXTRAL_LARGE: string;
DEEPSEEK_V3: string;
DEEPSEEK_CHAT: string;
DEEPSEEK_CODER_V2: string;
DEEPSEEK_R1: string;
DEEPSEEK_R1_LITE: string;
QWEN_2_5_CODER_32B: string;
QWEN_2_5_72B: string;
QWEN_2_5_14B: string;
QWEN_2_5_7B: string;
QWQ_32B_PREVIEW: string;
KIMI_K2: string;
KIMI_K1: string;
QWEN_VL_MAX: string;
MARCO_O1: string;
COMMAND_R_PLUS: string;
COMMAND_R: string;
JAMBA_1_5_LARGE: string;
JAMBA_1_5_MINI: string;
DBRX_INSTRUCT: string;
LLAMA_3_1_NEMOTRON_70B: string;
PERPLEXITY_ONLINE: string;
PERPLEXITY_CHAT: string;
O1_PREVIEW: string;
O1_MINI: string;
ANALYSIS_MODEL: string;
SYNTHESIS_MODEL: string;
CODE_MODEL: string;
DEBATE_MODEL: string;
REASONING_MODEL: string;
VISION_MODEL: string;
};
export declare const MODEL_CONTEXT_LIMITS: {
"google/gemini-2.5-pro": number;
"google/gemini-2.5-flash": number;
"google/gemini-pro": number;
"google/gemini-2.0-flash-thinking-exp": number;
"openai/gpt-4o": number;
"openai/gpt-4o-mini": number;
"openai/gpt-4-turbo": number;
"openai/gpt-4": number;
"openai/o1-preview": number;
"openai/o1-mini": number;
"anthropic/claude-3-opus": number;
"anthropic/claude-3-sonnet": number;
"anthropic/claude-3-haiku": number;
"anthropic/claude-3.5-sonnet": number;
"anthropic/claude-3.5-sonnet-20241022": number;
"x-ai/grok-2": number;
"x-ai/grok-2-vision": number;
"x-ai/grok-3-beta": number;
"x-ai/grok-4-beta": number;
"meta-llama/llama-3.3-70b-instruct": number;
"meta-llama/llama-3.1-405b-instruct": number;
"meta-llama/llama-3.1-70b-instruct": number;
"meta-llama/llama-3.2-90b-vision-instruct": number;
"moonshotai/kimi-k2-1t": number;
"moonshotai/kimi-k1": number;
"deepseek/deepseek-v3": number;
"deepseek/deepseek-r1": number;
"deepseek/deepseek-coder-v2-instruct": number;
"qwen/qwen-2.5-coder-32b-instruct": number;
"qwen/qwen-2.5-72b-instruct": number;
"qwen/qwq-32b-preview": number;
"mistralai/mistral-large-2411": number;
"mistralai/codestral-2501": number;
"mistralai/mixtral-8x22b-instruct": number;
};
export declare const MODEL_SPECIALIZATIONS: {
coding: string[];
reasoning: string[];
vision: string[];
longContext: string[];
efficiency: string[];
online: string[];
};
export declare const FALLBACK_MODELS: {
"google/gemini-pro": string;
"google/gemini-pro-vision": string;
"google/gemini-2.0-flash-exp": string;
"google/gemini-2.5-flash-exp": string;
"x-ai/grok-4-beta": string;
"x-ai/grok-3-beta": string;
"deepseek/deepseek-r1": string;
"deepseek/deepseek-r1-lite-preview": string;
"anthropic/claude-3.5-sonnet-20241022": string;
"moonshotai/kimi-k2-1t": string;
};
export declare const MODEL_CATEGORIES: {
GENERAL: string;
CODING: string;
VISION: string;
REASONING: string;
CREATIVE: string;
ANALYSIS: string;
};
export declare const MODEL_PRICING_TIERS: {
premium: string[];
standard: string[];
economy: string[];
};
//# sourceMappingURL=models.d.ts.map