ultimate-mcp-server
Version:
The definitive all-in-one Model Context Protocol server for AI-assisted coding across 30+ platforms
83 lines • 2.01 kB
JavaScript
/**
* Large Context Analysis Types (Inspired by Consult7)
*
* Enables analysis of massive codebases and file collections
* beyond typical context window limitations
*/
export const LARGE_CONTEXT_MODELS = [
{
name: 'google/gemini-2.5-flash',
provider: 'google',
contextWindow: 1048576, // 1M tokens
costPer1kTokens: {
input: 0.00015,
output: 0.0006
},
features: {
reasoning: true,
vision: true,
functionCalling: true,
streaming: true
}
},
{
name: 'google/gemini-2.5-pro',
provider: 'google',
contextWindow: 2097152, // 2M tokens
costPer1kTokens: {
input: 0.0015,
output: 0.006
},
features: {
reasoning: true,
vision: true,
functionCalling: true,
streaming: true
}
},
{
name: 'anthropic/claude-3-opus',
provider: 'anthropic',
contextWindow: 200000,
costPer1kTokens: {
input: 0.015,
output: 0.075
},
features: {
reasoning: true,
vision: true,
functionCalling: true,
streaming: true
}
},
{
name: 'openai/gpt-4-128k',
provider: 'openai',
contextWindow: 128000,
costPer1kTokens: {
input: 0.01,
output: 0.03
},
features: {
reasoning: true,
vision: true,
functionCalling: true,
streaming: true
}
},
{
name: 'qwen/qwen-2.5-72b-turbo',
provider: 'openrouter',
contextWindow: 128000,
costPer1kTokens: {
input: 0.0005,
output: 0.0015
},
features: {
reasoning: true,
functionCalling: true,
streaming: true
}
}
];
//# sourceMappingURL=types.js.map