giga-code
Version:
A personal AI CLI assistant powered by Grok for local development.
200 lines • 7.65 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.fetchModelsWithFallback = exports.fetchProviderModels = void 0;
const provider_models_1 = require("./provider-models");
const ollama_models_1 = require("./ollama-models");
// Fetch models from OpenRouter API
async function fetchOpenRouterModels(apiKey) {
try {
const response = await fetch('https://openrouter.ai/api/v1/models', {
headers: {
'Authorization': `Bearer ${apiKey}`,
},
signal: AbortSignal.timeout(10000),
});
if (!response.ok) {
return { success: false, models: [], error: `HTTP ${response.status}` };
}
const data = await response.json();
const models = data.data || data;
const modelInfos = Array.isArray(models)
? models.map((model) => ({
id: model.id,
name: model.name,
description: model.description,
provider: model.id.split('/')[0],
context_length: model.context_length
}))
: [];
const modelIds = modelInfos.map(m => m.id).filter(Boolean);
return { success: true, models: modelIds, allModels: modelInfos };
}
catch (error) {
return { success: false, models: [], error: error.message };
}
}
// Fetch models from Google AI API
async function fetchGoogleModels(apiKey) {
try {
const response = await fetch(`https://generativelanguage.googleapis.com/v1beta/models?key=${apiKey}`, {
signal: AbortSignal.timeout(10000),
});
if (!response.ok) {
return { success: false, models: [], error: `HTTP ${response.status}` };
}
const data = await response.json();
const models = data.models || [];
const modelIds = models
.map((model) => model.name?.replace('models/', '') || model.id)
.filter(Boolean);
return { success: true, models: modelIds };
}
catch (error) {
return { success: false, models: [], error: error.message };
}
}
// Fetch models from xAI API
async function fetchXaiModels(apiKey) {
try {
const response = await fetch('https://api.x.ai/v1/models', {
headers: {
'Authorization': `Bearer ${apiKey}`,
},
signal: AbortSignal.timeout(10000),
});
if (!response.ok) {
return { success: false, models: [], error: `HTTP ${response.status}` };
}
const data = await response.json();
const models = data.data || data;
const modelIds = Array.isArray(models)
? models.map((model) => model.id).filter(Boolean)
: [];
return { success: true, models: modelIds };
}
catch (error) {
return { success: false, models: [], error: error.message };
}
}
// Fetch models from Groq API
async function fetchGroqModels(apiKey) {
try {
const response = await fetch('https://api.groq.com/openai/v1/models', {
headers: {
'Authorization': `Bearer ${apiKey}`,
},
signal: AbortSignal.timeout(10000),
});
if (!response.ok) {
return { success: false, models: [], error: `HTTP ${response.status}` };
}
const data = await response.json();
const models = data.data || [];
const modelIds = models.map((model) => model.id).filter(Boolean);
return { success: true, models: modelIds };
}
catch (error) {
return { success: false, models: [], error: error.message };
}
}
// Fetch models from Cerebras API
async function fetchCerebrasModels(apiKey) {
try {
const response = await fetch('https://api.cerebras.ai/v1/models', {
headers: {
'Authorization': `Bearer ${apiKey}`,
},
signal: AbortSignal.timeout(10000),
});
if (!response.ok) {
return { success: false, models: [], error: `HTTP ${response.status}` };
}
const data = await response.json();
const models = data.data || [];
const modelIds = models.map((model) => model.id).filter(Boolean);
return { success: true, models: modelIds };
}
catch (error) {
return { success: false, models: [], error: error.message };
}
}
// Fetch models from OpenAI API
async function fetchOpenaiModels(apiKey) {
try {
const response = await fetch('https://api.openai.com/v1/models', {
headers: {
'Authorization': `Bearer ${apiKey}`,
},
signal: AbortSignal.timeout(10000),
});
if (!response.ok) {
return { success: false, models: [], error: `HTTP ${response.status}` };
}
const data = await response.json();
const models = data.data || [];
const modelIds = models.map((model) => model.id).filter(Boolean);
return { success: true, models: modelIds };
}
catch (error) {
return { success: false, models: [], error: error.message };
}
}
// Fetch models from Ollama API
async function fetchOllamaModelsFromApi(baseUrl) {
try {
const models = await (0, ollama_models_1.fetchOllamaModels)(baseUrl);
return { success: true, models };
}
catch (error) {
return { success: false, models: [], error: error.message };
}
}
// Main function to fetch models for a provider
async function fetchProviderModels(provider, apiKey) {
// For Ollama, apiKey is actually the base URL
if (provider.toLowerCase() === 'ollama') {
const baseUrl = apiKey || 'http://localhost:11434';
return await fetchOllamaModelsFromApi(baseUrl);
}
if (!apiKey || !apiKey.trim()) {
return { success: false, models: [], error: 'API key is required' };
}
try {
switch (provider.toLowerCase()) {
case 'openrouter':
return await fetchOpenRouterModels(apiKey);
case 'google':
return await fetchGoogleModels(apiKey);
case 'xai':
return await fetchXaiModels(apiKey);
case 'groq':
return await fetchGroqModels(apiKey);
case 'cerebras':
// Cerebras uses static models since API doesn't return correct model names
return { success: true, models: provider_models_1.PROVIDER_MODELS.cerebras };
case 'openai':
return await fetchOpenaiModels(apiKey);
case 'anthropic':
// Anthropic doesn't have a public models endpoint, use static list
return { success: true, models: provider_models_1.PROVIDER_MODELS.anthropic };
default:
return { success: false, models: [], error: 'Unknown provider' };
}
}
catch (error) {
return { success: false, models: [], error: error.message };
}
}
exports.fetchProviderModels = fetchProviderModels;
// Fetch models with fallback to static list
async function fetchModelsWithFallback(provider, apiKey) {
const result = await fetchProviderModels(provider, apiKey);
if (result.success && result.models.length > 0) {
return { models: result.models.sort(), allModels: result.allModels };
}
// Fallback to static models
const staticModels = provider_models_1.PROVIDER_MODELS[provider] || [];
return { models: staticModels.sort() };
}
exports.fetchModelsWithFallback = fetchModelsWithFallback;
//# sourceMappingURL=dynamic-model-fetcher.js.map