capsule-ai-cli
Version:
The AI Model Orchestrator - Intelligent multi-model workflows with device-locked licensing
84 lines • 3.42 kB
JavaScript
import { OpenRouterProvider } from './openrouter.js';
import { LocalProvider } from './local.js';
import { configManager } from '../core/config.js';
import { authService } from '../services/auth.js';
import { localModelsService } from '../services/local-models.js';
export class ProviderFactory {
static async create(providerName) {
const config = configManager.getConfig();
const auth = await authService.getStatus();
if (this.isLocalProvider(providerName)) {
return this.createLocalModelProvider(providerName, config);
}
if (!auth.isAuthenticated) {
return this.createLocalProvider(providerName, config);
}
if (auth.tier === 'base') {
return this.createLocalProvider(providerName, config);
}
return this.createLocalProvider(providerName, config);
}
static isLocalProvider(providerName) {
return ['local', 'ollama', 'lmstudio', 'llamacpp'].includes(providerName.toLowerCase());
}
static async createLocalModelProvider(providerName, config) {
const localConfig = config.providers[providerName] || {};
const provider = new LocalProvider(localConfig.baseUrl);
if (providerName === 'ollama') {
provider.name = 'ollama';
if (!localConfig.baseUrl) {
provider.baseUrl = 'http://localhost:11434';
}
}
else if (providerName === 'lmstudio') {
provider.name = 'lmstudio';
if (!localConfig.baseUrl) {
provider.baseUrl = 'http://localhost:1234';
}
}
else if (providerName === 'llamacpp') {
provider.name = 'llamacpp';
if (!localConfig.baseUrl) {
provider.baseUrl = 'http://localhost:8080';
}
}
else {
provider.name = 'local';
if (!localConfig.baseUrl) {
const availableUrl = await localModelsService.getAvailableServerUrl();
if (availableUrl) {
provider.baseUrl = availableUrl;
}
}
}
const isAvailable = await provider.isAvailable();
if (!isAvailable) {
throw new Error(`Local model server not available at ${provider.baseUrl}\n` +
`Please ensure your local model server is running:\n` +
`- For Ollama: ollama serve\n` +
`- For LM Studio: Start the server from the app\n` +
`- For llama.cpp: ./llama-server --port 8080`);
}
return provider;
}
static createLocalProvider(providerName, config) {
const openrouterConfig = config.providers.openrouter;
if (!openrouterConfig?.apiKey) {
throw new Error(`No OpenRouter API key found.\n` +
`Please set it with: capsule config set providers.openrouter.apiKey YOUR_KEY`);
}
const provider = new OpenRouterProvider(openrouterConfig.apiKey);
provider.name = providerName;
return provider;
}
static async validateProvider(providerName) {
try {
await this.create(providerName);
return { valid: true };
}
catch (error) {
return { valid: false, error: error instanceof Error ? error.message : String(error) };
}
}
}
//# sourceMappingURL=factory.js.map