@oliverpople/agency-x
Version:
🚀 **Transform feature requests into production-ready code in seconds**
42 lines (34 loc) • 1.36 kB
text/typescript
import { getClaudeClient } from './claudeClient';
import { getOpenAIClient } from './openaiClient';
let defaultProvider = 'claude'; // Prefer Anthropic as documented
export const setDefaultProvider = (provider: 'claude' | 'openai') => {
defaultProvider = provider;
};
// Check which providers are available
const isProviderAvailable = (provider: 'claude' | 'openai'): boolean => {
if (provider === 'claude') {
return !!(process.env.ANTHROPIC_API_KEY || process.env.CLAUDE_API_KEY);
}
return !!process.env.OPENAI_API_KEY;
};
export const getLlmClient = () => {
// Try default provider first
if (isProviderAvailable(defaultProvider as 'claude' | 'openai')) {
switch (defaultProvider) {
case 'claude':
return getClaudeClient();
case 'openai':
return getOpenAIClient();
}
}
// Fallback to available provider
if (defaultProvider !== 'claude' && isProviderAvailable('claude')) {
console.warn('Primary LLM provider not available, falling back to Claude');
return getClaudeClient();
}
if (defaultProvider !== 'openai' && isProviderAvailable('openai')) {
console.warn('Primary LLM provider not available, falling back to OpenAI');
return getOpenAIClient();
}
throw new Error('No LLM provider available. Please set ANTHROPIC_API_KEY or OPENAI_API_KEY environment variables.');
};