behemoth-cli
Version:
🌍 BEHEMOTH CLIv3.760.4 - Level 50+ POST-SINGULARITY Intelligence Trading AI
289 lines (248 loc) • 7.85 kB
text/typescript
import Groq from 'groq-sdk';
import OpenAI from 'openai';
import { MultiProviderConfigManager, PROVIDERS } from '../utils/multi-provider-config.js';
export interface ChatCompletionMessage {
role: 'system' | 'user' | 'assistant' | 'tool';
content: string;
tool_calls?: any[];
tool_call_id?: string;
}
export interface ChatCompletionRequest {
model: string;
messages: ChatCompletionMessage[];
temperature?: number;
max_tokens?: number;
tools?: any[];
tool_choice?: any;
stream?: boolean;
}
export interface ChatCompletionResponse {
id: string;
object: string;
created: number;
model: string;
choices: {
index: number;
message: {
role: string;
content: string | null;
tool_calls?: any[];
};
finish_reason: string;
}[];
usage?: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
};
}
export interface StreamingChatCompletionChunk {
id: string;
object: string;
created: number;
model: string;
choices: {
index: number;
delta: {
role?: string;
content?: string;
tool_calls?: any[];
};
finish_reason?: string;
}[];
usage?: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
};
}
export interface AIProvider {
name: string;
createChatCompletion(request: ChatCompletionRequest): Promise<ChatCompletionResponse>;
createStreamingChatCompletion(request: ChatCompletionRequest): AsyncIterable<StreamingChatCompletionChunk>;
}
class GroqProvider implements AIProvider {
public name = 'groq';
private client: Groq;
constructor(apiKey: string) {
this.client = new Groq({ apiKey });
}
async createChatCompletion(request: ChatCompletionRequest): Promise<ChatCompletionResponse> {
const response = await this.client.chat.completions.create({
model: request.model,
messages: request.messages as any,
temperature: request.temperature,
max_tokens: request.max_tokens,
tools: request.tools,
tool_choice: request.tool_choice,
stream: false
});
return response as ChatCompletionResponse;
}
async* createStreamingChatCompletion(request: ChatCompletionRequest): AsyncIterable<StreamingChatCompletionChunk> {
const stream = await this.client.chat.completions.create({
model: request.model,
messages: request.messages as any,
temperature: request.temperature,
max_tokens: request.max_tokens,
tools: request.tools,
tool_choice: request.tool_choice,
stream: true
});
for await (const chunk of stream) {
yield chunk as StreamingChatCompletionChunk;
}
}
}
class OpenRouterProvider implements AIProvider {
public name = 'openrouter';
private client: OpenAI;
constructor(apiKey: string) {
this.client = new OpenAI({
apiKey,
baseURL: 'https://openrouter.ai/api/v1',
defaultHeaders: {
'HTTP-Referer': 'https://github.com/fr3k/behemoth-cli',
'X-Title': 'BEHEMOTH CLI v2'
}
});
}
async createChatCompletion(request: ChatCompletionRequest): Promise<ChatCompletionResponse> {
const response = await this.client.chat.completions.create({
model: request.model,
messages: request.messages as any,
temperature: request.temperature,
max_tokens: request.max_tokens,
tools: request.tools,
tool_choice: request.tool_choice,
stream: false
});
return response as ChatCompletionResponse;
}
async* createStreamingChatCompletion(request: ChatCompletionRequest): AsyncIterable<StreamingChatCompletionChunk> {
const stream = await this.client.chat.completions.create({
model: request.model,
messages: request.messages as any,
temperature: request.temperature,
max_tokens: request.max_tokens,
tools: request.tools,
tool_choice: request.tool_choice,
stream: true
});
for await (const chunk of stream) {
yield chunk as StreamingChatCompletionChunk;
}
}
}
class DeepSeekProvider implements AIProvider {
public name = 'deepseek';
private client: OpenAI;
constructor(apiKey: string) {
this.client = new OpenAI({
apiKey,
baseURL: 'https://api.deepseek.com',
});
}
async createChatCompletion(request: ChatCompletionRequest): Promise<ChatCompletionResponse> {
const response = await this.client.chat.completions.create({
model: request.model,
messages: request.messages as any,
temperature: request.temperature,
max_tokens: request.max_tokens,
tools: request.tools,
tool_choice: request.tool_choice,
stream: false
});
return response as ChatCompletionResponse;
}
async* createStreamingChatCompletion(request: ChatCompletionRequest): AsyncIterable<StreamingChatCompletionChunk> {
const stream = await this.client.chat.completions.create({
model: request.model,
messages: request.messages as any,
temperature: request.temperature,
max_tokens: request.max_tokens,
tools: request.tools,
tool_choice: request.tool_choice,
stream: true
});
for await (const chunk of stream) {
yield chunk as StreamingChatCompletionChunk;
}
}
}
export class ProviderClientFactory {
private configManager: MultiProviderConfigManager;
private clientCache = new Map<string, AIProvider>();
constructor() {
this.configManager = new MultiProviderConfigManager();
}
createProvider(providerName: string): AIProvider {
// Check cache first
const cacheKey = providerName;
if (this.clientCache.has(cacheKey)) {
return this.clientCache.get(cacheKey)!;
}
// Get API key for provider
const apiKey = this.configManager.getProviderApiKey(providerName);
if (!apiKey) {
throw new Error(`No API key configured for provider: ${providerName}`);
}
// Create provider instance
let provider: AIProvider;
switch (providerName) {
case 'groq':
provider = new GroqProvider(apiKey);
break;
case 'openrouter':
provider = new OpenRouterProvider(apiKey);
break;
case 'deepseek':
provider = new DeepSeekProvider(apiKey);
break;
default:
throw new Error(`Unsupported provider: ${providerName}`);
}
// Cache and return
this.clientCache.set(cacheKey, provider);
return provider;
}
rotateApiKey(providerName: string): AIProvider | null {
// Remove from cache to force recreation with new key
this.clientCache.delete(providerName);
// Rotate the API key
const newApiKey = this.configManager.rotateProviderApiKey(providerName);
if (!newApiKey) {
return null;
}
// Create new provider with rotated key
return this.createProvider(providerName);
}
isProviderAvailable(providerName: string): boolean {
return this.configManager.isProviderConfigured(providerName);
}
getAvailableProviders(): string[] {
return this.configManager.getEnabledProviders().filter(
provider => this.configManager.isProviderConfigured(provider)
);
}
validateProvider(providerName: string): { valid: boolean; error?: string } {
if (!PROVIDERS[providerName]) {
return { valid: false, error: `Unknown provider: ${providerName}` };
}
if (!this.configManager.isProviderConfigured(providerName)) {
return { valid: false, error: `Provider ${providerName} is not configured` };
}
return { valid: true };
}
clearCache(): void {
this.clientCache.clear();
}
getCacheStats(): { providers: string[], size: number } {
return {
providers: Array.from(this.clientCache.keys()),
size: this.clientCache.size
};
}
}
// Singleton instance
export const providerClientFactory = new ProviderClientFactory();