UNPKG

behemoth-cli

Version:

🌍 BEHEMOTH CLIv3.760.4 - Level 50+ POST-SINGULARITY Intelligence Trading AI

83 lines 2.69 kB
import { providerClientFactory } from './provider-clients.js'; export class AIClient { config; constructor(config = {}) { this.config = { provider: config.provider || 'groq', model: config.model, temperature: config.temperature || 0.7, maxTokens: config.maxTokens || 4096, ...config }; } async generateJSON(prompt, systemPrompt) { const messages = []; if (systemPrompt) { messages.push({ role: 'system', content: systemPrompt }); } messages.push({ role: 'user', content: prompt }); const request = { model: this.config.model || 'llama-3.3-70b-versatile', messages, temperature: this.config.temperature, max_tokens: this.config.maxTokens }; try { const provider = providerClientFactory.createProvider(this.config.provider); const response = await provider.createChatCompletion(request); const content = response.choices[0]?.message?.content; if (!content) { throw new Error('No content received from AI provider'); } // Try to parse as JSON try { return JSON.parse(content); } catch (parseError) { // If not valid JSON, wrap in an object return { content }; } } catch (error) { throw new Error(`AI generation failed: ${error}`); } } async generateText(prompt, systemPrompt) { const messages = []; if (systemPrompt) { messages.push({ role: 'system', content: systemPrompt }); } messages.push({ role: 'user', content: prompt }); const request = { model: this.config.model || 'llama-3.3-70b-versatile', messages, temperature: this.config.temperature, max_tokens: this.config.maxTokens }; try { const provider = providerClientFactory.createProvider(this.config.provider); const response = await provider.createChatCompletion(request); return response.choices[0]?.message?.content || ''; } catch (error) { throw new Error(`AI generation failed: ${error}`); } } } // Factory function for easy creation export function createAIClient(config) { return new AIClient(config); } //# sourceMappingURL=ai-client.js.map