erosolar-cli
Version:
Unified AI agent framework for the command line - Multi-provider support with schema-driven tools, code intelligence, and transparent reasoning
43 lines • 1.6 kB
TypeScript
import type { ConversationMessage, LLMProvider, ProviderId, ProviderResponse, ProviderToolDefinition, StreamChunk } from '../core/types.js';
/**
* Custom error class for provider-specific failures
*/
export declare class ProviderStreamError extends Error {
readonly isRetryable: boolean;
readonly originalError?: Error;
readonly providerId: string;
constructor(message: string, providerId: string, originalError?: Error, isRetryable?: boolean);
}
interface OpenAIChatCompletionsOptions {
apiKey: string;
model: string;
providerId?: ProviderId;
baseURL?: string;
/** Request timeout in milliseconds (default: 120000) */
timeout?: number;
/** Maximum retries for transient errors (default: 3) */
maxRetries?: number;
}
export declare class OpenAIChatCompletionsProvider implements LLMProvider {
readonly id: ProviderId;
readonly model: string;
private readonly client;
private readonly maxRetries;
constructor(options: OpenAIChatCompletionsOptions);
/**
* Sleep for a given number of milliseconds
*/
private sleep;
/**
* Calculate exponential backoff delay
*/
private getBackoffDelay;
/**
* Execute request with retry logic for transient errors
*/
private executeWithRetry;
generate(messages: ConversationMessage[], tools: ProviderToolDefinition[]): Promise<ProviderResponse>;
generateStream(messages: ConversationMessage[], tools: ProviderToolDefinition[]): AsyncIterableIterator<StreamChunk>;
}
export {};
//# sourceMappingURL=openaiChatCompletionsProvider.d.ts.map