erosolar-cli
Version:
Unified AI agent framework for the command line - Multi-provider support with schema-driven tools, code intelligence, and transparent reasoning
38 lines • 1.46 kB
TypeScript
import type { ConversationMessage, LLMProvider, ProviderId, ProviderResponse, ProviderToolDefinition, ReasoningEffortLevel, TextVerbosityLevel, StreamChunk } from '../core/types.js';
interface OpenAIProviderOptions {
apiKey: string;
model: string;
providerId?: ProviderId;
baseURL?: string;
reasoningEffort?: ReasoningEffortLevel;
textVerbosity?: TextVerbosityLevel;
/** Maximum retries for transient errors (default: 3) */
maxRetries?: number;
/** Request timeout in milliseconds (default: 120000) */
timeout?: number;
}
export declare class OpenAIResponsesProvider implements LLMProvider {
readonly id: ProviderId;
readonly model: string;
private readonly client;
private readonly reasoningEffort?;
private readonly textVerbosity?;
private readonly maxRetries;
constructor(options: OpenAIProviderOptions);
/**
* Sleep for a given number of milliseconds
*/
private sleep;
/**
* Calculate exponential backoff delay with jitter
*/
private getBackoffDelay;
/**
* Execute request with retry logic for transient errors
*/
private executeWithRetry;
generate(messages: ConversationMessage[], tools: ProviderToolDefinition[]): Promise<ProviderResponse>;
generateStream(messages: ConversationMessage[], tools: ProviderToolDefinition[]): AsyncIterableIterator<StreamChunk>;
}
export {};
//# sourceMappingURL=openaiResponsesProvider.d.ts.map