erosolar-cli
Version:
Unified AI agent framework for the command line - Multi-provider support with schema-driven tools, code intelligence, and transparent reasoning
41 lines • 1.58 kB
TypeScript
import type { ConversationMessage, LLMProvider, ProviderResponse, ProviderToolDefinition, StreamChunk } from '../core/types.js';
interface AnthropicProviderOptions {
apiKey: string;
model: string;
maxTokens?: number;
temperature?: number;
rateLimitMaxRetries?: number;
rateLimitInitialDelayMs?: number;
enablePromptCaching?: boolean;
/** Maximum retries for transient errors (default: 3) */
transientMaxRetries?: number;
}
export declare class AnthropicMessagesProvider implements LLMProvider {
readonly id = "anthropic";
readonly model: string;
private readonly client;
private readonly maxTokens;
private readonly temperature;
private readonly rateLimitMaxRetries;
private readonly rateLimitInitialDelayMs;
private readonly enablePromptCaching;
private readonly _transientMaxRetries;
constructor(options: AnthropicProviderOptions);
generate(messages: ConversationMessage[], tools: ProviderToolDefinition[]): Promise<ProviderResponse>;
generateStream(messages: ConversationMessage[], tools: ProviderToolDefinition[]): AsyncIterableIterator<StreamChunk>;
getCapabilities(): {
streaming: boolean;
toolCalling: boolean;
vision: boolean;
functionCalling: boolean;
maxTokens: number;
supportedModalities: ("text" | "image" | "audio")[];
};
/**
* Calculate exponential backoff delay with jitter
*/
private getBackoffDelay;
private executeWithRateLimitRetries;
}
export {};
//# sourceMappingURL=anthropicProvider.d.ts.map