llama-flow
Version:
The Typescript-first prompt engineering toolkit for working with chat based LLMs.
6 lines • 294 B
TypeScript
export declare const RateLimitRetryIntervalMs = 30000;
export declare const CompletionDefaultRetries = 3;
export declare const CompletionDefaultTimeout = 60000;
export declare const MinimumResponseTokens = 200;
export declare const PromptDefaultRetries = 3;
//# sourceMappingURL=config.d.ts.map