llama-flow
Version:
The Typescript-first prompt engineering toolkit for working with chat based LLMs.
9 lines (8 loc) • 448 B
JavaScript
Object.defineProperty(exports, "__esModule", { value: true });
exports.PromptDefaultRetries = exports.MinimumResponseTokens = exports.CompletionDefaultTimeout = exports.CompletionDefaultRetries = exports.RateLimitRetryIntervalMs = void 0;
exports.RateLimitRetryIntervalMs = 30_000;
exports.CompletionDefaultRetries = 3;
exports.CompletionDefaultTimeout = 60_000;
exports.MinimumResponseTokens = 200;
exports.PromptDefaultRetries = 3;
;