llama-flow
Version:
The Typescript-first prompt engineering toolkit for working with chat based LLMs.
13 lines (12 loc) • 347 B
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.TokenError = void 0;
class TokenError extends Error {
overflowTokens;
constructor(message, overflowTokens) {
super(message);
this.name = 'TokenError';
this.overflowTokens = overflowTokens;
}
}
exports.TokenError = TokenError;