ai-utils.js
Version:
Build AI applications, chatbots, and agents with JavaScript and TypeScript.
65 lines (63 loc) • 2.38 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.LlamaCppTokenizer = void 0;
const zod_1 = __importDefault(require("zod"));
const callWithRetryAndThrottle_js_1 = require("../../util/api/callWithRetryAndThrottle.cjs");
const postToApi_js_1 = require("../../util/api/postToApi.cjs");
const LlamaCppError_js_1 = require("./LlamaCppError.cjs");
/**
* Tokenizer for LlamaCpp.
* @example
* const tokenizer = new LlamaCppTokenizer();
*
* const text = "At first, Nox didn't know what to do with the pup.";
*
* const tokenCount = await countTokens(tokenizer, text);
* const tokens = await tokenizer.tokenize(text);
* const tokensAndTokenTexts = await tokenizer.tokenizeWithTexts(text);
* const reconstructedText = await tokenizer.detokenize(tokens);
*/
class LlamaCppTokenizer {
constructor(settings = {}) {
Object.defineProperty(this, "settings", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
this.settings = settings;
}
async callTokenizeAPI(text, context) {
return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
retry: this.settings.retry,
throttle: this.settings.throttle,
call: async () => callLlamaCppTokenizeAPI({
abortSignal: context?.abortSignal,
text,
...this.settings,
}),
});
}
async tokenize(text) {
const response = await this.callTokenizeAPI(text);
return response.tokens;
}
}
exports.LlamaCppTokenizer = LlamaCppTokenizer;
const llamaCppTokenizationResponseSchema = zod_1.default.object({
tokens: zod_1.default.array(zod_1.default.number()),
});
async function callLlamaCppTokenizeAPI({ baseUrl = "http://127.0.0.1:8080", abortSignal, text, }) {
return (0, postToApi_js_1.postJsonToApi)({
url: `${baseUrl}/tokenize`,
body: {
content: text,
},
failedResponseHandler: LlamaCppError_js_1.failedLlamaCppCallResponseHandler,
successfulResponseHandler: (0, postToApi_js_1.createJsonResponseHandler)(llamaCppTokenizationResponseSchema),
abortSignal,
});
}