ai-utils.js
Version:
Build AI applications, chatbots, and agents with JavaScript and TypeScript.
85 lines (84 loc) • 2.89 kB
JavaScript
export class PromptMappingTextGenerationModel {
constructor({ model, promptMapping, }) {
Object.defineProperty(this, "model", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "promptMapping", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
this.model = model;
this.promptMapping = promptMapping;
}
get modelInformation() {
return this.model.modelInformation;
}
get settings() {
return this.model.settings;
}
get tokenizer() {
return this.model.tokenizer;
}
get contextWindowSize() {
return this.model.contextWindowSize;
}
get countPromptTokens() {
const originalCountPromptTokens = this.model.countPromptTokens?.bind(this.model);
if (originalCountPromptTokens === undefined) {
return undefined;
}
return ((prompt) => originalCountPromptTokens(this.promptMapping.map(prompt)));
}
generateTextResponse(prompt, options) {
const mappedPrompt = this.promptMapping.map(prompt);
return this.model.generateTextResponse(mappedPrompt, options);
}
extractText(response) {
return this.model.extractText(response);
}
get generateDeltaStreamResponse() {
const originalGenerateDeltaStreamResponse = this.model.generateDeltaStreamResponse?.bind(this.model);
if (originalGenerateDeltaStreamResponse === undefined) {
return undefined;
}
return ((prompt, options) => {
const mappedPrompt = this.promptMapping.map(prompt);
return originalGenerateDeltaStreamResponse(mappedPrompt, options);
});
}
get extractTextDelta() {
return this.model.extractTextDelta;
}
mapPrompt(promptMapping) {
return new PromptMappingTextGenerationModel({
model: this.withStopTokens(promptMapping.stopTokens),
promptMapping,
});
}
withSettings(additionalSettings) {
return new PromptMappingTextGenerationModel({
model: this.model.withSettings(additionalSettings),
promptMapping: this.promptMapping,
});
}
get maxCompletionTokens() {
return this.model.maxCompletionTokens;
}
withMaxCompletionTokens(maxCompletionTokens) {
return new PromptMappingTextGenerationModel({
model: this.model.withMaxCompletionTokens(maxCompletionTokens),
promptMapping: this.promptMapping,
});
}
withStopTokens(stopTokens) {
return new PromptMappingTextGenerationModel({
model: this.model.withStopTokens(stopTokens),
promptMapping: this.promptMapping,
});
}
}