node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
18 lines (17 loc) • 749 B
TypeScript
import { GeneralChatWrapper } from "./GeneralChatWrapper.js";
/**
* This chat wrapper is not safe against chat syntax injection attacks
* ([learn more](https://node-llama-cpp.withcat.ai/guide/llama-text#input-safety-in-node-llama-cpp)).
*/
export declare class AlpacaChatWrapper extends GeneralChatWrapper {
readonly wrapperName: string;
constructor({ userMessageTitle, modelResponseTitle, middleSystemMessageTitle, allowSpecialTokensInTitles }?: {
userMessageTitle?: string;
modelResponseTitle?: string;
middleSystemMessageTitle?: string;
allowSpecialTokensInTitles?: boolean;
});
get userMessageTitle(): string;
get modelResponseTitle(): string;
get middleSystemMessageTitle(): string;
}