node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
23 lines (22 loc) • 690 B
TypeScript
export declare const enum ConsoleInteractionKey {
ctrlC = "\u0003",
upArrow = "\u001B[A",
downArrow = "\u001B[B",
enter = "\r"
}
export declare class ConsoleInteraction {
constructor({ stdin }?: {
stdin?: NodeJS.ReadStream;
});
get isActive(): boolean;
start(): void;
stop(): void;
onKey(key: string | ConsoleInteractionKey | (string | ConsoleInteractionKey)[], callback: () => void): ConsoleInteractionOnKeyHandle;
static yesNoQuestion(question: string): Promise<boolean>;
}
export declare class ConsoleInteractionOnKeyHandle {
private constructor();
dispose(): void;
[Symbol.dispose](): void;
get disposed(): boolean;
}