node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
10 lines (9 loc) • 459 B
TypeScript
import { LlamaChatSession } from "../evaluator/LlamaChatSession/LlamaChatSession.js";
import { LlamaChat } from "../evaluator/LlamaChat/LlamaChat.js";
import { LlamaCompletion } from "../evaluator/LlamaCompletion.js";
export type LlamaClasses = {
readonly LlamaChatSession: typeof LlamaChatSession;
readonly LlamaChat: typeof LlamaChat;
readonly LlamaCompletion: typeof LlamaCompletion;
};
export declare function getLlamaClasses(): LlamaClasses;