node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
14 lines • 518 B
JavaScript
import { LlamaChatSession } from "../evaluator/LlamaChatSession/LlamaChatSession.js";
import { LlamaChat } from "../evaluator/LlamaChat/LlamaChat.js";
import { LlamaCompletion } from "../evaluator/LlamaCompletion.js";
let cachedClasses = undefined;
export function getLlamaClasses() {
if (cachedClasses == null)
cachedClasses = Object.seal({
LlamaChatSession,
LlamaChat,
LlamaCompletion
});
return cachedClasses;
}
//# sourceMappingURL=getLlamaClasses.js.map