node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
7 lines (6 loc) • 498 B
TypeScript
import { BuildLlamaCppCommand } from "./cli/commands/source/commands/BuildCommand.js";
import { DownloadLlamaCppCommand } from "./cli/commands/source/commands/DownloadCommand.js";
import { ClearLlamaCppBuildCommand } from "./cli/commands/source/commands/ClearCommand.js";
import { _startCreateCli } from "./cli/startCreateCli.js";
import { getBuildDefaults } from "./utils/getBuildDefaults.js";
export { BuildLlamaCppCommand, DownloadLlamaCppCommand, ClearLlamaCppBuildCommand, getBuildDefaults };