UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

11 lines 526 B
import chalk from "chalk"; import { getReadablePath } from "./getReadablePath.js"; export function printModelDestination(modelDestination) { if (modelDestination.type === "url") console.info(`${chalk.yellow("URL:")} ${modelDestination.url}`); else if (modelDestination.type === "uri") console.info(`${chalk.yellow("URI:")} ${modelDestination.uri}`); else console.info(`${chalk.yellow("File:")} ${getReadablePath(modelDestination.path)}`); } //# sourceMappingURL=printModelDestination.js.map