UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

21 lines 1.26 kB
import chalk from "chalk"; import stripAnsi from "strip-ansi"; import { prettyPrintObject } from "../../utils/prettyPrintObject.js"; import { getLlamaFunctionName } from "../getLlama.js"; export function getExampleUsageCodeOfGetLlama(getLlamaOptions, prefix = "", wrapWithSeparators = true) { let res = prefix + [ chalk.magenta.italic("import "), chalk.whiteBright("{"), chalk.yellow(getLlamaFunctionName), chalk.whiteBright("} "), chalk.magenta.italic("from "), chalk.green("\"node-llama-cpp\""), chalk.whiteBright(";"), "\n\n", chalk.magenta.italic("const "), chalk.whiteBright("llama "), chalk.whiteBright("= "), chalk.magenta.italic("await "), chalk.yellow(getLlamaFunctionName), chalk.whiteBright("("), getLlamaOptions === undefined ? "" : prettyPrintObject(getLlamaOptions), chalk.whiteBright(")"), chalk.whiteBright(";") ].join(prefix); if (wrapWithSeparators) { const longestLineLength = res.split("\n") .reduce((max, line) => Math.max(max, stripAnsi(line).length), 0); res = chalk.blue("-".repeat(longestLineLength)) + "\n" + res + "\n" + chalk.blue("-".repeat(longestLineLength)); } return res; } //# sourceMappingURL=getExampleUsageCodeOfGetLlama.js.map