node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
14 lines • 520 B
JavaScript
import os from "os";
import path from "path";
export function getReadablePath(fsPath) {
const resolvedPath = path.resolve(process.cwd(), fsPath);
if (process.platform === "win32" || process.platform === "cygwin")
return resolvedPath;
let homedir = os.homedir();
if (!homedir.endsWith("/"))
homedir += "/";
if (resolvedPath.startsWith(homedir))
return "~" + resolvedPath.slice(homedir.length - "/".length);
return resolvedPath;
}
//# sourceMappingURL=getReadablePath.js.map