node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
13 lines • 489 B
JavaScript
import path from "path";
import { fileURLToPath } from "url";
import fs from "fs-extra";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
let moduleVersion = null;
export async function getModuleVersion() {
if (moduleVersion != null)
return moduleVersion;
const packageJson = await fs.readJson(path.join(__dirname, "..", "..", "package.json"));
moduleVersion = packageJson.version;
return moduleVersion;
}
//# sourceMappingURL=getModuleVersion.js.map