node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
16 lines • 522 B
JavaScript
import process from "process";
import fs from "fs-extra";
import { tempDownloadDirectory } from "../config.js";
export async function clearTempFolder() {
if (process.platform === "win32") {
try {
await fs.remove(tempDownloadDirectory);
}
catch (err) {
// do nothing as it fails sometime on Windows, and since it's a temp folder, it's not a big deal
}
return;
}
await fs.remove(tempDownloadDirectory);
}
//# sourceMappingURL=clearTempFolder.js.map