node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
10 lines (9 loc) • 343 B
TypeScript
export type LinuxDistroInfo = Awaited<ReturnType<typeof getLinuxDistroInfo>>;
export declare function getLinuxDistroInfo(): Promise<{
name: string;
id: string;
version: string;
versionCodename: string;
prettyName: string;
}>;
export declare function isDistroAlpineLinux(linuxDistroInfo: LinuxDistroInfo): Promise<boolean>;