node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
15 lines (14 loc) • 472 B
TypeScript
type DistroPackages = {
linuxPackages?: {
apt?: string[];
apk?: string[];
};
macOsPackages?: {
brew?: string[];
};
};
export declare function logDistroInstallInstruction(prefixText: string, distroPackages: DistroPackages, { forceLogPrefix }?: {
forceLogPrefix?: boolean;
}): Promise<void>;
export declare function getDistroInstallInstruction({ linuxPackages, macOsPackages }: DistroPackages): Promise<string | null>;
export {};