node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
16 lines (15 loc) • 611 B
TypeScript
import { Llama } from "../../bindings/Llama.js";
export declare function resolveCommandGgufPath(ggufPath: string | undefined, llama: Llama, fetchHeaders?: Record<string, string>, { targetDirectory, flashAttention, useMmap, consoleTitle }?: {
targetDirectory?: string;
flashAttention?: boolean;
useMmap?: boolean;
consoleTitle?: string;
}): Promise<string>;
export declare function tryCoercingModelUri(ggufPath: string): {
uri: string;
modifiedRegion: {
start: number;
end: number;
};
} | undefined;
export declare function printDidYouMeanUri(ggufPath: string): void;