node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
4 lines (3 loc) • 402 B
TypeScript
import { GgufArchitectureType, GgufMetadata } from "../types/GgufMetadataTypes.js";
import { MergeOptionalUnionTypes } from "../../utils/mergeUnionTypes.js";
export declare function getGgufMetadataArchitectureData<const T extends GgufArchitectureType>(ggufMetadata: GgufMetadata<T>): (GgufArchitectureType extends T ? MergeOptionalUnionTypes<Exclude<GgufMetadata[T], undefined>> : GgufMetadata<T>[T]);