UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

9 lines (8 loc) 342 B
export type Writable<T> = { -readonly [P in keyof T]: T[P]; }; export type PickOptions<Value extends Readonly<Record<string, any>>, Options extends { readonly [key: string]: boolean | undefined; }> = Pick<Value, { [Key in keyof Value]: Key extends keyof Options ? Options[Key] extends true ? Key : never : never; }[keyof Value]>;