UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

13 lines (12 loc) 387 B
export declare class LruCache<Key, Value> { readonly maxSize: number; constructor(maxSize: number, { onDelete }?: { onDelete?(key: Key, value: Value): void; }); get(key: Key): NonNullable<Value> | undefined; set(key: Key, value: Value): this; get firstKey(): Key | undefined; clear(): void; keys(): MapIterator<Key>; delete(key: Key): void; }