UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

16 lines (15 loc) 792 B
import { GbnfJsonSchema, GbnfJsonSchemaToType } from "../../../utils/gbnfJson/types.js"; import { ChatSessionModelFunction } from "../../../types.js"; /** * Define a function that can be used by the model in a chat session, and return it. * * This is a helper function to facilitate defining functions with full TypeScript type information. * * The handler function can return a Promise, and the return value will be awaited before being returned to the model. * @param functionDefinition */ export declare function defineChatSessionFunction<const Params extends GbnfJsonSchema>({ description, params, handler }: { description?: string; params?: Readonly<Params>; handler: (params: GbnfJsonSchemaToType<Params>) => Promise<any> | any; }): ChatSessionModelFunction<Params>;