UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

14 lines (13 loc) 743 B
export declare function basicChooseFromListConsoleInteraction<T>({ title, footer, items, renderItem, canFocusItem, canSelectItem, initialFocusIndex, aboveItemsPadding, belowItemsPadding, renderSummaryOnExit, exitOnCtrlC }: { title: string | ((focusedItem: T, rerender: () => void) => string); footer?: string | ((focusedItem: T, rerender: () => void) => string | undefined); items: T[]; renderItem(item: T, focused: boolean, rerender: () => void): string; canFocusItem?(item: T): boolean; canSelectItem?(item: T): boolean; initialFocusIndex?: number; aboveItemsPadding?: number; belowItemsPadding?: number; renderSummaryOnExit?(item: T | null): string; exitOnCtrlC?: boolean; }): Promise<T | null>;