llama-flow
Version:
The Typescript-first prompt engineering toolkit for working with chat based LLMs.
18 lines • 1.06 kB
TypeScript
/// <reference types="debug" />
import type { RawPrompt } from './types';
export declare const debug: {
error: import("debug").Debugger;
log: import("debug").Debugger;
write: (t: string) => boolean | "" | null | undefined;
};
export declare function sleep(delay: number): Promise<unknown>;
export type MaybePromise<T> = Promise<T> | T;
type IsFunction<T, K extends keyof T> = T[K] extends (...args: any[]) => any ? true : T[K] extends (...args: any[]) => Promise<any> ? true : false;
type PickData<T> = T extends {
data?: any;
} ? T['data'] : undefined;
type GetRawPromptResponse<T extends RawPrompt> = IsFunction<Required<T>, 'parse'> extends true ? Awaited<ReturnType<NonNullable<T['parse']>>> : never;
type GetRawPromptDataType<T extends RawPrompt> = GetRawPromptResponse<T> extends object ? NonNullable<PickData<GetRawPromptResponse<T>>> : never;
export type PromptReturnType<T extends string | RawPrompt> = T extends string ? string : T extends RawPrompt<any> ? GetRawPromptDataType<T> : never;
export {};
//# sourceMappingURL=utils.d.ts.map