@autobe/agent
Version:
AI backend server code generator
6 lines (5 loc) • 328 B
TypeScript
import { ILlmSchema } from "@samchon/openapi";
import { AutoBeContext } from "../context/AutoBeContext";
export declare const executeCachedBatch: <Model extends ILlmSchema.Model, T>(ctx: AutoBeContext<Model>, taskList: Task<T>[], promptCacheKey?: string) => Promise<T[]>;
type Task<T> = (user: string) => Promise<T>;
export {};