@agentica/core
Version:
Agentic AI Library specialized in LLM Function Calling
12 lines (11 loc) • 784 B
TypeScript
import type OpenAI from "openai";
import type { AgenticaTokenUsage } from "../context/AgenticaTokenUsage";
import type { AgenticaEventSource, AgenticaRequestEvent, AgenticaResponseEvent } from "../events";
import type { IAgenticaConfig, IAgenticaVendor, IMicroAgenticaConfig } from "../structures";
export declare function getChatCompletionWithStreamingFunction(props: {
vendor: IAgenticaVendor;
config?: IAgenticaConfig | IMicroAgenticaConfig;
dispatch: (event: AgenticaRequestEvent | AgenticaResponseEvent) => Promise<void>;
abortSignal?: AbortSignal;
usage: AgenticaTokenUsage;
}): (source: AgenticaEventSource, body: Omit<OpenAI.ChatCompletionCreateParamsStreaming, "model" | "stream">) => Promise<ReadableStream<OpenAI.Chat.Completions.ChatCompletionChunk>>;