@agentica/core
Version:
Agentic AI Library specialized in LLM Function Calling
9 lines (8 loc) • 437 B
TypeScript
import type { ChatCompletion, ChatCompletionChunk } from "openai/resources";
declare function reduceStreamingWithDispatch(stream: ReadableStream<ChatCompletionChunk>, eventProcessor: (props: {
stream: AsyncGenerator<string, undefined, undefined>;
done: () => boolean;
get: () => string;
join: () => Promise<string>;
}) => void, abortSignal?: AbortSignal): Promise<ChatCompletion>;
export { reduceStreamingWithDispatch };