@langchain/langgraph
Version:
LangGraph
31 lines (30 loc) • 1.92 kB
TypeScript
import { BaseCallbackHandler, HandleLLMNewTokenCallbackFields, NewTokenIndices } from "@langchain/core/callbacks/base";
import { BaseMessage } from "@langchain/core/messages";
import { Serialized } from "@langchain/core/load/serializable";
import { LLMResult } from "@langchain/core/outputs";
import { ChainValues } from "@langchain/core/utils/types";
import { StreamChunk } from "./stream.js";
type Meta = [string[], Record<string, any>];
/**
* A callback handler that implements stream_mode=messages.
* Collects messages from (1) chat model stream events and (2) node outputs.
*/
export declare class StreamMessagesHandler extends BaseCallbackHandler {
name: string;
streamFn: (streamChunk: StreamChunk) => void;
metadatas: Record<string, Meta>;
seen: Record<string, BaseMessage>;
emittedChatModelRunIds: Record<string, boolean>;
stableMessageIdMap: Record<string, string>;
lc_prefer_streaming: boolean;
constructor(streamFn: (streamChunk: StreamChunk) => void);
_emit(meta: Meta, message: BaseMessage, runId: string | undefined, dedupe?: boolean): void;
handleChatModelStart(_llm: Serialized, _messages: BaseMessage[][], runId: string, _parentRunId?: string, _extraParams?: Record<string, unknown>, tags?: string[], metadata?: Record<string, unknown>, name?: string): void;
handleLLMNewToken(token: string, _idx: NewTokenIndices, runId: string, _parentRunId?: string, _tags?: string[], fields?: HandleLLMNewTokenCallbackFields): void;
handleLLMEnd(output: LLMResult, runId: string): void;
handleLLMError(_err: any, runId: string): void;
handleChainStart(_chain: Serialized, inputs: ChainValues, runId: string, _parentRunId?: string, tags?: string[], metadata?: Record<string, unknown>, _runType?: string, name?: string): void;
handleChainEnd(outputs: ChainValues, runId: string): void;
handleChainError(_err: any, runId: string): void;
}
export {};