@langgraph-js/sdk
Version:
The UI SDK for LangGraph - seamlessly integrate your AI agents with frontend interfaces
214 lines (213 loc) • 7.14 kB
TypeScript
import { Client, Thread, Message, Assistant, HumanMessage, AIMessage, ToolMessage, Command } from "@langchain/langgraph-sdk";
import { ToolManager } from "./ToolManager.js";
import { CallToolResult } from "./tool/createTool.js";
interface AsyncCallerParams {
/**
* The maximum number of concurrent calls that can be made.
* Defaults to `Infinity`, which means no limit.
*/
maxConcurrency?: number;
/**
* The maximum number of retries that can be made for a single call,
* with an exponential backoff between each attempt. Defaults to 6.
*/
maxRetries?: number;
onFailedResponseHook?: any;
/**
* Specify a custom fetch implementation.
*
* By default we expect the `fetch` is available in the global scope.
*/
fetch?: typeof fetch | ((...args: any[]) => any);
}
export type RenderMessage = Message & {
/** 对于 AIMessage 来说是节点名称,对于工具节点来说是工具名称 */
name?: string;
/** 工具节点的触发节点名称 */
node_name?: string;
/** 工具入参 ,聚合而来*/
tool_input?: string;
additional_kwargs?: {
done?: boolean;
tool_calls?: {
function: {
arguments: string;
};
}[];
};
usage_metadata?: {
total_tokens: number;
input_tokens: number;
output_tokens: number;
};
tool_call_id?: string;
response_metadata?: {
create_time: string;
};
/** 耗时 */
spend_time?: number;
/** 渲染时的唯一 id,聚合而来*/
unique_id?: string;
/** 工具调用是否完成 */
done?: boolean;
};
export type SendMessageOptions = {
extraParams?: Record<string, any>;
_debug?: {
streamResponse?: any;
};
command?: Command;
};
export interface LangGraphClientConfig {
apiUrl?: string;
apiKey?: string;
callerOptions?: AsyncCallerParams;
timeoutMs?: number;
defaultHeaders?: Record<string, string | null | undefined>;
}
/**
* @zh StreamingMessageType 类用于判断消息的类型。
* @en The StreamingMessageType class is used to determine the type of a message.
*/
export declare class StreamingMessageType {
static isUser(m: Message): m is HumanMessage;
static isTool(m: Message): m is ToolMessage;
static isAssistant(m: Message): m is AIMessage;
static isToolAssistant(m: Message): m is AIMessage;
}
type StreamingUpdateEvent = {
type: "message" | "value" | "update" | "error" | "thread" | "done" | "start";
data: any;
};
type StreamingUpdateCallback = (event: StreamingUpdateEvent) => void;
/**
* @zh LangGraphClient 类是与 LangGraph 后端交互的主要客户端。
* @en The LangGraphClient class is the main client for interacting with the LangGraph backend.
*/
export declare class LangGraphClient extends Client {
private currentAssistant;
private currentThread;
private streamingCallbacks;
tools: ToolManager;
stopController: AbortController | null;
constructor(config: LangGraphClientConfig);
availableAssistants: Assistant[];
private listAssistants;
/**
* @zh 初始化 Assistant。
* @en Initializes the Assistant.
*/
initAssistant(agentName?: string): Promise<void>;
/**
* @zh 创建一个新的 Thread。
* @en Creates a new Thread.
*/
createThread({ threadId, }?: {
threadId?: string;
}): Promise<Thread<import("@langchain/langgraph-sdk").DefaultValues>>;
graphVisualize(): Promise<import("@langchain/langgraph-sdk").AssistantGraph>;
/**
* @zh 列出所有的 Thread。
* @en Lists all Threads.
*/
listThreads<T>(): Promise<Thread<T>[]>;
/**
* @zh 从历史中恢复 Thread 数据。
* @en Resets the Thread data from history.
*/
resetThread(agent: string, threadId: string): Promise<void>;
streamingMessage: RenderMessage[];
/** 图发过来的更新信息 */
graphMessages: RenderMessage[];
cloneMessage(message: Message): Message;
private updateStreamingMessage;
/** 将 graphMessages 和 streamingMessage 合并,并返回新的消息数组 */
private combineGraphMessagesWithStreamingMessages;
/**
* @zh 用于 UI 中的流式渲染中的消息。
* @en Messages used for streaming rendering in the UI.
*/
get renderMessage(): RenderMessage[];
/**
* @zh 为消息附加额外的信息,如耗时、唯一 ID 等。
* @en Attaches additional information to messages, such as spend time, unique ID, etc.
*/
private attachInfoForMessage;
/**
* @zh 组合工具消息,将 AI 的工具调用和工具的执行结果关联起来。
* @en Composes tool messages, associating AI tool calls with tool execution results.
*/
private composeToolMessages;
/**
* @zh 获取 Token 计数器信息。
* @en Gets the Token counter information.
*/
get tokenCounter(): {
total_tokens: number;
input_tokens: number;
output_tokens: number;
};
/**
* @zh 注册流式更新的回调函数。
* @en Registers a callback function for streaming updates.
*/
onStreamingUpdate(callback: StreamingUpdateCallback): () => void;
private emitStreamingUpdate;
/** 前端工具人机交互时,锁住面板 */
isFELocking(messages: RenderMessage[]): boolean | undefined;
graphState: any;
currentRun?: {
run_id: string;
};
/**
* @zh 取消当前的 Run。
* @en Cancels the current Run.
*/
cancelRun(): void;
/**
* @zh 发送消息到 LangGraph 后端。
* @en Sends a message to the LangGraph backend.
*/
sendMessage(input: string | Message[], { extraParams, _debug, command }?: SendMessageOptions): Promise<any[]>;
/** 当前子图位置,但是依赖 stream,不太适合稳定使用*/
private graphPosition;
getGraphPosition(): {
id: string;
name: string;
}[];
getGraphNodeNow(): {
id: string;
name: string;
};
/** 子图的数据需要通过 merge 的方式重新进行合并更新 */
private mergeSubGraphMessagesToStreamingMessages;
private runFETool;
private callFETool;
extraParams: Record<string, any>;
/**
* @zh 继续被前端工具中断的流程。
* @en Resumes a process interrupted by a frontend tool.
*/
resume(result: CallToolResult): Promise<any[]>;
/**
* @zh 标记前端工具等待已完成。
* @en Marks the frontend tool waiting as completed.
*/
doneFEToolWaiting(id: string, result: CallToolResult): void;
/**
* @zh 获取当前的 Thread。
* @en Gets the current Thread.
*/
getCurrentThread(): Thread<import("@langchain/langgraph-sdk").DefaultValues> | null;
/**
* @zh 获取当前的 Assistant。
* @en Gets the current Assistant.
*/
getCurrentAssistant(): Assistant | null;
/**
* @zh 重置客户端状态。
* @en Resets the client state.
*/
reset(): Promise<void>;
}
export {};