@langgraph-js/sdk
Version:
The UI SDK for LangGraph - seamlessly integrate your AI agents with frontend interfaces
578 lines (577 loc) • 22.6 kB
JavaScript
import { Client } from "@langchain/langgraph-sdk";
import { ToolManager } from "./ToolManager.js";
/**
* @zh StreamingMessageType 类用于判断消息的类型。
* @en The StreamingMessageType class is used to determine the type of a message.
*/
export class StreamingMessageType {
static isUser(m) {
return m.type === "human";
}
static isTool(m) {
return m.type === "tool";
}
static isAssistant(m) {
return m.type === "ai" && !this.isToolAssistant(m);
}
static isToolAssistant(m) {
var _a, _b;
/** @ts-ignore */
return m.type === "ai" && (((_a = m.tool_calls) === null || _a === void 0 ? void 0 : _a.length) || ((_b = m.tool_call_chunks) === null || _b === void 0 ? void 0 : _b.length));
}
}
/**
* @zh LangGraphClient 类是与 LangGraph 后端交互的主要客户端。
* @en The LangGraphClient class is the main client for interacting with the LangGraph backend.
*/
export class LangGraphClient extends Client {
constructor(config) {
super(config);
this.currentAssistant = null;
this.currentThread = null;
this.streamingCallbacks = new Set();
this.tools = new ToolManager();
this.stopController = null;
this.availableAssistants = [];
this.streamingMessage = [];
/** 图发过来的更新信息 */
this.graphMessages = [];
this.graphState = {};
/** 当前子图位置,但是依赖 stream,不太适合稳定使用*/
this.graphPosition = "";
this.extraParams = {};
}
listAssistants() {
return this.assistants.search({
metadata: null,
offset: 0,
limit: 100,
});
}
/**
* @zh 初始化 Assistant。
* @en Initializes the Assistant.
*/
async initAssistant(agentName) {
try {
const assistants = await this.listAssistants();
this.availableAssistants = assistants;
if (assistants.length > 0) {
if (agentName) {
this.currentAssistant = assistants.find((assistant) => assistant.graph_id === agentName) || null;
if (!this.currentAssistant) {
throw new Error("Agent not found: " + agentName);
}
}
else {
this.currentAssistant = assistants[0];
}
}
else {
throw new Error("No assistants found");
}
}
catch (error) {
console.error("Failed to initialize LangGraphClient:", error);
throw error;
}
}
/**
* @zh 创建一个新的 Thread。
* @en Creates a new Thread.
*/
async createThread({ threadId, } = {}) {
try {
this.currentThread = await this.threads.create({
threadId,
});
return this.currentThread;
}
catch (error) {
console.error("Failed to create new thread:", error);
throw error;
}
}
graphVisualize() {
var _a;
return this.assistants.getGraph((_a = this.currentAssistant) === null || _a === void 0 ? void 0 : _a.assistant_id, {
xray: true,
});
}
/**
* @zh 列出所有的 Thread。
* @en Lists all Threads.
*/
async listThreads() {
return this.threads.search({
sortOrder: "desc",
});
}
/**
* @zh 从历史中恢复 Thread 数据。
* @en Resets the Thread data from history.
*/
async resetThread(agent, threadId) {
await this.initAssistant(agent);
this.currentThread = await this.threads.get(threadId);
this.graphState = this.currentThread.values;
this.graphMessages = this.graphState.messages;
this.emitStreamingUpdate({
type: "value",
data: {
event: "messages/partial",
data: {
messages: this.graphMessages,
},
},
});
}
cloneMessage(message) {
return JSON.parse(JSON.stringify(message));
}
updateStreamingMessage(message) {
const lastMessage = this.streamingMessage[this.streamingMessage.length - 1];
if (!(lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.id) || message.id !== lastMessage.id) {
this.streamingMessage.push(message);
return;
}
this.streamingMessage[this.streamingMessage.length - 1] = message;
}
/** 将 graphMessages 和 streamingMessage 合并,并返回新的消息数组 */
combineGraphMessagesWithStreamingMessages() {
const idMap = new Map(this.streamingMessage.map((i) => [i.id, i]));
return [
...this.graphMessages.map((i) => {
if (idMap.has(i.id)) {
const newValue = idMap.get(i.id);
idMap.delete(i.id);
return newValue;
}
return i;
}),
...idMap.values(),
];
}
/**
* @zh 用于 UI 中的流式渲染中的消息。
* @en Messages used for streaming rendering in the UI.
*/
get renderMessage() {
var _a;
const previousMessage = new Map();
const closedToolCallIds = new Set();
const result = [];
const inputMessages = this.combineGraphMessagesWithStreamingMessages();
// console.log(inputMessages);
// 从后往前遍历,这样可以保证最新的消息在前面
for (let i = inputMessages.length - 1; i >= 0; i--) {
const message = this.cloneMessage(inputMessages[i]);
if (!message.id) {
result.unshift(message);
continue;
}
if (message.type === "ai") {
/** @ts-ignore */
if (!message.name)
message.name = this.getGraphNodeNow().name;
}
if (StreamingMessageType.isToolAssistant(message)) {
const m = message;
// 记录这个 id 的消息,并添加到结果中
previousMessage.set(message.id, m);
/** @ts-ignore */
const tool_calls = ((_a = m.tool_calls) === null || _a === void 0 ? void 0 : _a.length) ? m.tool_calls : m.tool_call_chunks;
const new_tool_calls = tool_calls
.filter((i) => {
return !closedToolCallIds.has(i.id);
})
.map((tool, index) => {
var _a, _b, _c, _d;
return {
type: "tool",
additional_kwargs: {},
/** @ts-ignore */
tool_input: (_d = (_c = (_b = (_a = m.additional_kwargs) === null || _a === void 0 ? void 0 : _a.tool_calls) === null || _b === void 0 ? void 0 : _b[index]) === null || _c === void 0 ? void 0 : _c.function) === null || _d === void 0 ? void 0 : _d.arguments,
id: tool.id,
name: tool.name,
response_metadata: {},
tool_call_id: tool.id,
content: "",
};
});
for (const tool of new_tool_calls) {
if (!previousMessage.has(tool.id)) {
result.unshift(tool);
previousMessage.set(tool.id, tool);
}
}
result.unshift(m);
}
else {
if (message.type === "tool" && message.tool_call_id) {
closedToolCallIds.add(message.tool_call_id);
}
previousMessage.set(message.id, message);
result.unshift(message);
}
}
return this.attachInfoForMessage(this.composeToolMessages(result));
}
/**
* @zh 为消息附加额外的信息,如耗时、唯一 ID 等。
* @en Attaches additional information to messages, such as spend time, unique ID, etc.
*/
attachInfoForMessage(result) {
var _a, _b, _c;
let lastMessage = null;
for (const message of result) {
const createTime = ((_a = message.response_metadata) === null || _a === void 0 ? void 0 : _a.create_time) || "";
// 工具必须要使用 tool_call_id 来保证一致性
message.unique_id = message.tool_call_id || message.id;
message.spend_time = new Date(createTime).getTime() - new Date(((_b = lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.response_metadata) === null || _b === void 0 ? void 0 : _b.create_time) || createTime).getTime();
if (!message.usage_metadata && ((_c = message.response_metadata) === null || _c === void 0 ? void 0 : _c.usage)) {
const usage = message.response_metadata.usage;
message.usage_metadata = {
...usage,
input_tokens: usage.prompt_tokens,
output_tokens: usage.completion_tokens,
total_tokens: usage.total_tokens,
};
}
lastMessage = message;
}
return result;
}
/**
* @zh 组合工具消息,将 AI 的工具调用和工具的执行结果关联起来。
* @en Composes tool messages, associating AI tool calls with tool execution results.
*/
composeToolMessages(messages) {
var _a, _b;
const result = [];
const assistantToolMessages = new Map();
const toolParentMessage = new Map();
for (const message of messages) {
if (StreamingMessageType.isToolAssistant(message)) {
/** @ts-ignore 只有 tool_call_chunks 的 args 才是文本 */
(_a = (message.tool_calls || message.tool_call_chunks)) === null || _a === void 0 ? void 0 : _a.forEach((element) => {
assistantToolMessages.set(element.id, element);
toolParentMessage.set(element.id, message);
});
if (!message.content)
continue;
}
if (StreamingMessageType.isTool(message) && !message.tool_input) {
const assistantToolMessage = assistantToolMessages.get(message.tool_call_id);
const parentMessage = toolParentMessage.get(message.tool_call_id);
if (assistantToolMessage) {
message.tool_input = typeof assistantToolMessage.args !== "string" ? JSON.stringify(assistantToolMessage.args) : assistantToolMessage.args;
if (message.additional_kwargs) {
message.additional_kwargs.done = true;
message.done = true;
}
else {
message.done = true;
message.additional_kwargs = {
done: true,
};
}
}
if (parentMessage) {
message.usage_metadata = parentMessage.usage_metadata;
message.node_name = parentMessage.name;
// 修补特殊情况下,tool name 丢失的问题
if (!message.name) {
message.name = (_b = parentMessage.tool_calls.find((i) => i.id === message.tool_call_id)) === null || _b === void 0 ? void 0 : _b.name;
}
}
}
result.push(message);
}
return result;
}
/**
* @zh 获取 Token 计数器信息。
* @en Gets the Token counter information.
*/
get tokenCounter() {
return this.graphMessages.reduce((acc, message) => {
var _a, _b, _c, _d, _e;
if (message.usage_metadata) {
acc.total_tokens += ((_a = message.usage_metadata) === null || _a === void 0 ? void 0 : _a.total_tokens) || 0;
acc.input_tokens += ((_b = message.usage_metadata) === null || _b === void 0 ? void 0 : _b.input_tokens) || 0;
acc.output_tokens += ((_c = message.usage_metadata) === null || _c === void 0 ? void 0 : _c.output_tokens) || 0;
}
else if ((_d = message.response_metadata) === null || _d === void 0 ? void 0 : _d.usage) {
const usage = (_e = message.response_metadata) === null || _e === void 0 ? void 0 : _e.usage;
acc.total_tokens += usage.total_tokens || 0;
acc.input_tokens += usage.prompt_tokens || 0;
acc.output_tokens += usage.completion_tokens || 0;
}
return acc;
}, {
total_tokens: 0,
input_tokens: 0,
output_tokens: 0,
});
}
/**
* @zh 注册流式更新的回调函数。
* @en Registers a callback function for streaming updates.
*/
onStreamingUpdate(callback) {
this.streamingCallbacks.add(callback);
return () => {
this.streamingCallbacks.delete(callback);
};
}
emitStreamingUpdate(event) {
this.streamingCallbacks.forEach((callback) => callback(event));
}
/** 前端工具人机交互时,锁住面板 */
isFELocking(messages) {
var _a;
const lastMessage = messages[messages.length - 1];
if (!lastMessage) {
return false;
}
const tool = this.tools.getTool(lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.name);
return tool && tool.render && (lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.type) === "tool" && !((_a = lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.additional_kwargs) === null || _a === void 0 ? void 0 : _a.done);
}
/**
* @zh 取消当前的 Run。
* @en Cancels the current Run.
*/
cancelRun() {
var _a, _b;
if (((_a = this.currentThread) === null || _a === void 0 ? void 0 : _a.thread_id) && ((_b = this.currentRun) === null || _b === void 0 ? void 0 : _b.run_id)) {
this.runs.cancel(this.currentThread.thread_id, this.currentRun.run_id);
}
}
/**
* @zh 发送消息到 LangGraph 后端。
* @en Sends a message to the LangGraph backend.
*/
async sendMessage(input, { extraParams, _debug, command } = {}) {
var _a;
if (!this.currentAssistant) {
throw new Error("Thread or Assistant not initialized");
}
if (!this.currentThread) {
await this.createThread();
this.emitStreamingUpdate({
type: "thread",
data: {
event: "thread/create",
data: {
thread: this.currentThread,
},
},
});
}
const messagesToSend = Array.isArray(input)
? input
: [
{
type: "human",
content: input,
},
];
const streamResponse = (_debug === null || _debug === void 0 ? void 0 : _debug.streamResponse) ||
this.runs.stream(this.currentThread.thread_id, this.currentAssistant.assistant_id, {
input: {
...this.graphState,
...this.extraParams,
...(extraParams || {}),
messages: messagesToSend,
fe_tools: await this.tools.toJSON(this.currentAssistant.graph_id),
},
streamMode: ["messages", "values"],
streamSubgraphs: true,
command,
});
const streamRecord = [];
this.emitStreamingUpdate({
type: "start",
data: {
event: "start",
},
});
for await (const chunk of streamResponse) {
streamRecord.push(chunk);
if (chunk.event === "metadata") {
this.currentRun = chunk.data;
}
else if (chunk.event === "error") {
this.emitStreamingUpdate({
type: "error",
data: chunk,
});
}
else if (chunk.event === "messages/partial") {
for (const message of chunk.data) {
this.updateStreamingMessage(message);
}
this.emitStreamingUpdate({
type: "message",
data: chunk,
});
continue;
}
else if (chunk.event === "values") {
const data = chunk.data;
if (data.messages) {
const isResume = !!(command === null || command === void 0 ? void 0 : command.resume);
const isLongerThanLocal = data.messages.length >= this.graphMessages.length;
// resume 情况下,长度低于前端 message 的统统不接受
if (!isResume || (isResume && isLongerThanLocal)) {
this.graphMessages = data.messages;
this.emitStreamingUpdate({
type: "value",
data: chunk,
});
}
this.graphState = chunk.data;
}
continue;
}
else if (chunk.event.startsWith("values|")) {
// 这个 values 必然是子 values
if ((_a = chunk.data) === null || _a === void 0 ? void 0 : _a.messages) {
this.mergeSubGraphMessagesToStreamingMessages(chunk.data.messages);
}
this.graphPosition = chunk.event.split("|")[1];
}
}
const data = await this.runFETool();
if (data)
streamRecord.push(...data);
this.emitStreamingUpdate({
type: "done",
data: {
event: "done",
},
});
this.streamingMessage = [];
return streamRecord;
}
getGraphPosition() {
return this.graphPosition.split("|").map((i) => {
const [name, id] = i.split(":");
return {
id,
name,
};
});
}
getGraphNodeNow() {
const position = this.getGraphPosition();
return position[position.length - 1];
}
/** 子图的数据需要通过 merge 的方式重新进行合并更新 */
mergeSubGraphMessagesToStreamingMessages(messages) {
const map = new Map(messages.filter((i) => i.id).map((i) => [i.id, i]));
this.streamingMessage.forEach((i) => {
if (map.has(i.id)) {
const newValue = map.get(i.id);
Object.assign(i, newValue);
map.delete(i.id);
}
});
// 剩余的 message 一定不在 streamMessage 中
map.forEach((i) => {
if (i.type === "tool" && i.tool_call_id) {
this.streamingMessage.push(i);
}
});
}
runFETool() {
var _a;
const data = this.streamingMessage; // 需要保证不被清理
const lastMessage = data[data.length - 1];
if (!lastMessage)
return;
// 如果最后一条消息是前端工具消息,则调用工具
if (lastMessage.type === "ai" && ((_a = lastMessage.tool_calls) === null || _a === void 0 ? void 0 : _a.length)) {
const result = lastMessage.tool_calls.map((tool) => {
if (this.tools.getTool(tool.name)) {
const toolMessage = {
...tool,
tool_call_id: tool.id,
/** @ts-ignore */
tool_input: JSON.stringify(tool.args),
additional_kwargs: {},
};
// json 校验
return this.callFETool(toolMessage, tool.args);
}
});
this.currentThread.status = "interrupted"; // 修复某些机制下,状态不为 interrupted 与后端有差异
return Promise.all(result);
}
}
async callFETool(message, args) {
const that = this; // 防止 this 被错误解析
const result = await this.tools.callTool(message.name, args, { client: that, message });
if (!result) {
return;
}
return this.resume(result);
}
/**
* @zh 继续被前端工具中断的流程。
* @en Resumes a process interrupted by a frontend tool.
*/
resume(result) {
return this.sendMessage([], {
command: {
resume: result,
},
});
}
/**
* @zh 标记前端工具等待已完成。
* @en Marks the frontend tool waiting as completed.
*/
doneFEToolWaiting(id, result) {
var _a;
const done = this.tools.doneWaiting(id, result);
if (!done && ((_a = this.currentThread) === null || _a === void 0 ? void 0 : _a.status) === "interrupted") {
this.resume(result);
}
}
/**
* @zh 获取当前的 Thread。
* @en Gets the current Thread.
*/
getCurrentThread() {
return this.currentThread;
}
/**
* @zh 获取当前的 Assistant。
* @en Gets the current Assistant.
*/
getCurrentAssistant() {
return this.currentAssistant;
}
/**
* @zh 重置客户端状态。
* @en Resets the client state.
*/
async reset() {
var _a;
await this.initAssistant((_a = this.currentAssistant) === null || _a === void 0 ? void 0 : _a.graph_id);
this.currentThread = null;
this.graphState = {};
this.graphMessages = [];
this.streamingMessage = [];
this.currentRun = undefined;
this.tools.clearWaiting();
this.emitStreamingUpdate({
type: "value",
data: {
event: "messages/partial",
},
});
}
}