UNPKG

@langgraph-js/pro

Version:

The Pro SDK for LangGraph - seamlessly integrate your AI agents with frontend interfaces and build complex AI workflows

46 lines (45 loc) 1.59 kB
import { createDefaultAnnotation, createState } from "./createState.js"; import { ChatOpenAI } from "@langchain/openai"; /** * 创建模型状态和模型工具 * @param ModelAllow 模型允许值的定义,可以传入,第一个为默认值 * @returns 模型状态和模型工具 * @example * const { ModelState, createLLM } = createModelHelper({ * main_model: ["gemini-2.5-flash"], * }); * const GraphState = createState(ModelState).build({}); * const llm = await createLLM(state, "main_model"); */ export const createModelHelper = (ModelAllow) => { const ModelState = createState().build({ ...Object.fromEntries(Object.entries(ModelAllow).map(([key, value]) => [key, createDefaultAnnotation(() => value[0])])), }); const modelGuard = (state, model_type) => { const modelName = state[model_type]; if (!modelName) { throw new Error(`Model ${String(model_type)} not found in state`); } if (!ModelAllow[model_type].includes(modelName)) { throw new Error(`Not Allowed Model ${String(model_type)} `); } return modelName; }; async function createLLM(state, model_type, params = {}) { const modelName = modelGuard(state, model_type); return new ChatOpenAI({ modelName, ...params, configuration: { /** @ts-ignore */ parallel_tool_calls: true, }, streamUsage: true, streaming: true, }); } return { ModelState, createLLM, }; };