UNPKG

@langgraph-js/pro

Version:

The Pro SDK for LangGraph - seamlessly integrate your AI agents with frontend interfaces and build complex AI workflows

27 lines (26 loc) 1.21 kB
import { BinaryOperatorAggregate } from "@langchain/langgraph/web"; import { BaseChatModel } from "@langchain/core/language_models/chat_models"; /** * 创建模型状态和模型工具 * @param ModelAllow 模型允许值的定义,可以传入,第一个为默认值 * @returns 模型状态和模型工具 * @example * const { ModelState, createLLM } = createModelHelper({ * main_model: ["gemini-2.5-flash"], * }); * const GraphState = createState(ModelState).build({}); * const llm = await createLLM(state, "main_model"); */ export declare const createModelHelper: <const T extends Record<string, string[]>>(ModelAllow: T) => { ModelState: import("@langchain/langgraph/web").AnnotationRoot<Record<keyof T, BinaryOperatorAggregate<string, string>>>; createLLM: (state: import("@langchain/langgraph/web").StateType<Record<keyof T, BinaryOperatorAggregate<string, string>>>, model_type: keyof typeof ModelAllow, params?: { temperature?: number; maxTokens?: number; topP?: number; frequencyPenalty?: number; presencePenalty?: number; stop?: string[]; timeout?: number; streaming?: boolean; }) => Promise<BaseChatModel>; };