langchain
Version:
Typescript bindings for langchain
1 lines • 14.7 kB
Source Map (JSON)
{"version":3,"file":"utils.cjs","names":["BaseChatModel","value: number","tools: StructuredTool[]","_schema: any","RunnableLambda","messages: BaseMessage[]","_options?: this[\"ParsedCallOptions\"]","_runManager?: CallbackManagerForLLMRun","HumanMessage","AIMessage"],"sources":["../../../src/agents/tests/utils.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-explicit-any */\n/* eslint-disable import/no-extraneous-dependencies */\nimport { expect } from \"vitest\";\nimport { CallbackManagerForLLMRun } from \"@langchain/core/callbacks/manager\";\nimport {\n BaseChatModel,\n BaseChatModelParams,\n BaseChatModelCallOptions,\n BindToolsInput,\n ToolChoice,\n} from \"@langchain/core/language_models/chat_models\";\nimport { StructuredTool } from \"@langchain/core/tools\";\nimport {\n BaseMessage,\n AIMessage,\n HumanMessage,\n BaseMessageFields,\n AIMessageFields,\n ToolMessage,\n ToolMessageFields,\n} from \"@langchain/core/messages\";\nimport { ChatResult } from \"@langchain/core/outputs\";\nimport {\n Runnable,\n RunnableConfig,\n RunnableLambda,\n RunnableBinding,\n} from \"@langchain/core/runnables\";\nimport {\n MemorySaver,\n Checkpoint,\n CheckpointMetadata,\n type BaseCheckpointSaver,\n} from \"@langchain/langgraph-checkpoint\";\nimport { LanguageModelLike } from \"@langchain/core/language_models/base\";\nimport { z } from \"zod/v3\";\n\nexport class _AnyIdAIMessage extends AIMessage {\n get lc_id() {\n return [\"langchain_core\", \"messages\", \"AIMessage\"];\n }\n\n constructor(fields: AIMessageFields | string) {\n let fieldsWithJestMatcher: Partial<AIMessageFields> = {\n id: expect.any(String) as unknown as string,\n };\n if (typeof fields === \"string\") {\n fieldsWithJestMatcher = {\n content: fields,\n ...fieldsWithJestMatcher,\n };\n } else {\n fieldsWithJestMatcher = {\n ...fields,\n ...fieldsWithJestMatcher,\n };\n }\n super(fieldsWithJestMatcher as AIMessageFields);\n }\n}\n\nexport class _AnyIdHumanMessage extends HumanMessage {\n get lc_id() {\n return [\"langchain_core\", \"messages\", \"HumanMessage\"];\n }\n\n constructor(fields: BaseMessageFields | string) {\n let fieldsWithJestMatcher: Partial<BaseMessageFields> = {\n id: expect.any(String) as unknown as string,\n };\n if (typeof fields === \"string\") {\n fieldsWithJestMatcher = {\n content: fields,\n ...fieldsWithJestMatcher,\n };\n } else {\n fieldsWithJestMatcher = {\n ...fields,\n ...fieldsWithJestMatcher,\n };\n }\n super(fieldsWithJestMatcher as BaseMessageFields);\n }\n}\n\nexport class _AnyIdToolMessage extends ToolMessage {\n get lc_id() {\n return [\"langchain_core\", \"messages\", \"ToolMessage\"];\n }\n\n constructor(fields: ToolMessageFields) {\n const fieldsWithJestMatcher: Partial<ToolMessageFields> = {\n id: expect.any(String) as unknown as string,\n ...fields,\n };\n super(fieldsWithJestMatcher as ToolMessageFields);\n }\n}\n\nexport class FakeConfigurableModel extends BaseChatModel {\n _queuedMethodOperations: Record<string, any> = {};\n\n _chatModel: LanguageModelLike;\n\n constructor(\n fields: {\n model: LanguageModelLike;\n } & BaseChatModelParams\n ) {\n super(fields);\n this._chatModel = fields.model;\n }\n\n _llmType() {\n return \"fake_configurable\";\n }\n\n async _generate(\n _messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n throw new Error(\"Not implemented\");\n }\n\n async _model() {\n return this._chatModel;\n }\n\n bindTools(tools: BindToolsInput[]) {\n const modelWithTools = new FakeConfigurableModel({\n model: (this._chatModel as FakeToolCallingChatModel).bindTools(tools),\n });\n modelWithTools._queuedMethodOperations.bindTools = tools;\n return modelWithTools;\n }\n}\n\nexport class FakeToolCallingChatModel extends BaseChatModel {\n sleep?: number = 50;\n\n responses?: BaseMessage[];\n\n thrownErrorString?: string;\n\n idx: number;\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n structuredResponse?: Record<string, unknown>;\n\n // Track messages passed to structured output calls\n structuredOutputMessages: BaseMessage[][] = [];\n\n constructor(\n fields: {\n sleep?: number;\n responses?: BaseMessage[];\n thrownErrorString?: string;\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n structuredResponse?: Record<string, unknown>;\n } & BaseChatModelParams\n ) {\n super(fields);\n this.sleep = fields.sleep ?? this.sleep;\n this.responses = fields.responses;\n this.thrownErrorString = fields.thrownErrorString;\n this.idx = 0;\n this.toolStyle = fields.toolStyle ?? this.toolStyle;\n this.structuredResponse = fields.structuredResponse;\n this.structuredOutputMessages = [];\n }\n\n _llmType() {\n return \"fake\";\n }\n\n async _generate(\n messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n if (this.sleep !== undefined) {\n await new Promise((resolve) => setTimeout(resolve, this.sleep));\n }\n const responses = this.responses?.length ? this.responses : messages;\n const msg = responses[this.idx % responses.length];\n const generation: ChatResult = {\n generations: [\n {\n text: \"\",\n message: msg,\n },\n ],\n };\n this.idx += 1;\n\n if (typeof msg.content === \"string\") {\n await runManager?.handleLLMNewToken(msg.content);\n }\n return generation;\n }\n\n bindTools(tools: BindToolsInput[]): Runnable<any> {\n const toolDicts = [];\n const serverTools = [];\n for (const tool of tools) {\n if (!(\"name\" in tool)) {\n serverTools.push(tool);\n continue;\n }\n\n // NOTE: this is a simplified tool spec for testing purposes only\n if (this.toolStyle === \"openai\") {\n toolDicts.push({\n type: \"function\",\n function: {\n name: tool.name,\n },\n });\n } else if ([\"anthropic\", \"google\"].includes(this.toolStyle)) {\n toolDicts.push({\n name: tool.name,\n });\n } else if (this.toolStyle === \"bedrock\") {\n toolDicts.push({\n toolSpec: {\n name: tool.name,\n },\n });\n }\n }\n let toolsToBind: BindToolsInput[] = toolDicts;\n if (this.toolStyle === \"google\") {\n toolsToBind = [{ functionDeclarations: toolDicts }];\n }\n return this.withConfig({\n tools: [...toolsToBind, ...serverTools],\n } as BaseChatModelCallOptions);\n }\n\n withStructuredOutput<\n RunOutput extends Record<string, any> = Record<string, any>\n >(_: unknown): Runnable<any> {\n if (!this.structuredResponse) {\n throw new Error(\"No structured response provided\");\n }\n // Create a runnable that returns the proper structured format\n return RunnableLambda.from(async (messages: BaseMessage[]) => {\n if (this.sleep) {\n await new Promise((resolve) => setTimeout(resolve, this.sleep));\n }\n\n // Store the messages that were sent to generate structured output\n this.structuredOutputMessages.push([...messages]);\n\n // Return in the format expected: { raw: BaseMessage, parsed: RunOutput }\n return this.structuredResponse as RunOutput;\n });\n }\n}\n\nexport class MemorySaverAssertImmutable extends MemorySaver {\n storageForCopies: Record<string, Record<string, Uint8Array>> = {};\n\n constructor() {\n super();\n this.storageForCopies = {};\n }\n\n async put(\n config: RunnableConfig,\n checkpoint: Checkpoint,\n metadata: CheckpointMetadata\n ): Promise<RunnableConfig> {\n const thread_id = config.configurable?.thread_id;\n this.storageForCopies[thread_id] ??= {};\n\n // assert checkpoint hasn't been modified since last written\n const saved = await this.get(config);\n if (saved) {\n const savedId = saved.id;\n if (this.storageForCopies[thread_id][savedId]) {\n const loaded = await this.serde.loadsTyped(\n \"json\",\n this.storageForCopies[thread_id][savedId]\n );\n\n expect(\n saved,\n `Checkpoint [${savedId}] has been modified since last written`\n ).toEqual(loaded);\n }\n }\n const [, serializedCheckpoint] = await this.serde.dumpsTyped(checkpoint);\n // save a copy of the checkpoint\n this.storageForCopies[thread_id][checkpoint.id] = serializedCheckpoint;\n\n return super.put(config, checkpoint, metadata);\n }\n}\n\ninterface ToolCall {\n name: string;\n args: Record<string, any>;\n id: string;\n type?: \"tool_call\";\n}\n\ninterface FakeToolCallingModelFields {\n toolCalls?: ToolCall[][];\n toolStyle?: \"openai\" | \"anthropic\";\n index?: number;\n structuredResponse?: any;\n}\n\n// Helper function to create checkpointer\nexport function createCheckpointer(): BaseCheckpointSaver {\n return new MemorySaver();\n}\n\n/**\n * Fake chat model for testing tool calling functionality\n */\nexport class FakeToolCallingModel extends BaseChatModel {\n toolCalls: ToolCall[][];\n\n toolStyle: \"openai\" | \"anthropic\";\n\n // Use a shared reference object so the index persists across bindTools calls\n private indexRef: { current: number };\n\n structuredResponse?: any;\n\n private tools: StructuredTool[] = [];\n\n constructor({\n toolCalls = [],\n toolStyle = \"openai\",\n index = 0,\n structuredResponse,\n indexRef,\n ...rest\n }: FakeToolCallingModelFields & { indexRef?: { current: number } } = {}) {\n super(rest);\n this.toolCalls = toolCalls;\n this.toolStyle = toolStyle;\n // Share the same index reference across instances\n this.indexRef = indexRef ?? { current: index };\n this.structuredResponse = structuredResponse;\n }\n\n // Getter/setter for backwards compatibility\n get index(): number {\n return this.indexRef.current;\n }\n\n set index(value: number) {\n this.indexRef.current = value;\n }\n\n _llmType(): string {\n return \"fake-tool-calling\";\n }\n\n _combineLLMOutput() {\n return [];\n }\n\n bindTools(\n tools: StructuredTool[]\n ):\n | FakeToolCallingModel\n | RunnableBinding<\n any,\n any,\n any & { tool_choice?: ToolChoice | undefined }\n > {\n const newInstance = new FakeToolCallingModel({\n toolCalls: this.toolCalls,\n toolStyle: this.toolStyle,\n structuredResponse: this.structuredResponse,\n // Pass the same indexRef so all instances share the same index state\n indexRef: this.indexRef,\n });\n newInstance.tools = [...this.tools, ...tools];\n return newInstance;\n }\n\n withStructuredOutput(_schema: any) {\n return new RunnableLambda({\n func: async () => {\n return this.structuredResponse;\n },\n });\n }\n\n async _generate(\n messages: BaseMessage[],\n _options?: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n const lastMessage = messages[messages.length - 1];\n let content = lastMessage.content as string;\n\n // Handle prompt concatenation\n if (messages.length > 1) {\n const parts = messages.map((m) => m.content).filter(Boolean);\n content = parts.join(\"-\");\n }\n\n // Reset index at the start of a new conversation (only human message)\n // This allows the model to be reused across multiple agent.invoke() calls\n const isStartOfConversation =\n messages.length === 1 ||\n (messages.length === 2 && messages.every(HumanMessage.isInstance));\n if (isStartOfConversation && this.index !== 0) {\n this.index = 0;\n }\n\n const currentToolCalls = this.toolCalls[this.index] || [];\n const messageId = this.index.toString();\n\n // Move to next set of tool calls for subsequent invocations\n this.index = (this.index + 1) % Math.max(1, this.toolCalls.length);\n\n const message = new AIMessage({\n content,\n id: messageId,\n tool_calls:\n currentToolCalls.length > 0\n ? currentToolCalls.map((tc) => ({\n ...tc,\n type: \"tool_call\" as const,\n }))\n : undefined,\n });\n\n return {\n generations: [\n {\n text: content,\n message,\n },\n ],\n llmOutput: {},\n };\n }\n}\n\nexport class SearchAPI extends StructuredTool {\n name = \"search_api\";\n\n description = \"A simple API that returns the input string.\";\n\n schema = z.object({\n query: z.string().describe(\"The query to search for.\"),\n });\n\n async _call(input: z.infer<typeof this.schema>) {\n if (input?.query === \"error\") {\n throw new Error(\"Error\");\n }\n return `result for ${input?.query}`;\n }\n}\n"],"mappings":";;;;;;;;;;;;AAuUA,IAAa,uBAAb,MAAa,6BAA6BA,2DAAc;CACtD;CAEA;CAGA,AAAQ;CAER;CAEA,AAAQ,QAA0B,CAAE;CAEpC,YAAY,EACV,YAAY,CAAE,GACd,YAAY,UACZ,QAAQ,GACR,oBACA,SACA,GAAG,MAC6D,GAAG,CAAE,GAAE;EACvE,MAAM,KAAK;EACX,KAAK,YAAY;EACjB,KAAK,YAAY;EAEjB,KAAK,WAAW,YAAY,EAAE,SAAS,MAAO;EAC9C,KAAK,qBAAqB;CAC3B;CAGD,IAAI,QAAgB;AAClB,SAAO,KAAK,SAAS;CACtB;CAED,IAAI,MAAMC,OAAe;EACvB,KAAK,SAAS,UAAU;CACzB;CAED,WAAmB;AACjB,SAAO;CACR;CAED,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,UACEC,OAOI;EACJ,MAAM,cAAc,IAAI,qBAAqB;GAC3C,WAAW,KAAK;GAChB,WAAW,KAAK;GAChB,oBAAoB,KAAK;GAEzB,UAAU,KAAK;EAChB;EACD,YAAY,QAAQ,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;AAC7C,SAAO;CACR;CAED,qBAAqBC,SAAc;AACjC,SAAO,IAAIC,0CAAe,EACxB,MAAM,YAAY;AAChB,UAAO,KAAK;EACb,EACF;CACF;CAED,MAAM,UACJC,UACAC,UACAC,aACqB;EACrB,MAAM,cAAc,SAAS,SAAS,SAAS;EAC/C,IAAI,UAAU,YAAY;AAG1B,MAAI,SAAS,SAAS,GAAG;GACvB,MAAM,QAAQ,SAAS,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,OAAO,QAAQ;GAC5D,UAAU,MAAM,KAAK,IAAI;EAC1B;EAID,MAAM,wBACJ,SAAS,WAAW,KACnB,SAAS,WAAW,KAAK,SAAS,MAAMC,uCAAa,WAAW;AACnE,MAAI,yBAAyB,KAAK,UAAU,GAC1C,KAAK,QAAQ;EAGf,MAAM,mBAAmB,KAAK,UAAU,KAAK,UAAU,CAAE;EACzD,MAAM,YAAY,KAAK,MAAM,UAAU;EAGvC,KAAK,SAAS,KAAK,QAAQ,KAAK,KAAK,IAAI,GAAG,KAAK,UAAU,OAAO;EAElE,MAAM,UAAU,IAAIC,oCAAU;GAC5B;GACA,IAAI;GACJ,YACE,iBAAiB,SAAS,IACtB,iBAAiB,IAAI,CAAC,QAAQ;IAC5B,GAAG;IACH,MAAM;GACP,GAAE,GACH;EACP;AAED,SAAO;GACL,aAAa,CACX;IACE,MAAM;IACN;GACD,CACF;GACD,WAAW,CAAE;EACd;CACF;AACF"}