UNPKG

@langchain/core

Version:
1 lines 10.3 kB
{"version":3,"file":"chat_models.d.cts","names":["CallbackManagerForLLMRun","BaseChatModel","BaseChatModelCallOptions","BaseChatModelParams","BaseLLMParams","BaseMessage","AIMessage","AIMessageChunk","ChatResult","ChatGenerationChunk","Runnable","StructuredTool","StructuredOutputMethodParams","BaseLanguageModelInput","StructuredOutputMethodOptions","InteropZodType","ToolSpec","Record","FakeStreamingChatModelCallOptions","FakeStreamingChatModelFields","FakeChatModel","Promise","FakeStreamingChatModel","sleep","responses","chunks","toolStyle","thrownErrorString","______messages_message_js0","MessageStructure","AsyncGenerator","FakeChatInput","FakeListChatModelCallOptions","FakeListChatModel","RunOutput"],"sources":["../../../src/utils/testing/chat_models.d.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport { BaseChatModel, BaseChatModelCallOptions, BaseChatModelParams } from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport { BaseMessage, AIMessage, AIMessageChunk } from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport { StructuredOutputMethodParams, BaseLanguageModelInput, StructuredOutputMethodOptions } from \"../../language_models/base.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>;\n}\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions extends BaseChatModelCallOptions {\n}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\nexport declare class FakeChatModel extends BaseChatModel {\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n}\nexport declare class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep: number;\n responses: BaseMessage[];\n chunks: AIMessageChunk[];\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n thrownErrorString?: string;\n private tools;\n constructor({ sleep, responses, chunks, toolStyle, thrownErrorString, ...rest }: FakeStreamingChatModelFields & BaseLLMParams);\n _llmType(): string;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure>, FakeStreamingChatModelCallOptions>;\n _generate(messages: BaseMessage[], _options: this[\"ParsedCallOptions\"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n}\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n emitCustomEvent?: boolean;\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport declare class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name(): string;\n lc_serializable: boolean;\n responses: string[];\n i: number;\n sleep?: number;\n emitCustomEvent: boolean;\n generationInfo?: Record<string, unknown>;\n private tools;\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n constructor(params: FakeChatInput);\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(_messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _formatGeneration(text: string): {\n message: AIMessage<import(\"../../messages/message.js\").MessageStructure>;\n text: string;\n };\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n _sleepIfRequested(): Promise<void>;\n _sleep(): Promise<void>;\n _createResponseChunk(text: string, generationInfo?: Record<string, any>): ChatGenerationChunk;\n _currentResponse(): string;\n _incrementResponse(): void;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure>, FakeListChatModelCallOptions>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, false> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, true> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {\n raw: BaseMessage;\n parsed: RunOutput;\n }>;\n}\n//# sourceMappingURL=chat_models.d.ts.map"],"mappings":";;;;;;;;;;;;;;UAUiBgB,QAAAA;;;EAAAA,MAAAA,EAGLD,cAHa,GAGIE,MAAjBF,CAAAA,MAAAA,EAAiBE,OAAM,CAAA;AAKnC;AAKA;;;AAAsDd,UALrCe,iCAAAA,SAA0ChB,wBAKLC,CAAAA,CAAmB;AAYzE;;;AAG4HK,UAf3GW,4BAAAA,SAAqChB,mBAesEK,CAAAA;EAARa;EAHzEpB,KAAAA,CAAAA,EAAAA,MAAAA;EAAa;EAKnCqB,SAAAA,CAAAA,EAbLjB,WAaKiB,EAAsB;EAAuBJ;EAEnDb,MAAAA,CAAAA,EAbFE,cAaEF,EAAAA;EACHE;EAIMgB,SAAAA,CAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAAOC;EAAWC,iBAAAA,CAAAA,EAAAA,MAAAA;;AAAmBE,cAZlCP,aAAAA,SAAsBnB,aAAAA,CAYY0B;EAA8BR,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAA+Bf,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAE9FO,SAAAA,CAAAA,QAAAA,EAXEN,WAWFM,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAXmEX,wBAWnEW,CAAAA,EAX8FU,OAW9FV,CAXsGH,UAWtGG,CAAAA;;AAAwCE,cATzCS,sBAAAA,SAA+BrB,aASUY,CATIK,iCASJL,CAAAA,CAAAA;;EAAwBN,SAAAA,EAPvEF,WAOuEE,EAAAA;EAAsEW,MAAAA,EANhJX,cAMgJW,EAAAA;EAAvGR,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAC7BL,iBAAAA,CAAAA,EAAAA,MAAAA;EAAkEL,QAAAA,KAAAA;EAAmCQ,WAAAA,CAAAA;IAAAA,KAAAA;IAAAA,SAAAA;IAAAA,MAAAA;IAAAA,SAAAA;IAAAA,iBAAAA;IAAAA,GAAAA;EAAAA,CAAAA,EAHxCW,4BAGwCX,GAHTJ,aAGSI;EAARa,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAChFhB,SAAAA,CAAAA,KAAAA,EAAAA,CAFfM,cAEeN,GAFEW,QAEFX,CAAAA,EAAAA,CAAAA,EAFgBK,QAEhBL,CAFyBQ,sBAEzBR,EAFiDE,cAEjDF,kBAAAA,EAFuHa,iCAEvHb,CAAAA;EAAgEL,SAAAA,CAAAA,QAAAA,EAD7EK,WAC6EL,EAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,WAAAA,CAAAA,EADXA,wBACWA,CAAAA,EADgBqB,OAChBrB,CADwBQ,UACxBR,CAAAA;EAA0CS,qBAAAA,CAAAA,SAAAA,EAA1GJ,WAA0GI,EAAAA,EAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAA1CT,wBAA0CS,CAAAA,EAAfqB,cAAerB,CAAAA,mBAAAA,CAAAA;;;AAX9E;AAgBjE;AAaiBuB,UAbAD,aAAAA,SAAsB5B,mBAaeD,CAAAA;EAuBjC+B;EAAwCD,SAAAA,EAAAA,MAAAA,EAAAA;EAOxCf;EAGGc,KAAAA,CAAAA,EAAAA,MAAAA;EAGC1B,eAAAA,CAAAA,EAAAA,OAAAA;EAAiEL;;;;;EAKrDK,cAAAA,CAAAA,EA3ChBY,MA2CgBZ,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;;AAA0GI,UAzC9HuB,4BAAAA,SAAqC9B,wBAyCyFO,CAAAA;EAAfqB,iBAAAA,CAAAA,EAAAA,MAAAA;;;;;;;;;;;;;;;;;;;;;;AAOiHpB,cAzB5NuB,iBAAAA,SAA0BhC,aAyBkMS,CAzBpLsB,4BAyBoLtB,CAAAA,CAAAA;EACtMO,OAAAA,OAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAAsBA,eAAAA,EAAAA,OAAAA;EAA2DiB,SAAAA,EAAAA,MAAAA,EAAAA;EAA7BtB,CAAAA,EAAAA,MAAAA;EAA+DsB,KAAAA,CAAAA,EAAAA,MAAAA;EAAfnB,eAAAA,EAAAA,OAAAA;EAA4BE,cAAAA,CAAAA,EAnBtJA,MAmBsJA,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;EAA8BH,QAAAA,KAAAA;EAA+CD,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAC3OR,WAAAA,CAAAA,MAAAA,EAjBW0B,aAiBX1B;EACG6B,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAF+NxB,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EA1BhMT,SAAAA,CAAAA,SAAAA,EAatBI,WAbsBJ,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAa2CD,wBAb3CC,CAAAA,EAasEoB,OAbtEpB,CAa8EO,UAb9EP,CAAAA;EAAa,iBAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EAAA;aAe3CK;;;mCAGoBD,gEAAgEL,2BAA2B8B,eAAerB;uBACtHY;YACXA;sDAC0CJ,sBAAsBR;;;oBAGxDE,iBAAiBK,cAAcN,SAASG,wBAAwBN,kCAAsEyB;yCACjHf,sBAAsBA,8BAA8BL,6BAA6BsB,oBAAoBnB,eAAemB,aAAajB,8BAA8BH,uCAAuCJ,SAASG,wBAAwBqB;yCACvOjB,sBAAsBA,8BAA8BL,6BAA6BsB,mBAAmBnB,eAAemB,aAAajB,8BAA8BH,sCAAsCJ,SAASG;SAC3OR;YACG6B"}