@langchain/core
Version:
Core LangChain.js abstractions and schemas
1 lines • 3.69 kB
Source Map (JSON)
{"version":3,"file":"llms.cjs","names":["LLM","fields: { response?: string; thrownErrorString?: string } & BaseLLMParams","prompt: string","_options: this[\"ParsedCallOptions\"]","runManager?: CallbackManagerForLLMRun","fields: {\n sleep?: number;\n responses?: string[];\n thrownErrorString?: string;\n } & BaseLLMParams","input: string","_options?: this[\"ParsedCallOptions\"]"],"sources":["../../../src/utils/testing/llms.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport { BaseLLMParams, LLM } from \"../../language_models/llms.js\";\nimport { GenerationChunk } from \"../../outputs.js\";\n\nexport class FakeLLM extends LLM {\n response?: string;\n\n thrownErrorString?: string;\n\n constructor(\n fields: { response?: string; thrownErrorString?: string } & BaseLLMParams\n ) {\n super(fields);\n this.response = fields.response;\n this.thrownErrorString = fields.thrownErrorString;\n }\n\n _llmType() {\n return \"fake\";\n }\n\n async _call(\n prompt: string,\n _options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<string> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n const response = this.response ?? prompt;\n await runManager?.handleLLMNewToken(response);\n return response;\n }\n}\n\nexport class FakeStreamingLLM extends LLM {\n sleep?: number = 50;\n\n responses?: string[];\n\n thrownErrorString?: string;\n\n constructor(\n fields: {\n sleep?: number;\n responses?: string[];\n thrownErrorString?: string;\n } & BaseLLMParams\n ) {\n super(fields);\n this.sleep = fields.sleep ?? this.sleep;\n this.responses = fields.responses;\n this.thrownErrorString = fields.thrownErrorString;\n }\n\n _llmType() {\n return \"fake\";\n }\n\n async _call(prompt: string): Promise<string> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n const response = this.responses?.[0];\n this.responses = this.responses?.slice(1);\n return response ?? prompt;\n }\n\n async *_streamResponseChunks(\n input: string,\n _options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ) {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n const response = this.responses?.[0];\n this.responses = this.responses?.slice(1);\n for (const c of response ?? input) {\n await new Promise((resolve) => setTimeout(resolve, this.sleep));\n yield { text: c, generationInfo: {} } as GenerationChunk;\n await runManager?.handleLLMNewToken(c);\n }\n }\n}\n"],"mappings":";;;AAIA,IAAa,UAAb,cAA6BA,iCAAI;CAC/B;CAEA;CAEA,YACEC,QACA;EACA,MAAM,OAAO;EACb,KAAK,WAAW,OAAO;EACvB,KAAK,oBAAoB,OAAO;CACjC;CAED,WAAW;AACT,SAAO;CACR;CAED,MAAM,MACJC,QACAC,UACAC,YACiB;AACjB,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;EAEvB,MAAM,WAAW,KAAK,YAAY;EAClC,MAAM,YAAY,kBAAkB,SAAS;AAC7C,SAAO;CACR;AACF;AAED,IAAa,mBAAb,cAAsCJ,iCAAI;CACxC,QAAiB;CAEjB;CAEA;CAEA,YACEK,QAKA;EACA,MAAM,OAAO;EACb,KAAK,QAAQ,OAAO,SAAS,KAAK;EAClC,KAAK,YAAY,OAAO;EACxB,KAAK,oBAAoB,OAAO;CACjC;CAED,WAAW;AACT,SAAO;CACR;CAED,MAAM,MAAMH,QAAiC;AAC3C,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;EAEvB,MAAM,WAAW,KAAK,YAAY;EAClC,KAAK,YAAY,KAAK,WAAW,MAAM,EAAE;AACzC,SAAO,YAAY;CACpB;CAED,OAAO,sBACLI,OACAC,UACAH,YACA;AACA,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;EAEvB,MAAM,WAAW,KAAK,YAAY;EAClC,KAAK,YAAY,KAAK,WAAW,MAAM,EAAE;AACzC,OAAK,MAAM,KAAK,YAAY,OAAO;GACjC,MAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,KAAK,MAAM;GAC9D,MAAM;IAAE,MAAM;IAAG,gBAAgB,CAAE;GAAE;GACrC,MAAM,YAAY,kBAAkB,EAAE;EACvC;CACF;AACF"}