@langchain/core
Version:
Core LangChain.js abstractions and schemas
1 lines • 7.84 kB
Source Map (JSON)
{"version":3,"file":"llms.d.ts","names":["BasePromptValueInterface","LLMResult","Generation","GenerationChunk","BaseCallbackConfig","CallbackManagerForLLMRun","Callbacks","BaseLanguageModel","BaseLanguageModelCallOptions","BaseLanguageModelInput","BaseLanguageModelParams","RunnableConfig","BaseCache","SerializedLLM","Record","BaseLLMParams","BaseLLMCallOptions","BaseLLM","CallOptions","Exclude","Omit","Promise","AsyncGenerator","Partial","prompts","cache","llmStringKey","parsedOptions","handledOptions","runId","LLM"],"sources":["../../src/language_models/llms.d.ts"],"sourcesContent":["import type { BasePromptValueInterface } from \"../prompt_values.js\";\nimport { type LLMResult, type Generation, GenerationChunk } from \"../outputs.js\";\nimport { type BaseCallbackConfig, type CallbackManagerForLLMRun, type Callbacks } from \"../callbacks/manager.js\";\nimport { BaseLanguageModel, type BaseLanguageModelCallOptions, type BaseLanguageModelInput, type BaseLanguageModelParams } from \"./base.js\";\nimport type { RunnableConfig } from \"../runnables/config.js\";\nimport type { BaseCache } from \"../caches/index.js\";\nexport type SerializedLLM = {\n _model: string;\n _type: string;\n} & Record<string, any>;\nexport interface BaseLLMParams extends BaseLanguageModelParams {\n}\nexport interface BaseLLMCallOptions extends BaseLanguageModelCallOptions {\n}\n/**\n * LLM Wrapper. Takes in a prompt (or prompts) and returns a string.\n */\nexport declare abstract class BaseLLM<CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions> extends BaseLanguageModel<string, CallOptions> {\n ParsedCallOptions: Omit<CallOptions, Exclude<keyof RunnableConfig, \"signal\" | \"timeout\" | \"maxConcurrency\">>;\n lc_namespace: string[];\n /**\n * This method takes an input and options, and returns a string. It\n * converts the input to a prompt value and generates a result based on\n * the prompt.\n * @param input Input for the LLM.\n * @param options Options for the LLM call.\n * @returns A string result based on the prompt.\n */\n invoke(input: BaseLanguageModelInput, options?: CallOptions): Promise<string>;\n _streamResponseChunks(_input: string, _options: this[\"ParsedCallOptions\"], _runManager?: CallbackManagerForLLMRun): AsyncGenerator<GenerationChunk>;\n protected _separateRunnableConfigFromCallOptionsCompat(options?: Partial<CallOptions>): [RunnableConfig, this[\"ParsedCallOptions\"]];\n _streamIterator(input: BaseLanguageModelInput, options?: CallOptions): AsyncGenerator<string>;\n /**\n * This method takes prompt values, options, and callbacks, and generates\n * a result based on the prompts.\n * @param promptValues Prompt values for the LLM.\n * @param options Options for the LLM call.\n * @param callbacks Callbacks for the LLM call.\n * @returns An LLMResult based on the prompts.\n */\n generatePrompt(promptValues: BasePromptValueInterface[], options?: string[] | CallOptions, callbacks?: Callbacks): Promise<LLMResult>;\n /**\n * Run the LLM on the given prompts and input.\n */\n abstract _generate(prompts: string[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<LLMResult>;\n /**\n * Get the parameters used to invoke the model\n */\n invocationParams(_options?: this[\"ParsedCallOptions\"]): any;\n _flattenLLMResult(llmResult: LLMResult): LLMResult[];\n /** @ignore */\n _generateUncached(prompts: string[], parsedOptions: this[\"ParsedCallOptions\"], handledOptions: BaseCallbackConfig, startedRunManagers?: CallbackManagerForLLMRun[]): Promise<LLMResult>;\n _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, runId }: {\n prompts: string[];\n cache: BaseCache<Generation[]>;\n llmStringKey: string;\n parsedOptions: any;\n handledOptions: RunnableConfig;\n runId?: string;\n }): Promise<LLMResult & {\n missingPromptIndices: number[];\n startedRunManagers?: CallbackManagerForLLMRun[];\n }>;\n /**\n * Run the LLM on the given prompts and input, handling caching.\n */\n generate(prompts: string[], options?: string[] | CallOptions, callbacks?: Callbacks): Promise<LLMResult>;\n /**\n * Get the identifying parameters of the LLM.\n */\n _identifyingParams(): Record<string, any>;\n /**\n * Return the string type key uniquely identifying this class of LLM.\n */\n abstract _llmType(): string;\n _modelType(): string;\n}\n/**\n * LLM class that provides a simpler interface to subclass than {@link BaseLLM}.\n *\n * Requires only implementing a simpler {@link _call} method instead of {@link _generate}.\n *\n * @augments BaseLLM\n */\nexport declare abstract class LLM<CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions> extends BaseLLM<CallOptions> {\n /**\n * Run the LLM on the given prompt and input.\n */\n abstract _call(prompt: string, options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<string>;\n _generate(prompts: string[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<LLMResult>;\n}\n//# sourceMappingURL=llms.d.ts.map"],"mappings":";;;;;;;;;KAMYa,aAAAA;;;AAAZ,CAAA,GAGIC,MAHQD,CAAAA,MAAAA,EAAAA,GAAa,CAAA;AAIRE,UAAAA,aAAAA,SAAsBL,uBAAuB,CAAA,CAE9D;AAK8BO,UALbD,kBAAAA,SAA2BR,4BAKP,CAAA;;;;AACkBG,uBADzBM,OACyBN,CAAAA,oBADGK,kBACHL,GADwBK,kBACxBL,CAAAA,SADoDJ,iBACpDI,CAAAA,MAAAA,EAD8EO,WAC9EP,CAAAA,CAAAA;EAAdQ,iBAAAA,EAAlBC,IAAkBD,CAAbD,WAAaC,EAAAA,OAAAA,CAAAA,MAAcR,cAAdQ,EAAAA,QAAAA,GAAAA,SAAAA,GAAAA,gBAAAA,CAAAA,CAAAA;EAAlBC,YAAAA,EAAAA,MAAAA,EAAAA;EAULX;;;;;;;;EAE2EE,MAAAA,CAAAA,KAAAA,EAF3EF,sBAE2EE,EAAAA,OAAAA,CAAAA,EAFzCO,WAEyCP,CAAAA,EAF3BU,OAE2BV,CAAAA,MAAAA,CAAAA;EAClEF,qBAAAA,CAAAA,MAAAA,EAAAA,MAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,WAAAA,CAAAA,EAFkEJ,wBAElEI,CAAAA,EAF6Fa,cAE7Fb,CAF4GN,eAE5GM,CAAAA;EAAkCS,UAAAA,4CAAAA,CAAAA,OAAAA,CAAAA,EADQK,OACRL,CADgBA,WAChBA,CAAAA,CAAAA,EAAAA,CADgCP,cAChCO,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,CAAAA;EAAcI,eAAAA,CAAAA,KAAAA,EAAhDb,sBAAgDa,EAAAA,OAAAA,CAAAA,EAAdJ,WAAcI,CAAAA,EAAAA,cAAAA,CAAAA,MAAAA,CAAAA;EAS1CtB;;;;;;;;EASAC,cAAAA,CAAAA,YAAAA,EATAD,wBASAC,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,MAAAA,EAAAA,GATiDiB,WASjDjB,EAAAA,SAAAA,CAAAA,EAT0EK,SAS1EL,CAAAA,EATsFoB,OAStFpB,CAT8FA,SAS9FA,CAAAA;EAAYA;;;EAEoIA,SAAAA,SAAAA,CAAAA,OAAAA,EAAAA,MAAAA,EAAAA,EAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAPtFI,wBAOsFJ,CAAAA,EAP3DoB,OAO2DpB,CAPnDA,SAOmDA,CAAAA;EAARoB;;;EACnIK,gBAAAA,CAAAA,QAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,CAAAA,EAAAA,GAAAA;EAAcC,iBAAAA,CAAAA,SAAAA,EAHnB1B,SAGmB0B,CAAAA,EAHP1B,SAGO0B,EAAAA;EAAeC;EAAgBC,iBAAAA,CAAAA,OAAAA,EAAAA,MAAAA,EAAAA,EAAAA,aAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,cAAAA,EADgBzB,kBAChByB,EAAAA,kBAAAA,CAAAA,EADyDxB,wBACzDwB,EAAAA,CAAAA,EADsFR,OACtFQ,CAD8F5B,SAC9F4B,CAAAA;EAE1D3B,eAAAA,CAAAA;IAAAA,OAAAA;IAAAA,KAAAA;IAAAA,YAAAA;IAAAA,aAAAA;IAAAA,cAAAA;IAAAA;EAYqDI,CAZrDJ,EAAAA;IAAVU,OAAAA,EAAAA,MAAAA,EAAAA;IAGSD,KAAAA,EAHTC,SAGSD,CAHCT,UAGDS,EAAAA,CAAAA;IAERV,YAAAA,EAAAA,MAAAA;IAEaI,aAAAA,EAAAA,GAAAA;IAFrBgB,cAAAA,EAFgBV,cAEhBU;IAO6CH,KAAAA,CAAAA,EAAAA,MAAAA;EAAyBZ,CAAAA,CAAAA,EAPtEe,OAOsEf,CAP9DL,SAO8DK,GAAAA;IAAoBL,oBAAAA,EAAAA,MAAAA,EAAAA;IAARoB,kBAAAA,CAAAA,EAL7DhB,wBAK6DgB,EAAAA;EAIhEP,CAAAA,CAAAA;EArDiFP;AAAiB;AAmE5H;EAAsDS,QAAAA,CAAAA,OAAAA,EAAAA,MAAAA,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,MAAAA,EAAAA,GAlBDE,WAkBCF,EAAAA,SAAAA,CAAAA,EAlBwBV,SAkBxBU,CAAAA,EAlBoCK,OAkBpCL,CAlB4Cf,SAkB5Ce,CAAAA;EAAqBA;;;EAIoCK,kBAAAA,CAAAA,CAAAA,EAlBrFP,MAkBqFO,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA;EAC7BhB;;;EALqBY,SAAAA,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAAO,UAAA,CAAA,CAAA,EAAA,MAAA;;;;;;;;;uBAAhFa,wBAAwBd,qBAAqBA,4BAA4BC,QAAQC;;;;kFAI3Bb,2BAA2BgB;gFAC7BhB,2BAA2BgB,QAAQpB"}