UNPKG

@langchain/core

Version:
1 lines 153 kB
{"version":3,"file":"base.cjs","names":["value: any","defaultKey: string","Serializable","suffix?: string","fields?: {\n stopAfterAttempt?: number;\n onFailedAttempt?: RunnableRetryFailedAttemptHandler;\n }","config: Partial<CallOptions>","fields:\n | {\n fallbacks: Runnable<RunInput, RunOutput>[];\n }\n | Runnable<RunInput, RunOutput>[]","options: Partial<O> | Partial<O>[]","ensureConfig","inputs: RunInput[]","options?: Partial<CallOptions> | Partial<CallOptions>[]","batchOptions?: RunnableBatchOptions","AsyncCaller","input: RunInput","options?: Partial<CallOptions>","AsyncGeneratorWithSetup","IterableReadableStream","func:\n | ((input: T) => Promise<RunOutput>)\n | ((\n input: T,\n config?: Partial<CallOptions>,\n runManager?: CallbackManagerForChainRun\n ) => Promise<RunOutput>)","input: T","options?: Partial<CallOptions> & { runType?: string }","getCallbackManagerForConfig","raceWithSignal","func: (\n inputs: T[],\n options?: Partial<CallOptions>[],\n runManagers?: (CallbackManagerForChainRun | undefined)[],\n batchOptions?: RunnableBatchOptions\n ) => Promise<(RunOutput | Error)[]>","inputs: T[]","options?:\n | Partial<CallOptions & { runType?: string }>\n | Partial<CallOptions & { runType?: string }>[]","outputs: (RunOutput | Error)[]","first: O","second: O","concat","inputGenerator: AsyncGenerator<I>","transformer: (\n generator: AsyncGenerator<I>,\n runManager?: CallbackManagerForChainRun,\n options?: Partial<CallOptions>\n ) => AsyncGenerator<O>","finalInput: I | undefined","finalOutput: O | undefined","runManager: CallbackManagerForChainRun | undefined","pipeGeneratorWithSetup","isStreamEventsHandler","isLogStreamHandler","_?: RunnableConfig","Graph","z","coerceable: RunnableLike<RunOutput, NewRunOutput>","keys: string | string[]","mapping: RunnableMapLike<Record<string, unknown>, Record<string, unknown>>","generator: AsyncGenerator<RunInput>","options: Partial<CallOptions>","streamOptions?: Omit<LogStreamCallbackHandlerInput, \"autoClose\">","LogStreamCallbackHandler","logStreamCallbackHandler: LogStreamCallbackHandler","RunLogPatch","options: Partial<CallOptions> & {\n version: \"v1\" | \"v2\";\n encoding?: \"text/event-stream\" | undefined;\n }","streamOptions?: Omit<EventStreamCallbackHandlerInput, \"autoClose\">","convertToHttpEventStream","options: Partial<CallOptions> & { version: \"v1\" | \"v2\" }","EventStreamCallbackHandler","listener: (() => void) | null","_RootEventFilter","RunLog","state","event: StreamEvent","data: StreamEventData","thing: any","isRunnableInterface","RootListenersTracer","fields: {\n name?: string;\n description?: string;\n schema: InteropZodType<T>;\n }","fields: RunnableBindingArgs<RunInput, RunOutput, CallOptions>","suffix?: string | undefined","mergeConfigs","options?: Partial<CallOptions> | undefined","fields: {\n bound: Runnable<RunInputItem, RunOutputItem, CallOptions>;\n }","inputs: RunInputItem[]","config?: Partial<CallOptions>","runManager?: CallbackManagerForChainRun","patchConfig","fields: RunnableBindingArgs<RunInput, RunOutput, CallOptions> & {\n maxAttemptNumber?: number;\n onFailedAttempt?: RunnableRetryFailedAttemptHandler;\n }","attempt: number","config?: CallOptions","pRetry","attemptNumber: number","configs?: RunnableConfig[]","runManagers?: (CallbackManagerForChainRun | undefined)[]","resultsMap: Record<string, RunOutput | Error>","fields: RunnableSequenceFields<RunInput, RunOutput>","options?: RunnableConfig","finalOutput: RunOutput","getAbortSignalError","options?: Partial<RunnableConfig> | Partial<RunnableConfig>[]","nextStepInputs: any","config?: RunnableConfig","currentLastNode: any","nameOrFields?:\n | string\n | Omit<\n RunnableSequenceFields<RunInput, RunOutput>,\n \"first\" | \"middle\" | \"last\"\n >","extra: Record<string, unknown>","fields: { steps: RunnableMapLike<RunInput, RunOutput> }","steps: RunnableMapLike<RunInput, RunOutput>","options?: Partial<RunnableConfig>","output: Record<string, any>","atee","result","fields: { func: AnyTraceableFunction }","isAsyncIterable","isIterator","state: IteratorResult<unknown>","func: AnyTraceableFunction","func:\n | RunnableFunc<\n RunInput,\n RunOutput | Runnable<RunInput, RunOutput, CallOptions>,\n CallOptions\n >\n | TraceableFunction<\n RunnableFunc<\n RunInput,\n RunOutput | Runnable<RunInput, RunOutput, CallOptions>,\n CallOptions\n >\n >","fields: {\n func:\n | RunnableFunc<\n RunInput,\n RunOutput | Runnable<RunInput, RunOutput, CallOptions>,\n CallOptions\n >\n | TraceableFunction<\n RunnableFunc<\n RunInput,\n RunOutput | Runnable<RunInput, RunOutput, CallOptions>,\n CallOptions\n >\n >;\n }","func:\n | RunnableFunc<\n RunInput,\n RunOutput | Runnable<RunInput, RunOutput, CallOptions>,\n CallOptions\n >\n | TraceableFunction<\n RunnableFunc<\n RunInput,\n RunOutput | Runnable<RunInput, RunOutput, CallOptions>,\n CallOptions\n >\n >","DEFAULT_RECURSION_LIMIT","AsyncLocalStorageProviderSingleton","pickRunnableConfigKeys","finalOutput: RunOutput | undefined","consumeAsyncIterableInContext","isIterableIterator","consumeIteratorInContext","finalChunk: RunInput | undefined","fields: {\n runnable: Runnable<RunInput, RunOutput>;\n fallbacks: Runnable<RunInput, RunOutput>[];\n }","options?: Partial<RunnableConfig> | undefined","firstError: any","coerceable: RunnableLike<RunInput, RunOutput, CallOptions>","runnables: Record<string, Runnable<RunInput>>","fields: RunnableMap<RunInput> | RunnableAssignFields<RunInput>","fields: string | string[] | RunnablePickFields","fields: RunnableToolLikeArgs<RunInput, RunOutput>","toolInput: InferInteropZodOutput<RunInput>","_isToolCall","interopParseAsync","ToolInputParsingException","runnable: Runnable<RunInput, RunOutput>","fields: {\n name?: string;\n description?: string;\n schema: InteropZodType<RunInput>;\n }","getSchemaDescription","isSimpleStringZodSchema"],"sources":["../../src/runnables/base.ts"],"sourcesContent":["import { z } from \"zod/v3\";\nimport { v4 as uuidv4 } from \"uuid\";\n\nimport {\n type TraceableFunction,\n isTraceableFunction,\n} from \"langsmith/singletons/traceable\";\nimport type {\n RunnableInterface,\n RunnableBatchOptions,\n RunnableConfig,\n} from \"./types.js\";\nimport { CallbackManagerForChainRun } from \"../callbacks/manager.js\";\nimport {\n LogStreamCallbackHandler,\n LogStreamCallbackHandlerInput,\n RunLog,\n RunLogPatch,\n isLogStreamHandler,\n} from \"../tracers/log_stream.js\";\nimport {\n EventStreamCallbackHandler,\n EventStreamCallbackHandlerInput,\n StreamEvent,\n StreamEventData,\n isStreamEventsHandler,\n} from \"../tracers/event_stream.js\";\nimport { Serializable } from \"../load/serializable.js\";\nimport pRetry from \"../utils/p-retry/index.js\";\nimport {\n IterableReadableStream,\n concat,\n atee,\n pipeGeneratorWithSetup,\n AsyncGeneratorWithSetup,\n} from \"../utils/stream.js\";\nimport { raceWithSignal, getAbortSignalError } from \"../utils/signal.js\";\nimport {\n DEFAULT_RECURSION_LIMIT,\n ensureConfig,\n getCallbackManagerForConfig,\n mergeConfigs,\n patchConfig,\n pickRunnableConfigKeys,\n} from \"./config.js\";\nimport { AsyncCaller } from \"../utils/async_caller.js\";\nimport { Run } from \"../tracers/base.js\";\nimport { RootListenersTracer } from \"../tracers/root_listener.js\";\nimport { _RootEventFilter, isRunnableInterface } from \"./utils.js\";\nimport { AsyncLocalStorageProviderSingleton } from \"../singletons/index.js\";\nimport { Graph } from \"./graph.js\";\nimport { convertToHttpEventStream } from \"./wrappers.js\";\nimport {\n consumeAsyncIterableInContext,\n consumeIteratorInContext,\n isAsyncIterable,\n isIterableIterator,\n isIterator,\n} from \"./iter.js\";\nimport { _isToolCall, ToolInputParsingException } from \"../tools/utils.js\";\nimport { ToolCall } from \"../messages/tool.js\";\nimport {\n getSchemaDescription,\n InferInteropZodOutput,\n interopParseAsync,\n InteropZodType,\n isSimpleStringZodSchema,\n} from \"../utils/types/zod.js\";\n\nexport { type RunnableInterface, RunnableBatchOptions };\n\nexport type RunnableFunc<\n RunInput,\n RunOutput,\n CallOptions extends RunnableConfig = RunnableConfig\n> = (\n input: RunInput,\n options:\n | CallOptions\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | (Record<string, any> & CallOptions)\n) => RunOutput | Promise<RunOutput>;\n\nexport type RunnableMapLike<RunInput, RunOutput> = {\n [K in keyof RunOutput]: RunnableLike<RunInput, RunOutput[K]>;\n};\n\nexport type RunnableLike<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunInput = any,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput = any,\n CallOptions extends RunnableConfig = RunnableConfig\n> =\n | RunnableInterface<RunInput, RunOutput, CallOptions>\n | RunnableFunc<RunInput, RunOutput, CallOptions>\n | RunnableMapLike<RunInput, RunOutput>;\n\nexport type RunnableRetryFailedAttemptHandler = (\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n error: any,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n input: any\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n) => any;\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport function _coerceToDict(value: any, defaultKey: string) {\n return value &&\n !Array.isArray(value) &&\n // eslint-disable-next-line no-instanceof/no-instanceof\n !(value instanceof Date) &&\n typeof value === \"object\"\n ? value\n : { [defaultKey]: value };\n}\n\n/**\n * A Runnable is a generic unit of work that can be invoked, batched, streamed, and/or\n * transformed.\n */\nexport abstract class Runnable<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunInput = any,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput = any,\n CallOptions extends RunnableConfig = RunnableConfig\n >\n extends Serializable\n implements RunnableInterface<RunInput, RunOutput, CallOptions>\n{\n protected lc_runnable = true;\n\n name?: string;\n\n getName(suffix?: string): string {\n const name =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this.name ?? (this.constructor as any).lc_name() ?? this.constructor.name;\n return suffix ? `${name}${suffix}` : name;\n }\n\n abstract invoke(\n input: RunInput,\n options?: Partial<CallOptions>\n ): Promise<RunOutput>;\n\n /**\n * Add retry logic to an existing runnable.\n * @param fields.stopAfterAttempt The number of attempts to retry.\n * @param fields.onFailedAttempt A function that is called when a retry fails.\n * @returns A new RunnableRetry that, when invoked, will retry according to the parameters.\n */\n withRetry(fields?: {\n stopAfterAttempt?: number;\n onFailedAttempt?: RunnableRetryFailedAttemptHandler;\n }): RunnableRetry<RunInput, RunOutput, CallOptions> {\n // eslint-disable-next-line @typescript-eslint/no-use-before-define\n return new RunnableRetry({\n bound: this,\n kwargs: {},\n config: {},\n maxAttemptNumber: fields?.stopAfterAttempt,\n ...fields,\n });\n }\n\n /**\n * Bind config to a Runnable, returning a new Runnable.\n * @param config New configuration parameters to attach to the new runnable.\n * @returns A new RunnableBinding with a config matching what's passed.\n */\n withConfig(\n config: Partial<CallOptions>\n ): Runnable<RunInput, RunOutput, CallOptions> {\n // eslint-disable-next-line @typescript-eslint/no-use-before-define\n return new RunnableBinding({\n bound: this,\n config,\n kwargs: {},\n });\n }\n\n /**\n * Create a new runnable from the current one that will try invoking\n * other passed fallback runnables if the initial invocation fails.\n * @param fields.fallbacks Other runnables to call if the runnable errors.\n * @returns A new RunnableWithFallbacks.\n */\n withFallbacks(\n fields:\n | {\n fallbacks: Runnable<RunInput, RunOutput>[];\n }\n | Runnable<RunInput, RunOutput>[]\n ): RunnableWithFallbacks<RunInput, RunOutput> {\n const fallbacks = Array.isArray(fields) ? fields : fields.fallbacks;\n // eslint-disable-next-line @typescript-eslint/no-use-before-define\n return new RunnableWithFallbacks<RunInput, RunOutput>({\n runnable: this,\n fallbacks,\n });\n }\n\n protected _getOptionsList<O extends CallOptions & { runType?: string }>(\n options: Partial<O> | Partial<O>[],\n length = 0\n ): Partial<O>[] {\n if (Array.isArray(options) && options.length !== length) {\n throw new Error(\n `Passed \"options\" must be an array with the same length as the inputs, but got ${options.length} options for ${length} inputs`\n );\n }\n\n if (Array.isArray(options)) {\n return options.map(ensureConfig);\n }\n if (length > 1 && !Array.isArray(options) && options.runId) {\n console.warn(\n \"Provided runId will be used only for the first element of the batch.\"\n );\n const subsequent = Object.fromEntries(\n Object.entries(options).filter(([key]) => key !== \"runId\")\n );\n\n return Array.from({ length }, (_, i) =>\n ensureConfig(i === 0 ? options : subsequent)\n ) as Partial<O>[];\n }\n return Array.from({ length }, () => ensureConfig(options));\n }\n\n /**\n * Default implementation of batch, which calls invoke N times.\n * Subclasses should override this method if they can batch more efficiently.\n * @param inputs Array of inputs to each batch call.\n * @param options Either a single call options object to apply to each batch call or an array for each call.\n * @param batchOptions.returnExceptions Whether to return errors rather than throwing on the first one\n * @returns An array of RunOutputs, or mixed RunOutputs and errors if batchOptions.returnExceptions is set\n */\n async batch(\n inputs: RunInput[],\n options?: Partial<CallOptions> | Partial<CallOptions>[],\n batchOptions?: RunnableBatchOptions & { returnExceptions?: false }\n ): Promise<RunOutput[]>;\n\n async batch(\n inputs: RunInput[],\n options?: Partial<CallOptions> | Partial<CallOptions>[],\n batchOptions?: RunnableBatchOptions & { returnExceptions: true }\n ): Promise<(RunOutput | Error)[]>;\n\n async batch(\n inputs: RunInput[],\n options?: Partial<CallOptions> | Partial<CallOptions>[],\n batchOptions?: RunnableBatchOptions\n ): Promise<(RunOutput | Error)[]>;\n\n async batch(\n inputs: RunInput[],\n options?: Partial<CallOptions> | Partial<CallOptions>[],\n batchOptions?: RunnableBatchOptions\n ): Promise<(RunOutput | Error)[]> {\n const configList = this._getOptionsList(options ?? {}, inputs.length);\n const maxConcurrency =\n configList[0]?.maxConcurrency ?? batchOptions?.maxConcurrency;\n const caller = new AsyncCaller({\n maxConcurrency,\n onFailedAttempt: (e) => {\n throw e;\n },\n });\n const batchCalls = inputs.map((input, i) =>\n caller.call(async () => {\n try {\n const result = await this.invoke(input, configList[i]);\n return result;\n } catch (e) {\n if (batchOptions?.returnExceptions) {\n return e as Error;\n }\n throw e;\n }\n })\n );\n return Promise.all(batchCalls);\n }\n\n /**\n * Default streaming implementation.\n * Subclasses should override this method if they support streaming output.\n * @param input\n * @param options\n */\n async *_streamIterator(\n input: RunInput,\n options?: Partial<CallOptions>\n ): AsyncGenerator<RunOutput> {\n yield this.invoke(input, options);\n }\n\n /**\n * Stream output in chunks.\n * @param input\n * @param options\n * @returns A readable stream that is also an iterable.\n */\n async stream(\n input: RunInput,\n options?: Partial<CallOptions>\n ): Promise<IterableReadableStream<RunOutput>> {\n // Buffer the first streamed chunk to allow for initial errors\n // to surface immediately.\n const config = ensureConfig(options);\n const wrappedGenerator = new AsyncGeneratorWithSetup({\n generator: this._streamIterator(input, config),\n config,\n });\n await wrappedGenerator.setup;\n return IterableReadableStream.fromAsyncGenerator(wrappedGenerator);\n }\n\n protected _separateRunnableConfigFromCallOptions(\n options?: Partial<CallOptions>\n ): [RunnableConfig, Omit<Partial<CallOptions>, keyof RunnableConfig>] {\n let runnableConfig;\n if (options === undefined) {\n runnableConfig = ensureConfig(options);\n } else {\n runnableConfig = ensureConfig({\n callbacks: options.callbacks,\n tags: options.tags,\n metadata: options.metadata,\n runName: options.runName,\n configurable: options.configurable,\n recursionLimit: options.recursionLimit,\n maxConcurrency: options.maxConcurrency,\n runId: options.runId,\n timeout: options.timeout,\n signal: options.signal,\n });\n }\n const callOptions = { ...(options as Partial<CallOptions>) };\n delete callOptions.callbacks;\n delete callOptions.tags;\n delete callOptions.metadata;\n delete callOptions.runName;\n delete callOptions.configurable;\n delete callOptions.recursionLimit;\n delete callOptions.maxConcurrency;\n delete callOptions.runId;\n delete callOptions.timeout;\n delete callOptions.signal;\n return [runnableConfig, callOptions];\n }\n\n protected async _callWithConfig<T extends RunInput>(\n func:\n | ((input: T) => Promise<RunOutput>)\n | ((\n input: T,\n config?: Partial<CallOptions>,\n runManager?: CallbackManagerForChainRun\n ) => Promise<RunOutput>),\n input: T,\n options?: Partial<CallOptions> & { runType?: string }\n ) {\n const config = ensureConfig(options);\n const callbackManager_ = await getCallbackManagerForConfig(config);\n const runManager = await callbackManager_?.handleChainStart(\n this.toJSON(),\n _coerceToDict(input, \"input\"),\n config.runId,\n config?.runType,\n undefined,\n undefined,\n config?.runName ?? this.getName()\n );\n delete config.runId;\n let output;\n try {\n const promise = func.call(this, input, config, runManager);\n output = await raceWithSignal(promise, options?.signal);\n } catch (e) {\n await runManager?.handleChainError(e);\n throw e;\n }\n await runManager?.handleChainEnd(_coerceToDict(output, \"output\"));\n return output;\n }\n\n /**\n * Internal method that handles batching and configuration for a runnable\n * It takes a function, input values, and optional configuration, and\n * returns a promise that resolves to the output values.\n * @param func The function to be executed for each input value.\n * @param input The input values to be processed.\n * @param config Optional configuration for the function execution.\n * @returns A promise that resolves to the output values.\n */\n async _batchWithConfig<T extends RunInput>(\n func: (\n inputs: T[],\n options?: Partial<CallOptions>[],\n runManagers?: (CallbackManagerForChainRun | undefined)[],\n batchOptions?: RunnableBatchOptions\n ) => Promise<(RunOutput | Error)[]>,\n inputs: T[],\n options?:\n | Partial<CallOptions & { runType?: string }>\n | Partial<CallOptions & { runType?: string }>[],\n batchOptions?: RunnableBatchOptions\n ): Promise<(RunOutput | Error)[]> {\n const optionsList = this._getOptionsList(options ?? {}, inputs.length);\n const callbackManagers = await Promise.all(\n optionsList.map(getCallbackManagerForConfig)\n );\n const runManagers = await Promise.all(\n callbackManagers.map(async (callbackManager, i) => {\n const handleStartRes = await callbackManager?.handleChainStart(\n this.toJSON(),\n _coerceToDict(inputs[i], \"input\"),\n optionsList[i].runId,\n optionsList[i].runType,\n undefined,\n undefined,\n optionsList[i].runName ?? this.getName()\n );\n delete optionsList[i].runId;\n return handleStartRes;\n })\n );\n let outputs: (RunOutput | Error)[];\n try {\n const promise = func.call(\n this,\n inputs,\n optionsList,\n runManagers,\n batchOptions\n );\n outputs = await raceWithSignal(promise, optionsList?.[0]?.signal);\n } catch (e) {\n await Promise.all(\n runManagers.map((runManager) => runManager?.handleChainError(e))\n );\n throw e;\n }\n await Promise.all(\n runManagers.map((runManager) =>\n runManager?.handleChainEnd(_coerceToDict(outputs, \"output\"))\n )\n );\n return outputs;\n }\n\n /** @internal */\n _concatOutputChunks<O>(first: O, second: O): O {\n return concat(first, second);\n }\n\n /**\n * Helper method to transform an Iterator of Input values into an Iterator of\n * Output values, with callbacks.\n * Use this to implement `stream()` or `transform()` in Runnable subclasses.\n */\n protected async *_transformStreamWithConfig<\n I extends RunInput,\n O extends RunOutput\n >(\n inputGenerator: AsyncGenerator<I>,\n transformer: (\n generator: AsyncGenerator<I>,\n runManager?: CallbackManagerForChainRun,\n options?: Partial<CallOptions>\n ) => AsyncGenerator<O>,\n options?: Partial<CallOptions> & { runType?: string }\n ): AsyncGenerator<O> {\n let finalInput: I | undefined;\n let finalInputSupported = true;\n let finalOutput: O | undefined;\n let finalOutputSupported = true;\n\n const config = ensureConfig(options);\n const callbackManager_ = await getCallbackManagerForConfig(config);\n const outerThis = this;\n async function* wrapInputForTracing() {\n for await (const chunk of inputGenerator) {\n if (finalInputSupported) {\n if (finalInput === undefined) {\n finalInput = chunk;\n } else {\n try {\n finalInput = outerThis._concatOutputChunks(\n finalInput,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n chunk as any\n );\n } catch {\n finalInput = undefined;\n finalInputSupported = false;\n }\n }\n }\n yield chunk;\n }\n }\n\n let runManager: CallbackManagerForChainRun | undefined;\n try {\n const pipe = await pipeGeneratorWithSetup(\n transformer.bind(this),\n wrapInputForTracing(),\n async () =>\n callbackManager_?.handleChainStart(\n this.toJSON(),\n { input: \"\" },\n config.runId,\n config.runType,\n undefined,\n undefined,\n config.runName ?? this.getName(),\n undefined,\n { lc_defers_inputs: true }\n ),\n options?.signal,\n config\n );\n delete config.runId;\n runManager = pipe.setup;\n\n const streamEventsHandler = runManager?.handlers.find(\n isStreamEventsHandler\n );\n let iterator = pipe.output;\n if (streamEventsHandler !== undefined && runManager !== undefined) {\n iterator = streamEventsHandler.tapOutputIterable(\n runManager.runId,\n iterator\n );\n }\n\n const streamLogHandler = runManager?.handlers.find(isLogStreamHandler);\n if (streamLogHandler !== undefined && runManager !== undefined) {\n iterator = streamLogHandler.tapOutputIterable(\n runManager.runId,\n iterator\n );\n }\n\n for await (const chunk of iterator) {\n yield chunk;\n if (finalOutputSupported) {\n if (finalOutput === undefined) {\n finalOutput = chunk;\n } else {\n try {\n finalOutput = this._concatOutputChunks(\n finalOutput,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n chunk as any\n );\n } catch {\n finalOutput = undefined;\n finalOutputSupported = false;\n }\n }\n }\n }\n } catch (e) {\n await runManager?.handleChainError(e, undefined, undefined, undefined, {\n inputs: _coerceToDict(finalInput, \"input\"),\n });\n throw e;\n }\n await runManager?.handleChainEnd(\n finalOutput ?? {},\n undefined,\n undefined,\n undefined,\n { inputs: _coerceToDict(finalInput, \"input\") }\n );\n }\n\n getGraph(_?: RunnableConfig): Graph {\n const graph = new Graph();\n\n // TODO: Add input schema for runnables\n const inputNode = graph.addNode({\n name: `${this.getName()}Input`,\n schema: z.any(),\n });\n\n const runnableNode = graph.addNode(this);\n\n // TODO: Add output schemas for runnables\n const outputNode = graph.addNode({\n name: `${this.getName()}Output`,\n schema: z.any(),\n });\n\n graph.addEdge(inputNode, runnableNode);\n graph.addEdge(runnableNode, outputNode);\n return graph;\n }\n\n /**\n * Create a new runnable sequence that runs each individual runnable in series,\n * piping the output of one runnable into another runnable or runnable-like.\n * @param coerceable A runnable, function, or object whose values are functions or runnables.\n * @returns A new runnable sequence.\n */\n pipe<NewRunOutput>(\n coerceable: RunnableLike<RunOutput, NewRunOutput>\n ): Runnable<RunInput, Exclude<NewRunOutput, Error>> {\n // eslint-disable-next-line @typescript-eslint/no-use-before-define\n return new RunnableSequence({\n first: this,\n last: _coerceToRunnable(coerceable),\n });\n }\n\n /**\n * Pick keys from the dict output of this runnable. Returns a new runnable.\n */\n pick(keys: string | string[]): Runnable {\n // eslint-disable-next-line @typescript-eslint/no-use-before-define\n return this.pipe(new RunnablePick(keys) as Runnable);\n }\n\n /**\n * Assigns new fields to the dict output of this runnable. Returns a new runnable.\n */\n assign(\n mapping: RunnableMapLike<Record<string, unknown>, Record<string, unknown>>\n ): Runnable {\n return this.pipe(\n // eslint-disable-next-line @typescript-eslint/no-use-before-define\n new RunnableAssign(\n // eslint-disable-next-line @typescript-eslint/no-use-before-define\n new RunnableMap<Record<string, unknown>>({ steps: mapping })\n ) as Runnable\n );\n }\n\n /**\n * Default implementation of transform, which buffers input and then calls stream.\n * Subclasses should override this method if they can start producing output while\n * input is still being generated.\n * @param generator\n * @param options\n */\n async *transform(\n generator: AsyncGenerator<RunInput>,\n options: Partial<CallOptions>\n ): AsyncGenerator<RunOutput> {\n let finalChunk;\n for await (const chunk of generator) {\n if (finalChunk === undefined) {\n finalChunk = chunk;\n } else {\n // Make a best effort to gather, for any type that supports concat.\n // This method should throw an error if gathering fails.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n finalChunk = this._concatOutputChunks(finalChunk, chunk as any);\n }\n }\n yield* this._streamIterator(finalChunk, ensureConfig(options));\n }\n\n /**\n * Stream all output from a runnable, as reported to the callback system.\n * This includes all inner runs of LLMs, Retrievers, Tools, etc.\n * Output is streamed as Log objects, which include a list of\n * jsonpatch ops that describe how the state of the run has changed in each\n * step, and the final state of the run.\n * The jsonpatch ops can be applied in order to construct state.\n * @param input\n * @param options\n * @param streamOptions\n */\n async *streamLog(\n input: RunInput,\n options?: Partial<CallOptions>,\n streamOptions?: Omit<LogStreamCallbackHandlerInput, \"autoClose\">\n ): AsyncGenerator<RunLogPatch> {\n const logStreamCallbackHandler = new LogStreamCallbackHandler({\n ...streamOptions,\n autoClose: false,\n _schemaFormat: \"original\",\n });\n const config = ensureConfig(options);\n yield* this._streamLog(input, logStreamCallbackHandler, config);\n }\n\n protected async *_streamLog(\n input: RunInput,\n logStreamCallbackHandler: LogStreamCallbackHandler,\n config: Partial<CallOptions>\n ): AsyncGenerator<RunLogPatch> {\n const { callbacks } = config;\n if (callbacks === undefined) {\n config.callbacks = [logStreamCallbackHandler];\n } else if (Array.isArray(callbacks)) {\n config.callbacks = callbacks.concat([logStreamCallbackHandler]);\n } else {\n const copiedCallbacks = callbacks.copy();\n copiedCallbacks.addHandler(logStreamCallbackHandler, true);\n config.callbacks = copiedCallbacks;\n }\n const runnableStreamPromise = this.stream(input, config);\n async function consumeRunnableStream() {\n try {\n const runnableStream = await runnableStreamPromise;\n for await (const chunk of runnableStream) {\n const patch = new RunLogPatch({\n ops: [\n {\n op: \"add\",\n path: \"/streamed_output/-\",\n value: chunk,\n },\n ],\n });\n await logStreamCallbackHandler.writer.write(patch);\n }\n } finally {\n await logStreamCallbackHandler.writer.close();\n }\n }\n const runnableStreamConsumePromise = consumeRunnableStream();\n try {\n for await (const log of logStreamCallbackHandler) {\n yield log;\n }\n } finally {\n await runnableStreamConsumePromise;\n }\n }\n\n /**\n * Generate a stream of events emitted by the internal steps of the runnable.\n *\n * Use to create an iterator over StreamEvents that provide real-time information\n * about the progress of the runnable, including StreamEvents from intermediate\n * results.\n *\n * A StreamEvent is a dictionary with the following schema:\n *\n * - `event`: string - Event names are of the format: on_[runnable_type]_(start|stream|end).\n * - `name`: string - The name of the runnable that generated the event.\n * - `run_id`: string - Randomly generated ID associated with the given execution of\n * the runnable that emitted the event. A child runnable that gets invoked as part of the execution of a\n * parent runnable is assigned its own unique ID.\n * - `tags`: string[] - The tags of the runnable that generated the event.\n * - `metadata`: Record<string, any> - The metadata of the runnable that generated the event.\n * - `data`: Record<string, any>\n *\n * Below is a table that illustrates some events that might be emitted by various\n * chains. Metadata fields have been omitted from the table for brevity.\n * Chain definitions have been included after the table.\n *\n * **ATTENTION** This reference table is for the V2 version of the schema.\n *\n * ```md\n * +----------------------+-----------------------------+------------------------------------------+\n * | event | input | output/chunk |\n * +======================+=============================+==========================================+\n * | on_chat_model_start | {\"messages\": BaseMessage[]} | |\n * +----------------------+-----------------------------+------------------------------------------+\n * | on_chat_model_stream | | AIMessageChunk(\"hello\") |\n * +----------------------+-----------------------------+------------------------------------------+\n * | on_chat_model_end | {\"messages\": BaseMessage[]} | AIMessageChunk(\"hello world\") |\n * +----------------------+-----------------------------+------------------------------------------+\n * | on_llm_start | {'input': 'hello'} | |\n * +----------------------+-----------------------------+------------------------------------------+\n * | on_llm_stream | | 'Hello' |\n * +----------------------+-----------------------------+------------------------------------------+\n * | on_llm_end | 'Hello human!' | |\n * +----------------------+-----------------------------+------------------------------------------+\n * | on_chain_start | | |\n * +----------------------+-----------------------------+------------------------------------------+\n * | on_chain_stream | | \"hello world!\" |\n * +----------------------+-----------------------------+------------------------------------------+\n * | on_chain_end | [Document(...)] | \"hello world!, goodbye world!\" |\n * +----------------------+-----------------------------+------------------------------------------+\n * | on_tool_start | {\"x\": 1, \"y\": \"2\"} | |\n * +----------------------+-----------------------------+------------------------------------------+\n * | on_tool_end | | {\"x\": 1, \"y\": \"2\"} |\n * +----------------------+-----------------------------+------------------------------------------+\n * | on_retriever_start | {\"query\": \"hello\"} | |\n * +----------------------+-----------------------------+------------------------------------------+\n * | on_retriever_end | {\"query\": \"hello\"} | [Document(...), ..] |\n * +----------------------+-----------------------------+------------------------------------------+\n * | on_prompt_start | {\"question\": \"hello\"} | |\n * +----------------------+-----------------------------+------------------------------------------+\n * | on_prompt_end | {\"question\": \"hello\"} | ChatPromptValue(messages: BaseMessage[]) |\n * +----------------------+-----------------------------+------------------------------------------+\n * ```\n *\n * The \"on_chain_*\" events are the default for Runnables that don't fit one of the above categories.\n *\n * In addition to the standard events above, users can also dispatch custom events.\n *\n * Custom events will be only be surfaced with in the `v2` version of the API!\n *\n * A custom event has following format:\n *\n * ```md\n * +-----------+------+------------------------------------------------------------+\n * | Attribute | Type | Description |\n * +===========+======+============================================================+\n * | name | str | A user defined name for the event. |\n * +-----------+------+------------------------------------------------------------+\n * | data | Any | The data associated with the event. This can be anything. |\n * +-----------+------+------------------------------------------------------------+\n * ```\n *\n * Here's an example:\n *\n * ```ts\n * import { RunnableLambda } from \"@langchain/core/runnables\";\n * import { dispatchCustomEvent } from \"@langchain/core/callbacks/dispatch\";\n * // Use this import for web environments that don't support \"async_hooks\"\n * // and manually pass config to child runs.\n * // import { dispatchCustomEvent } from \"@langchain/core/callbacks/dispatch/web\";\n *\n * const slowThing = RunnableLambda.from(async (someInput: string) => {\n * // Placeholder for some slow operation\n * await new Promise((resolve) => setTimeout(resolve, 100));\n * await dispatchCustomEvent(\"progress_event\", {\n * message: \"Finished step 1 of 2\",\n * });\n * await new Promise((resolve) => setTimeout(resolve, 100));\n * return \"Done\";\n * });\n *\n * const eventStream = await slowThing.streamEvents(\"hello world\", {\n * version: \"v2\",\n * });\n *\n * for await (const event of eventStream) {\n * if (event.event === \"on_custom_event\") {\n * console.log(event);\n * }\n * }\n * ```\n */\n streamEvents(\n input: RunInput,\n options: Partial<CallOptions> & { version: \"v1\" | \"v2\" },\n streamOptions?: Omit<EventStreamCallbackHandlerInput, \"autoClose\">\n ): IterableReadableStream<StreamEvent>;\n\n streamEvents(\n input: RunInput,\n options: Partial<CallOptions> & {\n version: \"v1\" | \"v2\";\n encoding: \"text/event-stream\";\n },\n streamOptions?: Omit<EventStreamCallbackHandlerInput, \"autoClose\">\n ): IterableReadableStream<Uint8Array>;\n\n streamEvents(\n input: RunInput,\n options: Partial<CallOptions> & {\n version: \"v1\" | \"v2\";\n encoding?: \"text/event-stream\" | undefined;\n },\n streamOptions?: Omit<EventStreamCallbackHandlerInput, \"autoClose\">\n ): IterableReadableStream<StreamEvent | Uint8Array> {\n let stream;\n if (options.version === \"v1\") {\n stream = this._streamEventsV1(input, options, streamOptions);\n } else if (options.version === \"v2\") {\n stream = this._streamEventsV2(input, options, streamOptions);\n } else {\n throw new Error(\n `Only versions \"v1\" and \"v2\" of the schema are currently supported.`\n );\n }\n if (options.encoding === \"text/event-stream\") {\n return convertToHttpEventStream(stream);\n } else {\n return IterableReadableStream.fromAsyncGenerator(stream);\n }\n }\n\n private async *_streamEventsV2(\n input: RunInput,\n options: Partial<CallOptions> & { version: \"v1\" | \"v2\" },\n streamOptions?: Omit<EventStreamCallbackHandlerInput, \"autoClose\">\n ): AsyncGenerator<StreamEvent> {\n const eventStreamer = new EventStreamCallbackHandler({\n ...streamOptions,\n autoClose: false,\n });\n const config = ensureConfig(options);\n const runId = config.runId ?? uuidv4();\n config.runId = runId;\n const callbacks = config.callbacks;\n if (callbacks === undefined) {\n config.callbacks = [eventStreamer];\n } else if (Array.isArray(callbacks)) {\n config.callbacks = callbacks.concat(eventStreamer);\n } else {\n const copiedCallbacks = callbacks.copy();\n copiedCallbacks.addHandler(eventStreamer, true);\n config.callbacks = copiedCallbacks;\n }\n const abortController = new AbortController();\n // Call the runnable in streaming mode,\n // add each chunk to the output stream\n const outerThis = this;\n async function consumeRunnableStream() {\n let signal;\n let listener: (() => void) | null = null;\n\n try {\n if (options?.signal) {\n if (\"any\" in AbortSignal) {\n // Use native AbortSignal.any() if available (Node 19+)\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n signal = (AbortSignal as any).any([\n abortController.signal,\n options.signal,\n ]);\n } else {\n // Fallback for Node 18 and below - just use the provided signal\n signal = options.signal;\n // Ensure we still abort our controller when the parent signal aborts\n\n listener = () => {\n abortController.abort();\n };\n\n options.signal.addEventListener(\"abort\", listener, { once: true });\n }\n } else {\n signal = abortController.signal;\n }\n const runnableStream = await outerThis.stream(input, {\n ...config,\n signal,\n });\n const tappedStream = eventStreamer.tapOutputIterable(\n runId,\n runnableStream\n );\n for await (const _ of tappedStream) {\n // Just iterate so that the callback handler picks up events\n if (abortController.signal.aborted) break;\n }\n } finally {\n await eventStreamer.finish();\n\n if (signal && listener) {\n signal.removeEventListener(\"abort\", listener);\n }\n }\n }\n const runnableStreamConsumePromise = consumeRunnableStream();\n let firstEventSent = false;\n let firstEventRunId;\n try {\n for await (const event of eventStreamer) {\n // This is a work-around an issue where the inputs into the\n // chain are not available until the entire input is consumed.\n // As a temporary solution, we'll modify the input to be the input\n // that was passed into the chain.\n if (!firstEventSent) {\n event.data.input = input;\n firstEventSent = true;\n firstEventRunId = event.run_id;\n yield event;\n continue;\n }\n if (event.run_id === firstEventRunId && event.event.endsWith(\"_end\")) {\n // If it's the end event corresponding to the root runnable\n // we dont include the input in the event since it's guaranteed\n // to be included in the first event.\n if (event.data?.input) {\n delete event.data.input;\n }\n }\n yield event;\n }\n } finally {\n abortController.abort();\n await runnableStreamConsumePromise;\n }\n }\n\n private async *_streamEventsV1(\n input: RunInput,\n options: Partial<CallOptions> & { version: \"v1\" | \"v2\" },\n streamOptions?: Omit<LogStreamCallbackHandlerInput, \"autoClose\">\n ): AsyncGenerator<StreamEvent> {\n let runLog;\n let hasEncounteredStartEvent = false;\n const config = ensureConfig(options);\n const rootTags = config.tags ?? [];\n const rootMetadata = config.metadata ?? {};\n const rootName = config.runName ?? this.getName();\n const logStreamCallbackHandler = new LogStreamCallbackHandler({\n ...streamOptions,\n autoClose: false,\n _schemaFormat: \"streaming_events\",\n });\n const rootEventFilter = new _RootEventFilter({\n ...streamOptions,\n });\n const logStream = this._streamLog(input, logStreamCallbackHandler, config);\n for await (const log of logStream) {\n if (!runLog) {\n runLog = RunLog.fromRunLogPatch(log);\n } else {\n runLog = runLog.concat(log);\n }\n if (runLog.state === undefined) {\n throw new Error(\n `Internal error: \"streamEvents\" state is missing. Please open a bug report.`\n );\n }\n // Yield the start event for the root runnable if it hasn't been seen.\n // The root run is never filtered out\n if (!hasEncounteredStartEvent) {\n hasEncounteredStartEvent = true;\n const state = { ...runLog.state };\n const event: StreamEvent = {\n run_id: state.id,\n event: `on_${state.type}_start`,\n name: rootName,\n tags: rootTags,\n metadata: rootMetadata,\n data: {\n input,\n },\n };\n if (rootEventFilter.includeEvent(event, state.type)) {\n yield event;\n }\n }\n const paths = log.ops\n .filter((op) => op.path.startsWith(\"/logs/\"))\n .map((op) => op.path.split(\"/\")[2]);\n const dedupedPaths = [...new Set(paths)];\n for (const path of dedupedPaths) {\n let eventType;\n let data: StreamEventData = {};\n const logEntry = runLog.state.logs[path];\n if (logEntry.end_time === undefined) {\n if (logEntry.streamed_output.length > 0) {\n eventType = \"stream\";\n } else {\n eventType = \"start\";\n }\n } else {\n eventType = \"end\";\n }\n if (eventType === \"start\") {\n // Include the inputs with the start event if they are available.\n // Usually they will NOT be available for components that operate\n // on streams, since those components stream the input and\n // don't know its final value until the end of the stream.\n if (logEntry.inputs !== undefined) {\n data.input = logEntry.inputs;\n }\n } else if (eventType === \"end\") {\n if (logEntry.inputs !== undefined) {\n data.input = logEntry.inputs;\n }\n data.output = logEntry.final_output;\n } else if (eventType === \"stream\") {\n const chunkCount = logEntry.streamed_output.length;\n if (chunkCount !== 1) {\n throw new Error(\n `Expected exactly one chunk of streamed output, got ${chunkCount} instead. Encountered in: \"${logEntry.name}\"`\n );\n }\n data = { chunk: logEntry.streamed_output[0] };\n // Clean up the stream, we don't need it anymore.\n // And this avoids duplicates as well!\n logEntry.streamed_output = [];\n }\n yield {\n event: `on_${logEntry.type}_${eventType}`,\n name: logEntry.name,\n run_id: logEntry.id,\n tags: logEntry.tags,\n metadata: logEntry.metadata,\n data,\n };\n }\n // Finally, we take care of the streaming output from the root chain\n // if there is any.\n const { state } = runLog;\n if (state.streamed_output.length > 0) {\n const chunkCount = state.streamed_output.length;\n if (chunkCount !== 1) {\n throw new Error(\n `Expected exactly one chunk of streamed output, got ${chunkCount} instead. Encountered in: \"${state.name}\"`\n );\n }\n const data = { chunk: state.streamed_output[0] };\n // Clean up the stream, we don't need it anymore.\n state.streamed_output = [];\n const event = {\n event: `on_${state.type}_stream`,\n run_id: state.id,\n tags: rootTags,\n metadata: rootMetadata,\n name: rootName,\n data,\n };\n if (rootEventFilter.includeEvent(event, state.type)) {\n yield event;\n }\n }\n }\n const state = runLog?.state;\n if (state !== undefined) {\n // Finally, yield the end event for the root runnable.\n const event = {\n event: `on_${state.type}_end`,\n name: rootName,\n run_id: state.id,\n tags: rootTags,\n metadata: rootMetadata,\n data: {\n output: state.final_output,\n },\n };\n if (rootEventFilter.includeEvent(event, state.type)) yield event;\n }\n }\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n static isRunnable(thing: any): thing is Runnable {\n return isRunnableInterface(thing);\n }\n\n /**\n * Bind lifecycle listeners to a Runnable, returning a new Runnable.\n * The Run object contains information about the run, including its id,\n * type, input, output, error, startTime, endTime, and any tags or metadata\n * added to the run.\n *\n * @param {Object} params - The object containing the callback functions.\n * @param {(run: Run) => void} params.onStart - Called before the runnable starts running, with the Run object.\n * @param {(run: Run) => void} params.onEnd - Called after the runnable finishes running, with the Run object.\n * @param {(run: Run) => void} params.onError - Called if the runnable throws an error, with the Run object.\n */\n withListeners({\n onStart,\n onEnd,\n onError,\n }: {\n onStart?: (run: Run, config?: RunnableConfig) => void | Promise<void>;\n onEnd?: (run: Run, config?: RunnableConfig) => void | Promise<void>;\n onError?: (run: Run, config?: RunnableConfig) => void | Promise<void>;\n }): Runnable<RunInput, RunOutput, CallOptions> {\n // eslint-disable-next-line @typescript-eslint/no-use-before-define\n return new RunnableBinding<RunInput, RunOutput, CallOptions>({\n bound: this,\n config: {},\n configFactories: [\n (config) => ({\n callbacks: [\n new RootListenersTracer({\n config,\n onStart,\n onEnd,\n onError,\n }),\n ],\n }),\n ],\n });\n }\n\n /**\n * Convert a runnable to a tool. Return a new instance of `RunnableToolLike`\n * which contains the runnable, name, description and schema.\n *\n * @template {T extends RunInput = RunInput