@langchain/core
Version:
Core LangChain.js abstractions and schemas
1 lines • 24.5 kB
Source Map (JSON)
{"version":3,"file":"llms.cjs","names":["BaseLanguageModel","input: BaseLanguageModelInput","options?: CallOptions","_input: string","_options: this[\"ParsedCallOptions\"]","_runManager?: CallbackManagerForLLMRun","options?: Partial<CallOptions>","CallbackManager","GenerationChunk","promptValues: BasePromptValueInterface[]","options?: string[] | CallOptions","callbacks?: Callbacks","prompts: string[]","_options?: this[\"ParsedCallOptions\"]","llmResult: LLMResult","llmResults: LLMResult[]","parsedOptions: this[\"ParsedCallOptions\"]","handledOptions: BaseCallbackConfig","startedRunManagers?: CallbackManagerForLLMRun[]","runManagers: CallbackManagerForLLMRun[] | undefined","callbackHandlerPrefersStreaming","output: LLMResult","concat","flattenedOutputs: LLMResult[]","RUN_KEY","missingPromptIndices: number[]","generations: Generation[][]","result","parsedOptions: CallOptions | undefined","options: this[\"ParsedCallOptions\"]","runManager?: CallbackManagerForLLMRun"],"sources":["../../src/language_models/llms.ts"],"sourcesContent":["import type { BasePromptValueInterface } from \"../prompt_values.js\";\nimport {\n type LLMResult,\n RUN_KEY,\n type Generation,\n GenerationChunk,\n} from \"../outputs.js\";\nimport {\n type BaseCallbackConfig,\n CallbackManager,\n type CallbackManagerForLLMRun,\n type Callbacks,\n} from \"../callbacks/manager.js\";\nimport {\n BaseLanguageModel,\n type BaseLanguageModelCallOptions,\n type BaseLanguageModelInput,\n type BaseLanguageModelParams,\n} from \"./base.js\";\nimport type { RunnableConfig } from \"../runnables/config.js\";\nimport type { BaseCache } from \"../caches/index.js\";\nimport { concat } from \"../utils/stream.js\";\nimport { callbackHandlerPrefersStreaming } from \"../callbacks/base.js\";\n\nexport type SerializedLLM = {\n _model: string;\n _type: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n} & Record<string, any>;\n\nexport interface BaseLLMParams extends BaseLanguageModelParams {}\n\nexport interface BaseLLMCallOptions extends BaseLanguageModelCallOptions {}\n\n/**\n * LLM Wrapper. Takes in a prompt (or prompts) and returns a string.\n */\nexport abstract class BaseLLM<\n CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions\n> extends BaseLanguageModel<string, CallOptions> {\n // Backwards compatibility since fields have been moved to RunnableConfig\n declare ParsedCallOptions: Omit<\n CallOptions,\n Exclude<keyof RunnableConfig, \"signal\" | \"timeout\" | \"maxConcurrency\">\n >;\n\n // Only ever instantiated in main LangChain\n lc_namespace = [\"langchain\", \"llms\", this._llmType()];\n\n /**\n * This method takes an input and options, and returns a string. It\n * converts the input to a prompt value and generates a result based on\n * the prompt.\n * @param input Input for the LLM.\n * @param options Options for the LLM call.\n * @returns A string result based on the prompt.\n */\n async invoke(\n input: BaseLanguageModelInput,\n options?: CallOptions\n ): Promise<string> {\n const promptValue = BaseLLM._convertInputToPromptValue(input);\n const result = await this.generatePrompt(\n [promptValue],\n options,\n options?.callbacks\n );\n return result.generations[0][0].text;\n }\n\n // eslint-disable-next-line require-yield\n async *_streamResponseChunks(\n _input: string,\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<GenerationChunk> {\n throw new Error(\"Not implemented.\");\n }\n\n protected _separateRunnableConfigFromCallOptionsCompat(\n options?: Partial<CallOptions>\n ): [RunnableConfig, this[\"ParsedCallOptions\"]] {\n // For backwards compat, keep `signal` in both runnableConfig and callOptions\n const [runnableConfig, callOptions] =\n super._separateRunnableConfigFromCallOptions(options);\n (callOptions as this[\"ParsedCallOptions\"]).signal = runnableConfig.signal;\n return [runnableConfig, callOptions as this[\"ParsedCallOptions\"]];\n }\n\n async *_streamIterator(\n input: BaseLanguageModelInput,\n options?: CallOptions\n ): AsyncGenerator<string> {\n // Subclass check required to avoid double callbacks with default implementation\n if (\n this._streamResponseChunks === BaseLLM.prototype._streamResponseChunks\n ) {\n yield this.invoke(input, options);\n } else {\n const prompt = BaseLLM._convertInputToPromptValue(input);\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(options);\n const callbackManager_ = await CallbackManager.configure(\n runnableConfig.callbacks,\n this.callbacks,\n runnableConfig.tags,\n this.tags,\n runnableConfig.metadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: callOptions,\n invocation_params: this?.invocationParams(callOptions),\n batch_size: 1,\n };\n const runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n [prompt.toString()],\n runnableConfig.runId,\n undefined,\n extra,\n undefined,\n undefined,\n runnableConfig.runName\n );\n let generation = new GenerationChunk({\n text: \"\",\n });\n try {\n for await (const chunk of this._streamResponseChunks(\n prompt.toString(),\n callOptions,\n runManagers?.[0]\n )) {\n if (!generation) {\n generation = chunk;\n } else {\n generation = generation.concat(chunk);\n }\n if (typeof chunk.text === \"string\") {\n yield chunk.text;\n }\n }\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMEnd({\n generations: [[generation]],\n })\n )\n );\n }\n }\n\n /**\n * This method takes prompt values, options, and callbacks, and generates\n * a result based on the prompts.\n * @param promptValues Prompt values for the LLM.\n * @param options Options for the LLM call.\n * @param callbacks Callbacks for the LLM call.\n * @returns An LLMResult based on the prompts.\n */\n async generatePrompt(\n promptValues: BasePromptValueInterface[],\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n const prompts: string[] = promptValues.map((promptValue) =>\n promptValue.toString()\n );\n return this.generate(prompts, options, callbacks);\n }\n\n /**\n * Run the LLM on the given prompts and input.\n */\n abstract _generate(\n prompts: string[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<LLMResult>;\n\n /**\n * Get the parameters used to invoke the model\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n invocationParams(_options?: this[\"ParsedCallOptions\"]): any {\n return {};\n }\n\n _flattenLLMResult(llmResult: LLMResult): LLMResult[] {\n const llmResults: LLMResult[] = [];\n\n for (let i = 0; i < llmResult.generations.length; i += 1) {\n const genList = llmResult.generations[i];\n\n if (i === 0) {\n llmResults.push({\n generations: [genList],\n llmOutput: llmResult.llmOutput,\n });\n } else {\n const llmOutput = llmResult.llmOutput\n ? { ...llmResult.llmOutput, tokenUsage: {} }\n : undefined;\n\n llmResults.push({\n generations: [genList],\n llmOutput,\n });\n }\n }\n\n return llmResults;\n }\n\n /** @ignore */\n async _generateUncached(\n prompts: string[],\n parsedOptions: this[\"ParsedCallOptions\"],\n handledOptions: BaseCallbackConfig,\n startedRunManagers?: CallbackManagerForLLMRun[]\n ): Promise<LLMResult> {\n let runManagers: CallbackManagerForLLMRun[] | undefined;\n if (\n startedRunManagers !== undefined &&\n startedRunManagers.length === prompts.length\n ) {\n runManagers = startedRunManagers;\n } else {\n const callbackManager_ = await CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n handledOptions.metadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: this?.invocationParams(parsedOptions),\n batch_size: prompts.length,\n };\n runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n prompts,\n handledOptions.runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions?.runName\n );\n }\n // Even if stream is not explicitly called, check if model is implicitly\n // called from streamEvents() or streamLog() to get all streamed events.\n // Bail out if _streamResponseChunks not overridden\n const hasStreamingHandler = !!runManagers?.[0].handlers.find(\n callbackHandlerPrefersStreaming\n );\n let output: LLMResult;\n if (\n hasStreamingHandler &&\n prompts.length === 1 &&\n this._streamResponseChunks !== BaseLLM.prototype._streamResponseChunks\n ) {\n try {\n const stream = await this._streamResponseChunks(\n prompts[0],\n parsedOptions,\n runManagers?.[0]\n );\n let aggregated;\n for await (const chunk of stream) {\n if (aggregated === undefined) {\n aggregated = chunk;\n } else {\n aggregated = concat(aggregated, chunk);\n }\n }\n if (aggregated === undefined) {\n throw new Error(\"Received empty response from chat model call.\");\n }\n output = { generations: [[aggregated]], llmOutput: {} };\n await runManagers?.[0].handleLLMEnd(output);\n } catch (e) {\n await runManagers?.[0].handleLLMError(e);\n throw e;\n }\n } else {\n try {\n output = await this._generate(prompts, parsedOptions, runManagers?.[0]);\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n\n const flattenedOutputs: LLMResult[] = this._flattenLLMResult(output);\n await Promise.all(\n (runManagers ?? []).map((runManager, i) =>\n runManager?.handleLLMEnd(flattenedOutputs[i])\n )\n );\n }\n const runIds = runManagers?.map((manager) => manager.runId) || undefined;\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runIds ? { runIds } : undefined,\n configurable: true,\n });\n return output;\n }\n\n async _generateCached({\n prompts,\n cache,\n llmStringKey,\n parsedOptions,\n handledOptions,\n runId,\n }: {\n prompts: string[];\n cache: BaseCache<Generation[]>;\n llmStringKey: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsedOptions: any;\n handledOptions: RunnableConfig;\n runId?: string;\n }): Promise<\n LLMResult & {\n missingPromptIndices: number[];\n startedRunManagers?: CallbackManagerForLLMRun[];\n }\n > {\n const callbackManager_ = await CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n handledOptions.metadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: this?.invocationParams(parsedOptions),\n batch_size: prompts.length,\n };\n const runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n prompts,\n runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions?.runName\n );\n\n // generate results\n const missingPromptIndices: number[] = [];\n const results = await Promise.allSettled(\n prompts.map(async (prompt, index) => {\n const result = await cache.lookup(prompt, llmStringKey);\n if (result == null) {\n missingPromptIndices.push(index);\n }\n return result;\n })\n );\n\n // Map run managers to the results before filtering out null results\n // Null results are just absent from the cache.\n const cachedResults = results\n .map((result, index) => ({ result, runManager: runManagers?.[index] }))\n .filter(\n ({ result }) =>\n (result.status === \"fulfilled\" && result.value != null) ||\n result.status === \"rejected\"\n );\n\n // Handle results and call run managers\n const generations: Generation[][] = [];\n await Promise.all(\n cachedResults.map(async ({ result: promiseResult, runManager }, i) => {\n if (promiseResult.status === \"fulfilled\") {\n const result = promiseResult.value as Generation[];\n generations[i] = result.map((result) => {\n result.generationInfo = {\n ...result.generationInfo,\n tokenUsage: {},\n };\n return result;\n });\n if (result.length) {\n await runManager?.handleLLMNewToken(result[0].text);\n }\n return runManager?.handleLLMEnd(\n {\n generations: [result],\n },\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n } else {\n // status === \"rejected\"\n await runManager?.handleLLMError(\n promiseResult.reason,\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n return Promise.reject(promiseResult.reason);\n }\n })\n );\n\n const output = {\n generations,\n missingPromptIndices,\n startedRunManagers: runManagers,\n };\n\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runManagers\n ? { runIds: runManagers?.map((manager) => manager.runId) }\n : undefined,\n configurable: true,\n });\n\n return output;\n }\n\n /**\n * Run the LLM on the given prompts and input, handling caching.\n */\n async generate(\n prompts: string[],\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n if (!Array.isArray(prompts)) {\n throw new Error(\"Argument 'prompts' is expected to be a string[]\");\n }\n\n let parsedOptions: CallOptions | undefined;\n if (Array.isArray(options)) {\n parsedOptions = { stop: options } as CallOptions;\n } else {\n parsedOptions = options;\n }\n\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(parsedOptions);\n runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;\n\n if (!this.cache) {\n return this._generateUncached(prompts, callOptions, runnableConfig);\n }\n\n const { cache } = this;\n const llmStringKey = this._getSerializedCacheKeyParametersForCall(\n callOptions as CallOptions\n );\n const { generations, missingPromptIndices, startedRunManagers } =\n await this._generateCached({\n prompts,\n cache,\n llmStringKey,\n parsedOptions: callOptions,\n handledOptions: runnableConfig,\n runId: runnableConfig.runId,\n });\n\n let llmOutput = {};\n if (missingPromptIndices.length > 0) {\n const results = await this._generateUncached(\n missingPromptIndices.map((i) => prompts[i]),\n callOptions,\n runnableConfig,\n startedRunManagers !== undefined\n ? missingPromptIndices.map((i) => startedRunManagers?.[i])\n : undefined\n );\n await Promise.all(\n results.generations.map(async (generation, index) => {\n const promptIndex = missingPromptIndices[index];\n generations[promptIndex] = generation;\n return cache.update(prompts[promptIndex], llmStringKey, generation);\n })\n );\n llmOutput = results.llmOutput ?? {};\n }\n\n return { generations, llmOutput } as LLMResult;\n }\n\n /**\n * Get the identifying parameters of the LLM.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _identifyingParams(): Record<string, any> {\n return {};\n }\n\n /**\n * Return the string type key uniquely identifying this class of LLM.\n */\n abstract _llmType(): string;\n\n _modelType(): string {\n return \"base_llm\" as const;\n }\n}\n\n/**\n * LLM class that provides a simpler interface to subclass than {@link BaseLLM}.\n *\n * Requires only implementing a simpler {@link _call} method instead of {@link _generate}.\n *\n * @augments BaseLLM\n */\nexport abstract class LLM<\n CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions\n> extends BaseLLM<CallOptions> {\n /**\n * Run the LLM on the given prompt and input.\n */\n abstract _call(\n prompt: string,\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<string>;\n\n async _generate(\n prompts: string[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<LLMResult> {\n const generations: Generation[][] = await Promise.all(\n prompts.map((prompt, promptIndex) =>\n this._call(prompt, { ...options, promptIndex }, runManager).then(\n (text) => [{ text }]\n )\n )\n );\n return { generations };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAqCA,IAAsB,UAAtB,MAAsB,gBAEZA,+CAAuC;CAQ/C,eAAe;EAAC;EAAa;EAAQ,KAAK,UAAU;CAAC;;;;;;;;;CAUrD,MAAM,OACJC,OACAC,SACiB;EACjB,MAAM,cAAc,QAAQ,2BAA2B,MAAM;EAC7D,MAAM,SAAS,MAAM,KAAK,eACxB,CAAC,WAAY,GACb,SACA,SAAS,UACV;AACD,SAAO,OAAO,YAAY,GAAG,GAAG;CACjC;CAGD,OAAO,sBACLC,QACAC,UACAC,aACiC;AACjC,QAAM,IAAI,MAAM;CACjB;CAED,AAAU,6CACRC,SAC6C;EAE7C,MAAM,CAAC,gBAAgB,YAAY,GACjC,MAAM,uCAAuC,QAAQ;EACtD,YAA0C,SAAS,eAAe;AACnE,SAAO,CAAC,gBAAgB,WAAyC;CAClE;CAED,OAAO,gBACLL,OACAC,SACwB;AAExB,MACE,KAAK,0BAA0B,QAAQ,UAAU,uBAEjD,MAAM,KAAK,OAAO,OAAO,QAAQ;OAC5B;GACL,MAAM,SAAS,QAAQ,2BAA2B,MAAM;GACxD,MAAM,CAAC,gBAAgB,YAAY,GACjC,KAAK,6CAA6C,QAAQ;GAC5D,MAAM,mBAAmB,MAAMK,0CAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf,KAAK,UACL,EAAE,SAAS,KAAK,QAAS,EAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB,MAAM,iBAAiB,YAAY;IACtD,YAAY;GACb;GACD,MAAM,cAAc,MAAM,kBAAkB,eAC1C,KAAK,QAAQ,EACb,CAAC,OAAO,UAAU,AAAC,GACnB,eAAe,OACf,QACA,OACA,QACA,QACA,eAAe,QAChB;GACD,IAAI,aAAa,IAAIC,gCAAgB,EACnC,MAAM,GACP;AACD,OAAI;AACF,eAAW,MAAM,SAAS,KAAK,sBAC7B,OAAO,UAAU,EACjB,aACA,cAAc,GACf,EAAE;AACD,SAAI,CAAC,YACH,aAAa;UAEb,aAAa,WAAW,OAAO,MAAM;AAEvC,SAAI,OAAO,MAAM,SAAS,UACxB,MAAM,MAAM;IAEf;GACF,SAAQ,KAAK;IACZ,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;GACP;GACD,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,eACvB,YAAY,aAAa,EACvB,aAAa,CAAC,CAAC,UAAW,CAAC,EAC5B,EAAC,CACH,CACF;EACF;CACF;;;;;;;;;CAUD,MAAM,eACJC,cACAC,SACAC,WACoB;EACpB,MAAMC,UAAoB,aAAa,IAAI,CAAC,gBAC1C,YAAY,UAAU,CACvB;AACD,SAAO,KAAK,SAAS,SAAS,SAAS,UAAU;CAClD;;;;CAeD,iBAAiBC,UAA2C;AAC1D,SAAO,CAAE;CACV;CAED,kBAAkBC,WAAmC;EACnD,MAAMC,aAA0B,CAAE;AAElC,OAAK,IAAI,IAAI,GAAG,IAAI,UAAU,YAAY,QAAQ,KAAK,GAAG;GACxD,MAAM,UAAU,UAAU,YAAY;AAEtC,OAAI,MAAM,GACR,WAAW,KAAK;IACd,aAAa,CAAC,OAAQ;IACtB,WAAW,UAAU;GACtB,EAAC;QACG;IACL,MAAM,YAAY,UAAU,YACxB;KAAE,GAAG,UAAU;KAAW,YAAY,CAAE;IAAE,IAC1C;IAEJ,WAAW,KAAK;KACd,aAAa,CAAC,OAAQ;KACtB;IACD,EAAC;GACH;EACF;AAED,SAAO;CACR;;CAGD,MAAM,kBACJH,SACAI,eACAC,gBACAC,oBACoB;EACpB,IAAIC;AACJ,MACE,uBAAuB,UACvB,mBAAmB,WAAW,QAAQ,QAEtC,cAAc;OACT;GACL,MAAM,mBAAmB,MAAMZ,0CAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf,KAAK,UACL,EAAE,SAAS,KAAK,QAAS,EAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB,MAAM,iBAAiB,cAAc;IACxD,YAAY,QAAQ;GACrB;GACD,cAAc,MAAM,kBAAkB,eACpC,KAAK,QAAQ,EACb,SACA,eAAe,OACf,QACA,OACA,QACA,QACA,gBAAgB,QACjB;EACF;EAID,MAAM,sBAAsB,CAAC,CAAC,cAAc,GAAG,SAAS,KACtDa,uDACD;EACD,IAAIC;AACJ,MACE,uBACA,QAAQ,WAAW,KACnB,KAAK,0BAA0B,QAAQ,UAAU,sBAEjD,KAAI;GACF,MAAM,SAAS,MAAM,KAAK,sBACxB,QAAQ,IACR,eACA,cAAc,GACf;GACD,IAAI;AACJ,cAAW,MAAM,SAAS,OACxB,KAAI,eAAe,QACjB,aAAa;QAEb,aAAaC,4BAAO,YAAY,MAAM;AAG1C,OAAI,eAAe,OACjB,OAAM,IAAI,MAAM;GAElB,SAAS;IAAE,aAAa,CAAC,CAAC,UAAW,CAAC;IAAE,WAAW,CAAE;GAAE;GACvD,MAAM,cAAc,GAAG,aAAa,OAAO;EAC5C,SAAQ,GAAG;GACV,MAAM,cAAc,GAAG,eAAe,EAAE;AACxC,SAAM;EACP;OACI;AACL,OAAI;IACF,SAAS,MAAM,KAAK,UAAU,SAAS,eAAe,cAAc,GAAG;GACxE,SAAQ,KAAK;IACZ,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;GACP;GAED,MAAMC,mBAAgC,KAAK,kBAAkB,OAAO;GACpE,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,YAAY,MACnC,YAAY,aAAa,iBAAiB,GAAG,CAC9C,CACF;EACF;EACD,MAAM,SAAS,aAAa,IAAI,CAAC,YAAY,QAAQ,MAAM,IAAI;EAI/D,OAAO,eAAe,QAAQC,yBAAS;GACrC,OAAO,SAAS,EAAE,OAAQ,IAAG;GAC7B,cAAc;EACf,EAAC;AACF,SAAO;CACR;CAED,MAAM,gBAAgB,EACpB,SACA,OACA,cACA,eACA,gBACA,OASD,EAKC;EACA,MAAM,mBAAmB,MAAMjB,0CAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf,KAAK,UACL,EAAE,SAAS,KAAK,QAAS,EAC1B;EACD,MAAM,QAAQ;GACZ,SAAS;GACT,mBAAmB,MAAM,iBAAiB,cAAc;GACxD,YAAY,QAAQ;EACrB;EACD,MAAM,cAAc,MAAM,kBAAkB,eAC1C,KAAK,QAAQ,EACb,SACA,OACA,QACA,OACA,QACA,QACA,gBAAgB,QACjB;EAGD,MAAMkB,uBAAiC,CAAE;EACzC,MAAM,UAAU,MAAM,QAAQ,WAC5B,QAAQ,IAAI,OAAO,QAAQ,UAAU;GACnC,MAAM,SAAS,MAAM,MAAM,OAAO,QAAQ,aAAa;AACvD,OAAI,UAAU,MACZ,qBAAqB,KAAK,MAAM;AAElC,UAAO;EACR,EAAC,CACH;EAID,MAAM,gBAAgB,QACnB,IAAI,CAAC,QAAQ,WAAW;GAAE;GAAQ,YAAY,cAAc;EAAQ,GAAE,CACtE,OACC,CAAC,EAAE,QAAQ,KACR,OAAO,WAAW,eAAe,OAAO,SAAS,QAClD,OAAO,WAAW,WACrB;EAGH,MAAMC,cAA8B,CAAE;EACtC,MAAM,QAAQ,IACZ,cAAc,IAAI,OAAO,EAAE,QAAQ,eAAe,YAAY,EAAE,MAAM;AACpE,OAAI,cAAc,WAAW,aAAa;IACxC,MAAM,SAAS,cAAc;IAC7B,YAAY,KAAK,OAAO,IAAI,CAACC,aAAW;KACtCA,SAAO,iBAAiB;MACtB,GAAGA,SAAO;MACV,YAAY,CAAE;KACf;AACD,YAAOA;IACR,EAAC;AACF,QAAI,OAAO,QACT,MAAM,YAAY,kBAAkB,OAAO,GAAG,KAAK;AAErD,WAAO,YAAY,aACjB,EACE,aAAa,CAAC,MAAO,EACtB,GACD,QACA,QACA,QACA,EACE,QAAQ,KACT,EACF;GACF,OAAM;IAEL,MAAM,YAAY,eAChB,cAAc,QACd,QACA,QACA,QACA,EACE,QAAQ,KACT,EACF;AACD,WAAO,QAAQ,OAAO,cAAc,OAAO;GAC5C;EACF,EAAC,CACH;EAED,MAAM,SAAS;GACb;GACA;GACA,oBAAoB;EACrB;EAKD,OAAO,eAAe,QAAQH,yBAAS;GACrC,OAAO,cACH,EAAE,QAAQ,aAAa,IAAI,CAAC,YAAY,QAAQ,MAAM,CAAE,IACxD;GACJ,cAAc;EACf,EAAC;AAEF,SAAO;CACR;;;;CAKD,MAAM,SACJZ,SACAF,SACAC,WACoB;AACpB,MAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM;EAGlB,IAAIiB;AACJ,MAAI,MAAM,QAAQ,QAAQ,EACxB,gBAAgB,EAAE,MAAM,QAAS;OAEjC,gBAAgB;EAGlB,MAAM,CAAC,gBAAgB,YAAY,GACjC,KAAK,6CAA6C,cAAc;EAClE,eAAe,YAAY,eAAe,aAAa;AAEvD,MAAI,CAAC,KAAK,MACR,QAAO,KAAK,kBAAkB,SAAS,aAAa,eAAe;EAGrE,MAAM,EAAE,OAAO,GAAG;EAClB,MAAM,eAAe,KAAK,wCACxB,YACD;EACD,MAAM,EAAE,aAAa,sBAAsB,oBAAoB,GAC7D,MAAM,KAAK,gBAAgB;GACzB;GACA;GACA;GACA,eAAe;GACf,gBAAgB;GAChB,OAAO,eAAe;EACvB,EAAC;EAEJ,IAAI,YAAY,CAAE;AAClB,MAAI,qBAAqB,SAAS,GAAG;GACnC,MAAM,UAAU,MAAM,KAAK,kBACzB,qBAAqB,IAAI,CAAC,MAAM,QAAQ,GAAG,EAC3C,aACA,gBACA,uBAAuB,SACnB,qBAAqB,IAAI,CAAC,MAAM,qBAAqB,GAAG,GACxD,OACL;GACD,MAAM,QAAQ,IACZ,QAAQ,YAAY,IAAI,OAAO,YAAY,UAAU;IACnD,MAAM,cAAc,qBAAqB;IACzC,YAAY,eAAe;AAC3B,WAAO,MAAM,OAAO,QAAQ,cAAc,cAAc,WAAW;GACpE,EAAC,CACH;GACD,YAAY,QAAQ,aAAa,CAAE;EACpC;AAED,SAAO;GAAE;GAAa;EAAW;CAClC;;;;CAMD,qBAA0C;AACxC,SAAO,CAAE;CACV;CAOD,aAAqB;AACnB,SAAO;CACR;AACF;;;;;;;;AASD,IAAsB,MAAtB,cAEU,QAAqB;CAU7B,MAAM,UACJhB,SACAiB,SACAC,YACoB;EACpB,MAAMJ,cAA8B,MAAM,QAAQ,IAChD,QAAQ,IAAI,CAAC,QAAQ,gBACnB,KAAK,MAAM,QAAQ;GAAE,GAAG;GAAS;EAAa,GAAE,WAAW,CAAC,KAC1D,CAAC,SAAS,CAAC,EAAE,KAAM,CAAC,EACrB,CACF,CACF;AACD,SAAO,EAAE,YAAa;CACvB;AACF"}