UNPKG

@langchain/openai

Version:
1 lines 24.4 kB
{"version":3,"file":"completions.cjs","names":["BaseChatOpenAI","options?: this[\"ParsedCallOptions\"]","extra?: { streaming?: boolean }","strict: boolean | undefined","params: Partial<ChatCompletionsInvocationParams>","formatToOpenAIToolChoice","isReasoningModel","messages: BaseMessage[]","options: this[\"ParsedCallOptions\"]","runManager?: CallbackManagerForLLMRun","messagesMapped: OpenAIClient.Chat.Completions.ChatCompletionMessageParam[]","convertMessagesToCompletionsMessageParams","finalChunks: Record<number, ChatGenerationChunk>","generations: ChatGeneration[]","generation: ChatGeneration","AIMessage","defaultRole: OpenAIClient.Chat.ChatCompletionRole | undefined","usage: OpenAIClient.Completions.CompletionUsage | undefined","generationInfo: Record<string, any>","ChatGenerationChunk","AIMessageChunk","request: OpenAIClient.Chat.ChatCompletionCreateParams","requestOptions?: OpenAIClient.RequestOptions","wrapOpenAIClientError","delta: Record<string, any>","rawResponse: OpenAIClient.Chat.Completions.ChatCompletionChunk","defaultRole?: OpenAIClient.Chat.ChatCompletionRole","convertCompletionsDeltaToBaseMessageChunk","message: OpenAIClient.ChatCompletionMessage","rawResponse: OpenAIClient.ChatCompletion","convertCompletionsMessageToBaseMessage"],"sources":["../../src/chat_models/completions.ts"],"sourcesContent":["import { OpenAI as OpenAIClient } from \"openai\";\nimport { CallbackManagerForLLMRun } from \"@langchain/core/callbacks/manager\";\nimport {\n AIMessage,\n AIMessageChunk,\n type BaseMessage,\n isAIMessage,\n type UsageMetadata,\n type AIMessageFields,\n BaseMessageChunk,\n} from \"@langchain/core/messages\";\nimport {\n ChatGenerationChunk,\n type ChatGeneration,\n type ChatResult,\n} from \"@langchain/core/outputs\";\nimport { NewTokenIndices } from \"@langchain/core/callbacks/base\";\nimport { wrapOpenAIClientError } from \"../utils/client.js\";\nimport {\n OpenAIToolChoice,\n formatToOpenAIToolChoice,\n _convertToOpenAITool,\n} from \"../utils/tools.js\";\nimport { isReasoningModel } from \"../utils/misc.js\";\nimport { BaseChatOpenAICallOptions } from \"./base.js\";\nimport { BaseChatOpenAI } from \"./base.js\";\nimport {\n convertCompletionsDeltaToBaseMessageChunk,\n convertCompletionsMessageToBaseMessage,\n convertMessagesToCompletionsMessageParams,\n} from \"../converters/completions.js\";\n\nexport interface ChatOpenAICompletionsCallOptions\n extends BaseChatOpenAICallOptions {}\n\ntype ChatCompletionsInvocationParams = Omit<\n OpenAIClient.Chat.Completions.ChatCompletionCreateParams,\n \"messages\"\n>;\n\n/**\n * OpenAI Completions API implementation.\n * @internal\n */\nexport class ChatOpenAICompletions<\n CallOptions extends ChatOpenAICompletionsCallOptions = ChatOpenAICompletionsCallOptions\n> extends BaseChatOpenAI<CallOptions> {\n /** @internal */\n override invocationParams(\n options?: this[\"ParsedCallOptions\"],\n extra?: { streaming?: boolean }\n ): ChatCompletionsInvocationParams {\n let strict: boolean | undefined;\n if (options?.strict !== undefined) {\n strict = options.strict;\n } else if (this.supportsStrictToolCalling !== undefined) {\n strict = this.supportsStrictToolCalling;\n }\n\n let streamOptionsConfig = {};\n if (options?.stream_options !== undefined) {\n streamOptionsConfig = { stream_options: options.stream_options };\n } else if (this.streamUsage && (this.streaming || extra?.streaming)) {\n streamOptionsConfig = { stream_options: { include_usage: true } };\n }\n\n const params: Partial<ChatCompletionsInvocationParams> = {\n model: this.model,\n temperature: this.temperature,\n top_p: this.topP,\n frequency_penalty: this.frequencyPenalty,\n presence_penalty: this.presencePenalty,\n logprobs: this.logprobs,\n top_logprobs: this.topLogprobs,\n n: this.n,\n logit_bias: this.logitBias,\n stop: options?.stop ?? this.stopSequences,\n user: this.user,\n // if include_usage is set or streamUsage then stream must be set to true.\n stream: this.streaming,\n functions: options?.functions,\n function_call: options?.function_call,\n tools: options?.tools?.length\n ? options.tools.map((tool) =>\n this._convertChatOpenAIToolToCompletionsTool(tool, { strict })\n )\n : undefined,\n tool_choice: formatToOpenAIToolChoice(\n options?.tool_choice as OpenAIToolChoice\n ),\n response_format: this._getResponseFormat(options?.response_format),\n seed: options?.seed,\n ...streamOptionsConfig,\n parallel_tool_calls: options?.parallel_tool_calls,\n ...(this.audio || options?.audio\n ? { audio: this.audio || options?.audio }\n : {}),\n ...(this.modalities || options?.modalities\n ? { modalities: this.modalities || options?.modalities }\n : {}),\n ...this.modelKwargs,\n prompt_cache_key: options?.promptCacheKey ?? this.promptCacheKey,\n prompt_cache_retention:\n options?.promptCacheRetention ?? this.promptCacheRetention,\n verbosity: options?.verbosity ?? this.verbosity,\n };\n if (options?.prediction !== undefined) {\n params.prediction = options.prediction;\n }\n if (this.service_tier !== undefined) {\n params.service_tier = this.service_tier;\n }\n if (options?.service_tier !== undefined) {\n params.service_tier = options.service_tier;\n }\n const reasoning = this._getReasoningParams(options);\n if (reasoning !== undefined && reasoning.effort !== undefined) {\n params.reasoning_effort = reasoning.effort;\n }\n if (isReasoningModel(params.model)) {\n params.max_completion_tokens =\n this.maxTokens === -1 ? undefined : this.maxTokens;\n } else {\n params.max_tokens = this.maxTokens === -1 ? undefined : this.maxTokens;\n }\n\n return params as ChatCompletionsInvocationParams;\n }\n\n async _generate(\n messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n const usageMetadata = {} as UsageMetadata;\n const params = this.invocationParams(options);\n const messagesMapped: OpenAIClient.Chat.Completions.ChatCompletionMessageParam[] =\n convertMessagesToCompletionsMessageParams({\n messages,\n model: this.model,\n });\n\n if (params.stream) {\n const stream = this._streamResponseChunks(messages, options, runManager);\n const finalChunks: Record<number, ChatGenerationChunk> = {};\n for await (const chunk of stream) {\n chunk.message.response_metadata = {\n ...chunk.generationInfo,\n ...chunk.message.response_metadata,\n };\n const index =\n (chunk.generationInfo as NewTokenIndices)?.completion ?? 0;\n if (finalChunks[index] === undefined) {\n finalChunks[index] = chunk;\n } else {\n finalChunks[index] = finalChunks[index].concat(chunk);\n }\n }\n const generations = Object.entries(finalChunks)\n .sort(([aKey], [bKey]) => parseInt(aKey, 10) - parseInt(bKey, 10))\n .map(([_, value]) => value);\n\n const { functions, function_call } = this.invocationParams(options);\n\n // OpenAI does not support token usage report under stream mode,\n // fallback to estimation.\n\n const promptTokenUsage = await this._getEstimatedTokenCountFromPrompt(\n messages,\n functions,\n function_call\n );\n const completionTokenUsage = await this._getNumTokensFromGenerations(\n generations\n );\n\n usageMetadata.input_tokens = promptTokenUsage;\n usageMetadata.output_tokens = completionTokenUsage;\n usageMetadata.total_tokens = promptTokenUsage + completionTokenUsage;\n return {\n generations,\n llmOutput: {\n estimatedTokenUsage: {\n promptTokens: usageMetadata.input_tokens,\n completionTokens: usageMetadata.output_tokens,\n totalTokens: usageMetadata.total_tokens,\n },\n },\n };\n } else {\n const data = await this.completionWithRetry(\n {\n ...params,\n stream: false,\n messages: messagesMapped,\n },\n {\n signal: options?.signal,\n ...options?.options,\n }\n );\n\n const {\n completion_tokens: completionTokens,\n prompt_tokens: promptTokens,\n total_tokens: totalTokens,\n prompt_tokens_details: promptTokensDetails,\n completion_tokens_details: completionTokensDetails,\n } = data?.usage ?? {};\n\n if (completionTokens) {\n usageMetadata.output_tokens =\n (usageMetadata.output_tokens ?? 0) + completionTokens;\n }\n\n if (promptTokens) {\n usageMetadata.input_tokens =\n (usageMetadata.input_tokens ?? 0) + promptTokens;\n }\n\n if (totalTokens) {\n usageMetadata.total_tokens =\n (usageMetadata.total_tokens ?? 0) + totalTokens;\n }\n\n if (\n promptTokensDetails?.audio_tokens !== null ||\n promptTokensDetails?.cached_tokens !== null\n ) {\n usageMetadata.input_token_details = {\n ...(promptTokensDetails?.audio_tokens !== null && {\n audio: promptTokensDetails?.audio_tokens,\n }),\n ...(promptTokensDetails?.cached_tokens !== null && {\n cache_read: promptTokensDetails?.cached_tokens,\n }),\n };\n }\n\n if (\n completionTokensDetails?.audio_tokens !== null ||\n completionTokensDetails?.reasoning_tokens !== null\n ) {\n usageMetadata.output_token_details = {\n ...(completionTokensDetails?.audio_tokens !== null && {\n audio: completionTokensDetails?.audio_tokens,\n }),\n ...(completionTokensDetails?.reasoning_tokens !== null && {\n reasoning: completionTokensDetails?.reasoning_tokens,\n }),\n };\n }\n\n const generations: ChatGeneration[] = [];\n for (const part of data?.choices ?? []) {\n const text = part.message?.content ?? \"\";\n const generation: ChatGeneration = {\n text,\n message: this._convertCompletionsMessageToBaseMessage(\n part.message ?? { role: \"assistant\" },\n data\n ),\n };\n generation.generationInfo = {\n ...(part.finish_reason ? { finish_reason: part.finish_reason } : {}),\n ...(part.logprobs ? { logprobs: part.logprobs } : {}),\n };\n if (isAIMessage(generation.message)) {\n generation.message.usage_metadata = usageMetadata;\n }\n // Fields are not serialized unless passed to the constructor\n // Doing this ensures all fields on the message are serialized\n generation.message = new AIMessage(\n Object.fromEntries(\n Object.entries(generation.message).filter(\n ([key]) => !key.startsWith(\"lc_\")\n )\n ) as AIMessageFields\n );\n generations.push(generation);\n }\n return {\n generations,\n llmOutput: {\n tokenUsage: {\n promptTokens: usageMetadata.input_tokens,\n completionTokens: usageMetadata.output_tokens,\n totalTokens: usageMetadata.total_tokens,\n },\n },\n };\n }\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const messagesMapped: OpenAIClient.Chat.Completions.ChatCompletionMessageParam[] =\n convertMessagesToCompletionsMessageParams({\n messages,\n model: this.model,\n });\n\n const params = {\n ...this.invocationParams(options, {\n streaming: true,\n }),\n messages: messagesMapped,\n stream: true as const,\n };\n let defaultRole: OpenAIClient.Chat.ChatCompletionRole | undefined;\n\n const streamIterable = await this.completionWithRetry(params, options);\n let usage: OpenAIClient.Completions.CompletionUsage | undefined;\n for await (const data of streamIterable) {\n const choice = data?.choices?.[0];\n if (data.usage) {\n usage = data.usage;\n }\n if (!choice) {\n continue;\n }\n\n const { delta } = choice;\n if (!delta) {\n continue;\n }\n const chunk = this._convertCompletionsDeltaToBaseMessageChunk(\n delta,\n data,\n defaultRole\n );\n defaultRole = delta.role ?? defaultRole;\n const newTokenIndices = {\n prompt: options.promptIndex ?? 0,\n completion: choice.index ?? 0,\n };\n if (typeof chunk.content !== \"string\") {\n console.log(\n \"[WARNING]: Received non-string content from OpenAI. This is currently not supported.\"\n );\n continue;\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const generationInfo: Record<string, any> = { ...newTokenIndices };\n if (choice.finish_reason != null) {\n generationInfo.finish_reason = choice.finish_reason;\n // Only include system fingerprint in the last chunk for now\n // to avoid concatenation issues\n generationInfo.system_fingerprint = data.system_fingerprint;\n generationInfo.model_name = data.model;\n generationInfo.service_tier = data.service_tier;\n }\n if (this.logprobs) {\n generationInfo.logprobs = choice.logprobs;\n }\n const generationChunk = new ChatGenerationChunk({\n message: chunk,\n text: chunk.content,\n generationInfo,\n });\n yield generationChunk;\n await runManager?.handleLLMNewToken(\n generationChunk.text ?? \"\",\n newTokenIndices,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n }\n if (usage) {\n const inputTokenDetails = {\n ...(usage.prompt_tokens_details?.audio_tokens !== null && {\n audio: usage.prompt_tokens_details?.audio_tokens,\n }),\n ...(usage.prompt_tokens_details?.cached_tokens !== null && {\n cache_read: usage.prompt_tokens_details?.cached_tokens,\n }),\n };\n const outputTokenDetails = {\n ...(usage.completion_tokens_details?.audio_tokens !== null && {\n audio: usage.completion_tokens_details?.audio_tokens,\n }),\n ...(usage.completion_tokens_details?.reasoning_tokens !== null && {\n reasoning: usage.completion_tokens_details?.reasoning_tokens,\n }),\n };\n const generationChunk = new ChatGenerationChunk({\n message: new AIMessageChunk({\n content: \"\",\n response_metadata: {\n usage: { ...usage },\n },\n usage_metadata: {\n input_tokens: usage.prompt_tokens,\n output_tokens: usage.completion_tokens,\n total_tokens: usage.total_tokens,\n ...(Object.keys(inputTokenDetails).length > 0 && {\n input_token_details: inputTokenDetails,\n }),\n ...(Object.keys(outputTokenDetails).length > 0 && {\n output_token_details: outputTokenDetails,\n }),\n },\n }),\n text: \"\",\n });\n yield generationChunk;\n }\n if (options.signal?.aborted) {\n throw new Error(\"AbortError\");\n }\n }\n\n async completionWithRetry(\n request: OpenAIClient.Chat.ChatCompletionCreateParamsStreaming,\n requestOptions?: OpenAIClient.RequestOptions\n ): Promise<AsyncIterable<OpenAIClient.Chat.Completions.ChatCompletionChunk>>;\n\n async completionWithRetry(\n request: OpenAIClient.Chat.ChatCompletionCreateParamsNonStreaming,\n requestOptions?: OpenAIClient.RequestOptions\n ): Promise<OpenAIClient.Chat.Completions.ChatCompletion>;\n\n async completionWithRetry(\n request: OpenAIClient.Chat.ChatCompletionCreateParams,\n requestOptions?: OpenAIClient.RequestOptions\n ): Promise<\n | AsyncIterable<OpenAIClient.Chat.Completions.ChatCompletionChunk>\n | OpenAIClient.Chat.Completions.ChatCompletion\n > {\n const clientOptions = this._getClientOptions(requestOptions);\n const isParseableFormat =\n request.response_format && request.response_format.type === \"json_schema\";\n return this.caller.call(async () => {\n try {\n if (isParseableFormat && !request.stream) {\n return await this.client.chat.completions.parse(\n request,\n clientOptions\n );\n } else {\n return await this.client.chat.completions.create(\n request,\n clientOptions\n );\n }\n } catch (e) {\n const error = wrapOpenAIClientError(e);\n throw error;\n }\n });\n }\n\n /**\n * @deprecated\n * This function was hoisted into a publicly accessible function from a\n * different export, but to maintain backwards compatibility with chat models\n * that depend on ChatOpenAICompletions, we'll keep it here as an overridable\n * method. This will be removed in a future release\n */\n protected _convertCompletionsDeltaToBaseMessageChunk(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n delta: Record<string, any>,\n rawResponse: OpenAIClient.Chat.Completions.ChatCompletionChunk,\n defaultRole?: OpenAIClient.Chat.ChatCompletionRole\n ): BaseMessageChunk {\n return convertCompletionsDeltaToBaseMessageChunk({\n delta,\n rawResponse,\n includeRawResponse: this.__includeRawResponse,\n defaultRole,\n });\n }\n\n /**\n * @deprecated\n * This function was hoisted into a publicly accessible function from a\n * different export, but to maintain backwards compatibility with chat models\n * that depend on ChatOpenAICompletions, we'll keep it here as an overridable\n * method. This will be removed in a future release\n */\n protected _convertCompletionsMessageToBaseMessage(\n message: OpenAIClient.ChatCompletionMessage,\n rawResponse: OpenAIClient.ChatCompletion\n ): BaseMessage {\n return convertCompletionsMessageToBaseMessage({\n message,\n rawResponse,\n includeRawResponse: this.__includeRawResponse,\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;AA4CA,IAAa,wBAAb,cAEUA,4BAA4B;;CAEpC,AAAS,iBACPC,SACAC,OACiC;EACjC,IAAIC;AACJ,MAAI,SAAS,WAAW,QACtB,SAAS,QAAQ;WACR,KAAK,8BAA8B,QAC5C,SAAS,KAAK;EAGhB,IAAI,sBAAsB,CAAE;AAC5B,MAAI,SAAS,mBAAmB,QAC9B,sBAAsB,EAAE,gBAAgB,QAAQ,eAAgB;WACvD,KAAK,gBAAgB,KAAK,aAAa,OAAO,YACvD,sBAAsB,EAAE,gBAAgB,EAAE,eAAe,KAAM,EAAE;EAGnE,MAAMC,SAAmD;GACvD,OAAO,KAAK;GACZ,aAAa,KAAK;GAClB,OAAO,KAAK;GACZ,mBAAmB,KAAK;GACxB,kBAAkB,KAAK;GACvB,UAAU,KAAK;GACf,cAAc,KAAK;GACnB,GAAG,KAAK;GACR,YAAY,KAAK;GACjB,MAAM,SAAS,QAAQ,KAAK;GAC5B,MAAM,KAAK;GAEX,QAAQ,KAAK;GACb,WAAW,SAAS;GACpB,eAAe,SAAS;GACxB,OAAO,SAAS,OAAO,SACnB,QAAQ,MAAM,IAAI,CAAC,SACjB,KAAK,wCAAwC,MAAM,EAAE,OAAQ,EAAC,CAC/D,GACD;GACJ,aAAaC,uCACX,SAAS,YACV;GACD,iBAAiB,KAAK,mBAAmB,SAAS,gBAAgB;GAClE,MAAM,SAAS;GACf,GAAG;GACH,qBAAqB,SAAS;GAC9B,GAAI,KAAK,SAAS,SAAS,QACvB,EAAE,OAAO,KAAK,SAAS,SAAS,MAAO,IACvC,CAAE;GACN,GAAI,KAAK,cAAc,SAAS,aAC5B,EAAE,YAAY,KAAK,cAAc,SAAS,WAAY,IACtD,CAAE;GACN,GAAG,KAAK;GACR,kBAAkB,SAAS,kBAAkB,KAAK;GAClD,wBACE,SAAS,wBAAwB,KAAK;GACxC,WAAW,SAAS,aAAa,KAAK;EACvC;AACD,MAAI,SAAS,eAAe,QAC1B,OAAO,aAAa,QAAQ;AAE9B,MAAI,KAAK,iBAAiB,QACxB,OAAO,eAAe,KAAK;AAE7B,MAAI,SAAS,iBAAiB,QAC5B,OAAO,eAAe,QAAQ;EAEhC,MAAM,YAAY,KAAK,oBAAoB,QAAQ;AACnD,MAAI,cAAc,UAAa,UAAU,WAAW,QAClD,OAAO,mBAAmB,UAAU;AAEtC,MAAIC,8BAAiB,OAAO,MAAM,EAChC,OAAO,wBACL,KAAK,cAAc,KAAK,SAAY,KAAK;OAE3C,OAAO,aAAa,KAAK,cAAc,KAAK,SAAY,KAAK;AAG/D,SAAO;CACR;CAED,MAAM,UACJC,UACAC,SACAC,YACqB;EACrB,MAAM,gBAAgB,CAAE;EACxB,MAAM,SAAS,KAAK,iBAAiB,QAAQ;EAC7C,MAAMC,iBACJC,8DAA0C;GACxC;GACA,OAAO,KAAK;EACb,EAAC;AAEJ,MAAI,OAAO,QAAQ;GACjB,MAAM,SAAS,KAAK,sBAAsB,UAAU,SAAS,WAAW;GACxE,MAAMC,cAAmD,CAAE;AAC3D,cAAW,MAAM,SAAS,QAAQ;IAChC,MAAM,QAAQ,oBAAoB;KAChC,GAAG,MAAM;KACT,GAAG,MAAM,QAAQ;IAClB;IACD,MAAM,QACH,MAAM,gBAAoC,cAAc;AAC3D,QAAI,YAAY,WAAW,QACzB,YAAY,SAAS;SAErB,YAAY,SAAS,YAAY,OAAO,OAAO,MAAM;GAExD;GACD,MAAM,cAAc,OAAO,QAAQ,YAAY,CAC5C,KAAK,CAAC,CAAC,KAAK,EAAE,CAAC,KAAK,KAAK,SAAS,MAAM,GAAG,GAAG,SAAS,MAAM,GAAG,CAAC,CACjE,IAAI,CAAC,CAAC,GAAG,MAAM,KAAK,MAAM;GAE7B,MAAM,EAAE,WAAW,eAAe,GAAG,KAAK,iBAAiB,QAAQ;GAKnE,MAAM,mBAAmB,MAAM,KAAK,kCAClC,UACA,WACA,cACD;GACD,MAAM,uBAAuB,MAAM,KAAK,6BACtC,YACD;GAED,cAAc,eAAe;GAC7B,cAAc,gBAAgB;GAC9B,cAAc,eAAe,mBAAmB;AAChD,UAAO;IACL;IACA,WAAW,EACT,qBAAqB;KACnB,cAAc,cAAc;KAC5B,kBAAkB,cAAc;KAChC,aAAa,cAAc;IAC5B,EACF;GACF;EACF,OAAM;GACL,MAAM,OAAO,MAAM,KAAK,oBACtB;IACE,GAAG;IACH,QAAQ;IACR,UAAU;GACX,GACD;IACE,QAAQ,SAAS;IACjB,GAAG,SAAS;GACb,EACF;GAED,MAAM,EACJ,mBAAmB,kBACnB,eAAe,cACf,cAAc,aACd,uBAAuB,qBACvB,2BAA2B,yBAC5B,GAAG,MAAM,SAAS,CAAE;AAErB,OAAI,kBACF,cAAc,iBACX,cAAc,iBAAiB,KAAK;AAGzC,OAAI,cACF,cAAc,gBACX,cAAc,gBAAgB,KAAK;AAGxC,OAAI,aACF,cAAc,gBACX,cAAc,gBAAgB,KAAK;AAGxC,OACE,qBAAqB,iBAAiB,QACtC,qBAAqB,kBAAkB,MAEvC,cAAc,sBAAsB;IAClC,GAAI,qBAAqB,iBAAiB,QAAQ,EAChD,OAAO,qBAAqB,aAC7B;IACD,GAAI,qBAAqB,kBAAkB,QAAQ,EACjD,YAAY,qBAAqB,cAClC;GACF;AAGH,OACE,yBAAyB,iBAAiB,QAC1C,yBAAyB,qBAAqB,MAE9C,cAAc,uBAAuB;IACnC,GAAI,yBAAyB,iBAAiB,QAAQ,EACpD,OAAO,yBAAyB,aACjC;IACD,GAAI,yBAAyB,qBAAqB,QAAQ,EACxD,WAAW,yBAAyB,iBACrC;GACF;GAGH,MAAMC,cAAgC,CAAE;AACxC,QAAK,MAAM,QAAQ,MAAM,WAAW,CAAE,GAAE;IACtC,MAAM,OAAO,KAAK,SAAS,WAAW;IACtC,MAAMC,aAA6B;KACjC;KACA,SAAS,KAAK,wCACZ,KAAK,WAAW,EAAE,MAAM,YAAa,GACrC,KACD;IACF;IACD,WAAW,iBAAiB;KAC1B,GAAI,KAAK,gBAAgB,EAAE,eAAe,KAAK,cAAe,IAAG,CAAE;KACnE,GAAI,KAAK,WAAW,EAAE,UAAU,KAAK,SAAU,IAAG,CAAE;IACrD;AACD,mDAAgB,WAAW,QAAQ,EACjC,WAAW,QAAQ,iBAAiB;IAItC,WAAW,UAAU,IAAIC,oCACvB,OAAO,YACL,OAAO,QAAQ,WAAW,QAAQ,CAAC,OACjC,CAAC,CAAC,IAAI,KAAK,CAAC,IAAI,WAAW,MAAM,CAClC,CACF;IAEH,YAAY,KAAK,WAAW;GAC7B;AACD,UAAO;IACL;IACA,WAAW,EACT,YAAY;KACV,cAAc,cAAc;KAC5B,kBAAkB,cAAc;KAChC,aAAa,cAAc;IAC5B,EACF;GACF;EACF;CACF;CAED,OAAO,sBACLR,UACAC,SACAC,YACqC;EACrC,MAAMC,iBACJC,8DAA0C;GACxC;GACA,OAAO,KAAK;EACb,EAAC;EAEJ,MAAM,SAAS;GACb,GAAG,KAAK,iBAAiB,SAAS,EAChC,WAAW,KACZ,EAAC;GACF,UAAU;GACV,QAAQ;EACT;EACD,IAAIK;EAEJ,MAAM,iBAAiB,MAAM,KAAK,oBAAoB,QAAQ,QAAQ;EACtE,IAAIC;AACJ,aAAW,MAAM,QAAQ,gBAAgB;GACvC,MAAM,SAAS,MAAM,UAAU;AAC/B,OAAI,KAAK,OACP,QAAQ,KAAK;AAEf,OAAI,CAAC,OACH;GAGF,MAAM,EAAE,OAAO,GAAG;AAClB,OAAI,CAAC,MACH;GAEF,MAAM,QAAQ,KAAK,2CACjB,OACA,MACA,YACD;GACD,cAAc,MAAM,QAAQ;GAC5B,MAAM,kBAAkB;IACtB,QAAQ,QAAQ,eAAe;IAC/B,YAAY,OAAO,SAAS;GAC7B;AACD,OAAI,OAAO,MAAM,YAAY,UAAU;IACrC,QAAQ,IACN,uFACD;AACD;GACD;GAED,MAAMC,iBAAsC,EAAE,GAAG,gBAAiB;AAClE,OAAI,OAAO,iBAAiB,MAAM;IAChC,eAAe,gBAAgB,OAAO;IAGtC,eAAe,qBAAqB,KAAK;IACzC,eAAe,aAAa,KAAK;IACjC,eAAe,eAAe,KAAK;GACpC;AACD,OAAI,KAAK,UACP,eAAe,WAAW,OAAO;GAEnC,MAAM,kBAAkB,IAAIC,6CAAoB;IAC9C,SAAS;IACT,MAAM,MAAM;IACZ;GACD;GACD,MAAM;GACN,MAAM,YAAY,kBAChB,gBAAgB,QAAQ,IACxB,iBACA,QACA,QACA,QACA,EAAE,OAAO,gBAAiB,EAC3B;EACF;AACD,MAAI,OAAO;GACT,MAAM,oBAAoB;IACxB,GAAI,MAAM,uBAAuB,iBAAiB,QAAQ,EACxD,OAAO,MAAM,uBAAuB,aACrC;IACD,GAAI,MAAM,uBAAuB,kBAAkB,QAAQ,EACzD,YAAY,MAAM,uBAAuB,cAC1C;GACF;GACD,MAAM,qBAAqB;IACzB,GAAI,MAAM,2BAA2B,iBAAiB,QAAQ,EAC5D,OAAO,MAAM,2BAA2B,aACzC;IACD,GAAI,MAAM,2BAA2B,qBAAqB,QAAQ,EAChE,WAAW,MAAM,2BAA2B,iBAC7C;GACF;GACD,MAAM,kBAAkB,IAAIA,6CAAoB;IAC9C,SAAS,IAAIC,yCAAe;KAC1B,SAAS;KACT,mBAAmB,EACjB,OAAO,EAAE,GAAG,MAAO,EACpB;KACD,gBAAgB;MACd,cAAc,MAAM;MACpB,eAAe,MAAM;MACrB,cAAc,MAAM;MACpB,GAAI,OAAO,KAAK,kBAAkB,CAAC,SAAS,KAAK,EAC/C,qBAAqB,kBACtB;MACD,GAAI,OAAO,KAAK,mBAAmB,CAAC,SAAS,KAAK,EAChD,sBAAsB,mBACvB;KACF;IACF;IACD,MAAM;GACP;GACD,MAAM;EACP;AACD,MAAI,QAAQ,QAAQ,QAClB,OAAM,IAAI,MAAM;CAEnB;CAYD,MAAM,oBACJC,SACAC,gBAIA;EACA,MAAM,gBAAgB,KAAK,kBAAkB,eAAe;EAC5D,MAAM,oBACJ,QAAQ,mBAAmB,QAAQ,gBAAgB,SAAS;AAC9D,SAAO,KAAK,OAAO,KAAK,YAAY;AAClC,OAAI;AACF,QAAI,qBAAqB,CAAC,QAAQ,OAChC,QAAO,MAAM,KAAK,OAAO,KAAK,YAAY,MACxC,SACA,cACD;QAED,QAAO,MAAM,KAAK,OAAO,KAAK,YAAY,OACxC,SACA,cACD;GAEJ,SAAQ,GAAG;IACV,MAAM,QAAQC,qCAAsB,EAAE;AACtC,UAAM;GACP;EACF,EAAC;CACH;;;;;;;;CASD,AAAU,2CAERC,OACAC,aACAC,aACkB;AAClB,SAAOC,8DAA0C;GAC/C;GACA;GACA,oBAAoB,KAAK;GACzB;EACD,EAAC;CACH;;;;;;;;CASD,AAAU,wCACRC,SACAC,aACa;AACb,SAAOC,2DAAuC;GAC5C;GACA;GACA,oBAAoB,KAAK;EAC1B,EAAC;CACH;AACF"}