@built-in-ai/core
Version:
Browser Built-in AI API provider for Vercel AI SDK v5+ (Chrome & Edge)
1 lines • 33.7 kB
Source Map (JSON)
{"version":3,"sources":["../src/built-in-ai-language-model.ts","../src/convert-to-built-in-ai-messages.ts","../src/built-in-ai-embedding-model.ts","../src/built-in-ai-provider.ts"],"sourcesContent":["import {\n LanguageModelV2,\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2Prompt,\n LanguageModelV2StreamPart,\n LoadSettingError,\n JSONValue,\n} from \"@ai-sdk/provider\";\nimport { convertToBuiltInAIMessages } from \"./convert-to-built-in-ai-messages\";\n\nexport type BuiltInAIChatModelId = \"text\";\n\nexport interface BuiltInAIChatSettings extends LanguageModelCreateOptions {\n /**\n * Expected input types for the session, for multimodal inputs.\n */\n expectedInputs?: Array<{\n type: \"text\" | \"image\" | \"audio\";\n languages?: string[];\n }>;\n}\n\n/**\n * Check if the browser supports the built-in AI API\n * @returns true if the browser supports the built-in AI API, false otherwise\n */\nexport function doesBrowserSupportBuiltInAI(): boolean {\n return typeof LanguageModel !== \"undefined\";\n}\n\n/**\n * Check if the Prompt API is available\n * @deprecated Use `doesBrowserSupportBuiltInAI()` instead for clearer naming\n * @returns true if the browser supports the built-in AI API, false otherwise\n */\nexport function isBuiltInAIModelAvailable(): boolean {\n return typeof LanguageModel !== \"undefined\";\n}\n\ntype BuiltInAIConfig = {\n provider: string;\n modelId: BuiltInAIChatModelId;\n options: BuiltInAIChatSettings;\n};\n\n/**\n * Detect if the prompt contains multimodal content\n */\nfunction hasMultimodalContent(prompt: LanguageModelV2Prompt): boolean {\n for (const message of prompt) {\n if (message.role === \"user\") {\n for (const part of message.content) {\n if (part.type === \"file\") {\n return true;\n }\n }\n }\n }\n return false;\n}\n\n/**\n * Get expected inputs based on prompt content\n */\nfunction getExpectedInputs(\n prompt: LanguageModelV2Prompt,\n): Array<{ type: \"text\" | \"image\" | \"audio\" }> {\n const inputs = new Set<\"text\" | \"image\" | \"audio\">();\n // Don't add text by default - it's assumed by the Prompt API\n\n for (const message of prompt) {\n if (message.role === \"user\") {\n for (const part of message.content) {\n if (part.type === \"file\") {\n if (part.mediaType?.startsWith(\"image/\")) {\n inputs.add(\"image\");\n } else if (part.mediaType?.startsWith(\"audio/\")) {\n inputs.add(\"audio\");\n }\n }\n }\n }\n }\n\n return Array.from(inputs).map((type) => ({ type }));\n}\n\nexport class BuiltInAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = \"v2\";\n readonly modelId: BuiltInAIChatModelId;\n readonly provider = \"browser-ai\";\n\n private readonly config: BuiltInAIConfig;\n private session!: LanguageModel;\n\n constructor(\n modelId: BuiltInAIChatModelId,\n options: BuiltInAIChatSettings = {},\n ) {\n this.modelId = modelId;\n this.config = {\n provider: this.provider,\n modelId,\n options,\n };\n }\n\n readonly supportedUrls: Record<string, RegExp[]> = {\n \"image/*\": [/^https?:\\/\\/.+$/],\n \"audio/*\": [/^https?:\\/\\/.+$/],\n };\n\n private async getSession(\n options?: LanguageModelCreateOptions,\n expectedInputs?: Array<{ type: \"text\" | \"image\" | \"audio\" }>,\n systemMessage?: string,\n onDownloadProgress?: (progress: number) => void,\n ): Promise<LanguageModel> {\n if (typeof LanguageModel === \"undefined\") {\n throw new LoadSettingError({\n message:\n \"Prompt API is not available. This library requires Chrome or Edge browser with built-in AI capabilities.\",\n });\n }\n\n if (this.session) return this.session;\n\n const availability = await LanguageModel.availability();\n\n if (availability === \"unavailable\") {\n throw new LoadSettingError({ message: \"Built-in model not available\" });\n }\n\n const mergedOptions = {\n ...this.config.options,\n ...options,\n };\n\n // Add system message to initialPrompts if provided\n if (systemMessage) {\n mergedOptions.initialPrompts = [\n { role: \"system\", content: systemMessage },\n ];\n }\n\n // Add expected inputs if provided\n if (expectedInputs && expectedInputs.length > 0) {\n mergedOptions.expectedInputs = expectedInputs;\n }\n\n // Add download progress monitoring if callback provided\n if (onDownloadProgress) {\n mergedOptions.monitor = (m: CreateMonitor) => {\n m.addEventListener(\"downloadprogress\", (e: ProgressEvent) => {\n onDownloadProgress(e.loaded); // e.loaded is between 0 and 1\n });\n };\n }\n\n this.session = await LanguageModel.create(mergedOptions);\n\n return this.session;\n }\n\n private getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n tools,\n }: Parameters<LanguageModelV2[\"doGenerate\"]>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n // Add warnings for unsupported settings\n if (tools && tools.length > 0) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"tools\",\n details: \"Tool calling is not yet supported by Prompt API\",\n });\n }\n\n if (maxOutputTokens != null) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"maxOutputTokens\",\n details: \"maxOutputTokens is not supported by Prompt API\",\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"stopSequences\",\n details: \"stopSequences is not supported by Prompt API\",\n });\n }\n\n if (topP != null) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"topP\",\n details: \"topP is not supported by Prompt API\",\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"presencePenalty\",\n details: \"presencePenalty is not supported by Prompt API\",\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"frequencyPenalty\",\n details: \"frequencyPenalty is not supported by Prompt API\",\n });\n }\n\n if (seed != null) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"seed\",\n details: \"seed is not supported by Prompt API\",\n });\n }\n\n // Check if this is a multimodal prompt\n const hasMultiModalInput = hasMultimodalContent(prompt);\n\n // Convert messages to the DOM API format\n const { systemMessage, messages } = convertToBuiltInAIMessages(prompt);\n\n // Handle response format for Prompt API\n const promptOptions: LanguageModelPromptOptions &\n LanguageModelCreateCoreOptions = {};\n if (responseFormat?.type === \"json\") {\n promptOptions.responseConstraint = responseFormat.schema as Record<\n string,\n JSONValue\n >;\n }\n\n // Map supported settings\n if (temperature !== undefined) {\n promptOptions.temperature = temperature;\n }\n\n if (topK !== undefined) {\n promptOptions.topK = topK;\n }\n\n return {\n systemMessage,\n messages,\n warnings,\n promptOptions,\n hasMultiModalInput,\n expectedInputs: hasMultiModalInput\n ? getExpectedInputs(prompt)\n : undefined,\n };\n }\n\n /**\n * Generates a complete text response using the browser's built-in Prompt API\n * @param options\n * @returns Promise resolving to the generated content with finish reason, usage stats, and any warnings\n * @throws {LoadSettingError} When the Prompt API is not available or model needs to be downloaded\n * @throws {UnsupportedFunctionalityError} When unsupported features like file input are used\n */\n public async doGenerate(options: LanguageModelV2CallOptions) {\n const converted = this.getArgs(options);\n const { systemMessage, messages, warnings, promptOptions, expectedInputs } =\n converted;\n\n const session = await this.getSession(\n undefined,\n expectedInputs,\n systemMessage,\n );\n\n const text = await session.prompt(messages, promptOptions);\n\n const content: LanguageModelV2Content[] = [\n {\n type: \"text\",\n text,\n },\n ];\n\n return {\n content,\n finishReason: \"stop\" as LanguageModelV2FinishReason,\n usage: {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n },\n request: { body: { messages, options: promptOptions } },\n warnings,\n };\n }\n\n /**\n * Check the availability of the built-in AI model\n * @returns Promise resolving to \"unavailable\", \"available\", or \"available-after-download\"\n */\n public async availability(): Promise<Availability> {\n if (typeof LanguageModel === \"undefined\") {\n return \"unavailable\";\n }\n return LanguageModel.availability();\n }\n\n /**\n * Creates a session with download progress monitoring.\n *\n * @example\n * ```typescript\n * const session = await model.createSessionWithProgress(\n * (progress) => {\n * console.log(`Download progress: ${Math.round(progress * 100)}%`);\n * }\n * );\n * ```\n *\n * @param onDownloadProgress Optional callback receiving progress values 0-1 during model download\n * @returns Promise resolving to a configured LanguageModel session\n * @throws {LoadSettingError} When the Prompt API is not available or model is unavailable\n */\n public async createSessionWithProgress(\n onDownloadProgress?: (progress: number) => void,\n ): Promise<LanguageModel> {\n return this.getSession(undefined, undefined, undefined, onDownloadProgress);\n }\n\n /**\n * Generates a streaming text response using the browser's built-in Prompt API\n * @param options\n * @returns Promise resolving to a readable stream of text chunks and request metadata\n * @throws {LoadSettingError} When the Prompt API is not available or model needs to be downloaded\n * @throws {UnsupportedFunctionalityError} When unsupported features like file input are used\n */\n public async doStream(options: LanguageModelV2CallOptions) {\n const converted = this.getArgs(options);\n const {\n systemMessage,\n messages,\n warnings,\n promptOptions,\n expectedInputs,\n hasMultiModalInput,\n } = converted;\n\n const session = await this.getSession(\n undefined,\n expectedInputs,\n systemMessage,\n );\n\n // Pass abort signal to the native streaming method\n const streamOptions = {\n ...promptOptions,\n signal: options.abortSignal,\n };\n\n const promptStream = session.promptStreaming(messages, streamOptions);\n\n let isFirstChunk = true;\n const textId = \"text-0\";\n\n const stream = promptStream.pipeThrough(\n new TransformStream<string, LanguageModelV2StreamPart>({\n start(controller) {\n // Send stream start event with warnings\n controller.enqueue({\n type: \"stream-start\",\n warnings,\n });\n\n // Handle abort signal\n if (options.abortSignal) {\n options.abortSignal.addEventListener(\"abort\", () => {\n controller.terminate();\n });\n }\n },\n\n transform(chunk, controller) {\n if (isFirstChunk) {\n // Send text start event\n controller.enqueue({\n type: \"text-start\",\n id: textId,\n });\n isFirstChunk = false;\n }\n\n // Send text delta\n controller.enqueue({\n type: \"text-delta\",\n id: textId,\n delta: chunk,\n });\n },\n\n flush(controller) {\n // Send text end event\n controller.enqueue({\n type: \"text-end\",\n id: textId,\n });\n\n // Send finish event\n controller.enqueue({\n type: \"finish\",\n finishReason: \"stop\" as LanguageModelV2FinishReason,\n usage: {\n inputTokens: session.inputUsage,\n outputTokens: undefined,\n totalTokens: undefined,\n },\n });\n },\n }),\n );\n\n return {\n stream,\n request: { body: { messages, options: promptOptions } },\n };\n }\n}\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\n\nexport interface ConvertedMessages {\n systemMessage?: string;\n messages: LanguageModelMessage[];\n}\n\n/**\n * Convert base64 string to Uint8Array for built-in AI compatibility\n * Built-in AI supports BufferSource (including Uint8Array) for image/audio data\n */\nfunction convertBase64ToUint8Array(base64: string): Uint8Array {\n try {\n const binaryString = atob(base64);\n const bytes = new Uint8Array(binaryString.length);\n for (let i = 0; i < binaryString.length; i++) {\n bytes[i] = binaryString.charCodeAt(i);\n }\n return bytes;\n } catch (error) {\n throw new Error(`Failed to convert base64 to Uint8Array: ${error}`);\n }\n}\n\n/**\n * Convert file data to the appropriate format for built-in AI\n * Built-in AI supports: Blob, BufferSource (Uint8Array), URLs\n */\nfunction convertFileData(data: any, mediaType: string): Uint8Array | string {\n // Handle different data types from Vercel AI SDK\n if (data instanceof URL) {\n // URLs - keep as string (if supported by provider)\n return data.toString();\n }\n\n if (data instanceof Uint8Array) {\n // Already in correct format\n return data;\n }\n\n if (typeof data === \"string\") {\n // Base64 string from AI SDK - convert to Uint8Array\n return convertBase64ToUint8Array(data);\n }\n\n // Fallback for other types (shouldn't happen with current AI SDK)\n console.warn(`Unexpected data type for ${mediaType}:`, typeof data);\n return data;\n}\n\n/**\n * Convert Vercel AI SDK prompt format to built-in AI Prompt API format\n * Returns system message (for initialPrompts) and regular messages (for prompt method)\n */\nexport function convertToBuiltInAIMessages(\n prompt: LanguageModelV2Prompt,\n): ConvertedMessages {\n let systemMessage: string | undefined;\n const messages: LanguageModelMessage[] = [];\n\n for (const message of prompt) {\n switch (message.role) {\n case \"system\": {\n // There's only ever one system message from AI SDK\n systemMessage = message.content;\n break;\n }\n\n case \"user\": {\n messages.push({\n role: \"user\",\n content: message.content.map((part) => {\n switch (part.type) {\n case \"text\": {\n return {\n type: \"text\",\n value: part.text,\n } as LanguageModelMessageContent;\n }\n\n case \"file\": {\n const { mediaType, data, filename } = part;\n\n if (mediaType?.startsWith(\"image/\")) {\n const convertedData = convertFileData(data, mediaType);\n\n return {\n type: \"image\",\n value: convertedData,\n } as LanguageModelMessageContent;\n } else if (mediaType?.startsWith(\"audio/\")) {\n const convertedData = convertFileData(data, mediaType);\n\n return {\n type: \"audio\",\n value: convertedData,\n } as LanguageModelMessageContent;\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file type: ${mediaType}`,\n });\n }\n }\n\n default: {\n throw new UnsupportedFunctionalityError({\n functionality: `content type: ${(part as any).type}`,\n });\n }\n }\n }),\n } as LanguageModelMessage);\n break;\n }\n\n case \"assistant\": {\n let text = \"\";\n\n for (const part of message.content) {\n switch (part.type) {\n case \"text\": {\n text += part.text;\n break;\n }\n case \"tool-call\": {\n throw new UnsupportedFunctionalityError({\n functionality: \"tool calls\",\n });\n }\n }\n }\n\n messages.push({\n role: \"assistant\",\n content: text,\n } as LanguageModelMessage);\n break;\n }\n\n case \"tool\": {\n throw new UnsupportedFunctionalityError({\n functionality: \"tool messages\",\n });\n }\n\n default: {\n throw new Error(`Unsupported role: ${(message as any).role}`);\n }\n }\n }\n\n return { systemMessage, messages };\n}\n","import { EmbeddingModelV2, EmbeddingModelV2Embedding } from \"@ai-sdk/provider\";\nimport { TextEmbedder } from \"@mediapipe/tasks-text\";\n\nexport interface BuiltInAIEmbeddingModelSettings {\n /**\n * An optional base path to specify the directory the Wasm files should be loaded from.\n * @default 'https://pub-ddcfe353995744e89b8002f16bf98575.r2.dev/text_wasm_internal.js'\n */\n wasmLoaderPath?: string;\n /**\n * It's about 6mb before gzip.\n * @default 'https://pub-ddcfe353995744e89b8002f16bf98575.r2.dev/text_wasm_internal.wasm'\n */\n wasmBinaryPath?: string;\n /**\n * The model path to the model asset file.\n * It's about 6.1mb before gzip.\n * @default 'https://pub-ddcfe353995744e89b8002f16bf98575.r2.dev/universal_sentence_encoder.tflite'\n */\n modelAssetPath?: string;\n /**\n * Whether to normalize the returned feature vector with L2 norm. Use this\n * option only if the model does not already contain a native L2_NORMALIZATION\n * TF Lite Op. In most cases, this is already the case and L2 norm is thus\n * achieved through TF Lite inference.\n * @default false\n */\n l2Normalize?: boolean;\n /**\n * Whether the returned embedding should be quantized to bytes via scalar\n * quantization. Embeddings are implicitly assumed to be unit-norm and\n * therefore any dimension is guaranteed to have a value in [-1.0, 1.0]. Use\n * the l2_normalize option if this is not the case.\n * @default false\n */\n quantize?: boolean;\n /**\n * Overrides the default backend to use for the provided model.\n */\n delegate?: \"CPU\" | \"GPU\";\n}\n\n// See more:\n// - https://github.com/google-ai-edge/mediapipe\n// - https://ai.google.dev/edge/mediapipe/solutions/text/text_embedder/web_js\nexport class BuiltInAIEmbeddingModel implements EmbeddingModelV2<string> {\n readonly specificationVersion = \"v2\";\n readonly provider = \"google-mediapipe\";\n readonly modelId: string = \"embedding\";\n readonly supportsParallelCalls = true;\n readonly maxEmbeddingsPerCall = undefined;\n\n private settings: BuiltInAIEmbeddingModelSettings = {\n wasmLoaderPath:\n \"https://pub-ddcfe353995744e89b8002f16bf98575.r2.dev/text_wasm_internal.js\",\n wasmBinaryPath:\n \"https://pub-ddcfe353995744e89b8002f16bf98575.r2.dev/text_wasm_internal.wasm\",\n modelAssetPath:\n \"https://pub-ddcfe353995744e89b8002f16bf98575.r2.dev/universal_sentence_encoder.tflite\",\n l2Normalize: false,\n quantize: false,\n };\n private modelAssetBuffer!: Promise<ReadableStreamDefaultReader>;\n private textEmbedder!: Promise<TextEmbedder>;\n\n public constructor(settings: BuiltInAIEmbeddingModelSettings = {}) {\n this.settings = { ...this.settings, ...settings };\n this.modelAssetBuffer = fetch(this.settings.modelAssetPath!).then(\n (response) => response.body!.getReader(),\n )!;\n this.textEmbedder = this.getTextEmbedder();\n }\n\n protected getTextEmbedder = async (): Promise<TextEmbedder> => {\n return TextEmbedder.createFromOptions(\n {\n wasmBinaryPath: this.settings.wasmBinaryPath!,\n wasmLoaderPath: this.settings.wasmLoaderPath!,\n },\n {\n baseOptions: {\n modelAssetBuffer: await this.modelAssetBuffer,\n delegate: this.settings.delegate,\n },\n l2Normalize: this.settings.l2Normalize,\n quantize: this.settings.quantize,\n },\n );\n };\n\n public doEmbed = async (options: {\n values: string[];\n abortSignal?: AbortSignal;\n }): Promise<{\n embeddings: Array<EmbeddingModelV2Embedding>;\n rawResponse?: Record<PropertyKey, any>;\n }> => {\n // Note: abortSignal is not supported by MediaPipe TextEmbedder\n if (options.abortSignal?.aborted) {\n throw new Error(\"Operation was aborted\");\n }\n\n const embedder = await this.textEmbedder;\n const embeddings = options.values.map((text) => {\n const embedderResult = embedder.embed(text);\n const [embedding] = embedderResult.embeddings;\n return embedding?.floatEmbedding ?? [];\n });\n\n return {\n embeddings,\n rawResponse: {\n model: \"universal_sentence_encoder\",\n provider: \"google-mediapipe\",\n processed_texts: options.values.length,\n },\n };\n };\n}\n","import {\n EmbeddingModelV2,\n NoSuchModelError,\n ProviderV2,\n} from \"@ai-sdk/provider\";\nimport {\n BuiltInAIChatLanguageModel,\n BuiltInAIChatModelId,\n BuiltInAIChatSettings,\n} from \"./built-in-ai-language-model\";\nimport {\n BuiltInAIEmbeddingModel,\n BuiltInAIEmbeddingModelSettings,\n} from \"./built-in-ai-embedding-model\";\n\nexport interface BuiltInAIProvider extends ProviderV2 {\n (\n modelId?: BuiltInAIChatModelId,\n settings?: BuiltInAIChatSettings,\n ): BuiltInAIChatLanguageModel;\n\n /**\n * Creates a model for text generation.\n */\n languageModel(\n modelId: BuiltInAIChatModelId,\n settings?: BuiltInAIChatSettings,\n ): BuiltInAIChatLanguageModel;\n\n /**\n * Creates a model for text generation.\n */\n chat(\n modelId: BuiltInAIChatModelId,\n settings?: BuiltInAIChatSettings,\n ): BuiltInAIChatLanguageModel;\n\n textEmbedding(\n modelId: \"embedding\",\n settings?: BuiltInAIEmbeddingModelSettings,\n ): EmbeddingModelV2<string>;\n\n textEmbeddingModel: (\n modelId: \"embedding\",\n settings?: BuiltInAIEmbeddingModelSettings,\n ) => EmbeddingModelV2<string>;\n\n // Not implemented\n imageModel(modelId: string): never;\n speechModel(modelId: string): never;\n transcriptionModel(modelId: string): never;\n}\n\nexport interface BuiltInAIProviderSettings {\n // Currently empty - provider settings are minimal for BuiltInAI\n // Future provider-level settings can be added here\n}\n\n/**\n * Create a BuiltInAI provider instance.\n */\nexport function createBuiltInAI(\n options: BuiltInAIProviderSettings = {},\n): BuiltInAIProvider {\n const createChatModel = (\n modelId: BuiltInAIChatModelId,\n settings?: BuiltInAIChatSettings,\n ) => {\n return new BuiltInAIChatLanguageModel(modelId, settings);\n };\n\n const createEmbeddingModel = (\n modelId: \"embedding\",\n settings?: BuiltInAIEmbeddingModelSettings,\n ) => {\n return new BuiltInAIEmbeddingModel(settings);\n };\n\n const provider = function (\n modelId: BuiltInAIChatModelId = \"text\",\n settings?: BuiltInAIChatSettings,\n ) {\n if (new.target) {\n throw new Error(\n \"The BuiltInAI model function cannot be called with the new keyword.\",\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: \"imageModel\" });\n };\n\n provider.speechModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: \"speechModel\" });\n };\n\n provider.transcriptionModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: \"transcriptionModel\" });\n };\n\n return provider;\n}\n\n/**\n * Default BuiltInAI provider instance.\n */\nexport const builtInAI = createBuiltInAI();\n"],"mappings":";AAAA;AAAA,EAQE;AAAA,OAEK;;;ACVP;AAAA,EAEE;AAAA,OACK;AAWP,SAAS,0BAA0B,QAA4B;AAC7D,MAAI;AACF,UAAM,eAAe,KAAK,MAAM;AAChC,UAAM,QAAQ,IAAI,WAAW,aAAa,MAAM;AAChD,aAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;AAC5C,YAAM,CAAC,IAAI,aAAa,WAAW,CAAC;AAAA,IACtC;AACA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,2CAA2C,KAAK,EAAE;AAAA,EACpE;AACF;AAMA,SAAS,gBAAgB,MAAW,WAAwC;AAE1E,MAAI,gBAAgB,KAAK;AAEvB,WAAO,KAAK,SAAS;AAAA,EACvB;AAEA,MAAI,gBAAgB,YAAY;AAE9B,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,SAAS,UAAU;AAE5B,WAAO,0BAA0B,IAAI;AAAA,EACvC;AAGA,UAAQ,KAAK,4BAA4B,SAAS,KAAK,OAAO,IAAI;AAClE,SAAO;AACT;AAMO,SAAS,2BACd,QACmB;AACnB,MAAI;AACJ,QAAM,WAAmC,CAAC;AAE1C,aAAW,WAAW,QAAQ;AAC5B,YAAQ,QAAQ,MAAM;AAAA,MACpB,KAAK,UAAU;AAEb,wBAAgB,QAAQ;AACxB;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,QAAQ,IAAI,CAAC,SAAS;AACrC,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,OAAO,KAAK;AAAA,gBACd;AAAA,cACF;AAAA,cAEA,KAAK,QAAQ;AACX,sBAAM,EAAE,WAAW,MAAM,SAAS,IAAI;AAEtC,oBAAI,WAAW,WAAW,QAAQ,GAAG;AACnC,wBAAM,gBAAgB,gBAAgB,MAAM,SAAS;AAErD,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,OAAO;AAAA,kBACT;AAAA,gBACF,WAAW,WAAW,WAAW,QAAQ,GAAG;AAC1C,wBAAM,gBAAgB,gBAAgB,MAAM,SAAS;AAErD,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,OAAO;AAAA,kBACT;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eAAe,cAAc,SAAS;AAAA,kBACxC,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,cAEA,SAAS;AACP,sBAAM,IAAI,8BAA8B;AAAA,kBACtC,eAAe,iBAAkB,KAAa,IAAI;AAAA,gBACpD,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAyB;AACzB;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AAEX,mBAAW,QAAQ,QAAQ,SAAS;AAClC,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,8BAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,QACX,CAAyB;AACzB;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,8BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,IAAI,MAAM,qBAAsB,QAAgB,IAAI,EAAE;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,eAAe,SAAS;AACnC;;;AD9HO,SAAS,8BAAuC;AACrD,SAAO,OAAO,kBAAkB;AAClC;AAOO,SAAS,4BAAqC;AACnD,SAAO,OAAO,kBAAkB;AAClC;AAWA,SAAS,qBAAqB,QAAwC;AACpE,aAAW,WAAW,QAAQ;AAC5B,QAAI,QAAQ,SAAS,QAAQ;AAC3B,iBAAW,QAAQ,QAAQ,SAAS;AAClC,YAAI,KAAK,SAAS,QAAQ;AACxB,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAKA,SAAS,kBACP,QAC6C;AAC7C,QAAM,SAAS,oBAAI,IAAgC;AAGnD,aAAW,WAAW,QAAQ;AAC5B,QAAI,QAAQ,SAAS,QAAQ;AAC3B,iBAAW,QAAQ,QAAQ,SAAS;AAClC,YAAI,KAAK,SAAS,QAAQ;AACxB,cAAI,KAAK,WAAW,WAAW,QAAQ,GAAG;AACxC,mBAAO,IAAI,OAAO;AAAA,UACpB,WAAW,KAAK,WAAW,WAAW,QAAQ,GAAG;AAC/C,mBAAO,IAAI,OAAO;AAAA,UACpB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,KAAK,EAAE;AACpD;AAEO,IAAM,6BAAN,MAA4D;AAAA,EAQjE,YACE,SACA,UAAiC,CAAC,GAClC;AAVF,SAAS,uBAAuB;AAEhC,SAAS,WAAW;AAiBpB,SAAS,gBAA0C;AAAA,MACjD,WAAW,CAAC,iBAAiB;AAAA,MAC7B,WAAW,CAAC,iBAAiB;AAAA,IAC/B;AAXE,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,MACZ,UAAU,KAAK;AAAA,MACf;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAOA,MAAc,WACZ,SACA,gBACA,eACA,oBACwB;AACxB,QAAI,OAAO,kBAAkB,aAAa;AACxC,YAAM,IAAI,iBAAiB;AAAA,QACzB,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,KAAK,QAAS,QAAO,KAAK;AAE9B,UAAM,eAAe,MAAM,cAAc,aAAa;AAEtD,QAAI,iBAAiB,eAAe;AAClC,YAAM,IAAI,iBAAiB,EAAE,SAAS,+BAA+B,CAAC;AAAA,IACxE;AAEA,UAAM,gBAAgB;AAAA,MACpB,GAAG,KAAK,OAAO;AAAA,MACf,GAAG;AAAA,IACL;AAGA,QAAI,eAAe;AACjB,oBAAc,iBAAiB;AAAA,QAC7B,EAAE,MAAM,UAAU,SAAS,cAAc;AAAA,MAC3C;AAAA,IACF;AAGA,QAAI,kBAAkB,eAAe,SAAS,GAAG;AAC/C,oBAAc,iBAAiB;AAAA,IACjC;AAGA,QAAI,oBAAoB;AACtB,oBAAc,UAAU,CAAC,MAAqB;AAC5C,UAAE,iBAAiB,oBAAoB,CAAC,MAAqB;AAC3D,6BAAmB,EAAE,MAAM;AAAA,QAC7B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,SAAK,UAAU,MAAM,cAAc,OAAO,aAAa;AAEvD,WAAO,KAAK;AAAA,EACd;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,WAAyC,CAAC;AAGhD,QAAI,SAAS,MAAM,SAAS,GAAG;AAC7B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAGA,UAAM,qBAAqB,qBAAqB,MAAM;AAGtD,UAAM,EAAE,eAAe,SAAS,IAAI,2BAA2B,MAAM;AAGrE,UAAM,gBAC6B,CAAC;AACpC,QAAI,gBAAgB,SAAS,QAAQ;AACnC,oBAAc,qBAAqB,eAAe;AAAA,IAIpD;AAGA,QAAI,gBAAgB,QAAW;AAC7B,oBAAc,cAAc;AAAA,IAC9B;AAEA,QAAI,SAAS,QAAW;AACtB,oBAAc,OAAO;AAAA,IACvB;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,gBAAgB,qBACZ,kBAAkB,MAAM,IACxB;AAAA,IACN;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAa,WAAW,SAAqC;AAC3D,UAAM,YAAY,KAAK,QAAQ,OAAO;AACtC,UAAM,EAAE,eAAe,UAAU,UAAU,eAAe,eAAe,IACvE;AAEF,UAAM,UAAU,MAAM,KAAK;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,UAAM,OAAO,MAAM,QAAQ,OAAO,UAAU,aAAa;AAEzD,UAAM,UAAoC;AAAA,MACxC;AAAA,QACE,MAAM;AAAA,QACN;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc;AAAA,MACd,OAAO;AAAA,QACL,aAAa;AAAA,QACb,cAAc;AAAA,QACd,aAAa;AAAA,MACf;AAAA,MACA,SAAS,EAAE,MAAM,EAAE,UAAU,SAAS,cAAc,EAAE;AAAA,MACtD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAa,eAAsC;AACjD,QAAI,OAAO,kBAAkB,aAAa;AACxC,aAAO;AAAA,IACT;AACA,WAAO,cAAc,aAAa;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBA,MAAa,0BACX,oBACwB;AACxB,WAAO,KAAK,WAAW,QAAW,QAAW,QAAW,kBAAkB;AAAA,EAC5E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAa,SAAS,SAAqC;AACzD,UAAM,YAAY,KAAK,QAAQ,OAAO;AACtC,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAI;AAEJ,UAAM,UAAU,MAAM,KAAK;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAGA,UAAM,gBAAgB;AAAA,MACpB,GAAG;AAAA,MACH,QAAQ,QAAQ;AAAA,IAClB;AAEA,UAAM,eAAe,QAAQ,gBAAgB,UAAU,aAAa;AAEpE,QAAI,eAAe;AACnB,UAAM,SAAS;AAEf,UAAM,SAAS,aAAa;AAAA,MAC1B,IAAI,gBAAmD;AAAA,QACrD,MAAM,YAAY;AAEhB,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN;AAAA,UACF,CAAC;AAGD,cAAI,QAAQ,aAAa;AACvB,oBAAQ,YAAY,iBAAiB,SAAS,MAAM;AAClD,yBAAW,UAAU;AAAA,YACvB,CAAC;AAAA,UACH;AAAA,QACF;AAAA,QAEA,UAAU,OAAO,YAAY;AAC3B,cAAI,cAAc;AAEhB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,IAAI;AAAA,YACN,CAAC;AACD,2BAAe;AAAA,UACjB;AAGA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI;AAAA,YACJ,OAAO;AAAA,UACT,CAAC;AAAA,QACH;AAAA,QAEA,MAAM,YAAY;AAEhB,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI;AAAA,UACN,CAAC;AAGD,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,cAAc;AAAA,YACd,OAAO;AAAA,cACL,aAAa,QAAQ;AAAA,cACrB,cAAc;AAAA,cACd,aAAa;AAAA,YACf;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL;AAAA,MACA,SAAS,EAAE,MAAM,EAAE,UAAU,SAAS,cAAc,EAAE;AAAA,IACxD;AAAA,EACF;AACF;;;AE5bA,SAAS,oBAAoB;AA4CtB,IAAM,0BAAN,MAAkE;AAAA,EAoBhE,YAAY,WAA4C,CAAC,GAAG;AAnBnE,SAAS,uBAAuB;AAChC,SAAS,WAAW;AACpB,SAAS,UAAkB;AAC3B,SAAS,wBAAwB;AACjC,SAAS,uBAAuB;AAEhC,SAAQ,WAA4C;AAAA,MAClD,gBACE;AAAA,MACF,gBACE;AAAA,MACF,gBACE;AAAA,MACF,aAAa;AAAA,MACb,UAAU;AAAA,IACZ;AAYA,SAAU,kBAAkB,YAAmC;AAC7D,aAAO,aAAa;AAAA,QAClB;AAAA,UACE,gBAAgB,KAAK,SAAS;AAAA,UAC9B,gBAAgB,KAAK,SAAS;AAAA,QAChC;AAAA,QACA;AAAA,UACE,aAAa;AAAA,YACX,kBAAkB,MAAM,KAAK;AAAA,YAC7B,UAAU,KAAK,SAAS;AAAA,UAC1B;AAAA,UACA,aAAa,KAAK,SAAS;AAAA,UAC3B,UAAU,KAAK,SAAS;AAAA,QAC1B;AAAA,MACF;AAAA,IACF;AAEA,SAAO,UAAU,OAAO,YAMlB;AAEJ,UAAI,QAAQ,aAAa,SAAS;AAChC,cAAM,IAAI,MAAM,uBAAuB;AAAA,MACzC;AAEA,YAAM,WAAW,MAAM,KAAK;AAC5B,YAAM,aAAa,QAAQ,OAAO,IAAI,CAAC,SAAS;AAC9C,cAAM,iBAAiB,SAAS,MAAM,IAAI;AAC1C,cAAM,CAAC,SAAS,IAAI,eAAe;AACnC,eAAO,WAAW,kBAAkB,CAAC;AAAA,MACvC,CAAC;AAED,aAAO;AAAA,QACL;AAAA,QACA,aAAa;AAAA,UACX,OAAO;AAAA,UACP,UAAU;AAAA,UACV,iBAAiB,QAAQ,OAAO;AAAA,QAClC;AAAA,MACF;AAAA,IACF;AAnDE,SAAK,WAAW,EAAE,GAAG,KAAK,UAAU,GAAG,SAAS;AAChD,SAAK,mBAAmB,MAAM,KAAK,SAAS,cAAe,EAAE;AAAA,MAC3D,CAAC,aAAa,SAAS,KAAM,UAAU;AAAA,IACzC;AACA,SAAK,eAAe,KAAK,gBAAgB;AAAA,EAC3C;AA+CF;;;ACtHA;AAAA,EAEE;AAAA,OAEK;AAyDA,SAAS,gBACd,UAAqC,CAAC,GACnB;AACnB,QAAM,kBAAkB,CACtB,SACA,aACG;AACH,WAAO,IAAI,2BAA2B,SAAS,QAAQ;AAAA,EACzD;AAEA,QAAM,uBAAuB,CAC3B,SACA,aACG;AACH,WAAO,IAAI,wBAAwB,QAAQ;AAAA,EAC7C;AAEA,QAAM,WAAW,SACf,UAAgC,QAChC,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,WAAS,cAAc,CAAC,YAAoB;AAC1C,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,cAAc,CAAC;AAAA,EAClE;AAEA,WAAS,qBAAqB,CAAC,YAAoB;AACjD,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,qBAAqB,CAAC;AAAA,EACzE;AAEA,SAAO;AACT;AAKO,IAAM,YAAY,gBAAgB;","names":[]}