UNPKG

@sanity/codegen

Version:

Codegen toolkit for Sanity.io

1 lines • 61.4 kB
{"version":3,"file":"index.mjs","sources":["../src/readConfig.ts","../src/readSchema.ts","../src/safeParseQuery.ts","../src/getBabelConfig.ts","../src/typescript/parseSource.ts","../src/typescript/expressionResolvers.ts","../src/typescript/findQueriesInSource.ts","../src/typescript/moduleResolver.ts","../src/typescript/findQueriesInPath.ts","../src/typescript/registerBabel.ts","../src/typescript/typeGenerator.ts"],"sourcesContent":["import {readFile} from 'node:fs/promises'\n\nimport json5 from 'json5'\nimport * as z from 'zod'\n\nexport const configDefintion = z.object({\n path: z\n .string()\n .or(z.array(z.string()))\n .default([\n './src/**/*.{ts,tsx,js,jsx,mjs,cjs,astro}',\n './app/**/*.{ts,tsx,js,jsx,mjs,cjs}',\n './sanity/**/*.{ts,tsx,js,jsx,mjs,cjs}',\n ]),\n schema: z.string().default('./schema.json'),\n generates: z.string().default('./sanity.types.ts'),\n formatGeneratedCode: z.boolean().default(true),\n overloadClientMethods: z.boolean().default(true),\n})\n\nexport type CodegenConfig = z.infer<typeof configDefintion>\n\nexport async function readConfig(path: string): Promise<CodegenConfig> {\n try {\n const content = await readFile(path, 'utf-8')\n const json = json5.parse(content)\n return configDefintion.parseAsync(json)\n } catch (error) {\n if (error instanceof z.ZodError) {\n throw new Error(\n `Error in config file\\n ${error.errors.map((err) => err.message).join('\\n')}`,\n {cause: error},\n )\n }\n if (typeof error === 'object' && error !== null && 'code' in error && error.code === 'ENOENT') {\n return configDefintion.parse({})\n }\n\n throw error\n }\n}\n","import {readFile} from 'node:fs/promises'\n\nimport {type SchemaType} from 'groq-js'\n\n/**\n * Read a schema from a given path\n * @param path - The path to the schema\n * @returns The schema\n * @internal\n * @beta\n **/\nexport async function readSchema(path: string): Promise<SchemaType> {\n const content = await readFile(path, 'utf-8')\n return JSON.parse(content) // todo: ZOD validation?\n}\n","import {parse} from 'groq-js'\n\n/**\n * safeParseQuery parses a GROQ query string, but first attempts to extract any parameters used in slices. This method is _only_\n * intended for use in type generation where we don't actually execute the parsed AST on a dataset, and should not be used elsewhere.\n * @internal\n */\nexport function safeParseQuery(query: string) {\n const params: Record<string, unknown> = {}\n\n for (const param of extractSliceParams(query)) {\n params[param] = 0 // we don't care about the value, just the type\n }\n return parse(query, {params})\n}\n\n/**\n * Finds occurences of `[($start|{number})..($end|{number})]` in a query string and returns the start and end values, and return\n * the names of the start and end variables.\n * @internal\n */\nexport function* extractSliceParams(query: string): Generator<string> {\n const sliceRegex = /\\[(\\$(\\w+)|\\d)\\.\\.\\.?(\\$(\\w+)|\\d)\\]/g\n const matches = query.matchAll(sliceRegex)\n if (!matches) {\n return\n }\n const params = new Set<string>()\n for (const match of matches) {\n const start = match[1] === `$${match[2]}` ? match[2] : null\n if (start !== null) {\n yield start\n }\n const end = match[3] === `$${match[4]}` ? match[4] : null\n if (end !== null) {\n yield end\n }\n }\n}\n","import {existsSync} from 'node:fs'\nimport {join, resolve} from 'node:path'\n\nimport {type TransformOptions} from '@babel/core'\n\n/**\n * Because of bundlers and compilers, knowing the exact path the babel configuration will be\n * located at post - build is not always trivial. We traverse from the current directory upwards\n * until we find the first `babel.config.json` and use that path.\n *\n * @param path - The path to start looking for the babel configuration\n * @returns The path to the `babel.config.json` file\n * @internal\n */\nexport function findBabelConfig(path: string): string {\n const configPath = join(path, 'babel.config.json')\n if (existsSync(configPath)) {\n return configPath\n }\n\n const parent = resolve(join(path, '..'))\n if (parent && parent !== path) {\n return findBabelConfig(parent)\n }\n\n throw new Error('Could not find `babel.config.json` in @sanity/codegen')\n}\n\n/**\n * Get the default babel configuration for `@sanity/codegen`\n *\n * @param path - The path to start looking for the babel configuration. Defaults to `__dirname`\n * @returns A babel configuration object\n * @internal\n */\nexport function getBabelConfig(path?: string): TransformOptions {\n const configPath = findBabelConfig(path || __dirname)\n return {extends: configPath}\n}\n","import {parse, type TransformOptions} from '@babel/core'\nimport type * as babelTypes from '@babel/types'\n\n// helper function to parse a source file\nexport function parseSourceFile(\n _source: string,\n _filename: string,\n babelOptions: TransformOptions,\n): babelTypes.File {\n let source = _source\n let filename = _filename\n if (filename.endsWith('.astro')) {\n // append .ts to the filename so babel will parse it as typescript\n filename += '.ts'\n source = parseAstro(source)\n } else if (filename.endsWith('.vue')) {\n // append .ts to the filename so babel will parse it as typescript\n filename += '.ts'\n source = parseVue(source)\n }\n const result = parse(source, {\n ...babelOptions,\n filename,\n })\n\n if (!result) {\n throw new Error(`Failed to parse ${filename}`)\n }\n\n return result\n}\n\nfunction parseAstro(source: string): string {\n // find all code fences, the js code is between --- and ---\n const codeFences = source.match(/---\\n([\\s\\S]*?)\\n---/g)\n if (!codeFences) {\n return ''\n }\n\n return codeFences\n .map((codeFence) => {\n return codeFence.split('\\n').slice(1, -1).join('\\n')\n })\n .join('\\n')\n}\n\nfunction parseVue(source: string): string {\n // find all script tags, the js code is between <script> and </script>\n const scriptRegex = /<script(?:\\s+generic=[\"'][^\"']*[\"'])?[^>]*>([\\s\\S]*?)<\\/script>/g\n // const matches = [...source.matchAll(scriptRegex)]\n // TODO: swap once this code runs in `ES2020`\n const matches = matchAllPolyfill(source, scriptRegex)\n if (!matches.length) {\n return ''\n }\n\n return matches.map((match) => match[1]).join('\\n')\n}\n\n// TODO: remove once this code runs in `ES2020`\nfunction matchAllPolyfill(str: string, regex: RegExp): RegExpMatchArray[] {\n if (!regex.global) {\n throw new Error('matchAll polyfill requires a global regex (with /g flag)')\n }\n\n const matches = []\n let match\n while ((match = regex.exec(str)) !== null) {\n matches.push(match)\n }\n return matches\n}\n","import fs from 'node:fs'\nimport path from 'node:path'\n\nimport {type TransformOptions} from '@babel/core'\nimport traverse, {Scope} from '@babel/traverse'\nimport * as babelTypes from '@babel/types'\nimport createDebug from 'debug'\n\nimport {parseSourceFile} from './parseSource'\n\nconst debug = createDebug('sanity:codegen:findQueries:debug')\n\ntype resolveExpressionReturnType = string\n\n/**\n * NamedQueryResult is a result of a named query\n */\nexport interface NamedQueryResult {\n /** name is the name of the query */\n name: string\n /** result is a groq query */\n result: resolveExpressionReturnType\n\n /** location is the location of the query in the source */\n location: {\n start?: {\n line: number\n column: number\n index: number\n }\n end?: {\n line: number\n column: number\n index: number\n }\n }\n}\n\nconst TAGGED_TEMPLATE_ALLOW_LIST = ['groq']\nconst FUNCTION_WRAPPER_ALLOW_LIST = ['defineQuery']\n\n/**\n * resolveExpression takes a node and returns the resolved value of the expression.\n * @beta\n * @internal\n */\nexport function resolveExpression({\n node,\n file,\n scope,\n filename,\n resolver,\n babelConfig,\n params = [],\n fnArguments = [],\n}: {\n node: babelTypes.Node\n file: babelTypes.File\n scope: Scope\n filename: string\n resolver: NodeJS.RequireResolve\n babelConfig: TransformOptions\n params?: babelTypes.Node[]\n fnArguments?: babelTypes.Node[]\n}): resolveExpressionReturnType {\n debug(\n `Resolving node ${node.type} in ${filename}:${node.loc?.start.line}:${node.loc?.start.column}`,\n )\n if (\n babelTypes.isTaggedTemplateExpression(node) &&\n babelTypes.isIdentifier(node.tag) &&\n TAGGED_TEMPLATE_ALLOW_LIST.includes(node.tag.name)\n ) {\n return resolveExpression({\n node: node.quasi,\n scope,\n filename,\n file,\n resolver,\n params,\n babelConfig,\n fnArguments,\n })\n }\n\n if (babelTypes.isTemplateLiteral(node)) {\n const resolvedExpressions = node.expressions.map((expression) =>\n resolveExpression({\n node: expression,\n scope,\n filename,\n file,\n resolver,\n params,\n babelConfig,\n fnArguments,\n }),\n )\n return node.quasis\n .map((quasi, idx) => {\n return (quasi.value.cooked || '') + (resolvedExpressions[idx] || '')\n })\n .join('')\n }\n\n if (babelTypes.isLiteral(node)) {\n if (node.type === 'NullLiteral' || node.type === 'RegExpLiteral') {\n throw new Error(`Unsupported literal type: ${node.type}`)\n }\n\n return node.value.toString()\n }\n\n if (babelTypes.isIdentifier(node)) {\n return resolveIdentifier({\n node,\n scope,\n filename,\n file,\n resolver,\n fnArguments,\n babelConfig,\n params,\n })\n }\n\n if (babelTypes.isVariableDeclarator(node)) {\n const init = node.init ?? (babelTypes.isAssignmentPattern(node.id) && node.id.right)\n if (!init) {\n throw new Error(`Unsupported variable declarator`)\n }\n\n return resolveExpression({\n node: init,\n fnArguments,\n scope,\n filename,\n file,\n babelConfig,\n resolver,\n })\n }\n\n if (\n babelTypes.isCallExpression(node) &&\n babelTypes.isIdentifier(node.callee) &&\n FUNCTION_WRAPPER_ALLOW_LIST.includes(node.callee.name)\n ) {\n return resolveExpression({\n node: node.arguments[0],\n scope,\n filename,\n file,\n resolver,\n babelConfig,\n params,\n })\n }\n\n if (babelTypes.isCallExpression(node)) {\n return resolveCallExpression({\n node,\n scope,\n filename,\n file,\n resolver,\n babelConfig,\n params,\n fnArguments,\n })\n }\n\n if (\n babelTypes.isArrowFunctionExpression(node) ||\n babelTypes.isFunctionDeclaration(node) ||\n babelTypes.isFunctionExpression(node)\n ) {\n const newScope = new Scope(scope.path, scope)\n\n params.forEach((param, i) => {\n newScope.push({\n id: param as babelTypes.LVal,\n init: fnArguments[i] as babelTypes.Expression | undefined,\n })\n })\n\n return resolveExpression({\n node: node.body,\n params: node.params,\n fnArguments,\n scope: newScope,\n filename,\n file,\n babelConfig,\n resolver,\n })\n }\n\n if (babelTypes.isNewExpression(node)) {\n return resolveExpression({\n node: node.callee,\n scope,\n filename,\n file,\n babelConfig,\n resolver,\n })\n }\n\n if (babelTypes.isImportDefaultSpecifier(node) || babelTypes.isImportSpecifier(node)) {\n return resolveImportSpecifier({node, file, scope, filename, fnArguments, resolver, babelConfig})\n }\n\n if (babelTypes.isAssignmentPattern(node)) {\n return resolveExpression({\n node: node.right,\n scope,\n filename,\n file,\n resolver,\n params,\n babelConfig,\n fnArguments,\n })\n }\n\n throw new Error(\n `Unsupported expression type: ${node.type} in ${filename}:${node.loc?.start.line}:${node.loc?.start.column}`,\n )\n}\n\nfunction resolveIdentifier({\n node,\n scope,\n filename,\n file,\n resolver,\n babelConfig,\n fnArguments,\n params,\n}: {\n node: babelTypes.Identifier\n file: babelTypes.File\n scope: Scope\n filename: string\n resolver: NodeJS.RequireResolve\n babelConfig: TransformOptions\n fnArguments: babelTypes.Node[]\n params: babelTypes.Node[]\n}): resolveExpressionReturnType {\n const paramIndex = params.findIndex(\n (param) =>\n (babelTypes.isIdentifier(param) && node.name === param.name) ||\n (babelTypes.isAssignmentPattern(param) &&\n babelTypes.isIdentifier(param.left) &&\n node.name === param.left.name),\n )\n let argument = fnArguments[paramIndex]\n if (!argument && paramIndex >= 0 && babelTypes.isAssignmentPattern(params[paramIndex])) {\n argument = params[paramIndex].right\n }\n if (argument && babelTypes.isLiteral(argument)) {\n return resolveExpression({\n node: argument,\n scope,\n filename,\n file,\n resolver,\n params,\n babelConfig,\n fnArguments,\n })\n }\n const binding = scope.getBinding(node.name)\n if (binding) {\n if (babelTypes.isIdentifier(binding.path.node)) {\n const isSame = binding.path.node.name === node.name\n if (isSame) {\n throw new Error(\n `Could not resolve same identifier \"${node.name}\" in \"${filename}:${node.loc?.start.line}:${node.loc?.start.column}\"`,\n )\n }\n }\n return resolveExpression({\n node: binding.path.node,\n params,\n fnArguments,\n scope,\n filename,\n babelConfig,\n file,\n resolver,\n })\n }\n\n throw new Error(\n `Could not find binding for node \"${node.name}\" in ${filename}:${node.loc?.start.line}:${node.loc?.start.column}`,\n )\n}\n\nfunction resolveCallExpression({\n node,\n scope,\n filename,\n file,\n resolver,\n babelConfig,\n params,\n}: {\n node: babelTypes.CallExpression\n file: babelTypes.File\n scope: Scope\n filename: string\n resolver: NodeJS.RequireResolve\n babelConfig: TransformOptions\n fnArguments: babelTypes.Node[]\n params: babelTypes.Node[]\n}): resolveExpressionReturnType {\n const {callee} = node\n return resolveExpression({\n node: callee,\n scope,\n filename,\n file,\n resolver,\n babelConfig,\n params,\n fnArguments: node.arguments,\n })\n}\n\nfunction resolveImportSpecifier({\n node,\n file,\n filename,\n fnArguments,\n resolver,\n babelConfig,\n}: {\n node: babelTypes.ImportDefaultSpecifier | babelTypes.ImportSpecifier | babelTypes.ExportSpecifier\n file: babelTypes.File\n scope: Scope\n filename: string\n fnArguments: babelTypes.Node[]\n resolver: NodeJS.RequireResolve\n babelConfig: TransformOptions\n}): resolveExpressionReturnType {\n let importDeclaration: babelTypes.ImportDeclaration | undefined\n traverse(file, {\n ImportDeclaration(n) {\n if (!babelTypes.isImportDeclaration(n.node)) {\n return\n }\n for (const specifier of n.node.specifiers) {\n if (babelTypes.isImportDefaultSpecifier(specifier)) {\n if (specifier.local.loc?.identifierName === node.local.name) {\n importDeclaration = n.node\n break\n }\n }\n if (specifier.local.name === node.local.name) {\n importDeclaration = n.node\n }\n }\n },\n })\n\n if (!importDeclaration) {\n throw new Error(`Could not find import declaration for ${node.local.name}`)\n }\n\n const importName = node.local.name // the name of the variable to import\n const importFileName = importDeclaration.source.value // the file to import from\n\n const importPath =\n importFileName.startsWith('./') || importFileName.startsWith('../')\n ? path.resolve(path.dirname(filename), importFileName)\n : importFileName\n\n const resolvedFile = resolver(importPath)\n const source = fs.readFileSync(resolvedFile)\n const tree = parseSourceFile(source.toString(), resolvedFile, babelConfig)\n\n let newScope: Scope | undefined\n traverse(tree, {\n Program(p) {\n newScope = p.scope\n },\n })\n if (!newScope) {\n throw new Error(`Could not find scope for ${filename}`)\n }\n\n const binding = newScope.getBinding(importName)\n if (binding) {\n return resolveExpression({\n node: binding.path.node,\n file: tree,\n scope: newScope,\n fnArguments,\n babelConfig,\n filename: resolvedFile,\n resolver,\n })\n }\n\n // It's not a global binding, but it might be a named export\n let namedExport: babelTypes.ExportNamedDeclaration | undefined\n let newImportName: string | undefined\n traverse(tree, {\n ExportDeclaration(p) {\n if (p.node.type === 'ExportNamedDeclaration') {\n for (const specifier of p.node.specifiers) {\n if (\n specifier.type === 'ExportSpecifier' &&\n specifier.exported.type === 'Identifier' &&\n specifier.exported.name === importName\n ) {\n namedExport = p.node\n newImportName = specifier.exported.name\n }\n }\n }\n },\n })\n\n if (namedExport && newImportName) {\n return resolveExportSpecifier({\n node: namedExport,\n importName: newImportName,\n filename: resolvedFile,\n fnArguments,\n resolver,\n babelConfig,\n })\n }\n\n let result: resolveExpressionReturnType | undefined\n traverse(tree, {\n ExportDeclaration(p) {\n if (p.node.type === 'ExportAllDeclaration') {\n try {\n result = resolveExportSpecifier({\n node: p.node,\n importName,\n filename: resolvedFile,\n fnArguments,\n resolver,\n babelConfig,\n })\n } catch (e) {\n if (e.cause !== `noBinding:${importName}`) throw e\n }\n }\n },\n })\n if (result) return result\n\n throw new Error(`Could not find binding for import \"${importName}\" in ${importFileName}`)\n}\n\nfunction resolveExportSpecifier({\n node,\n importName,\n filename,\n fnArguments,\n babelConfig,\n resolver,\n}: {\n node: babelTypes.ExportNamedDeclaration | babelTypes.ExportAllDeclaration\n importName: string\n filename: string\n fnArguments: babelTypes.Node[]\n babelConfig: TransformOptions\n resolver: NodeJS.RequireResolve\n}): resolveExpressionReturnType {\n if (!node.source) {\n throw new Error(`Could not find source for export \"${importName}\" in ${filename}`)\n }\n\n const importFileName = node.source.value\n const importPath = path.resolve(path.dirname(filename), importFileName)\n const resolvedFile = resolver(importPath)\n const source = fs.readFileSync(resolvedFile)\n const tree = parseSourceFile(source.toString(), resolvedFile, babelConfig)\n\n let newScope: Scope | undefined\n traverse(tree, {\n Program(p) {\n newScope = p.scope\n },\n })\n if (!newScope) {\n throw new Error(`Could not find scope for ${filename}`)\n }\n\n const binding = newScope.getBinding(importName)\n if (binding) {\n return resolveExpression({\n node: binding.path.node,\n file: tree,\n scope: newScope,\n filename: importFileName,\n babelConfig,\n resolver,\n fnArguments,\n })\n }\n\n throw new Error(`Could not find binding for export \"${importName}\" in ${importFileName}`, {\n cause: `noBinding:${importName}`,\n })\n}\n","import {createRequire} from 'node:module'\n\nimport {type NodePath, type TransformOptions, traverse} from '@babel/core'\nimport {type Scope} from '@babel/traverse'\nimport * as babelTypes from '@babel/types'\n\nimport {getBabelConfig} from '../getBabelConfig'\nimport {type NamedQueryResult, resolveExpression} from './expressionResolvers'\nimport {parseSourceFile} from './parseSource'\n\nconst require = createRequire(__filename)\n\nconst groqTagName = 'groq'\nconst defineQueryFunctionName = 'defineQuery'\nconst groqModuleName = 'groq'\nconst nextSanityModuleName = 'next-sanity'\n\nconst ignoreValue = '@sanity-typegen-ignore'\n\n/**\n * findQueriesInSource takes a source string and returns all GROQ queries in it.\n * @param source - The source code to search for queries\n * @param filename - The filename of the source code\n * @param babelConfig - The babel configuration to use when parsing the source\n * @param resolver - A resolver function to use when resolving module imports\n * @returns\n * @beta\n * @internal\n */\nexport function findQueriesInSource(\n source: string,\n filename: string,\n babelConfig: TransformOptions = getBabelConfig(),\n resolver: NodeJS.RequireResolve = require.resolve,\n): NamedQueryResult[] {\n const queries: NamedQueryResult[] = []\n const file = parseSourceFile(source, filename, babelConfig)\n\n traverse(file, {\n // Look for variable declarations, e.g. `const myQuery = groq`... and extract the query.\n // The variable name is used as the name of the query result type\n VariableDeclarator(path) {\n const {node, scope} = path\n\n const init = node.init\n\n // Look for tagged template expressions that are called with the `groq` tag\n const isGroqTemplateTag =\n babelTypes.isTaggedTemplateExpression(init) &&\n babelTypes.isIdentifier(init.tag) &&\n init.tag.name === groqTagName\n\n // Look for strings wrapped in a defineQuery function call\n const isDefineQueryCall =\n babelTypes.isCallExpression(init) &&\n (isImportFrom(groqModuleName, defineQueryFunctionName, scope, init.callee) ||\n isImportFrom(nextSanityModuleName, defineQueryFunctionName, scope, init.callee))\n\n if (babelTypes.isIdentifier(node.id) && (isGroqTemplateTag || isDefineQueryCall)) {\n // If we find a comment leading the decleration which macthes with ignoreValue we don't add\n // the query\n if (declarationLeadingCommentContains(path, ignoreValue)) {\n return\n }\n\n const queryName = `${node.id.name}`\n const queryResult = resolveExpression({\n node: init,\n file,\n scope,\n babelConfig,\n filename,\n resolver,\n })\n\n const location = node.loc\n ? {\n start: {\n ...node.loc?.start,\n },\n end: {\n ...node.loc?.end,\n },\n }\n : {}\n\n queries.push({name: queryName, result: queryResult, location})\n }\n },\n })\n\n return queries\n}\n\nfunction declarationLeadingCommentContains(path: NodePath, comment: string): boolean {\n /*\n * We have to consider these cases:\n *\n * // @sanity-typegen-ignore\n * const query = groq`...`\n *\n * // AST\n * VariableDeclaration {\n * declarations: [\n * VariableDeclarator: {init: tag: {name: \"groq\"}}\n * ],\n * leadingComments: ...\n * }\n *\n * // @sanity-typegen-ignore\n * const query1 = groq`...`, query2 = groq`...`\n *\n * // AST\n * VariableDeclaration {\n * declarations: [\n * VariableDeclarator: {init: tag: {name: \"groq\"}}\n * VariableDeclarator: {init: tag: {name: \"groq\"}}\n * ],\n * leadingComments: ...\n * }\n *\n * // @sanity-typegen-ignore\n * export const query = groq`...`\n *\n * // AST\n * ExportNamedDeclaration {\n * declaration: VariableDeclaration {\n * declarations: [\n * VariableDeclarator: {init: tag: {name: \"groq\"}}\n * VariableDeclarator: {init: tag: {name: \"groq\"}}\n * ],\n * },\n * leadingComments: ...\n * }\n *\n * In the case where multiple variables are under the same VariableDeclaration the leadingComments\n * will still be on the VariableDeclaration\n *\n * In the case where the variable is exported, the leadingComments are on the\n * ExportNamedDeclaration which includes the VariableDeclaration in its own declaration property\n */\n\n const variableDeclaration = path.find((node) => node.isVariableDeclaration())\n if (!variableDeclaration) return false\n\n if (\n variableDeclaration.node.leadingComments?.find(\n (commentItem) => commentItem.value.trim() === comment,\n )\n ) {\n return true\n }\n\n // If the declaration is exported, the comment lies on the parent of the export declaration\n if (\n variableDeclaration.parent.leadingComments?.find(\n (commentItem) => commentItem.value.trim() === comment,\n )\n ) {\n return true\n }\n\n return false\n}\n\nfunction isImportFrom(\n moduleName: string,\n importName: string,\n scope: Scope,\n node: babelTypes.Expression | babelTypes.V8IntrinsicIdentifier,\n) {\n if (babelTypes.isIdentifier(node)) {\n const binding = scope.getBinding(node.name)\n if (!binding) {\n return false\n }\n\n const {path} = binding\n\n // import { foo } from 'groq'\n if (babelTypes.isImportSpecifier(path.node)) {\n return (\n path.node.importKind === 'value' &&\n path.parentPath &&\n babelTypes.isImportDeclaration(path.parentPath.node) &&\n path.parentPath.node.source.value === moduleName &&\n babelTypes.isIdentifier(path.node.imported) &&\n path.node.imported.name === importName\n )\n }\n\n // const { defineQuery } = require('groq')\n if (babelTypes.isVariableDeclarator(path.node)) {\n const {init} = path.node\n return (\n babelTypes.isCallExpression(init) &&\n babelTypes.isIdentifier(init.callee) &&\n init.callee.name === 'require' &&\n babelTypes.isStringLiteral(init.arguments[0]) &&\n init.arguments[0].value === moduleName\n )\n }\n }\n\n // import * as foo from 'groq'\n // foo.defineQuery(...)\n if (babelTypes.isMemberExpression(node)) {\n const {object, property} = node\n\n if (!babelTypes.isIdentifier(object)) {\n return false\n }\n\n const binding = scope.getBinding(object.name)\n if (!binding) {\n return false\n }\n const {path} = binding\n\n return (\n babelTypes.isIdentifier(object) &&\n babelTypes.isIdentifier(property) &&\n property.name === importName &&\n babelTypes.isImportNamespaceSpecifier(path.node) &&\n path.parentPath &&\n babelTypes.isImportDeclaration(path.parentPath.node) &&\n path.parentPath.node.source.value === moduleName\n )\n }\n\n return false\n}\n","import createDebug from 'debug'\nimport {createMatchPath, loadConfig as loadTSConfig} from 'tsconfig-paths'\n\nconst debug = createDebug('sanity:codegen:moduleResolver')\n\n/**\n * This is a custom implementation of require.resolve that takes into account the paths\n * configuration in tsconfig.json. This is necessary if we want to resolve paths that are\n * custom defined in the tsconfig.json file.\n * Resolving here is best effort and might not work in all cases.\n * @beta\n */\nexport function getResolver(cwd?: string): NodeJS.RequireResolve {\n const tsConfig = loadTSConfig(cwd)\n\n if (tsConfig.resultType === 'failed') {\n debug('Could not load tsconfig, using default resolver: %s', tsConfig.message)\n return require.resolve\n }\n\n const matchPath = createMatchPath(\n tsConfig.absoluteBaseUrl,\n tsConfig.paths,\n tsConfig.mainFields,\n tsConfig.addMatchAll,\n )\n\n const resolve = function (request: string, options?: {paths?: string[]}): string {\n const found = matchPath(request)\n if (found !== undefined) {\n return require.resolve(found, options)\n }\n return require.resolve(request, options)\n }\n\n // wrap the resolve.path function to make it available.\n resolve.paths = (request: string): string[] | null => {\n return require.resolve.paths(request)\n }\n return resolve\n}\n","import fs from 'node:fs/promises'\n\nimport {type TransformOptions} from '@babel/core'\nimport createDebug from 'debug'\nimport glob from 'globby'\n\nimport {getBabelConfig} from '../getBabelConfig'\nimport {type NamedQueryResult} from './expressionResolvers'\nimport {findQueriesInSource} from './findQueriesInSource'\nimport {getResolver} from './moduleResolver'\n\nconst debug = createDebug('sanity:codegen:findQueries:debug')\n\ntype ResultQueries = {\n type: 'queries'\n filename: string\n queries: NamedQueryResult[]\n}\ntype ResultError = {\n type: 'error'\n error: Error\n filename: string\n}\n\n/**\n * findQueriesInPath takes a path or array of paths and returns all GROQ queries in the files.\n * @param path - The path or array of paths to search for queries\n * @param babelOptions - The babel configuration to use when parsing the source\n * @param resolver - A resolver function to use when resolving module imports\n * @returns An async generator that yields the results of the search\n * @beta\n * @internal\n */\nexport async function* findQueriesInPath({\n path,\n babelOptions = getBabelConfig(),\n resolver = getResolver(),\n}: {\n path: string | string[]\n babelOptions?: TransformOptions\n resolver?: NodeJS.RequireResolve\n}): AsyncGenerator<ResultQueries | ResultError> {\n const queryNames = new Set()\n // Holds all query names found in the source files\n debug(`Globing ${path}`)\n\n const files = glob\n .sync(path, {\n absolute: false,\n ignore: ['**/node_modules/**'], // we never want to look in node_modules\n onlyFiles: true,\n })\n .sort()\n\n for (const filename of files) {\n if (typeof filename !== 'string') {\n continue\n }\n\n debug(`Found file \"${filename}\"`)\n try {\n const source = await fs.readFile(filename, 'utf8')\n const queries = findQueriesInSource(source, filename, babelOptions, resolver)\n // Check and error on duplicate query names, because we can't generate types with the same name.\n for (const query of queries) {\n if (queryNames.has(query.name)) {\n throw new Error(\n `Duplicate query name found: \"${query.name}\". Query names must be unique across all files.`,\n )\n }\n queryNames.add(query.name)\n }\n yield {type: 'queries', filename, queries}\n } catch (error) {\n debug(`Error in file \"${filename}\"`, error)\n yield {type: 'error', error, filename}\n }\n }\n}\n","import {type TransformOptions} from '@babel/core'\nimport register from '@babel/register'\n\nimport {getBabelConfig} from '../getBabelConfig'\n\n/**\n * Register Babel with the given options\n *\n * @param babelOptions - The options to use when registering Babel\n * @beta\n */\nexport function registerBabel(babelOptions?: TransformOptions): void {\n const options = babelOptions || getBabelConfig()\n\n register({...options, extensions: ['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs']})\n}\n","import {CodeGenerator} from '@babel/generator'\nimport * as t from '@babel/types'\nimport {\n type ArrayTypeNode,\n type DocumentSchemaType,\n type InlineTypeNode,\n type ObjectAttribute,\n type ObjectTypeNode,\n type SchemaType,\n type TypeDeclarationSchemaType,\n type TypeNode,\n type UnionTypeNode,\n} from 'groq-js'\n\nconst REFERENCE_SYMBOL_NAME = 'internalGroqTypeReferenceTo'\nconst ALL_SCHEMA_TYPES = 'AllSanitySchemaTypes'\n\ntype QueryWithTypeNode = {\n query: string\n typeNode: TypeNode\n}\n\n/**\n * A class used to generate TypeScript types from a given schema\n * @internal\n * @beta\n */\nexport class TypeGenerator {\n // Simple set to keep track of generated type names, to avoid conflicts\n private generatedTypeName: Set<string> = new Set()\n // Map between type names and their generated type names, used to resolve the correct generated type name\n private typeNameMap: Map<string, string> = new Map()\n // Map between type nodes and their generated type names, used for query mapping\n private typeNodeNameMap: Map<TypeNode | DocumentSchemaType | TypeDeclarationSchemaType, string> =\n new Map()\n\n private readonly schema: SchemaType\n\n constructor(schema: SchemaType) {\n this.schema = schema\n\n this.schema.forEach((s) => {\n this.getTypeName(s.name, s)\n })\n }\n\n /**\n * Generate TypeScript types for the given schema\n * @returns string\n * @internal\n * @beta\n */\n generateSchemaTypes(): string {\n const typeDeclarations: (t.TSTypeAliasDeclaration | t.ExportNamedDeclaration)[] = []\n\n const schemaNames = new Set<string>()\n this.schema.forEach((schema) => {\n const typeLiteral = this.getTypeNodeType(schema)\n\n const schemaName = this.typeNodeNameMap.get(schema)\n if (!schemaName) {\n throw new Error(`Schema name not found for schema ${schema.name}`)\n }\n\n schemaNames.add(schemaName)\n const typeAlias = t.tsTypeAliasDeclaration(t.identifier(schemaName), null, typeLiteral)\n\n typeDeclarations.push(t.exportNamedDeclaration(typeAlias))\n })\n\n typeDeclarations.push(\n t.exportNamedDeclaration(\n t.tsTypeAliasDeclaration(\n t.identifier(this.getTypeName(ALL_SCHEMA_TYPES)),\n null,\n t.tsUnionType(\n [...schemaNames].map((typeName) => t.tsTypeReference(t.identifier(typeName))),\n ),\n ),\n ),\n )\n\n // Generate TypeScript code from the AST nodes\n return typeDeclarations.map((decl) => new CodeGenerator(decl).generate().code).join('\\n\\n')\n }\n\n /**\n * Takes a identifier and a type node and generates a type alias for the type node.\n * @param identifierName - The name of the type to generated\n * @param typeNode - The type node to generate the type for\n * @returns\n * @internal\n * @beta\n */\n generateTypeNodeTypes(identifierName: string, typeNode: TypeNode): string {\n const type = this.getTypeNodeType(typeNode)\n\n const typeName = this.getTypeName(identifierName, typeNode)\n const typeAlias = t.tsTypeAliasDeclaration(t.identifier(typeName), null, type)\n\n return new CodeGenerator(t.exportNamedDeclaration(typeAlias)).generate().code.trim()\n }\n\n static generateKnownTypes(): string {\n const typeOperator = t.tsTypeOperator(t.tsSymbolKeyword(), 'unique')\n\n const identifier = t.identifier(REFERENCE_SYMBOL_NAME)\n identifier.typeAnnotation = t.tsTypeAnnotation(typeOperator)\n\n const decleration = t.variableDeclaration('const', [t.variableDeclarator(identifier)])\n decleration.declare = true\n return new CodeGenerator(t.exportNamedDeclaration(decleration)).generate().code.trim()\n }\n\n /**\n * Takes a list of queries from the codebase and generates a type declaration\n * for SanityClient to consume.\n *\n * Note: only types that have previously been generated with `generateTypeNodeTypes`\n * will be included in the query map.\n *\n * @param queries - A list of queries to generate a type declaration for\n * @returns\n * @internal\n * @beta\n */\n generateQueryMap(queries: QueryWithTypeNode[]): string {\n const typesByQuerystring: {[query: string]: string[]} = {}\n\n for (const query of queries) {\n const name = this.typeNodeNameMap.get(query.typeNode)\n if (!name) {\n continue\n }\n\n typesByQuerystring[query.query] ??= []\n typesByQuerystring[query.query].push(name)\n }\n\n const queryReturnInterface = t.tsInterfaceDeclaration(\n t.identifier('SanityQueries'),\n null,\n [],\n t.tsInterfaceBody(\n Object.entries(typesByQuerystring).map(([query, types]) => {\n return t.tsPropertySignature(\n t.stringLiteral(query),\n t.tsTypeAnnotation(\n t.tsUnionType(types.map((type) => t.tsTypeReference(t.identifier(type)))),\n ),\n )\n }),\n ),\n )\n\n const declareModule = t.declareModule(\n t.stringLiteral('@sanity/client'),\n t.blockStatement([queryReturnInterface]),\n )\n\n const clientImport = t.importDeclaration([], t.stringLiteral('@sanity/client'))\n\n return new CodeGenerator(t.program([clientImport, declareModule])).generate().code.trim()\n }\n\n /**\n * Since we are sanitizing identifiers we migt end up with collisions. Ie there might be a type mux.video and muxVideo, both these\n * types would be sanityized into MuxVideo. To avoid this we keep track of the generated type names and add a index to the name.\n * When we reference a type we also keep track of the original name so we can reference the correct type later.\n */\n private getTypeName(\n name: string,\n typeNode?: TypeNode | DocumentSchemaType | TypeDeclarationSchemaType,\n ): string {\n const desiredName = uppercaseFirstLetter(sanitizeIdentifier(name))\n\n let generatedName = desiredName\n let i = 2\n while (this.generatedTypeName.has(generatedName)) {\n // add _ and a index and increment that index until we find a name that is not in the map\n generatedName = `${desiredName}_${i++}`\n }\n this.generatedTypeName.add(generatedName)\n this.typeNameMap.set(name, generatedName)\n if (typeNode) {\n this.typeNodeNameMap.set(typeNode, generatedName)\n }\n\n return generatedName\n }\n\n private getTypeNodeType(\n typeNode: TypeNode | TypeDeclarationSchemaType | DocumentSchemaType,\n ): t.TSType {\n switch (typeNode.type) {\n case 'string': {\n if (typeNode.value !== undefined) {\n return t.tsLiteralType(t.stringLiteral(typeNode.value))\n }\n return t.tsStringKeyword()\n }\n case 'number': {\n if (typeNode.value !== undefined) {\n return t.tsLiteralType(t.numericLiteral(typeNode.value))\n }\n return t.tsNumberKeyword()\n }\n case 'boolean': {\n if (typeNode.value !== undefined) {\n return t.tsLiteralType(t.booleanLiteral(typeNode.value))\n }\n return t.tsBooleanKeyword()\n }\n case 'unknown': {\n return t.tsUnknownKeyword()\n }\n case 'document': {\n return this.generateDocumentType(typeNode)\n }\n case 'type': {\n return this.getTypeNodeType(typeNode.value)\n }\n case 'array': {\n return this.generateArrayTsType(typeNode)\n }\n case 'object': {\n return this.generateObjectTsType(typeNode)\n }\n case 'union': {\n return this.generateUnionTsType(typeNode)\n }\n case 'inline': {\n return this.generateInlineTsType(typeNode)\n }\n case 'null': {\n return t.tsNullKeyword()\n }\n\n default:\n // @ts-expect-error This should never happen\n throw new Error(`Type \"${typeNode.type}\" not found in schema`)\n }\n }\n\n // Helper function used to generate TS types for array type nodes.\n private generateArrayTsType(typeNode: ArrayTypeNode): t.TSTypeReference {\n const typeNodes = this.getTypeNodeType(typeNode.of)\n const arrayType = t.tsTypeReference(\n t.identifier('Array'),\n t.tsTypeParameterInstantiation([typeNodes]),\n )\n\n return arrayType\n }\n\n // Helper function used to generate TS types for object properties.\n private generateObjectProperty(key: string, attribute: ObjectAttribute): t.TSPropertySignature {\n const type = this.getTypeNodeType(attribute.value)\n const propertySignature = t.tsPropertySignature(\n t.identifier(sanitizeIdentifier(key)),\n t.tsTypeAnnotation(type),\n )\n propertySignature.optional = attribute.optional\n\n return propertySignature\n }\n\n // Helper function used to generate TS types for object type nodes.\n private generateObjectTsType(typeNode: ObjectTypeNode): t.TSType {\n const props: t.TSPropertySignature[] = []\n Object.entries(typeNode.attributes).forEach(([key, attribute]) => {\n props.push(this.generateObjectProperty(key, attribute))\n })\n const rest = typeNode.rest\n if (rest !== undefined) {\n switch (rest.type) {\n case 'unknown': {\n return t.tsUnknownKeyword()\n }\n case 'object': {\n Object.entries(rest.attributes).forEach(([key, attribute]) => {\n props.push(this.generateObjectProperty(key, attribute))\n })\n break\n }\n case 'inline': {\n const resolved = this.generateInlineTsType(rest)\n\n // if an object's rest type is unknown, we can't generate a type literal for it\n // so we return unknown\n if (t.isTSUnknownKeyword(resolved)) {\n return resolved\n }\n return t.tsIntersectionType([t.tsTypeLiteral(props), resolved])\n }\n default: {\n // @ts-expect-error This should never happen\n throw new Error(`Type \"${rest.type}\" not found in schema`)\n }\n }\n }\n if (typeNode.dereferencesTo !== undefined) {\n const derefType = t.tsPropertySignature(\n t.identifier(REFERENCE_SYMBOL_NAME),\n t.tsTypeAnnotation(t.tsLiteralType(t.stringLiteral(typeNode.dereferencesTo))),\n )\n derefType.computed = true\n derefType.optional = true\n props.push(derefType)\n }\n return t.tsTypeLiteral(props)\n }\n\n private generateInlineTsType(typeNode: InlineTypeNode): t.TSType {\n const referencedTypeNode = this.schema.find((schema) => schema.name === typeNode.name)\n // Check if we have a schema reference for the type node\n if (referencedTypeNode === undefined) {\n // Is it already generated by another type node?\n const generatedName = this.typeNameMap.get(typeNode.name)\n if (generatedName) {\n return t.tsTypeReference(t.identifier(generatedName))\n }\n\n // Not found in schema, return unknown type\n const missing = t.tsUnknownKeyword()\n missing.trailingComments = [\n {\n type: 'CommentLine',\n value: ` Unable to locate the referenced type \"${typeNode.name}\" in schema`,\n },\n ]\n return missing\n }\n\n const generatedName = this.typeNameMap.get(referencedTypeNode.name)\n\n if (generatedName) {\n return t.tsTypeReference(t.identifier(generatedName))\n }\n\n return t.tsUnknownKeyword()\n }\n\n // Helper function used to generate TS types for union type nodes.\n private generateUnionTsType(typeNode: UnionTypeNode): t.TSType {\n if (typeNode.of.length === 0) {\n return t.tsNeverKeyword()\n }\n if (typeNode.of.length === 1) {\n return this.getTypeNodeType(typeNode.of[0])\n }\n\n const typeNodes = typeNode.of.map((node) => this.getTypeNodeType(node))\n\n return t.tsUnionType(typeNodes)\n }\n\n // Helper function used to generate TS types for document type nodes.\n private generateDocumentType(document: DocumentSchemaType): t.TSType {\n const props = Object.entries(document.attributes).map(([key, node]) =>\n this.generateObjectProperty(key, node),\n )\n\n return t.tsTypeLiteral(props)\n }\n}\nfunction uppercaseFirstLetter(input: string): string {\n return input.charAt(0).toUpperCase() + input.slice(1)\n}\n\nfunction sanitizeIdentifier(input: string): string {\n return `${input.replace(/^\\d/, '_').replace(/[^$\\w]+(.)/g, (_, char) => char.toUpperCase())}`\n}\n"],"names":["path","parse","debug","babelTypes","require","traverse","loadTSConfig","resolve","fs","generatedName"],"mappings":";;;;;;;;;;;;;;;AAKO,MAAM,kBAAkB,EAAE,OAAO;AAAA,EACtC,MAAM,EACH,OAAA,EACA,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,EACtB,QAAQ;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,EAAA,CACD;AAAA,EACH,QAAQ,EAAE,SAAS,QAAQ,eAAe;AAAA,EAC1C,WAAW,EAAE,SAAS,QAAQ,mBAAmB;AAAA,EACjD,qBAAqB,EAAE,UAAU,QAAQ,EAAI;AAAA,EAC7C,uBAAuB,EAAE,QAAA,EAAU,QAAQ,EAAI;AACjD,CAAC;AAID,eAAsB,WAAWA,OAAsC;AACrE,MAAI;AACF,UAAM,UAAU,MAAM,SAASA,OAAM,OAAO,GACtC,OAAO,MAAM,MAAM,OAAO;AAChC,WAAO,gBAAgB,WAAW,IAAI;AAAA,EACxC,SAAS,OAAO;AACd,QAAI,iBAAiB,EAAE;AACrB,YAAM,IAAI;AAAA,QACR;AAAA,GAA0B,MAAM,OAAO,IAAI,CAAC,QAAQ,IAAI,OAAO,EAAE,KAAK;AAAA,CAAI,CAAC;AAAA,QAC3E,EAAC,OAAO,MAAA;AAAA,MAAK;AAGjB,QAAI,OAAO,SAAU,YAAY,UAAU,QAAQ,UAAU,SAAS,MAAM,SAAS;AACnF,aAAO,gBAAgB,MAAM,EAAE;AAGjC,UAAM;AAAA,EACR;AACF;AC7BA,eAAsB,WAAWA,OAAmC;AAClE,QAAM,UAAU,MAAM,SAASA,OAAM,OAAO;AAC5C,SAAO,KAAK,MAAM,OAAO;AAC3B;ACPO,SAAS,eAAe,OAAe;AAC5C,QAAM,SAAkC,CAAA;AAExC,aAAW,SAAS,mBAAmB,KAAK;AAC1C,WAAO,KAAK,IAAI;AAElB,SAAO,MAAM,OAAO,EAAC,QAAO;AAC9B;AAOO,UAAU,mBAAmB,OAAkC;AACpE,QAAM,aAAa,wCACb,UAAU,MAAM,SAAS,UAAU;AACzC,MAAK;AAIL,eAAW,SAAS,SAAS;AAC3B,YAAM,QAAQ,MAAM,CAAC,MAAM,IAAI,MAAM,CAAC,CAAC,KAAK,MAAM,CAAC,IAAI;AACnD,gBAAU,SACZ,MAAM;AAER,YAAM,MAAM,MAAM,CAAC,MAAM,IAAI,MAAM,CAAC,CAAC,KAAK,MAAM,CAAC,IAAI;AACjD,cAAQ,SACV,MAAM;AAAA,IAEV;AACF;ACxBO,SAAS,gBAAgBA,OAAsB;AACpD,QAAM,aAAa,KAAKA,OAAM,mBAAmB;AACjD,MAAI,WAAW,UAAU;AACvB,WAAO;AAGT,QAAM,SAAS,QAAQ,KAAKA,OAAM,IAAI,CAAC;AACvC,MAAI,UAAU,WAAWA;AACvB,WAAO,gBAAgB,MAAM;AAG/B,QAAM,IAAI,MAAM,uDAAuD;AACzE;AASO,SAAS,eAAeA,OAAiC;AAE9D,SAAO,EAAC,SADW,gBAAwB,SAAS,EAAA;AAEtD;AClCO,SAAS,gBACd,SACA,WACA,cACiB;AACjB,MAAI,SAAS,SACT,WAAW;AACX,WAAS,SAAS,QAAQ,KAE5B,YAAY,OACZ,SAAS,WAAW,MAAM,KACjB,SAAS,SAAS,MAAM,MAEjC,YAAY,OACZ,SAAS,SAAS,MAAM;AAE1B,QAAM,SAASC,QAAM,QAAQ;AAAA,IAC3B,GAAG;AAAA,IACH;AAAA,EAAA,CACD;AAED,MAAI,CAAC;AACH,UAAM,IAAI,MAAM,mBAAmB,QAAQ,EAAE;AAG/C,SAAO;AACT;AAEA,SAAS,WAAW,QAAwB;AAE1C,QAAM,aAAa,OAAO,MAAM,uBAAuB;AACvD,SAAK,aAIE,WACJ,IAAI,CAAC,cACG,UAAU,MAAM;AAAA,CAAI,EAAE,MAAM,GAAG,EAAE,EAAE,KAAK;AAAA,CAAI,CACpD,EACA,KAAK;AAAA,CAAI,IAPH;AAQX;AAEA,SAAS,SAAS,QAAwB;AAKxC,QAAM,UAAU,iBAAiB,QAHb,kEAGgC;AACpD,SAAK,QAAQ,SAIN,QAAQ,IAAI,CAAC,UAAU,MAAM,CAAC,CAAC,EAAE,KAAK;AAAA,CAAI,IAHxC;AAIX;AAGA,SAAS,iBAAiB,KAAa,OAAmC;AACxE,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,MAAM,0DAA0D;AAG5E,QAAM,UAAU,CAAA;AAChB,MAAI;AACJ,UAAQ,QAAQ,MAAM,KAAK,GAAG,OAAO;AACnC,YAAQ,KAAK,KAAK;AAEpB,SAAO;AACT;AC7DA,MAAMC,UAAQ,YAAY,kCAAkC,GA4BtD,6BAA6B,CAAC,MAAM,GACpC,8BAA8B,CAAC,aAAa;AAO3C,SAAS,kBAAkB;AAAA,EAChC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,SAAS,CAAA;AAAA,EACT,cAAc,CAAA;AAChB,GASgC;AAI9B,MAHAA;AAAAA,IACE,kBAAkB,KAAK,IAAI,OAAO,QAAQ,IAAI,KAAK,KAAK,MAAM,IAAI,IAAI,KAAK,KAAK,MAAM,MAAM;AAAA,EAAA,GAG5FC,EAAW,2BAA2B,IAAI,KAC1CA,EAAW,aAAa,KAAK,GAAG,KAChC,2BAA2B,SAAS,KAAK,IAAI,IAAI;AAEjD,WAAO,kBAAkB;AAAA,MACvB,MAAM,KAAK;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA,CACD;AAGH,MAAIA,EAAW,kBAAkB,IAAI,GAAG;AACtC,UAAM,sBAAsB,KAAK,YAAY;AAAA,MAAI,CAAC,eAChD,kBAAkB;AAAA,QAChB,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MAAA,CACD;AAAA,IAAA;AAEH,WAAO,KAAK,OACT,IAAI,CAAC,OAAO,SACH,MAAM,MAAM,UAAU,OAAO,oBAAoB,GAAG,KAAK,GAClE,EACA,KAAK,EAAE;AAAA,EACZ;AAEA,MAAIA,EAAW,UAAU,IAAI,GAAG;AAC9B,QAAI,KAAK,SAAS,iBAAiB,KAAK,SAAS;AAC/C,YAAM,IAAI,MAAM,6BAA6B,KAAK,IAAI,EAAE;AAG1D,WAAO,KAAK,MAAM,SAAA;AAAA,EACpB;AAEA,MAAIA,EAAW,aAAa,IAAI;AAC9B,WAAO,kBAAkB;AAAA,MACvB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA,CACD;AAGH,MAAIA,EAAW,qBAAqB,IAAI,GAAG;AACzC,UAAM,OAAO,KAAK,SAASA,EAAW,oBAAoB,KAAK,EAAE,KAAK,KAAK,GAAG;AAC9E,QAAI,CAAC;AACH,YAAM,IAAI,MAAM,iCAAiC;AAGnD,WAAO,kBAAkB;AAAA,MACvB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA,CACD;AAAA,EACH;AAEA,MACEA,EAAW,iBAAiB,IAAI,KAChCA,EAAW,aAAa,KAAK,MAAM,KACnC,4BAA4B,SAAS,KAAK,OAAO,IAAI;AAErD,WAAO,kBAAkB;AAAA,MACvB,MAAM,KAAK,UAAU,CAAC;AAAA,MACtB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA,CACD;AAGH,MAAIA,EAAW,iBAAiB,IAAI;AAClC,WAAO,sBAAsB;AAAA,MAC3B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAEF,CAAC;AAGH,MACEA,EAAW,0BAA0B,IAAI,KACzCA,EAAW,sBAAsB,IAAI,KACrCA,EAAW,qBAAqB,IAAI,GACpC;AACA,UAAM,WAAW,IAAI,MAAM,MAAM,MAAM,KAAK;AAE5C,WAAA,OAAO,QAAQ,CAAC,OAAO,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,IAAI;AAAA,QACJ,MAAM,YAAY,CAAC;AAAA,MAAA,CACpB;AAAA,IACH,CAAC,GAEM,kBAAkB;AAAA,MACvB,MAAM,KAAK;AAAA,MACX,QAAQ,KAAK;AAAA,MACb;AAAA,MACA,OAAO;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA,CACD;AAAA,EACH;AAEA,MAAIA,EAAW,gBAAgB,IAAI;AACjC,WAAO,kBAAkB;AAAA,MACvB,MAAM,KAAK;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA,CACD;AAGH,MAAIA,EAAW,yBAAyB,IAAI,KAAKA,EAAW,kBAAkB,IAAI;AAChF,WAAO,uBAAuB,EAAC,MAAM,MAAa,UAAU,aAAa,UAAU,aAAY;AAGjG,MAAIA,EAAW,oBAAoB,IAAI;AACrC,WAAO,kBAAkB;AAAA,MACvB,MAAM,KAAK;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA,CACD;AAGH,QAAM,IAAI;AAAA,IACR,gCAAgC,KAAK,IAAI,OAAO,QAAQ,IAAI,KAAK,KAAK,MAAM,IAAI,IAAI,KAAK,KAAK,MAAM,MAAM;AAAA,EAAA;AAE9G;AAEA,SAAS,kBAAkB;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GASgC;AAC9B,QAAM,aAAa,OAAO;AAAA,IACxB,CAAC,UACEA,EAAW,aAAa,KAAK,KAAK,KAAK,SAAS,MAAM,QACtDA,EAAW,oBAAoB,KAAK,KACnCA,EAAW,aAAa,MAAM,IAAI,KAClC,KAAK,SAAS,MAAM,KAAK;AAAA,EAAA;AAE/B,MAAI,WAAW,YAAY,UAAU;AAIrC,MAHI,CAAC,YAAY,cAAc,KAAKA,EAAW,oBAAoB,OAAO,UAAU,CAAC,MACnF,WAAW,OAAO,UAAU,EAAE,QAE5B,YAAYA,EAAW,UAAU,QAAQ;AAC3C,WAAO,kBAAkB;AAAA,MACvB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA,CACD;AAEH,QAAM,UAAU,MAAM,WAAW,KAAK,IAAI;AAC1C,MAAI,SAAS;AACX,QAAIA,EAAW,aAAa,QAAQ,KAAK,IAAI,KAC5B,QAAQ,KAAK,KAAK,SAAS,KAAK;AAE7C,YAAM,IAAI;AAAA,QACR,sCAAsC,KAAK,IAAI,SAAS,QAAQ,IAAI,KAAK,KAAK,MAAM,IAAI,IAAI,KAAK,KAAK,MAAM,MAAM;AAAA,MAAA;AAIxH,WAAO,kBAAkB;AAAA,MACvB,MAAM,QAAQ,KAAK;AAAA,MACnB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA,CACD;AAAA,EACH;AAEA,QAAM,IAAI;AAAA,IACR,oCAAoC,KAAK,IAAI,QAAQ,QAAQ,IAAI,KAAK,KAAK,MAAM,IAAI,IAAI,KAAK,KAAK,MAAM,MAAM;AAAA,EAAA;AAEnH;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GASgC;AAC9B,QAAM,EAAC,WAAU;AACjB,SAAO,kBAAkB;AAAA,IACvB,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,aAAa,KAAK;AAAA,EAAA,CACnB;AACH;AAEA,SAAS,uBAAuB;AAAA,EAC9B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAQgC;AAC9B,MAAI;AAoBJ,MAnBA,SAAS,MAAM;AAAA,IACb,kBAAkB,GAAG;AACnB,UAAKA,EAAW,oBAAoB,EAAE,IAAI;AAG1C,mBAAW,aAAa,EAAE,KAAK,YAAY;AACzC,cAAIA,EAAW,yBAAyB,SAAS,KAC3C,UAAU,MAAM,KAAK,mBAAmB,KAAK,MAAM,MAAM;AAC3D,gCAAoB,EAAE;AACtB;A