sanity
Version:
Sanity is a real-time content infrastructure with a scalable, hosted backend featuring a Graph Oriented Query Language (GROQ), asset pipelines and fast edge caches
1 lines • 37.7 kB
Source Map (JSON)
{"version":3,"file":"validateDocuments.cjs","sources":["../../../../src/_internal/cli/util/extractDocumentsFromNdjsonOrTarball.ts","../../../../src/_internal/cli/util/workerChannels.ts","../../../../src/_internal/cli/threads/validateDocuments.ts"],"sourcesContent":["import path from 'node:path'\nimport readline from 'node:readline'\nimport {Readable, type Writable} from 'node:stream'\nimport zlib from 'node:zlib'\n\nimport {type SanityDocument} from '@sanity/types'\nimport tar from 'tar-stream'\n\nconst HEADER_SIZE = 300\n\n// https://github.com/kevva/is-gzip/blob/13dab7c877787bd5cff9de5482b1736f00df99c6/index.js\nconst isGzip = (buf: Buffer) =>\n buf.length >= 3 && buf[0] === 0x1f && buf[1] === 0x8b && buf[2] === 0x08\n\n// https://github.com/watson/is-deflate/blob/f9e8f0c7814eed715e13e29e97c69acee319686a/index.js\nconst isDeflate = (buf: Buffer) =>\n buf.length >= 2 && buf[0] === 0x78 && (buf[1] === 1 || buf[1] === 0x9c || buf[1] === 0xda)\n\n// https://github.com/kevva/is-tar/blob/d295ffa2002a5d415946fc3d49f024ace8c28bd3/index.js\nconst isTar = (buf: Buffer) =>\n buf.length >= 262 &&\n buf[257] === 0x75 &&\n buf[258] === 0x73 &&\n buf[259] === 0x74 &&\n buf[260] === 0x61 &&\n buf[261] === 0x72\n\nasync function* extract<TReturn>(\n stream: AsyncIterable<Buffer>,\n extractor: Writable & AsyncIterable<TReturn>,\n) {\n // set up a task to drain the input iterable into the extractor asynchronously\n // before this function delegates to the extractor's iterable (containing the\n // result of the extraction)\n const drained = new Promise<void>((resolve, reject) => {\n // setTimeout is used here to ensure draining occurs after delegation\n setTimeout(async () => {\n try {\n for await (const chunk of stream) extractor.write(chunk)\n extractor.end()\n resolve()\n } catch (err) {\n reject(err)\n }\n })\n })\n\n // have this function delegate the results of the extractor\n yield* extractor\n await drained\n extractor.destroy()\n}\n\n/**\n * Given a async iterable of buffers, looks at the header of the file in the\n * first few bytes to see the file type then extracts the contents tries again.\n * If the given iterable of buffers is a tarball then it looks for an ndjson\n * files and returns another iterable of buffers with the contents of the\n * ndjson file\n */\nasync function* maybeExtractNdjson(stream: AsyncIterable<Buffer>): AsyncIterable<Buffer> {\n let buffer = Buffer.alloc(0)\n\n for await (const chunk of stream) {\n buffer = Buffer.concat([buffer, chunk])\n if (buffer.length < HEADER_SIZE) continue\n\n const fileHeader = buffer\n const restOfStream = async function* restOfStream() {\n yield fileHeader\n yield* stream\n }\n\n if (isGzip(fileHeader)) {\n yield* maybeExtractNdjson(extract(restOfStream(), zlib.createGunzip()))\n return\n }\n\n if (isDeflate(fileHeader)) {\n yield* maybeExtractNdjson(extract(restOfStream(), zlib.createDeflate()))\n return\n }\n\n if (isTar(fileHeader)) {\n for await (const entry of extract(restOfStream(), tar.extract())) {\n const filename = path.basename(entry.header.name)\n const extname = path.extname(filename).toLowerCase()\n // ignore hidden and non-ndjson files\n if (extname !== '.ndjson' || filename.startsWith('.')) continue\n\n for await (const ndjsonChunk of entry) yield ndjsonChunk\n return\n }\n }\n\n yield* restOfStream()\n }\n}\n\n/**\n * Takes in an async iterable of buffers from an ndjson file or tarball and\n * returns an async iterable of sanity documents.\n */\nexport async function* extractDocumentsFromNdjsonOrTarball(\n file: AsyncIterable<Buffer>,\n): AsyncIterable<SanityDocument> {\n const lines = readline.createInterface({\n input: Readable.from(maybeExtractNdjson(file)),\n })\n\n for await (const line of lines) {\n const trimmed = line.trim()\n if (trimmed) yield JSON.parse(trimmed) as SanityDocument\n }\n lines.close()\n}\n","import {type MessagePort, type Worker} from 'node:worker_threads'\n\ntype StreamReporter<TPayload = unknown> = {emit: (payload: TPayload) => void; end: () => void}\ntype EventReporter<TPayload = unknown> = (payload: TPayload) => void\ntype EventReceiver<TPayload = unknown> = () => Promise<TPayload>\ntype StreamReceiver<TPayload = unknown> = () => AsyncIterable<TPayload>\n\ntype EventKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelEvent<any> ? K : never\n}[keyof TWorkerChannel]\ntype StreamKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelStream<any> ? K : never\n}[keyof TWorkerChannel]\n\ntype EventMessage<TPayload = unknown> = {type: 'event'; name: string; payload: TPayload}\ntype StreamEmissionMessage<TPayload = unknown> = {type: 'emission'; name: string; payload: TPayload}\ntype StreamEndMessage = {type: 'end'; name: string}\ntype WorkerChannelMessage = EventMessage | StreamEmissionMessage | StreamEndMessage\n\n/**\n * Represents the definition of a \"worker channel\" to report progress from the\n * worker to the parent. Worker channels can define named events or streams and\n * the worker will report events and streams while the parent will await them.\n * This allows the control flow of the parent to follow the control flow of the\n * worker 1-to-1.\n */\nexport type WorkerChannel<\n TWorkerChannel extends Record<\n string,\n WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>\n > = Record<string, WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>>,\n> = TWorkerChannel\n\nexport type WorkerChannelEvent<TPayload = void> = {type: 'event'; payload: TPayload}\nexport type WorkerChannelStream<TPayload = void> = {type: 'stream'; payload: TPayload}\n\nexport interface WorkerChannelReporter<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReporter<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReporter<TPayload>\n : void\n }\n}\n\nexport interface WorkerChannelReceiver<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReceiver<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReceiver<TPayload>\n : void\n }\n // TODO: good candidate for [Symbol.asyncDispose] when our tooling better supports it\n dispose: () => Promise<number>\n}\n\n/**\n * A simple queue that has two primary methods: `push(message)` and\n * `await next()`. This message queue is used by the \"receiver\" of the worker\n * channel and this class handles buffering incoming messages if the worker is\n * producing faster than the parent as well as returning a promise if there is\n * no message yet in the queue when the parent awaits `next()`.\n */\nclass MessageQueue<T> {\n resolver: ((result: IteratorResult<T>) => void) | null = null\n queue: T[] = []\n\n push(message: T) {\n if (this.resolver) {\n this.resolver({value: message, done: false})\n this.resolver = null\n } else {\n this.queue.push(message)\n }\n }\n\n next(): Promise<IteratorResult<T>> {\n if (this.queue.length) {\n return Promise.resolve({value: this.queue.shift()!, done: false})\n }\n\n return new Promise((resolve) => (this.resolver = resolve))\n }\n\n end() {\n if (this.resolver) {\n this.resolver({value: undefined, done: true})\n }\n }\n}\n\nfunction isWorkerChannelMessage(message: unknown): message is WorkerChannelMessage {\n if (typeof message !== 'object') return false\n if (!message) return false\n if (!('type' in message)) return false\n if (typeof message.type !== 'string') return false\n const types: string[] = ['event', 'emission', 'end'] satisfies WorkerChannelMessage['type'][]\n return types.includes(message.type)\n}\n\n/**\n * Creates a \"worker channel receiver\" that subscribes to incoming messages\n * from the given worker and returns promises for worker channel events and\n * async iterators for worker channel streams.\n */\nexport function createReceiver<TWorkerChannel extends WorkerChannel>(\n worker: Worker,\n): WorkerChannelReceiver<TWorkerChannel> {\n const _events = new Map<string, MessageQueue<EventMessage>>()\n const _streams = new Map<string, MessageQueue<StreamEmissionMessage>>()\n const errors = new MessageQueue<{type: 'error'; error: unknown}>()\n\n const eventQueue = (name: string) => {\n const queue = _events.get(name) ?? new MessageQueue()\n if (!_events.has(name)) _events.set(name, queue)\n return queue\n }\n\n const streamQueue = (name: string) => {\n const queue = _streams.get(name) ?? new MessageQueue()\n if (!_streams.has(name)) _streams.set(name, queue)\n return queue\n }\n\n const handleMessage = (message: unknown) => {\n if (!isWorkerChannelMessage(message)) return\n if (message.type === 'event') eventQueue(message.name).push(message)\n if (message.type === 'emission') streamQueue(message.name).push(message)\n if (message.type === 'end') streamQueue(message.name).end()\n }\n\n const handleError = (error: unknown) => {\n errors.push({type: 'error', error})\n }\n\n worker.addListener('message', handleMessage)\n worker.addListener('error', handleError)\n\n return {\n event: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReceiver: EventReceiver = async () => {\n const {value} = await Promise.race([eventQueue(name).next(), errors.next()])\n if (value.type === 'error') throw value.error\n return value.payload\n }\n\n return eventReceiver\n },\n }),\n stream: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['stream'], {\n get: (target, prop) => {\n if (typeof prop !== 'string') return target[prop as keyof typeof target]\n const name = prop // alias for better typescript narrowing\n\n async function* streamReceiver() {\n while (true) {\n const {value, done} = await Promise.race([streamQueue(name).next(), errors.next()])\n if (done) return\n if (value.type === 'error') throw value.error\n yield value.payload\n }\n }\n\n return streamReceiver satisfies StreamReceiver\n },\n }),\n dispose: () => {\n worker.removeListener('message', handleMessage)\n worker.removeListener('error', handleError)\n return worker.terminate()\n },\n }\n}\n\n/**\n * Creates a \"worker channel reporter\" that sends messages to the given\n * `parentPort` to be received by a worker channel receiver.\n */\nexport function createReporter<TWorkerChannel extends WorkerChannel>(\n parentPort: MessagePort | null,\n): WorkerChannelReporter<TWorkerChannel> {\n if (!parentPort) {\n throw new Error('parentPart was falsy')\n }\n\n return {\n event: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReporter: EventReporter = (payload) => {\n const message: EventMessage = {type: 'event', name, payload}\n parentPort.postMessage(message)\n }\n\n return eventReporter\n },\n }),\n stream: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['stream'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const streamReporter: StreamReporter = {\n emit: (payload) => {\n const message: StreamEmissionMessage = {type: 'emission', name, payload}\n parentPort.postMessage(message)\n },\n end: () => {\n const message: StreamEndMessage = {type: 'end', name}\n parentPort.postMessage(message)\n },\n }\n\n return streamReporter\n },\n }),\n }\n}\n","import fs from 'node:fs'\nimport os from 'node:os'\nimport path from 'node:path'\nimport readline from 'node:readline'\nimport {Readable} from 'node:stream'\nimport {isMainThread, parentPort, workerData as _workerData} from 'node:worker_threads'\n\nimport {\n type ClientConfig,\n createClient,\n type SanityClient,\n type SanityDocument,\n} from '@sanity/client'\nimport {isReference, type ValidationContext, type ValidationMarker} from '@sanity/types'\nimport {isRecord, validateDocument} from 'sanity'\n\nimport {extractDocumentsFromNdjsonOrTarball} from '../util/extractDocumentsFromNdjsonOrTarball'\nimport {getStudioWorkspaces} from '../util/getStudioWorkspaces'\nimport {mockBrowserEnvironment} from '../util/mockBrowserEnvironment'\nimport {\n createReporter,\n type WorkerChannel,\n type WorkerChannelEvent,\n type WorkerChannelStream,\n} from '../util/workerChannels'\n\nconst MAX_VALIDATION_CONCURRENCY = 100\nconst DOCUMENT_VALIDATION_TIMEOUT = 30000\nconst REFERENCE_INTEGRITY_BATCH_SIZE = 100\n\ninterface AvailabilityResponse {\n omitted: {id: string; reason: 'existence' | 'permission'}[]\n}\n\n/** @internal */\nexport interface ValidateDocumentsWorkerData {\n workDir: string\n configPath?: string\n workspace?: string\n clientConfig?: Partial<ClientConfig>\n projectId?: string\n dataset?: string\n ndjsonFilePath?: string\n level?: ValidationMarker['level']\n maxCustomValidationConcurrency?: number\n maxFetchConcurrency?: number\n studioHost?: string\n}\n\n/** @internal */\nexport type ValidationWorkerChannel = WorkerChannel<{\n loadedWorkspace: WorkerChannelEvent<{\n name: string\n projectId: string\n dataset: string\n basePath: string\n }>\n loadedDocumentCount: WorkerChannelEvent<{documentCount: number}>\n exportProgress: WorkerChannelStream<{downloadedCount: number; documentCount: number}>\n exportFinished: WorkerChannelEvent<{totalDocumentsToValidate: number}>\n loadedReferenceIntegrity: WorkerChannelEvent\n validation: WorkerChannelStream<{\n validatedCount: number\n documentId: string\n documentType: string\n intentUrl?: string\n revision: string\n level: ValidationMarker['level']\n markers: ValidationMarker[]\n }>\n}>\n\nconst {\n clientConfig,\n workDir,\n workspace: workspaceName,\n configPath,\n dataset,\n ndjsonFilePath,\n projectId,\n level,\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n studioHost,\n} = _workerData as ValidateDocumentsWorkerData\n\nif (isMainThread || !parentPort) {\n throw new Error('This module must be run as a worker thread')\n}\n\nconst levelValues = {error: 0, warning: 1, info: 2} as const\n\nconst report = createReporter<ValidationWorkerChannel>(parentPort)\n\nconst getReferenceIds = (value: unknown) => {\n const ids = new Set<string>()\n\n function traverse(node: unknown) {\n if (isReference(node)) {\n ids.add(node._ref)\n return\n }\n\n if (typeof node === 'object' && node) {\n // Note: this works for arrays too\n for (const item of Object.values(node)) traverse(item)\n }\n }\n\n traverse(value)\n\n return ids\n}\n\nconst idRegex = /^[^-][A-Z0-9._-]*$/i\n\n// during testing, the `doc` endpoint 502'ed if given an invalid ID\nconst isValidId = (id: unknown) => typeof id === 'string' && idRegex.test(id)\nconst shouldIncludeDocument = (document: SanityDocument) => {\n // Filter out system documents and sanity documents\n return !document._type.startsWith('system.') && !document._type.startsWith('sanity.')\n}\n\nasync function* readerToGenerator(reader: ReadableStreamDefaultReader<Uint8Array>) {\n while (true) {\n const {value, done} = await reader.read()\n if (value) yield value\n if (done) return\n }\n}\n\nvoid main().then(() => process.exit())\n\nasync function loadWorkspace() {\n const workspaces = await getStudioWorkspaces({basePath: workDir, configPath})\n\n if (!workspaces.length) {\n throw new Error(`Configuration did not return any workspaces.`)\n }\n\n let _workspace\n if (workspaceName) {\n _workspace = workspaces.find((w) => w.name === workspaceName)\n if (!_workspace) {\n throw new Error(`Could not find any workspaces with name \\`${workspaceName}\\``)\n }\n } else {\n if (workspaces.length !== 1) {\n throw new Error(\n \"Multiple workspaces found. Please specify which workspace to use with '--workspace'.\",\n )\n }\n _workspace = workspaces[0]\n }\n const workspace = _workspace\n\n const client = createClient({\n ...clientConfig,\n dataset: dataset || workspace.dataset,\n projectId: projectId || workspace.projectId,\n requestTagPrefix: 'sanity.cli.validate',\n }).config({apiVersion: 'v2021-03-25'})\n\n report.event.loadedWorkspace({\n projectId: workspace.projectId,\n dataset: workspace.dataset,\n name: workspace.name,\n basePath: workspace.basePath,\n })\n\n return {workspace, client}\n}\n\nasync function downloadFromExport(client: SanityClient) {\n const exportUrl = new URL(client.getUrl(`/data/export/${client.config().dataset}`, false))\n\n const documentCount = await client.fetch('length(*)')\n report.event.loadedDocumentCount({documentCount})\n\n const {token} = client.config()\n const response = await fetch(exportUrl, {\n headers: new Headers({...(token && {Authorization: `Bearer ${token}`})}),\n })\n\n const reader = response.body?.getReader()\n if (!reader) throw new Error('Could not get reader from response body.')\n\n let downloadedCount = 0\n const referencedIds = new Set<string>()\n const documentIds = new Set<string>()\n const lines = readline.createInterface({input: Readable.from(readerToGenerator(reader))})\n\n // Note: we stream the export to a file and then re-read from that file to\n // make this less memory intensive.\n // this is a similar pattern to the import/export CLI commands\n const slugDate = new Date()\n .toISOString()\n .replace(/[^a-z0-9]/gi, '-')\n .toLowerCase()\n const tempOutputFile = path.join(os.tmpdir(), `sanity-validate-${slugDate}.ndjson`)\n const outputStream = fs.createWriteStream(tempOutputFile)\n\n for await (const line of lines) {\n const document = JSON.parse(line) as SanityDocument\n\n if (shouldIncludeDocument(document)) {\n documentIds.add(document._id)\n for (const referenceId of getReferenceIds(document)) {\n referencedIds.add(referenceId)\n }\n\n outputStream.write(`${line}\\n`)\n }\n\n downloadedCount++\n report.stream.exportProgress.emit({downloadedCount, documentCount})\n }\n\n await new Promise<void>((resolve, reject) =>\n outputStream.close((err) => (err ? reject(err) : resolve())),\n )\n\n report.stream.exportProgress.end()\n report.event.exportFinished({totalDocumentsToValidate: documentIds.size})\n\n const getDocuments = () =>\n extractDocumentsFromNdjsonOrTarball(fs.createReadStream(tempOutputFile))\n\n return {documentIds, referencedIds, getDocuments, cleanup: () => fs.promises.rm(tempOutputFile)}\n}\n\nasync function downloadFromFile(filePath: string) {\n const referencedIds = new Set<string>()\n const documentIds = new Set<string>()\n const getDocuments = () => extractDocumentsFromNdjsonOrTarball(fs.createReadStream(filePath))\n\n for await (const document of getDocuments()) {\n if (shouldIncludeDocument(document)) {\n documentIds.add(document._id)\n for (const referenceId of getReferenceIds(document)) {\n referencedIds.add(referenceId)\n }\n }\n }\n\n report.event.exportFinished({totalDocumentsToValidate: documentIds.size})\n\n return {documentIds, referencedIds, getDocuments, cleanup: undefined}\n}\n\ninterface CheckReferenceExistenceOptions {\n client: SanityClient\n referencedIds: Set<string>\n documentIds: Set<string>\n}\n\nasync function checkReferenceExistence({\n client,\n documentIds,\n referencedIds: _referencedIds,\n}: CheckReferenceExistenceOptions) {\n const existingIds = new Set(documentIds)\n const idsToCheck = Array.from(_referencedIds)\n .filter((id) => !existingIds.has(id) && isValidId(id))\n .sort()\n\n const batches = idsToCheck.reduce<string[][]>(\n (acc, next, index) => {\n const batchIndex = Math.floor(index / REFERENCE_INTEGRITY_BATCH_SIZE)\n const batch = acc[batchIndex]\n batch.push(next)\n return acc\n },\n Array.from<string[]>({\n length: Math.ceil(idsToCheck.length / REFERENCE_INTEGRITY_BATCH_SIZE),\n }).map(() => []),\n )\n\n for (const batch of batches) {\n const {omitted} = await client.request<AvailabilityResponse>({\n uri: client.getDataUrl('doc', batch.join(',')),\n json: true,\n query: {excludeContent: 'true'},\n tag: 'documents-availability',\n })\n\n const omittedIds = omitted.reduce<Record<string, 'existence' | 'permission'>>((acc, next) => {\n acc[next.id] = next.reason\n return acc\n }, {})\n\n for (const id of batch) {\n // unless the document ID is in the `omitted` object explictly due to\n // the reason `'existence'`, then it should exist\n if (omittedIds[id] !== 'existence') {\n existingIds.add(id)\n }\n }\n }\n report.event.loadedReferenceIntegrity()\n\n return {existingIds}\n}\n\nasync function main() {\n // note: this is dynamically imported because this module is ESM only and this\n // file gets compiled to CJS at this time\n const {default: pMap} = await import('p-map')\n\n const cleanupBrowserEnvironment = mockBrowserEnvironment(workDir)\n\n let cleanupDownloadedDocuments: (() => Promise<void>) | undefined\n\n try {\n const {client, workspace} = await loadWorkspace()\n const {documentIds, referencedIds, getDocuments, cleanup} = ndjsonFilePath\n ? await downloadFromFile(ndjsonFilePath)\n : await downloadFromExport(client)\n cleanupDownloadedDocuments = cleanup\n const {existingIds} = await checkReferenceExistence({client, referencedIds, documentIds})\n\n const getClient = <TOptions extends Partial<ClientConfig>>(options: TOptions) =>\n client.withConfig(options)\n\n const getDocumentExists: ValidationContext['getDocumentExists'] = ({id}) =>\n Promise.resolve(existingIds.has(id))\n\n const getLevel = (markers: ValidationMarker[]) => {\n let foundWarning = false\n for (const marker of markers) {\n if (marker.level === 'error') return 'error'\n if (marker.level === 'warning') foundWarning = true\n }\n\n if (foundWarning) return 'warning'\n return 'info'\n }\n\n let validatedCount = 0\n\n const validate = async (document: SanityDocument) => {\n let markers: ValidationMarker[]\n\n try {\n const timeout = Symbol('timeout')\n\n const result = await Promise.race([\n validateDocument({\n document,\n workspace,\n getClient,\n getDocumentExists,\n environment: 'cli',\n maxCustomValidationConcurrency,\n maxFetchConcurrency,\n }),\n new Promise<typeof timeout>((resolve) =>\n setTimeout(() => resolve(timeout), DOCUMENT_VALIDATION_TIMEOUT),\n ),\n ])\n\n if (result === timeout) {\n throw new Error(\n `Document '${document._id}' failed to validate within ${DOCUMENT_VALIDATION_TIMEOUT}ms.`,\n )\n }\n\n markers = result\n // remove deprecated `item` from the marker\n .map(({item, ...marker}) => marker)\n // filter out unwanted levels\n .filter((marker) => {\n const markerValue = levelValues[marker.level]\n const flagLevelValue =\n levelValues[level as keyof typeof levelValues] ?? levelValues.info\n return markerValue <= flagLevelValue\n })\n } catch (err) {\n const errorMessage =\n isRecord(err) && typeof err.message === 'string' ? err.message : 'Unknown error'\n\n const message = `Exception occurred while validating value: ${errorMessage}`\n\n markers = [\n {\n message,\n level: 'error',\n path: [],\n },\n ]\n }\n\n validatedCount++\n\n const intentUrl =\n studioHost &&\n `${studioHost}${path.resolve(\n workspace.basePath,\n `/intent/edit/id=${encodeURIComponent(document._id)};type=${encodeURIComponent(\n document._type,\n )}`,\n )}`\n\n report.stream.validation.emit({\n documentId: document._id,\n documentType: document._type,\n revision: document._rev,\n ...(intentUrl && {intentUrl}),\n markers,\n validatedCount,\n level: getLevel(markers),\n })\n }\n\n await pMap(getDocuments(), validate, {concurrency: MAX_VALIDATION_CONCURRENCY})\n\n report.stream.validation.end()\n } finally {\n await cleanupDownloadedDocuments?.()\n cleanupBrowserEnvironment()\n }\n}\n"],"names":["HEADER_SIZE","isGzip","buf","length","isDeflate","isTar","extract","stream","extractor","drained","Promise","resolve","reject","setTimeout","chunk","write","end","err","destroy","maybeExtractNdjson","buffer","Buffer","alloc","concat","fileHeader","restOfStream","zlib","createGunzip","createDeflate","entry","tar","filename","path","basename","header","name","extname","toLowerCase","startsWith","ndjsonChunk","extractDocumentsFromNdjsonOrTarball","file","lines","readline","createInterface","input","Readable","from","line","trimmed","trim","JSON","parse","close","createReporter","parentPort","Error","event","Proxy","get","target","payload","message","type","postMessage","emit","MAX_VALIDATION_CONCURRENCY","DOCUMENT_VALIDATION_TIMEOUT","REFERENCE_INTEGRITY_BATCH_SIZE","clientConfig","workDir","workspace","workspaceName","configPath","dataset","ndjsonFilePath","projectId","level","maxCustomValidationConcurrency","maxFetchConcurrency","studioHost","_workerData","isMainThread","levelValues","error","warning","info","report","getReferenceIds","value","ids","Set","traverse","node","isReference","add","_ref","item","Object","values","idRegex","isValidId","id","test","shouldIncludeDocument","document","_type","readerToGenerator","reader","done","read","main","then","process","exit","loadWorkspace","workspaces","getStudioWorkspaces","basePath","_workspace","find","w","client","createClient","requestTagPrefix","config","apiVersion","loadedWorkspace","downloadFromExport","exportUrl","URL","getUrl","documentCount","fetch","loadedDocumentCount","token","headers","Headers","Authorization","body","getReader","downloadedCount","referencedIds","documentIds","slugDate","Date","toISOString","replace","tempOutputFile","join","os","tmpdir","outputStream","fs","createWriteStream","_id","referenceId","exportProgress","exportFinished","totalDocumentsToValidate","size","getDocuments","createReadStream","cleanup","promises","rm","downloadFromFile","filePath","undefined","checkReferenceExistence","_referencedIds","existingIds","idsToCheck","Array","filter","has","sort","batches","reduce","acc","next","index","batchIndex","Math","floor","batch","push","ceil","map","omitted","request","uri","getDataUrl","json","query","excludeContent","tag","omittedIds","reason","loadedReferenceIntegrity","default","pMap","cleanupBrowserEnvironment","mockBrowserEnvironment","cleanupDownloadedDocuments","getClient","options","withConfig","getDocumentExists","getLevel","markers","foundWarning","marker","validatedCount","validate","timeout","result","race","validateDocument","environment","markerValue","flagLevelValue","isRecord","intentUrl","encodeURIComponent","validation","documentId","documentType","revision","_rev","concurrency"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAQA,MAAMA,cAAc,KAGdC,SAAUC,CAAAA,QACdA,IAAIC,UAAU,KAAKD,IAAI,CAAC,MAAM,MAAQA,IAAI,CAAC,MAAM,OAAQA,IAAI,CAAC,MAAM,GAGhEE,YAAaF,CAAAA,QACjBA,IAAIC,UAAU,KAAKD,IAAI,CAAC,MAAM,QAASA,IAAI,CAAC,MAAM,KAAKA,IAAI,CAAC,MAAM,OAAQA,IAAI,CAAC,MAAM,MAGjFG,QAASH,CAAAA,QACbA,IAAIC,UAAU,OACdD,IAAI,GAAG,MAAM,OACbA,IAAI,GAAG,MAAM,OACbA,IAAI,GAAG,MAAM,OACbA,IAAI,GAAG,MAAM,MACbA,IAAI,GAAG,MAAM;AAEf,gBAAgBI,QACdC,QACAC,WACA;AAIA,QAAMC,UAAU,IAAIC,QAAc,CAACC,SAASC,WAAW;AAErDC,eAAW,YAAY;AACrB,UAAI;AACF,yBAAiBC,SAASP,OAAQC,WAAUO,MAAMD,KAAK;AACvDN,kBAAUQ,IAAAA,GACVL,QAAAA;AAAAA,MACF,SAASM,KAAK;AACZL,eAAOK,GAAG;AAAA,MACZ;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAGD,SAAOT,WACP,MAAMC,SACND,UAAUU,QAAAA;AACZ;AASA,gBAAgBC,mBAAmBZ,QAAsD;AACvF,MAAIa,SAASC,OAAOC,MAAM,CAAC;AAE3B,mBAAiBR,SAASP,QAAQ;AAEhC,QADAa,SAASC,OAAOE,OAAO,CAACH,QAAQN,KAAK,CAAC,GAClCM,OAAOjB,SAASH,YAAa;AAEjC,UAAMwB,aAAaJ,QACbK,eAAe,mBAA+B;AAClD,YAAMD,YACN,OAAOjB;AAAAA,IACT;AAEA,QAAIN,OAAOuB,UAAU,GAAG;AACtB,aAAOL,mBAAmBb,QAAQmB,aAAAA,GAAgBC,cAAAA,QAAKC,aAAAA,CAAc,CAAC;AACtE;AAAA,IACF;AAEA,QAAIvB,UAAUoB,UAAU,GAAG;AACzB,aAAOL,mBAAmBb,QAAQmB,aAAAA,GAAgBC,cAAAA,QAAKE,cAAAA,CAAe,CAAC;AACvE;AAAA,IACF;AAEA,QAAIvB,MAAMmB,UAAU;AAClB,uBAAiBK,SAASvB,QAAQmB,aAAAA,GAAgBK,aAAAA,QAAIxB,QAAAA,CAAS,GAAG;AAChE,cAAMyB,WAAWC,cAAAA,QAAKC,SAASJ,MAAMK,OAAOC,IAAI;AAGhD,YAAIC,EAFYJ,cAAAA,QAAKI,QAAQL,QAAQ,EAAEM,kBAEvB,aAAaN,SAASO,WAAW,GAAG,IAEpD;AAAA,2BAAiBC,eAAeV,MAAO,OAAMU;AAC7C;AAAA,QAAA;AAAA,MACF;AAGF,WAAOd,aAAAA;AAAAA,EACT;AACF;AAMA,gBAAuBe,oCACrBC,MAC+B;AAC/B,QAAMC,QAAQC,kBAAAA,QAASC,gBAAgB;AAAA,IACrCC,OAAOC,YAAAA,SAASC,KAAK5B,mBAAmBsB,IAAI,CAAC;AAAA,EAAA,CAC9C;AAED,mBAAiBO,QAAQN,OAAO;AAC9B,UAAMO,UAAUD,KAAKE,KAAAA;AACjBD,gBAAS,MAAME,KAAKC,MAAMH,OAAO;AAAA,EACvC;AACAP,QAAMW,MAAAA;AACR;AC0EO,SAASC,eACdC,YACuC;AACvC,MAAI,CAACA;AACH,UAAM,IAAIC,MAAM,sBAAsB;AAGxC,SAAO;AAAA,IACLC,OAAO,IAAIC,MAAM,IAAsD;AAAA,MACrEC,KAAKA,CAACC,QAAQzB,SACR,OAAOA,QAAS,WAAiByB,OAAOzB,IAAI,IAEV0B,CAAAA,YAAY;AAChD,cAAMC,UAAwB;AAAA,UAACC,MAAM;AAAA,UAAS5B;AAAAA,UAAM0B;AAAAA,QAAAA;AACpDN,mBAAWS,YAAYF,OAAO;AAAA,MAChC;AAAA,IAAA,CAIH;AAAA,IACDvD,QAAQ,IAAImD,MAAM,IAAuD;AAAA,MACvEC,KAAKA,CAACC,QAAQzB,SACR,OAAOA,QAAS,WAAiByB,OAAOzB,IAAI,IAET;AAAA,QACrC8B,MAAOJ,CAAAA,YAAY;AACjB,gBAAMC,UAAiC;AAAA,YAACC,MAAM;AAAA,YAAY5B;AAAAA,YAAM0B;AAAAA,UAAAA;AAChEN,qBAAWS,YAAYF,OAAO;AAAA,QAChC;AAAA,QACA9C,KAAKA,MAAM;AACT,gBAAM8C,UAA4B;AAAA,YAACC,MAAM;AAAA,YAAO5B;AAAAA,UAAAA;AAChDoB,qBAAWS,YAAYF,OAAO;AAAA,QAChC;AAAA,MAAA;AAAA,IACF,CAIH;AAAA,EAAA;AAEL;AC1MA,MAAMI,6BAA6B,KAC7BC,8BAA8B,KAC9BC,iCAAiC,KA4CjC;AAAA,EACJC;AAAAA,EACAC;AAAAA,EACAC,WAAWC;AAAAA,EACXC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AAAAA,EACAC;AACF,IAAIC,oBAAAA;AAEJ,IAAIC,oBAAAA,gBAAgB,CAAC3B,oBAAAA;AACnB,QAAM,IAAIC,MAAM,4CAA4C;AAG9D,MAAM2B,cAAc;AAAA,EAACC,OAAO;AAAA,EAAGC,SAAS;AAAA,EAAGC,MAAM;AAAC,GAE5CC,SAASjC,eAAwCC,oBAAAA,UAAU,GAE3DiC,kBAAmBC,CAAAA,UAAmB;AAC1C,QAAMC,0BAAUC,IAAAA;AAEhB,WAASC,SAASC,MAAe;AAC/B,QAAIC,MAAAA,YAAYD,IAAI,GAAG;AACrBH,UAAIK,IAAIF,KAAKG,IAAI;AACjB;AAAA,IACF;AAEA,QAAI,OAAOH,QAAS,YAAYA;AAE9B,iBAAWI,QAAQC,OAAOC,OAAON,IAAI,YAAYI,IAAI;AAAA,EAEzD;AAEAL,SAAAA,SAASH,KAAK,GAEPC;AACT,GAEMU,UAAU,uBAGVC,YAAaC,CAAAA,OAAgB,OAAOA,MAAO,YAAYF,QAAQG,KAAKD,EAAE,GACtEE,wBAAyBC,CAAAA,aAEtB,CAACA,SAASC,MAAMpE,WAAW,SAAS,KAAK,CAACmE,SAASC,MAAMpE,WAAW,SAAS;AAGtF,gBAAgBqE,kBAAkBC,QAAiD;AACjF,aAAa;AACX,UAAM;AAAA,MAACnB;AAAAA,MAAOoB;AAAAA,IAAAA,IAAQ,MAAMD,OAAOE,KAAAA;AAEnC,QADIrB,UAAO,MAAMA,QACboB,KAAM;AAAA,EACZ;AACF;AAEKE,KAAAA,EAAOC,KAAK,MAAMC,QAAQC,MAAM;AAErC,eAAeC,gBAAgB;AAC7B,QAAMC,aAAa,MAAMC,wCAAoB;AAAA,IAACC,UAAUhD;AAAAA,IAASG;AAAAA,EAAAA,CAAW;AAE5E,MAAI,CAAC2C,WAAWjH;AACd,UAAM,IAAIqD,MAAM,8CAA8C;AAGhE,MAAI+D;AACJ,MAAI/C;AAEF,QADA+C,aAAaH,WAAWI,KAAMC,CAAAA,MAAMA,EAAEtF,SAASqC,aAAa,GACxD,CAAC+C;AACH,YAAM,IAAI/D,MAAM,6CAA6CgB,aAAa,IAAI;AAAA,SAE3E;AACL,QAAI4C,WAAWjH,WAAW;AACxB,YAAM,IAAIqD,MACR,sFACF;AAEF+D,iBAAaH,WAAW,CAAC;AAAA,EAC3B;AACA,QAAM7C,YAAYgD,YAEZG,WAASC,oBAAa;AAAA,IAC1B,GAAGtD;AAAAA,IACHK,SAASA,WAAWH,UAAUG;AAAAA,IAC9BE,WAAWA,aAAaL,UAAUK;AAAAA,IAClCgD,kBAAkB;AAAA,EAAA,CACnB,EAAEC,OAAO;AAAA,IAACC,YAAY;AAAA,EAAA,CAAc;AAErCvC,SAAAA,OAAO9B,MAAMsE,gBAAgB;AAAA,IAC3BnD,WAAWL,UAAUK;AAAAA,IACrBF,SAASH,UAAUG;AAAAA,IACnBvC,MAAMoC,UAAUpC;AAAAA,IAChBmF,UAAU/C,UAAU+C;AAAAA,EAAAA,CACrB,GAEM;AAAA,IAAC/C;AAAAA,IAAAA,QAAWmD;AAAAA,EAAAA;AACrB;AAEA,eAAeM,mBAAmBN,SAAsB;AACtD,QAAMO,YAAY,IAAIC,IAAIR,QAAOS,OAAO,gBAAgBT,QAAOG,OAAAA,EAASnD,OAAO,IAAI,EAAK,CAAC,GAEnF0D,gBAAgB,MAAMV,QAAOW,MAAM,WAAW;AACpD9C,SAAO9B,MAAM6E,oBAAoB;AAAA,IAACF;AAAAA,EAAAA,CAAc;AAEhD,QAAM;AAAA,IAACG;AAAAA,EAAAA,IAASb,QAAOG,OAAAA,GAKjBjB,UAJW,MAAMyB,MAAMJ,WAAW;AAAA,IACtCO,SAAS,IAAIC,QAAQ;AAAA,MAAC,GAAIF,SAAS;AAAA,QAACG,eAAe,UAAUH,KAAK;AAAA,MAAA;AAAA,IAAE,CAAG;AAAA,EAAA,CACxE,GAEuBI,MAAMC,UAAAA;AAC9B,MAAI,CAAChC,OAAQ,OAAM,IAAIpD,MAAM,0CAA0C;AAEvE,MAAIqF,kBAAkB;AACtB,QAAMC,gBAAgB,oBAAInD,IAAAA,GACpBoD,kCAAkBpD,OAClBjD,QAAQC,kBAAAA,QAASC,gBAAgB;AAAA,IAACC,OAAOC,YAAAA,SAASC,KAAK4D,kBAAkBC,MAAM,CAAC;AAAA,EAAA,CAAE,GAKlFoC,YAAW,oBAAIC,KAAAA,GAClBC,YAAAA,EACAC,QAAQ,eAAe,GAAG,EAC1B9G,YAAAA,GACG+G,iBAAiBpH,sBAAKqH,KAAKC,YAAAA,QAAGC,OAAAA,GAAU,mBAAmBP,QAAQ,SAAS,GAC5EQ,eAAeC,oBAAGC,kBAAkBN,cAAc;AAExD,mBAAiBpG,QAAQN,OAAO;AAC9B,UAAM+D,WAAWtD,KAAKC,MAAMJ,IAAI;AAEhC,QAAIwD,sBAAsBC,QAAQ,GAAG;AACnCsC,kBAAYhD,IAAIU,SAASkD,GAAG;AAC5B,iBAAWC,eAAepE,gBAAgBiB,QAAQ;AAChDqC,sBAAc/C,IAAI6D,WAAW;AAG/BJ,mBAAazI,MAAM,GAAGiC,IAAI;AAAA,CAAI;AAAA,IAChC;AAEA6F,uBACAtD,OAAOhF,OAAOsJ,eAAe5F,KAAK;AAAA,MAAC4E;AAAAA,MAAiBT;AAAAA,IAAAA,CAAc;AAAA,EACpE;AAEA,SAAA,MAAM,IAAI1H,QAAc,CAACC,SAASC,WAChC4I,aAAanG,MAAOpC,CAAAA,QAASA,MAAML,OAAOK,GAAG,IAAIN,QAAAA,CAAU,CAC7D,GAEA4E,OAAOhF,OAAOsJ,eAAe7I,OAC7BuE,OAAO9B,MAAMqG,eAAe;AAAA,IAACC,0BAA0BhB,YAAYiB;AAAAA,EAAAA,CAAK,GAKjE;AAAA,IAACjB;AAAAA,IAAaD;AAAAA,IAAemB,cAHfA,MACnBzH,oCAAoCiH,YAAAA,QAAGS,iBAAiBd,cAAc,CAAC;AAAA,IAEvBe,SAASA,MAAMV,YAAAA,QAAGW,SAASC,GAAGjB,cAAc;AAAA,EAAA;AAChG;AAEA,eAAekB,iBAAiBC,UAAkB;AAChD,QAAMzB,gBAAgB,oBAAInD,OACpBoD,cAAc,oBAAIpD,IAAAA,GAClBsE,eAAeA,MAAMzH,oCAAoCiH,YAAAA,QAAGS,iBAAiBK,QAAQ,CAAC;AAE5F,mBAAiB9D,YAAYwD,aAAAA;AAC3B,QAAIzD,sBAAsBC,QAAQ,GAAG;AACnCsC,kBAAYhD,IAAIU,SAASkD,GAAG;AAC5B,iBAAWC,eAAepE,gBAAgBiB,QAAQ;AAChDqC,sBAAc/C,IAAI6D,WAAW;AAAA,IAEjC;AAGFrE,SAAAA,OAAO9B,MAAMqG,eAAe;AAAA,IAACC,0BAA0BhB,YAAYiB;AAAAA,EAAAA,CAAK,GAEjE;AAAA,IAACjB;AAAAA,IAAaD;AAAAA,IAAemB;AAAAA,IAAcE,SAASK;AAAAA,EAAAA;AAC7D;AAQA,eAAeC,wBAAwB;AAAA,EACrC/C,QAAAA;AAAAA,EACAqB;AAAAA,EACAD,eAAe4B;AACe,GAAG;AACjC,QAAMC,cAAc,IAAIhF,IAAIoD,WAAW,GACjC6B,aAAaC,MAAM9H,KAAK2H,cAAc,EACzCI,OAAQxE,CAAAA,OAAO,CAACqE,YAAYI,IAAIzE,EAAE,KAAKD,UAAUC,EAAE,CAAC,EACpD0E,KAAAA,GAEGC,UAAUL,WAAWM,OACzB,CAACC,KAAKC,MAAMC,UAAU;AACpB,UAAMC,aAAaC,KAAKC,MAAMH,QAAQjH,8BAA8B;AAEpEqH,WADcN,IAAIG,UAAU,EACtBI,KAAKN,IAAI,GACRD;AAAAA,EACT,GACAN,MAAM9H,KAAe;AAAA,IACnB5C,QAAQoL,KAAKI,KAAKf,WAAWzK,SAASiE,8BAA8B;AAAA,EAAA,CACrE,EAAEwH,IAAI,MAAM,CAAA,CAAE,CACjB;AAEA,aAAWH,SAASR,SAAS;AAC3B,UAAM;AAAA,MAACY;AAAAA,IAAAA,IAAW,MAAMnE,QAAOoE,QAA8B;AAAA,MAC3DC,KAAKrE,QAAOsE,WAAW,OAAOP,MAAMpC,KAAK,GAAG,CAAC;AAAA,MAC7C4C,MAAM;AAAA,MACNC,OAAO;AAAA,QAACC,gBAAgB;AAAA,MAAA;AAAA,MACxBC,KAAK;AAAA,IAAA,CACN,GAEKC,aAAaR,QAAQX,OAAmD,CAACC,KAAKC,UAClFD,IAAIC,KAAK9E,EAAE,IAAI8E,KAAKkB,QACbnB,MACN,CAAA,CAAE;AAEL,eAAW7E,MAAMmF;AAGXY,iBAAW/F,EAAE,MAAM,eACrBqE,YAAY5E,IAAIO,EAAE;AAAA,EAGxB;AACAf,SAAAA,OAAO9B,MAAM8I,4BAEN;AAAA,IAAC5B;AAAAA,EAAAA;AACV;AAEA,eAAe5D,OAAO;AAGpB,QAAM;AAAA,IAACyF,SAASC;AAAAA,EAAAA,IAAQ,MAAM,OAAO,OAAO,GAEtCC,4BAA4BC,uBAAAA,uBAAuBrI,OAAO;AAEhE,MAAIsI;AAEJ,MAAI;AACF,UAAM;AAAA,MAAClF,QAAAA;AAAAA,MAAQnD;AAAAA,IAAAA,IAAa,MAAM4C,cAAAA,GAC5B;AAAA,MAAC4B;AAAAA,MAAaD;AAAAA,MAAemB;AAAAA,MAAcE;AAAAA,IAAAA,IAAWxF,iBACxD,MAAM2F,iBAAiB3F,cAAc,IACrC,MAAMqD,mBAAmBN,OAAM;AACnCkF,iCAA6BzC;AAC7B,UAAM;AAAA,MAACQ;AAAAA,IAAAA,IAAe,MAAMF,wBAAwB;AAAA,MAAC/C,QAAAA;AAAAA,MAAQoB;AAAAA,MAAeC;AAAAA,IAAAA,CAAY,GAElF8D,YAAqDC,CAAAA,YACzDpF,QAAOqF,WAAWD,OAAO,GAErBE,oBAA4DA,CAAC;AAAA,MAAC1G;AAAAA,IAAAA,MAClE5F,QAAQC,QAAQgK,YAAYI,IAAIzE,EAAE,CAAC,GAE/B2G,WAAYC,CAAAA,YAAgC;AAChD,UAAIC,eAAe;AACnB,iBAAWC,UAAUF,SAAS;AAC5B,YAAIE,OAAOvI,UAAU,QAAS,QAAO;AACjCuI,eAAOvI,UAAU,cAAWsI,eAAe;AAAA,MACjD;AAEA,aAAIA,eAAqB,YAClB;AAAA,IACT;AAEA,QAAIE,iBAAiB;AAErB,UAAMC,WAAW,OAAO7G,aAA6B;AACnD,UAAIyG;AAEJ,UAAI;AACF,cAAMK,iCAAiB,SAAS,GAE1BC,SAAS,MAAM9M,QAAQ+M,KAAK,CAChCC,wBAAiB;AAAA,UACfjH;AAAAA,UACAlC;AAAAA,UACAsI;AAAAA,UACAG;AAAAA,UACAW,aAAa;AAAA,UACb7I;AAAAA,UACAC;AAAAA,QAAAA,CACD,GACD,IAAIrE,QAAyBC,CAAAA,YAC3BE,WAAW,MAAMF,QAAQ4M,OAAO,GAAGpJ,2BAA2B,CAChE,CAAC,CACF;AAED,YAAIqJ,WAAWD;AACb,gBAAM,IAAI/J,MACR,aAAaiD,SAASkD,GAAG,+BAA+BxF,2BAA2B,KACrF;AAGF+I,kBAAUM,OAEP5B,IAAI,CAAC;AAAA,UAAC3F;AAAAA,UAAM,GAAGmH;AAAAA,QAAAA,MAAYA,MAAM,EAEjCtC,OAAQsC,CAAAA,WAAW;AAClB,gBAAMQ,cAAczI,YAAYiI,OAAOvI,KAAK,GACtCgJ,iBACJ1I,YAAYN,KAAK,KAAiCM,YAAYG;AAChE,iBAAOsI,eAAeC;AAAAA,QACxB,CAAC;AAAA,MACL,SAAS5M,KAAK;AAMZiM,kBAAU,CACR;AAAA,UACEpJ,SAJY,8CAFdgK,OAAAA,SAAS7M,GAAG,KAAK,OAAOA,IAAI6C,WAAY,WAAW7C,IAAI6C,UAAU,eAEO;AAAA,UAKtEe,OAAO;AAAA,UACP7C,MAAM,CAAA;AAAA,QAAA,CACP;AAAA,MAEL;AAEAqL;AAEA,YAAMU,YACJ/I,cACA,GAAGA,UAAU,GAAGhD,cAAAA,QAAKrB,QACnB4D,UAAU+C,UACV,mBAAmB0G,mBAAmBvH,SAASkD,GAAG,CAAC,SAASqE,mBAC1DvH,SAASC,KACX,CAAC,EACH,CAAC;AAEHnB,aAAOhF,OAAO0N,WAAWhK,KAAK;AAAA,QAC5BiK,YAAYzH,SAASkD;AAAAA,QACrBwE,cAAc1H,SAASC;AAAAA,QACvB0H,UAAU3H,SAAS4H;AAAAA,QACnB,GAAIN,aAAa;AAAA,UAACA;AAAAA,QAAAA;AAAAA,QAClBb;AAAAA,QACAG;AAAAA,QACAxI,OAAOoI,SAASC,OAAO;AAAA,MAAA,CACxB;AAAA,IACH;AAEA,UAAMT,KAAKxC,aAAAA,GAAgBqD,UAAU;AAAA,MAACgB,aAAapK;AAAAA,IAAAA,CAA2B,GAE9EqB,OAAOhF,OAAO0N,WAAWjN,IAAAA;AAAAA,EAC3B,UAAA;AACE,UAAM4L,6BAAAA,GACNF,0BAAAA;AAAAA,EACF;AACF;"}