@safaricom-mxl/nextjs-turbo-redis-cache
Version:
Next.js redis cache handler
1 lines • 64 kB
Source Map (JSON)
{"version":3,"sources":["../src/RedisStringsHandler.ts","../src/utils/debug.ts","../src/DeduplicatedRequestHandler.ts","../src/SyncedMap.ts","../src/utils/json.ts","../src/CachedHandler.ts","../src/index.ts"],"sourcesContent":["import { createClient, type RedisClientOptions } from \"redis\";\nimport { DeduplicatedRequestHandler } from \"./DeduplicatedRequestHandler\";\nimport { SyncedMap } from \"./SyncedMap\";\nimport { debug } from \"./utils/debug\";\nimport { bufferReplacer, bufferReviver } from \"./utils/json\";\n\nexport type Client = ReturnType<typeof createClient>;\n\nexport type CacheEntry = {\n value: unknown;\n lastModified: number;\n tags: string[];\n};\n\nexport function redisErrorHandler<T extends Promise<unknown>>(\n _debugInfo: string,\n redisCommandResult: T\n): T {\n const _beforeTimestamp = performance.now();\n\n // Guard against undefined or non-Promise values (can happen in tests)\n if (!redisCommandResult || typeof redisCommandResult.catch !== \"function\") {\n return redisCommandResult;\n }\n\n return redisCommandResult.catch((error) => {\n throw error;\n }) as T;\n}\n\nif (process.env.DEBUG_CACHE_HANDLER) {\n // This is a test to check if the event loop is lagging. If it lags, increase CPU of container\n setInterval(() => {\n const start = performance.now();\n setImmediate(() => {\n const duration = performance.now() - start;\n if (duration > 100) {\n debug(\n \"yellow\",\n `RedisStringsHandler detected an event loop lag of: ${duration.toFixed(2)}ms. If your container is hosted in a cloud provider with container suspension this is normal. If not you should increase the CPU of your container.`\n );\n }\n });\n }, 500);\n}\n\nexport type CreateRedisStringsHandlerOptions = {\n /** Redis redisUrl to use.\n * @default process.env.REDIS_URL? process.env.REDIS_URL : process.env.REDISHOST\n ? `redis://${process.env.REDISHOST}:${process.env.REDISPORT}`\n : 'redis://localhost:6379'\n */\n redisUrl?: string;\n /** Redis database number to use. Uses DB 0 for production, DB 1 otherwise\n * @default process.env.VERCEL_ENV === 'production' ? 0 : 1\n */\n database?: number;\n /** Prefix added to all Redis keys\n * @default process.env.VERCEL_URL || 'UNDEFINED_URL_'\n */\n keyPrefix?: string;\n /** Timeout in milliseconds for time critical Redis operations (during cache get, which blocks site rendering).\n * If redis get is not fulfilled within this time, the cache handler will return null so site rendering will\n * not be blocked further and site can fallback to re-render/re-fetch the content.\n * @default 500\n */\n getTimeoutMs?: number;\n /** Number of entries to query in one batch during full sync of shared tags hash map\n * @default 250\n */\n revalidateTagQuerySize?: number;\n /** Key used to store shared tags hash map in Redis\n * @default '__sharedTags__'\n */\n sharedTagsKey?: string;\n /** Average interval in milliseconds between tag map full re-syncs\n * @default 3600000 (1 hour)\n */\n avgResyncIntervalMs?: number;\n /** Enable deduplication of Redis get requests via internal in-memory cache\n * @default true\n */\n redisGetDeduplication?: boolean;\n /** Time in milliseconds to cache Redis get results in memory. Set this to 0 to disable in-memory caching completely\n * @default 10000\n */\n inMemoryCachingTime?: number;\n /** Default stale age in seconds for cached items\n * @default 1209600 (14 days)\n */\n defaultStaleAge?: number;\n /** Function to calculate expire age (redis TTL value) from stale age\n * @default Production: staleAge * 2, Other: staleAge * 1.2\n */\n estimateExpireAge?: (staleAge: number) => number;\n /** Kill container on Redis client error if error threshold is reached\n * @default 0 (0 means no error threshold)\n */\n killContainerOnErrorThreshold?: number;\n /** Additional Redis client socket options\n * @example { tls: true, rejectUnauthorized: false }\n */\n socketOptions?: RedisClientOptions[\"socket\"];\n /** Additional Redis client options to be passed directly to createClient\n * @example { username: 'user', password: 'pass' }\n */\n clientOptions?: Omit<RedisClientOptions, \"url\" | \"database\" | \"socket\">;\n};\n\n// Identifier prefix used by Next.js to mark automatically generated cache tags\n// These tags are created internally by Next.js for route-based invalidation\nconst NEXT_CACHE_IMPLICIT_TAG_ID = \"_N_T_\";\n\n// Redis key used to store a map of tags and their last revalidation timestamps\n// This helps track when specific tags were last invalidated\nconst REVALIDATED_TAGS_KEY = \"__revalidated_tags__\";\n\nconst killContainerOnErrorCount = 0;\nexport default class RedisStringsHandler {\n private client: Client;\n private sharedTagsMap: SyncedMap<string[]>;\n private revalidatedTagsMap: SyncedMap<number>;\n private inMemoryDeduplicationCache: SyncedMap<Promise<string | null>>;\n private getTimeoutMs: number;\n private redisGet: (key: string) => Promise<string | null>;\n private redisDeduplicationHandler: DeduplicatedRequestHandler<\n (key: string) => Promise<string | null>,\n string | null\n >;\n private deduplicatedRedisGet: (\n key: string\n ) => (key: string) => Promise<string | null>;\n private keyPrefix: string;\n private redisGetDeduplication: boolean;\n private inMemoryCachingTime: number;\n private defaultStaleAge: number;\n private estimateExpireAge: (staleAge: number) => number;\n private killContainerOnErrorThreshold: number;\n\n constructor({\n redisUrl = process.env.REDIS_URL\n ? process.env.REDIS_URL\n : process.env.REDISHOST\n ? `redis://${process.env.REDISHOST}:${process.env.REDISPORT}`\n : \"redis://localhost:6379\",\n database = process.env.VERCEL_ENV === \"production\" ? 0 : 1,\n keyPrefix = process.env.VERCEL_URL || \"UNDEFINED_URL_\",\n sharedTagsKey = \"__sharedTags__\",\n getTimeoutMs = process.env.REDIS_COMMAND_TIMEOUT_MS\n ? (Number.parseInt(process.env.REDIS_COMMAND_TIMEOUT_MS, 10) ?? 500)\n : 500,\n revalidateTagQuerySize = 250,\n avgResyncIntervalMs = 60 * 60 * 1000,\n redisGetDeduplication = true,\n inMemoryCachingTime = 10_000,\n defaultStaleAge = 60 * 60 * 24 * 14,\n estimateExpireAge = (staleAge) =>\n process.env.VERCEL_ENV === \"production\" ? staleAge * 2 : staleAge * 1.2,\n killContainerOnErrorThreshold = process.env\n .KILL_CONTAINER_ON_ERROR_THRESHOLD\n ? (Number.parseInt(process.env.KILL_CONTAINER_ON_ERROR_THRESHOLD, 10) ??\n 0)\n : 0,\n socketOptions,\n clientOptions,\n }: CreateRedisStringsHandlerOptions) {\n try {\n this.keyPrefix = keyPrefix;\n this.redisGetDeduplication = redisGetDeduplication;\n this.inMemoryCachingTime = inMemoryCachingTime;\n this.defaultStaleAge = defaultStaleAge;\n this.estimateExpireAge = estimateExpireAge;\n this.killContainerOnErrorThreshold = killContainerOnErrorThreshold;\n this.getTimeoutMs = getTimeoutMs;\n // Create Redis client with properly typed configuration\n this.client = createClient({\n url: redisUrl,\n pingInterval: 10_000, // Useful with Redis deployments that do not use TCP Keep-Alive. Restarts the connection if it is idle for too long.\n ...(database !== 0 ? { database } : {}),\n ...(socketOptions ? { socket: { ...socketOptions } } : {}),\n ...(clientOptions || {}),\n });\n\n this.client.on(\"error\", (_error) => {\n setTimeout(() => this.client.connect().catch((_error) => {}), 1000);\n if (\n this.killContainerOnErrorThreshold > 0 &&\n killContainerOnErrorCount >= this.killContainerOnErrorThreshold\n ) {\n this.client.disconnect();\n setTimeout(() => {\n process.exit(1);\n }, 500);\n }\n });\n\n this.client\n .connect()\n .then(() => {})\n .catch(() => {\n this.client.connect().catch((error) => {\n this.client.disconnect();\n throw error;\n });\n });\n\n const filterKeys = (key: string): boolean =>\n key !== REVALIDATED_TAGS_KEY && key !== sharedTagsKey;\n\n this.sharedTagsMap = new SyncedMap<string[]>({\n client: this.client,\n keyPrefix,\n redisKey: sharedTagsKey,\n database,\n querySize: revalidateTagQuerySize,\n filterKeys,\n resyncIntervalMs:\n avgResyncIntervalMs -\n avgResyncIntervalMs / 10 +\n Math.random() * (avgResyncIntervalMs / 10),\n });\n\n this.revalidatedTagsMap = new SyncedMap<number>({\n client: this.client,\n keyPrefix,\n redisKey: REVALIDATED_TAGS_KEY,\n database,\n querySize: revalidateTagQuerySize,\n filterKeys,\n resyncIntervalMs:\n avgResyncIntervalMs +\n avgResyncIntervalMs / 10 +\n Math.random() * (avgResyncIntervalMs / 10),\n });\n\n this.inMemoryDeduplicationCache = new SyncedMap({\n client: this.client,\n keyPrefix,\n redisKey: \"inMemoryDeduplicationCache\",\n database,\n querySize: revalidateTagQuerySize,\n filterKeys,\n customizedSync: {\n withoutRedisHashmap: true,\n withoutSetSync: true,\n },\n });\n\n // Create a wrapper function for Redis get that matches the expected signature\n const redisGet = (key: string): Promise<string | null> => {\n return this.client.get(key);\n };\n\n this.redisDeduplicationHandler = new DeduplicatedRequestHandler(\n redisGet,\n inMemoryCachingTime,\n this.inMemoryDeduplicationCache\n );\n this.redisGet = redisGet;\n this.deduplicatedRedisGet =\n this.redisDeduplicationHandler.deduplicatedFunction;\n } catch (error) {\n if (\n killContainerOnErrorThreshold > 0 &&\n killContainerOnErrorCount >= killContainerOnErrorThreshold\n ) {\n process.exit(1);\n }\n throw error;\n }\n }\n\n resetRequestCache(): void {}\n\n public async close(): Promise<void> {\n try {\n // Close all SyncedMap instances\n await Promise.all([\n this.sharedTagsMap?.close(),\n this.revalidatedTagsMap?.close(),\n this.inMemoryDeduplicationCache?.close(),\n ]);\n\n // Disconnect the main client\n if (this.client) {\n await this.client.disconnect();\n }\n } catch (_error) {}\n }\n\n private clientReadyCalls = 0;\n\n private async assertClientIsReady(): Promise<void> {\n this.clientReadyCalls++;\n if (this.clientReadyCalls > 10) {\n throw new Error(\n \"assertClientIsReady called more than 10 times without being ready.\"\n );\n }\n // Use a longer timeout for initial setup (60 seconds)\n const timeoutMs = this.clientReadyCalls === 1 ? 60_000 : 30_000;\n\n await Promise.race([\n Promise.all([\n this.sharedTagsMap.waitUntilReady(),\n this.revalidatedTagsMap.waitUntilReady(),\n ]),\n new Promise((_, reject) =>\n setTimeout(() => {\n reject(\n new Error(\n `assertClientIsReady: Timeout waiting for Redis maps to be ready after ${timeoutMs}ms (attempt ${this.clientReadyCalls})`\n )\n );\n }, timeoutMs)\n ),\n ]);\n\n this.clientReadyCalls = 0;\n\n if (!this.client.isReady) {\n throw new Error(\n \"assertClientIsReady: Redis client is not ready yet or connection is lost.\"\n );\n }\n }\n\n public async get(\n key: string,\n ctx:\n | {\n kind: \"APP_ROUTE\" | \"APP_PAGE\";\n isRoutePPREnabled: boolean;\n isFallback: boolean;\n }\n | {\n kind: \"FETCH\";\n revalidate: number;\n fetchUrl: string;\n fetchIdx: number;\n tags: string[];\n softTags: string[];\n isFallback: boolean;\n }\n ): Promise<CacheEntry | null> {\n try {\n if (\n ctx.kind !== \"APP_ROUTE\" &&\n ctx.kind !== \"APP_PAGE\" &&\n ctx.kind !== \"FETCH\"\n ) {\n }\n\n debug(\"green\", \"RedisStringsHandler.get() called with\", key, ctx);\n\n try {\n await this.assertClientIsReady();\n } catch (_readyError) {\n return null;\n }\n\n const clientGet = this.redisGetDeduplication\n ? this.deduplicatedRedisGet(key)\n : this.redisGet;\n const serializedCacheEntry = await redisErrorHandler(\n \"RedisStringsHandler.get(), operation: get\" +\n (this.redisGetDeduplication ? \"deduplicated\" : \"\") +\n \" \" +\n this.getTimeoutMs +\n \"ms \" +\n this.keyPrefix +\n \" \" +\n key,\n clientGet(this.keyPrefix + key)\n );\n\n debug(\n \"green\",\n \"RedisStringsHandler.get() finished with result (serializedCacheEntry)\",\n serializedCacheEntry?.substring(0, 200)\n );\n\n if (!serializedCacheEntry) {\n return null;\n }\n\n const cacheEntry: CacheEntry | null = JSON.parse(\n serializedCacheEntry,\n bufferReviver\n );\n\n debug(\n \"green\",\n \"RedisStringsHandler.get() finished with result (cacheEntry)\",\n JSON.stringify(cacheEntry).substring(0, 200)\n );\n\n if (!cacheEntry) {\n return null;\n }\n\n if (!cacheEntry?.tags) {\n }\n if (!cacheEntry?.value) {\n }\n if (!cacheEntry?.lastModified) {\n }\n\n if (ctx.kind === \"FETCH\") {\n const combinedTags = new Set([\n ...(ctx?.softTags || []),\n ...(ctx?.tags || []),\n ]);\n\n if (combinedTags.size === 0) {\n return cacheEntry;\n }\n\n // INFO: implicit tags (revalidate of nested fetch in api route/page on revalidatePath call of the page/api route). See revalidateTag() for more information\n //\n // This code checks if any of the cache tags associated with this entry (normally the internal tag of the parent page/api route containing the fetch request)\n // have been revalidated since the entry was last modified. If any tag was revalidated more recently than the entry's\n // lastModified timestamp, then the cached content is considered stale (therefore return null) and should be removed.\n for (const tag of combinedTags) {\n // Get the last revalidation time for this tag from our revalidatedTagsMap\n const revalidationTime = this.revalidatedTagsMap.get(tag);\n\n // If we have a revalidation time for this tag and it's more recent than when\n // this cache entry was last modified, the entry is stale\n if (revalidationTime && revalidationTime > cacheEntry.lastModified) {\n const redisKey = this.keyPrefix + key;\n\n // We don't await this cleanup since it can happen asynchronously in the background.\n // The cache entry is already considered invalid at this point.\n this.client\n .unlink(redisKey)\n .catch((_err) => {})\n .finally(async () => {\n // Clean up our tag tracking maps after the Redis key is removed\n await this.sharedTagsMap.delete(key);\n await this.revalidatedTagsMap.delete(tag);\n });\n\n debug(\n \"green\",\n 'RedisStringsHandler.get() found revalidation time for tag. Cache entry is stale and will be deleted and \"null\" will be returned.',\n tag,\n redisKey,\n revalidationTime,\n cacheEntry\n );\n\n // Return null to indicate no valid cache entry was found\n return null;\n }\n }\n }\n\n return cacheEntry;\n } catch (_error) {\n if (\n this.killContainerOnErrorThreshold > 0 &&\n killContainerOnErrorCount >= this.killContainerOnErrorThreshold\n ) {\n this.client.disconnect();\n setTimeout(() => {\n process.exit(1);\n }, 500);\n }\n return null;\n }\n }\n public async set(\n key: string,\n data:\n | {\n kind: \"APP_PAGE\";\n status: number;\n headers: {\n \"x-nextjs-stale-time\": string; // timestamp in ms\n \"x-next-cache-tags\": string; // comma separated paths (tags)\n };\n html: string;\n rscData: Buffer;\n segmentData: unknown;\n postboned: unknown;\n }\n | {\n kind: \"APP_ROUTE\";\n status: number;\n headers: {\n \"cache-control\"?: string;\n \"x-nextjs-stale-time\": string; // timestamp in ms\n \"x-next-cache-tags\": string; // comma separated paths (tags)\n };\n body: Buffer;\n }\n | {\n kind: \"FETCH\";\n data: {\n headers: Record<string, string>;\n body: string; // base64 encoded\n status: number;\n url: string;\n };\n revalidate: number | false;\n },\n ctx: {\n isRoutePPREnabled: boolean;\n isFallback: boolean;\n tags?: string[];\n // Different versions of Next.js use different arguments for the same functionality\n revalidate?: number | false; // Version 15.0.3\n cacheControl?: { revalidate: 5; expire: undefined }; // Version 15.0.3\n }\n ) {\n try {\n if (\n data.kind !== \"APP_ROUTE\" &&\n data.kind !== \"APP_PAGE\" &&\n data.kind !== \"FETCH\"\n ) {\n }\n await this.assertClientIsReady();\n\n if (data.kind === \"APP_PAGE\" || data.kind === \"APP_ROUTE\") {\n const tags = data.headers[\"x-next-cache-tags\"]?.split(\",\");\n ctx.tags = [...(ctx.tags || []), ...(tags || [])];\n }\n\n // Constructing and serializing the value for storing it in redis\n const cacheEntry: CacheEntry = {\n lastModified: Date.now(),\n tags: ctx?.tags || [],\n value: data,\n };\n const serializedCacheEntry = JSON.stringify(cacheEntry, bufferReplacer);\n\n // pre seed data into deduplicated get client. This will reduce redis load by not requesting\n // the same value from redis which was just set.\n if (this.redisGetDeduplication) {\n this.redisDeduplicationHandler.seedRequestReturn(\n key,\n serializedCacheEntry\n );\n }\n\n // TODO: implement expiration based on cacheControl.expire argument, -> probably relevant for cacheLife and \"use cache\" etc.: https://nextjs.org/docs/app/api-reference/functions/cacheLife\n // Constructing the expire time for the cache entry\n const revalidate =\n // For fetch requests in newest versions, the revalidate context property is never used, and instead the revalidate property of the passed-in data is used\n (data.kind === \"FETCH\" && data.revalidate) ||\n ctx.revalidate ||\n ctx.cacheControl?.revalidate ||\n (data as { revalidate?: number | false })?.revalidate;\n const expireAt =\n revalidate && Number.isSafeInteger(revalidate) && revalidate > 0\n ? this.estimateExpireAge(revalidate)\n : this.estimateExpireAge(this.defaultStaleAge);\n\n // Setting the cache entry in redis\n const setOperation: Promise<string | null> = redisErrorHandler(\n \"RedisStringsHandler.set(), operation: set \" +\n this.keyPrefix +\n \" \" +\n key,\n this.client.set(this.keyPrefix + key, serializedCacheEntry, {\n EX: expireAt,\n })\n );\n\n debug(\n \"blue\",\n \"RedisStringsHandler.set() will set the following serializedCacheEntry\",\n this.keyPrefix,\n key,\n data,\n ctx,\n serializedCacheEntry?.substring(0, 200),\n expireAt\n );\n\n // Setting the tags for the cache entry in the sharedTagsMap (locally stored hashmap synced via redis)\n let setTagsOperation: Promise<void> | undefined;\n if (ctx.tags && ctx.tags.length > 0) {\n const currentTags = this.sharedTagsMap.get(key);\n const currentIsSameAsNew =\n currentTags?.length === ctx.tags.length &&\n currentTags.every((v) => ctx.tags?.includes(v)) &&\n ctx.tags.every((v) => currentTags.includes(v));\n\n if (!currentIsSameAsNew) {\n setTagsOperation = this.sharedTagsMap.set(\n key,\n structuredClone(ctx.tags) as string[]\n );\n }\n }\n\n debug(\n \"blue\",\n \"RedisStringsHandler.set() will set the following sharedTagsMap\",\n key,\n ctx.tags as string[]\n );\n\n await Promise.all([setOperation, setTagsOperation]);\n } catch (error) {\n if (\n this.killContainerOnErrorThreshold > 0 &&\n killContainerOnErrorCount >= this.killContainerOnErrorThreshold\n ) {\n this.client.disconnect();\n setTimeout(() => {\n process.exit(1);\n }, 500);\n }\n throw error;\n }\n }\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n public async revalidateTag(tagOrTags: string | string[], ...rest: any[]) {\n try {\n debug(\n \"red\",\n \"RedisStringsHandler.revalidateTag() called with\",\n tagOrTags,\n rest\n );\n const tags = new Set([tagOrTags || []].flat());\n await this.assertClientIsReady();\n\n // find all keys that are related to this tag\n const keysToDelete: Set<string> = new Set();\n\n for (const tag of tags) {\n // INFO: implicit tags (revalidate of nested fetch in api route/page on revalidatePath call of the page/api route)\n //\n // Invalidation logic for fetch requests that are related to a invalidated page.\n // revalidateTag is called for the page tag (_N_T_...) and the fetch request needs to be invalidated as well\n // unfortunately this is not possible since the revalidateTag is not called with any data that would allow us to find the cache entry of the fetch request\n // in case of a fetch request get method call, the get method of the cache handler is called with some information about the pages/routes the fetch request is inside\n // therefore we only mark the page/route as stale here (with help of the revalidatedTagsMap)\n // and delete the cache entry of the fetch request on the next request to the get function\n if (tag.startsWith(NEXT_CACHE_IMPLICIT_TAG_ID)) {\n const now = Date.now();\n debug(\n \"red\",\n \"RedisStringsHandler.revalidateTag() set revalidation time for tag\",\n tag,\n \"to\",\n now\n );\n await this.revalidatedTagsMap.set(tag, now);\n }\n }\n\n // Scan the whole sharedTagsMap for keys that are dependent on any of the revalidated tags\n for (const [key, sharedTags] of this.sharedTagsMap.entries()) {\n if (sharedTags.some((tag) => tags.has(tag))) {\n keysToDelete.add(key);\n }\n }\n\n debug(\n \"red\",\n \"RedisStringsHandler.revalidateTag() found\",\n keysToDelete,\n \"keys to delete\"\n );\n\n // exit early if no keys are related to this tag\n if (keysToDelete.size === 0) {\n return;\n }\n\n // prepare deletion of all keys in redis that are related to this tag\n const redisKeys = Array.from(keysToDelete);\n const fullRedisKeys = redisKeys.map((key) => this.keyPrefix + key);\n const deleteKeysOperation = redisErrorHandler(\n \"RedisStringsHandler.revalidateTag(), operation: unlink \" +\n this.keyPrefix +\n \" \" +\n fullRedisKeys,\n this.client.unlink(fullRedisKeys)\n );\n\n // also delete entries from in-memory deduplication cache if they get revalidated\n if (this.redisGetDeduplication && this.inMemoryCachingTime > 0) {\n for (const key of keysToDelete) {\n this.inMemoryDeduplicationCache.delete(key);\n }\n }\n\n // prepare deletion of entries from shared tags map if they get revalidated so that the map will not grow indefinitely\n const deleteTagsOperation = this.sharedTagsMap.delete(redisKeys);\n\n // execute keys and tag maps deletion\n await Promise.all([deleteKeysOperation, deleteTagsOperation]);\n debug(\n \"red\",\n \"RedisStringsHandler.revalidateTag() finished delete operations\"\n );\n } catch (error) {\n if (\n this.killContainerOnErrorThreshold > 0 &&\n killContainerOnErrorCount >= this.killContainerOnErrorThreshold\n ) {\n this.client.disconnect();\n setTimeout(() => {\n process.exit(1);\n }, 500);\n }\n throw error;\n }\n }\n}\n","export function debug(\n _color:\n | \"red\"\n | \"blue\"\n | \"green\"\n | \"yellow\"\n | \"cyan\"\n | \"white\"\n | \"none\" = \"none\",\n ..._args: unknown[]\n): void {\n const _colorCode = {\n red: \"\\x1b[31m\",\n blue: \"\\x1b[34m\",\n green: \"\\x1b[32m\",\n yellow: \"\\x1b[33m\",\n cyan: \"\\x1b[36m\",\n white: \"\\x1b[37m\",\n none: \"\",\n };\n const isEnabled = process.env.DEBUG_CACHE_HANDLER;\n if (isEnabled && isEnabled !== \"false\" && isEnabled !== \"0\") {\n }\n}\n\nexport function debugVerbose(..._args: unknown[]) {\n const isEnabled = process.env.DEBUG_CACHE_HANDLER_VERBOSE;\n if (isEnabled && isEnabled !== \"false\" && isEnabled !== \"0\") {\n }\n}\n","import type { SyncedMap } from \"./SyncedMap\";\nimport { debugVerbose } from \"./utils/debug\";\nexport class DeduplicatedRequestHandler<\n T extends (...args: any[]) => Promise<K>,\n K,\n> {\n private readonly inMemoryDeduplicationCache: SyncedMap<Promise<K>>;\n private readonly cachingTimeMs: number;\n private readonly fn: T;\n\n constructor(\n fn: T,\n cachingTimeMs: number,\n inMemoryDeduplicationCache: SyncedMap<Promise<K>>\n ) {\n this.fn = fn;\n this.cachingTimeMs = cachingTimeMs;\n this.inMemoryDeduplicationCache = inMemoryDeduplicationCache;\n }\n\n // Method to manually seed a result into the cache\n seedRequestReturn(key: string, value: K): void {\n const resultPromise = new Promise<K>((res) => res(value));\n this.inMemoryDeduplicationCache.set(key, resultPromise);\n\n debugVerbose(\n \"DeduplicatedRequestHandler.seedRequestReturn() seeded result \",\n key,\n (value as string).substring(0, 200)\n );\n\n setTimeout(() => {\n this.inMemoryDeduplicationCache.delete(key);\n }, this.cachingTimeMs);\n }\n\n // Method to handle deduplicated requests\n deduplicatedFunction = (key: string): T => {\n debugVerbose(\n \"DeduplicatedRequestHandler.deduplicatedFunction() called with\",\n key\n );\n //eslint-disable-next-line @typescript-eslint/no-this-alias\n const self = this;\n const dedupedFn = async (...args: any[]): Promise<K> => {\n // If there's already a pending request with the same key, return it\n debugVerbose(\n \"DeduplicatedRequestHandler.deduplicatedFunction().dedupedFn called with\",\n key\n );\n if (self.inMemoryDeduplicationCache?.has(key)) {\n debugVerbose(\n \"DeduplicatedRequestHandler.deduplicatedFunction().dedupedFn \",\n key,\n \"found key in inMemoryDeduplicationCache\"\n );\n const res = await self.inMemoryDeduplicationCache\n .get(key)\n ?.then((v) => structuredClone(v));\n debugVerbose(\n \"DeduplicatedRequestHandler.deduplicatedFunction().dedupedFn \",\n key,\n \"found key in inMemoryDeduplicationCache and served result from there\",\n JSON.stringify(res).substring(0, 200)\n );\n return res;\n }\n\n // If no pending request, call the original function and store the promise\n const promise = self.fn(...args);\n self.inMemoryDeduplicationCache.set(key, promise);\n\n debugVerbose(\n \"DeduplicatedRequestHandler.deduplicatedFunction().dedupedFn \",\n key,\n \"did not found key in inMemoryDeduplicationCache. Setting it now and waiting for promise to resolve\"\n );\n\n try {\n const ts = performance.now();\n const result = await promise;\n debugVerbose(\n \"DeduplicatedRequestHandler.deduplicatedFunction().dedupedFn \",\n key,\n \"promise resolved (in \",\n performance.now() - ts,\n \"ms). Returning result\",\n JSON.stringify(result).substring(0, 200)\n );\n return structuredClone(result);\n } finally {\n // Once the promise is resolved/rejected and caching timeout is over, remove it from the map\n setTimeout(() => {\n debugVerbose(\n \"DeduplicatedRequestHandler.deduplicatedFunction().dedupedFn \",\n key,\n \"deleting key from inMemoryDeduplicationCache after \",\n self.cachingTimeMs,\n \"ms\"\n );\n self.inMemoryDeduplicationCache.delete(key);\n }, self.cachingTimeMs);\n }\n };\n return dedupedFn as T;\n };\n}\n","// SyncedMap.ts\nimport { type Client, redisErrorHandler } from \"./RedisStringsHandler\";\nimport { debug, debugVerbose } from \"./utils/debug\";\n\ntype CustomizedSync = {\n withoutRedisHashmap?: boolean;\n withoutSetSync?: boolean;\n};\n\ntype SyncedMapOptions = {\n client: Client;\n keyPrefix: string;\n redisKey: string; // Redis Hash key\n database: number;\n querySize: number;\n filterKeys: (key: string) => boolean;\n resyncIntervalMs?: number;\n customizedSync?: CustomizedSync;\n};\n\nexport type SyncMessage<V> = {\n type: \"insert\" | \"delete\";\n key?: string;\n value?: V;\n keys?: string[];\n};\n\nconst SYNC_CHANNEL_SUFFIX = \":sync-channel:\";\nexport class SyncedMap<V> {\n private readonly client: Client;\n private readonly subscriberClient: Client;\n private readonly map: Map<string, V>;\n private readonly keyPrefix: string;\n private readonly syncChannel: string;\n private readonly redisKey: string;\n private readonly database: number;\n private readonly querySize: number;\n private readonly filterKeys: (key: string) => boolean;\n private readonly resyncIntervalMs?: number;\n private readonly customizedSync?: CustomizedSync;\n\n private readonly setupLock: Promise<void>;\n private readonly setupLockResolve!: () => void;\n\n constructor(options: SyncedMapOptions) {\n this.client = options.client;\n this.keyPrefix = options.keyPrefix;\n this.redisKey = options.redisKey;\n this.syncChannel = `${options.keyPrefix}${SYNC_CHANNEL_SUFFIX}${options.redisKey}`;\n this.database = options.database;\n this.querySize = options.querySize;\n this.filterKeys = options.filterKeys;\n this.resyncIntervalMs = options.resyncIntervalMs;\n this.customizedSync = options.customizedSync;\n\n this.map = new Map<string, V>();\n this.subscriberClient = this.client.duplicate();\n this.setupLock = new Promise<void>((resolve) => {\n this.setupLockResolve = resolve;\n });\n\n this.setup().catch((error) => {\n throw error;\n });\n }\n\n private async setup() {\n try {\n const setupPromises: Promise<void>[] = [];\n if (!this.customizedSync?.withoutRedisHashmap) {\n setupPromises.push(this.initialSync());\n this.setupPeriodicResync();\n }\n setupPromises.push(this.setupPubSub());\n await Promise.all(setupPromises);\n this.setupLockResolve();\n } catch (error) {\n // Still resolve the setup lock to prevent indefinite waiting\n this.setupLockResolve();\n throw error;\n }\n }\n\n private async initialSync() {\n let cursor = 0;\n const _hScanOptions = { COUNT: this.querySize };\n do {\n const hScanResult = await redisErrorHandler(\n \"SyncedMap.initialSync(), operation: hScan \" +\n this.syncChannel +\n \" \" +\n this.keyPrefix +\n \" \" +\n this.redisKey +\n \" \" +\n cursor +\n \" \" +\n this.querySize,\n this.client.hScan(\n this.keyPrefix + this.redisKey,\n cursor.toString(),\n \"COUNT\",\n this.querySize.toString()\n )\n );\n\n // hScanResult formats:\n // 1. ['0', ['field1','value1', ...]]\n // 2. { cursor: '0', tuples: [{field,value}, ...] }\n let nextCursor = 0;\n if (Array.isArray(hScanResult)) {\n // Format 1\n nextCursor = Number.parseInt(hScanResult[0] as string, 10);\n const elems = hScanResult[1] as string[];\n for (let i = 0; i < elems.length; i += 2) {\n const field = elems[i];\n const value = elems[i + 1];\n if (this.filterKeys(field)) {\n try {\n const parsed = JSON.parse(value);\n this.map.set(field, parsed);\n } catch {\n // ignore parse errors\n }\n }\n }\n } else if (hScanResult && (hScanResult as any).tuples) {\n // Format 2\n const obj = hScanResult as any;\n nextCursor =\n typeof obj.cursor === \"string\"\n ? Number.parseInt(obj.cursor, 10)\n : obj.cursor;\n for (const { field, value } of obj.tuples) {\n if (this.filterKeys(field)) {\n try {\n const parsed = JSON.parse(value);\n this.map.set(field, parsed);\n } catch {}\n }\n }\n }\n\n cursor = nextCursor;\n } while (cursor !== 0);\n\n // Clean up keys not in Redis\n await this.cleanupKeysNotInRedis();\n }\n\n private async cleanupKeysNotInRedis() {\n let cursor = 0;\n const scanOptions = { COUNT: this.querySize, MATCH: `${this.keyPrefix}*` };\n let remoteKeys: string[] = [];\n do {\n const remoteKeysPortion = await redisErrorHandler(\n \"SyncedMap.cleanupKeysNotInRedis(), operation: scan \" + this.keyPrefix,\n this.client.scan(cursor.toString(), scanOptions)\n );\n let nextCursor = 0;\n if (Array.isArray(remoteKeysPortion)) {\n // Format ['0', ['key1','key2']]\n nextCursor = Number.parseInt(remoteKeysPortion[0] as string, 10);\n const keysPart = remoteKeysPortion[1] as string[];\n remoteKeys = remoteKeys.concat(keysPart);\n } else {\n if (\n remoteKeysPortion &&\n Array.isArray((remoteKeysPortion as any).keys)\n ) {\n remoteKeys = remoteKeys.concat((remoteKeysPortion as any).keys);\n }\n nextCursor =\n typeof (remoteKeysPortion as any)?.cursor === \"string\"\n ? Number.parseInt((remoteKeysPortion as any).cursor, 10)\n : (remoteKeysPortion as any)?.cursor || 0;\n }\n cursor = nextCursor;\n } while (cursor !== 0);\n\n const remoteKeysSet = new Set(\n remoteKeys.map((key) => key.substring(this.keyPrefix.length))\n );\n\n const keysToDelete: string[] = [];\n for (const key of this.map.keys()) {\n const keyStr = key as unknown as string;\n if (!remoteKeysSet.has(keyStr) && this.filterKeys(keyStr)) {\n keysToDelete.push(keyStr);\n }\n }\n\n if (keysToDelete.length > 0) {\n await this.delete(keysToDelete);\n }\n }\n\n private setupPeriodicResync() {\n if (this.resyncIntervalMs && this.resyncIntervalMs > 0) {\n setInterval(() => {\n this.initialSync().catch((_error) => {});\n }, this.resyncIntervalMs);\n }\n }\n\n private async setupPubSub() {\n const syncHandler = async (message: string) => {\n const syncMessage: SyncMessage<V> = JSON.parse(message);\n if (syncMessage.type === \"insert\") {\n if (syncMessage.key !== undefined && syncMessage.value !== undefined) {\n this.map.set(syncMessage.key, syncMessage.value);\n }\n } else if (syncMessage.type === \"delete\" && syncMessage.keys) {\n for (const key of syncMessage.keys) {\n this.map.delete(key);\n }\n }\n };\n\n const keyEventHandler = async (key: string, message: string) => {\n debug(\n \"yellow\",\n \"SyncedMap.keyEventHandler() called with message\",\n this.redisKey,\n message,\n key\n );\n // const key = message;\n if (key.startsWith(this.keyPrefix)) {\n const keyInMap = key.substring(this.keyPrefix.length);\n if (this.filterKeys(keyInMap)) {\n debugVerbose(\n \"SyncedMap.keyEventHandler() key matches filter and will be deleted\",\n this.redisKey,\n message,\n key\n );\n await this.delete(keyInMap, true);\n }\n } else {\n debugVerbose(\n \"SyncedMap.keyEventHandler() key does not have prefix\",\n this.redisKey,\n message,\n key\n );\n }\n };\n await this.subscriberClient.connect().catch(async () => {\n // Wait a bit before retrying\n await new Promise((resolve) => setTimeout(resolve, 1000));\n await this.subscriberClient.connect().catch((error) => {\n throw error;\n });\n });\n\n // Check if keyspace event configuration is set correctly\n if (\n (process.env.SKIP_KEYSPACE_CONFIG_CHECK || \"\").toUpperCase() !== \"TRUE\"\n ) {\n const keyspaceEventConfig = (\n await this.subscriberClient.configGet(\"notify-keyspace-events\")\n )?.[\"notify-keyspace-events\"];\n if (!keyspaceEventConfig.includes(\"E\")) {\n throw new Error(\n 'Keyspace event configuration is set to \"' +\n keyspaceEventConfig +\n \"\\\" but has to include 'E' for Keyevent events, published with __keyevent@<db>__ prefix. We recommend to set it to 'Exe' like so `redis-cli -h localhost config set notify-keyspace-events Exe`\"\n );\n }\n if (\n !(\n keyspaceEventConfig.includes(\"A\") ||\n (keyspaceEventConfig.includes(\"x\") &&\n keyspaceEventConfig.includes(\"e\"))\n )\n ) {\n throw new Error(\n 'Keyspace event configuration is set to \"' +\n keyspaceEventConfig +\n \"\\\" but has to include 'A' or 'x' and 'e' for expired and evicted events. We recommend to set it to 'Exe' like so `redis-cli -h localhost config set notify-keyspace-events Exe`\"\n );\n }\n }\n\n await Promise.all([\n // We use a custom channel for insert/delete For the following reason:\n // With custom channel we can delete multiple entries in one message. If we would listen to unlink / del we\n // could get thousands of messages for one revalidateTag (For example revalidateTag(\"algolia\") would send an enormous amount of network packages)\n // Also we can send the value in the message for insert\n this.subscriberClient.subscribe(this.syncChannel, syncHandler),\n // Subscribe to Redis keyevent notifications for evicted and expired keys\n this.subscriberClient.subscribe(\n `__keyevent@${this.database}__:evicted`,\n keyEventHandler\n ),\n this.subscriberClient.subscribe(\n `__keyevent@${this.database}__:expired`,\n keyEventHandler\n ),\n ]);\n\n // Error handling for reconnection\n this.subscriberClient.on(\"error\", async (_err) => {\n try {\n await this.subscriberClient.disconnect();\n this.subscriberClient = this.client.duplicate();\n await this.setupPubSub();\n } catch (_reconnectError) {}\n });\n }\n\n public async waitUntilReady() {\n await this.setupLock;\n }\n\n public get(key: string): V | undefined {\n debugVerbose(\n \"SyncedMap.get() called with key\",\n key,\n JSON.stringify(this.map.get(key))?.substring(0, 100)\n );\n return this.map.get(key);\n }\n\n public async set(key: string, value: V): Promise<void> {\n debugVerbose(\n \"SyncedMap.set() called with key\",\n key,\n JSON.stringify(value)?.substring(0, 100)\n );\n this.map.set(key, value);\n const operations = [];\n\n // This is needed if we only want to sync delete commands. This is especially useful for non serializable data like a promise map\n if (this.customizedSync?.withoutSetSync) {\n return;\n }\n if (!this.customizedSync?.withoutRedisHashmap) {\n operations.push(\n redisErrorHandler(\n \"SyncedMap.set(), operation: hSet \" +\n this.syncChannel +\n \" \" +\n this.keyPrefix +\n \" \" +\n key,\n this.client.hSet(\n this.keyPrefix + this.redisKey,\n key as unknown as string,\n JSON.stringify(value)\n )\n )\n );\n }\n\n const insertMessage: SyncMessage<V> = {\n type: \"insert\",\n key: key as unknown as string,\n value,\n };\n operations.push(\n redisErrorHandler(\n \"SyncedMap.set(), operation: publish \" +\n this.syncChannel +\n \" \" +\n this.keyPrefix +\n \" \" +\n key,\n this.client.publish(this.syncChannel, JSON.stringify(insertMessage))\n )\n );\n await Promise.all(operations);\n }\n\n public async delete(\n keys: string[] | string,\n withoutSyncMessage = false\n ): Promise<void> {\n debugVerbose(\n \"SyncedMap.delete() called with keys\",\n this.redisKey,\n keys,\n withoutSyncMessage\n );\n\n const keysArray = Array.isArray(keys) ? keys : [keys];\n const operations = [];\n\n for (const key of keysArray) {\n this.map.delete(key);\n }\n\n if (!this.customizedSync?.withoutRedisHashmap) {\n operations.push(\n redisErrorHandler(\n \"SyncedMap.delete(), operation: hDel \" +\n this.syncChannel +\n \" \" +\n this.keyPrefix +\n \" \" +\n this.redisKey +\n \" \" +\n keysArray,\n this.client.hDel(this.keyPrefix + this.redisKey, keysArray)\n )\n );\n }\n\n if (!withoutSyncMessage) {\n const deletionMessage: SyncMessage<V> = {\n type: \"delete\",\n keys: keysArray,\n };\n operations.push(\n redisErrorHandler(\n \"SyncedMap.delete(), operation: publish \" +\n this.syncChannel +\n \" \" +\n this.keyPrefix +\n \" \" +\n keysArray,\n this.client.publish(this.syncChannel, JSON.stringify(deletionMessage))\n )\n );\n }\n\n await Promise.all(operations);\n debugVerbose(\n \"SyncedMap.delete() finished operations\",\n this.redisKey,\n keys,\n operations.length\n );\n }\n\n public has(key: string): boolean {\n return this.map.has(key);\n }\n\n public entries(): IterableIterator<[string, V]> {\n return this.map.entries();\n }\n\n public async close(): Promise<void> {\n try {\n // Disconnect the subscriber client\n if (this.subscriberClient) {\n await this.subscriberClient.disconnect();\n }\n } catch (_error) {}\n }\n}\n","// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport function bufferReviver(_: string, value: any): any {\n if (value && typeof value === \"object\" && typeof value.$binary === \"string\") {\n try {\n // Check if it's valid base64 by attempting to decode it\n const base64String = value.$binary;\n // This regex checks for valid base64 format\n if (/^[A-Za-z0-9+/]*={0,2}$/.test(base64String)) {\n return Buffer.from(base64String, \"base64\");\n }\n } catch {\n // If decoding fails, return the original object\n }\n }\n return value;\n}\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport function bufferReplacer(_: string, value: any): any {\n if (Buffer.isBuffer(value)) {\n return {\n $binary: value.toString(\"base64\"),\n };\n }\n if (\n value &&\n typeof value === \"object\" &&\n value?.type === \"Buffer\" &&\n Array.isArray(value.data)\n ) {\n return {\n $binary: Buffer.from(value.data).toString(\"base64\"),\n };\n }\n return value;\n}\n","import RedisStringsHandler, {\n type CreateRedisStringsHandlerOptions,\n} from \"./RedisStringsHandler\";\nimport { debugVerbose } from \"./utils/debug\";\n\nlet cachedHandler: RedisStringsHandler;\n\n// Function to reset the cached handler (for testing)\nexport function resetCachedHandler() {\n cachedHandler = undefined as any;\n}\n\nexport default class CachedHandler {\n constructor(options: CreateRedisStringsHandlerOptions) {\n if (!cachedHandler) {\n cachedHandler = new RedisStringsHandler(options);\n }\n }\n get(\n ...args: Parameters<RedisStringsHandler[\"get\"]>\n ): ReturnType<RedisStringsHandler[\"get\"]> {\n debugVerbose(\"CachedHandler.get called with\", args);\n return cachedHandler.get(...args);\n }\n set(\n ...args: Parameters<RedisStringsHandler[\"set\"]>\n ): ReturnType<RedisStringsHandler[\"set\"]> {\n debugVerbose(\"CachedHandler.set called with\", args);\n return cachedHandler.set(...args);\n }\n revalidateTag(\n ...args: Parameters<RedisStringsHandler[\"revalidateTag\"]>\n ): ReturnType<RedisStringsHandler[\"revalidateTag\"]> {\n debugVerbose(\"CachedHandler.revalidateTag called with\", args);\n return cachedHandler.revalidateTag(...args);\n }\n resetRequestCache(\n ...args: Parameters<RedisStringsHandler[\"resetRequestCache\"]>\n ): ReturnType<RedisStringsHandler[\"resetRequestCache\"]> {\n // debug(\"CachedHandler.resetRequestCache called with\", args);\n return cachedHandler.resetRequestCache(...args);\n }\n}\n","import CachedHandler from \"./CachedHandler\";\nexport default CachedHandler;\n\nimport RedisStringsHandler from \"./RedisStringsHandler\";\nexport { RedisStringsHandler };\n"],"mappings":";AAAA,SAAS,oBAA6C;;;ACA/C,SAAS,MACd,SAOa,WACV,OACG;AACN,QAAM,aAAa;AAAA,IACjB,KAAK;AAAA,IACL,MAAM;AAAA,IACN,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,OAAO;AAAA,IACP,MAAM;AAAA,EACR;AACA,QAAM,YAAY,QAAQ,IAAI;AAC9B,MAAI,aAAa,cAAc,WAAW,cAAc,KAAK;AAAA,EAC7D;AACF;AAEO,SAAS,gBAAgB,OAAkB;AAChD,QAAM,YAAY,QAAQ,IAAI;AAC9B,MAAI,aAAa,cAAc,WAAW,cAAc,KAAK;AAAA,EAC7D;AACF;;;AC3BO,IAAM,6BAAN,MAGL;AAAA,EAKA,YACE,IACA,eACA,4BACA;AAuBF;AAAA,gCAAuB,CAAC,QAAmB;AACzC;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAEA,YAAM,OAAO;AACb,YAAM,YAAY,UAAU,SAA4B;AAEtD;AAAA,UACE;AAAA,UACA;AAAA,QACF;AACA,YAAI,KAAK,4BAA4B,IAAI,GAAG,GAAG;AAC7C;AAAA,YACE;AAAA,YACA;AAAA,YACA;AAAA,UACF;AACA,gBAAM,MAAM,MAAM,KAAK,2BACpB,IAAI,GAAG,GACN,KAAK,CAAC,MAAM,gBAAgB,CAAC,CAAC;AAClC;AAAA,YACE;AAAA,YACA;AAAA,YACA;AAAA,YACA,KAAK,UAAU,GAAG,EAAE,UAAU,GAAG,GAAG;AAAA,UACtC;AACA,iBAAO;AAAA,QACT;AAGA,cAAM,UAAU,KAAK,GAAG,GAAG,IAAI;AAC/B,aAAK,2BAA2B,IAAI,KAAK,OAAO;AAEhD;AAAA,UACE;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,KAAK,YAAY,IAAI;AAC3B,gBAAM,SAAS,MAAM;AACrB;AAAA,YACE;AAAA,YACA;AAAA,YACA;AAAA,YACA,YAAY,IAAI,IAAI;AAAA,YACpB;AAAA,YACA,KAAK,UAAU,MAAM,EAAE,UAAU,GAAG,GAAG;AAAA,UACzC;AACA,iBAAO,gBAAgB,MAAM;AAAA,QAC/B,UAAE;AAEA,qBAAW,MAAM;AACf;AAAA,cACE;AAAA,cACA;AAAA,cACA;AAAA,cACA,KAAK;AAAA,cACL;AAAA,YACF;AACA,iBAAK,2BAA2B,OAAO,GAAG;AAAA,UAC5C,GAAG,KAAK,aAAa;AAAA,QACvB;AAAA,MACF;AACA,aAAO;AAAA,IACT;AA1FE,SAAK,KAAK;AACV,SAAK,gBAAgB;AACrB,SAAK,6BAA6B;AAAA,EACpC;AAAA;AAAA,EAGA,kBAAkB,KAAa,OAAgB;AAC7C,UAAM,gBAAgB,IAAI,QAAW,CAAC,QAAQ,IAAI,KAAK,CAAC;AACxD,SAAK,2BAA2B,IAAI,KAAK,aAAa;AAEtD;AAAA,MACE;AAAA,MACA;AAAA,MACC,MAAiB,UAAU,GAAG,GAAG;AAAA,IACpC;AAEA,eAAW,MAAM;AACf,WAAK,2BAA2B,OAAO,GAAG;AAAA,IAC5C,GAAG,KAAK,aAAa;AAAA,EACvB;AAwEF;;;AC/EA,IAAM,sBAAsB;AACrB,IAAM,YAAN,MAAmB;AAAA,EAgBxB,YAAY,SAA2B;AACrC,SAAK,SAAS,QAAQ;AACtB,SAAK,YAAY,QAAQ;AACzB,SAAK,WAAW,QAAQ;AACxB,SAAK,cAAc,GAAG,QAAQ,SAAS,GAAG,mBAAmB,GAAG,QAAQ,QAAQ;AAChF,SAAK,WAAW,QAAQ;AACxB,SAAK,YAAY,QAAQ;AACzB,SAAK,aAAa,QAAQ;AAC1B,SAAK,mBAAmB,QAAQ;AAChC,SAAK,iBAAiB,QAAQ;AAE9B,SAAK,MAAM,oBAAI,IAAe;AAC9B,SAAK,mBAAmB,KAAK,OAAO,UAAU;AAC9C,SAAK,YAAY,IAAI,QAAc,CAAC,YAAY;AAC9C,WAAK,mBAAmB;AAAA,IAC1B,CAAC;AAED,SAAK,MAAM,EAAE,MAAM,CAAC,UAAU;AAC5B,YAAM;AAAA,IACR,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,QAAQ;AACpB,QAAI;AACF,YAAM,gBAAiC,CAAC;AACxC,UAAI,CAAC,KAAK,gBAAgB,qBAAqB;AAC7C,sBAAc,KAAK,KAAK,YAAY,CAAC;AACrC,aAAK,oBAAoB;AAAA,MAC3B;AACA,oBAAc,KAAK,KAAK,YAAY,CAAC;AACrC,YAAM,QAAQ,IAAI,aAAa;AAC/B,WAAK,iBAAiB;AAAA,IACxB,SAAS,OAAO;AAEd,WAAK,iBAAiB;AACtB,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAc,cAAc;AAC1B,QAAI,SAAS;AACb,UAAM,gBAAgB,EAAE,OAAO,KAAK,UAAU;AAC9C,OAAG;AACD,YAAM,cAAc,MAAM;AAAA,QACxB,+CACE,KAAK,cACL,MACA,KAAK,YACL,MACA,KAAK,WACL,MACA,SACA,MACA,KAAK;AAAA,QACP,KAAK,OAAO;AAAA,UACV,KAAK,YAAY,KAAK;AAAA,UACtB,OAAO,SAAS;AAAA,UAChB;AAAA,UACA,KAAK,UAAU,SAAS;AAAA,QAC1B;AAAA,MACF;AAKA,UAAI,aAAa;AACjB,UAAI,MAAM,QAAQ,WAAW,GAAG;AAE9B,qBAAa,OAAO,SAAS,YAAY,CAAC,GAAa,EAAE;AACzD,cAAM,QAAQ,YAAY,CAAC;AAC3B,iBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,GAAG;AACxC,gBAAM,QAAQ,MAAM,CAAC;AACrB,gBAAM,QAAQ,MAAM,IAAI,CAAC;AACzB,cAAI,KAAK,WAAW,KAAK,GAAG;AAC1B,gBAAI;AACF,oBAAM,SAAS,KAAK,MAAM,KAAK;AAC/B,mBAAK,IAAI,IAAI,OAAO,MAAM;AAAA,YAC5B,QAAQ;AAAA,YAER;AAAA,UACF;AAAA,QACF;AAAA,MACF,WAAW,eAAgB,YAAoB,QAAQ;AAErD,cAAM,MAAM;AACZ,qBACE,OAAO,IAAI,WAAW,WAClB,OAAO,SAAS,IAAI,QAAQ,EAAE,