UNPKG

@supabase-cache-helpers/postgrest-core

Version:

A collection of cache utilities for working with the Supabase REST API. It is not meant to be used standalone.

1 lines 160 kB
{"version":3,"sources":["../src/index.ts","../src/lib/query-types.ts","../src/lib/extract-paths-from-filter.ts","../src/lib/remove-first-path-element.ts","../src/lib/group-paths-recursive.ts","../src/lib/parse-select-param.ts","../src/lib/remove-alias-from-declaration.ts","../src/fetch/build-select-statement.ts","../src/fetch/dedupe.ts","../src/fetch/build-normalized-query.ts","../src/fetch/build-mutation-fetcher-response.ts","../src/lib/get.ts","../src/lib/is-plain-object.ts","../src/lib/get-table-from-url.ts","../src/lib/like-query-builder.ts","../src/lib/get-table.ts","../src/lib/cache-data-types.ts","../src/lib/response-types.ts","../src/lib/encode-object.ts","../src/lib/sort-search-param.ts","../src/lib/is-postgrest-builder.ts","../src/lib/is-postgrest-transform-builder.ts","../src/lib/set-filter-value.ts","../src/lib/is-iso-date-string.ts","../src/lib/parse-value.ts","../src/lib/parse-order-by-key.ts","../src/lib/parse-order-by.ts","../src/lib/find-filters.ts","../src/cursor-pagination-fetcher.ts","../src/delete-fetcher.ts","../src/mutate/should-revalidate-relation.ts","../src/mutate/should-revalidate-table.ts","../src/mutate/transformers.ts","../src/delete-item.ts","../src/fetcher.ts","../src/insert-fetcher.ts","../src/lib/binary-search.ts","../src/lib/if-date-get-time.ts","../src/lib/sorted-comparator.ts","../src/lib/find-index-ordered.ts","../src/mutate-item.ts","../src/offset-pagination-fetcher.ts","../src/filter/denormalize.ts","../src/lib/filter-filter-definitions-by-paths.ts","../src/lib/is-object.ts","../src/lib/like-postgrest-builder.ts","../src/lib/operators.ts","../src/lib/find-last-index.ts","../src/lib/is-not-null.ts","../src/postgrest-query-parser.ts","../src/postgrest-filter.ts","../src/postgrest-parser.ts","../src/update-fetcher.ts","../src/upsert-fetcher.ts","../src/upsert-item.ts","../src/revalidate-tables.ts"],"sourcesContent":["// cherry pick exports that are used by the adapter packages\nexport * from './fetch/build-normalized-query';\nexport * from './fetch/build-mutation-fetcher-response';\nexport * from './mutate/types';\nexport * from './lib/query-types';\nexport * from './lib/get-table';\nexport * from './lib/cache-data-types';\nexport * from './lib/response-types';\nexport * from './lib/encode-object';\nexport * from './lib/is-postgrest-builder';\nexport * from './lib/is-postgrest-transform-builder';\nexport * from './lib/get';\nexport * from './lib/set-filter-value';\nexport * from './lib/parse-value';\nexport * from './lib/parse-order-by-key';\nexport * from './lib/parse-order-by';\nexport * from './lib/find-filters';\nexport * from './lib/is-plain-object';\n\nexport * from './cursor-pagination-fetcher';\nexport * from './delete-fetcher';\nexport * from './delete-item';\nexport * from './fetcher';\nexport * from './insert-fetcher';\nexport * from './mutate-item';\nexport * from './offset-pagination-fetcher';\nexport * from './postgrest-filter';\nexport * from './postgrest-parser';\nexport * from './postgrest-query-parser';\nexport * from './update-fetcher';\nexport * from './upsert-fetcher';\nexport * from './upsert-item';\nexport * from './revalidate-tables';\n","/**\n * A function that validates whether the given input is an object of type Type\n * @returns true if obj is of type Type, false if not\n */\nexport type FilterFn<Type extends Record<string, unknown>> = (\n obj: unknown,\n) => obj is Type;\n\n/**\n * The supported value types\n */\nexport type ValueType = number | string | boolean | null | Date | object;\n\n/**\n * A function implementing a FilterOperators\n * @param columnValue the value of the input object to test the filter against\n * @param filterValue the value of the filter, e.g. in .eq('colname', 'filterValue'), 'filterValue' would be the filterValue\n * @returns true if the filter applies, false if not\n */\nexport type OperatorFn = (columnValue: any, filterValue: any) => boolean;\n\n/**\n * All supported operators of PostgREST\n */\nexport type FilterOperator =\n | 'or'\n | 'eq'\n | 'neq'\n | 'gt'\n | 'gte'\n | 'lt'\n | 'lte'\n | 'like'\n | 'ilike'\n | 'is'\n | 'in'\n | 'cs'\n | 'cd'\n | 'fts'\n | 'plfts';\n\n/**\n * An object describing a selected path of a query\n *\n */\nexport type Path = {\n /**\n * The aliased path if a column or relation name mapping is used within the path\n */\n alias?: string;\n /**\n * The \"real\" path of a column\n */\n path: string;\n /**\n * The full declaration of a column that includes alias, hints and inner joins\n */\n declaration: string;\n /**\n * The aggregate function applied to the path\n */\n aggregate?: string;\n};\n\n/**\n * A decomposed filter applied to a query\n */\nexport type FilterDefinition = {\n /**\n * The path to which the filter is applied\n */\n path: string;\n /**\n * The aliased path if a column or relation name mapping is used\n */\n alias?: string;\n /**\n * The operator that is applied\n */\n operator: FilterOperator;\n /**\n * Whether or not to negate the results of the filter, e.g. when .not('name', 'eq', 'Paris') is applied\n */\n negate: boolean;\n /**\n * The value of the filter\n */\n value: ValueType;\n};\n\n/**\n * A json representation of PostgREST filters that are applied to a query\n */\nexport type FilterDefinitions = (\n | { or: FilterDefinitions }\n | { and: FilterDefinitions }\n | FilterDefinition\n)[];\n\ntype ArrayElement<ArrayType extends readonly unknown[]> =\n ArrayType extends readonly (infer ElementType)[] ? ElementType : never;\n\nexport const isAndFilter = (\n f: ArrayElement<FilterDefinitions>,\n): f is { and: FilterDefinitions } =>\n Array.isArray((f as { and: FilterDefinitions }).and);\n\nexport const isOrFilter = (\n f: ArrayElement<FilterDefinitions>,\n): f is { or: FilterDefinitions } =>\n Array.isArray((f as { or: FilterDefinitions }).or);\n\nexport const isFilterDefinition = (\n f: ArrayElement<FilterDefinitions>,\n): f is FilterDefinition => !isAndFilter(f) && !isOrFilter(f);\n\nexport type OrderDefinition = {\n column: string;\n ascending: boolean;\n nullsFirst: boolean;\n foreignTable?: string;\n};\n","import {\n type FilterDefinitions,\n type Path,\n isAndFilter,\n isFilterDefinition,\n isOrFilter,\n} from './query-types';\n\nexport const extractPathsFromFilters = (f: FilterDefinitions, p: Path[]) => {\n return f.reduce<Path[]>((prev, filter) => {\n if (isAndFilter(filter)) {\n prev.push(...extractPathsFromFilters(filter.and, p));\n } else if (isOrFilter(filter)) {\n prev.push(...extractPathsFromFilters(filter.or, p));\n } else if (isFilterDefinition(filter)) {\n const relatedPath = p.find((p) => p.path === filter.path);\n const pathElements = filter.path.split('.');\n const aliasElements = filter.alias?.split('.');\n const declaration = pathElements\n .map(\n (el, idx) =>\n `${aliasElements && aliasElements[idx] !== el ? `${aliasElements[idx]}:` : ''}${el}`,\n )\n .join('.');\n prev.push({\n path: filter.path,\n alias: filter.alias,\n declaration: relatedPath ? relatedPath.declaration : declaration,\n });\n }\n return prev;\n }, []);\n};\n","import type { Path } from './query-types';\n\nexport const removeFirstPathElement = (p: Path): Path => {\n const aliasWithoutFirstElement = p.alias\n ? p.alias.split('.').slice(1).join('.')\n : undefined;\n const pathWithoutFirstEelment = p.path.split('.').slice(1).join('.');\n\n return {\n declaration: p.declaration.split('.').slice(1).join('.'),\n path: pathWithoutFirstEelment,\n alias:\n aliasWithoutFirstElement &&\n (aliasWithoutFirstElement.split('.').length > 1 ||\n aliasWithoutFirstElement !== pathWithoutFirstEelment)\n ? aliasWithoutFirstElement\n : undefined,\n };\n};\n","import type { Path } from './query-types';\nimport { removeFirstPathElement } from './remove-first-path-element';\n\nexport type NestedPath = {\n alias?: string;\n path: string;\n declaration: string;\n paths: (Path | NestedPath)[];\n};\n\nexport const isNestedPath = (p: Path | NestedPath): p is NestedPath =>\n Array.isArray((p as NestedPath).paths);\n\n// group paths by first path elements declaration\n// returns [Path, Path, NestedPath, NestedPath, Path]\nexport const groupPathsRecursive = (paths: Path[]): (Path | NestedPath)[] => {\n const grouped = paths.reduce<(Path | NestedPath)[]>((prev, curr) => {\n const levels = curr.path.split('.').length;\n if (levels === 1) {\n prev.push(curr);\n return prev;\n }\n\n // if has more than one level left,\n const firstLevelDeclaration = curr.declaration.split('.')[0];\n const indexOfNested = prev.findIndex(\n (p) => isNestedPath(p) && p.declaration === firstLevelDeclaration,\n );\n const pathWithoutCurrentLevel = removeFirstPathElement(curr);\n if (indexOfNested !== -1) {\n // add to nested\n (prev[indexOfNested] as NestedPath).paths.push(pathWithoutCurrentLevel);\n return prev;\n }\n // create nested\n prev.push({\n declaration: firstLevelDeclaration,\n path: curr.path.split('.')[0],\n paths: [pathWithoutCurrentLevel],\n ...(curr.alias ? { alias: curr.alias.split('.')[0] } : {}),\n });\n return prev;\n }, []);\n\n return grouped.map((p) =>\n isNestedPath(p) ? { ...p, paths: groupPathsRecursive(p.paths) } : p,\n );\n};\n","import XRegExp from 'xregexp';\n\nimport type { Path } from './query-types';\n\nexport const parseSelectParam = (s: string, currentPath?: Path): Path[] => {\n s = s.replace(/\\s/g, '');\n\n let result;\n try {\n result = XRegExp.matchRecursive(`,${s}`, '([^,\\\\(]+)\\\\(', '\\\\)', 'g', {\n valueNames: {\n '0': null,\n '1': 'tableName',\n '2': 'selectedColumns',\n '3': null,\n },\n }).map((item) => {\n if (\n item.name === 'tableName' &&\n item.value &&\n !item.value.startsWith(',')\n ) {\n item.value = ',' + item.value;\n }\n return item;\n });\n } catch (e) {\n const path = currentPath?.path\n ? `${currentPath?.declaration} with alias ${currentPath?.alias} at path ${currentPath?.path}`\n : 'root';\n throw new Error(`Unable to parse ${s} at ${path}`, {\n cause: e,\n });\n }\n\n const foreignTables = result.reduce((prev, curr, idx, matches) => {\n if (curr.name === 'selectedColumns' && curr.value.length > 0) {\n const name = matches[idx - 1].value.slice(1, -1);\n prev = { ...prev, [name]: curr.value };\n }\n return prev;\n }, {});\n\n const columns = s\n .replace(\n new RegExp(\n `${Object.entries(foreignTables)\n .map(([table, selectedColumns]) =>\n `${table}(${selectedColumns})`\n .replace(/\\(/g, '\\\\(')\n .replace(/\\)/g, '\\\\)')\n .replace(/\\*/g, '\\\\*'),\n )\n .join('|')}`,\n 'g',\n ),\n '',\n )\n .replace(/(,)\\1+/g, ',')\n .split(',')\n .filter((c) => c.length > 0)\n .map((c) => {\n const split = c.split(':');\n const hasAlias = split.length > 1;\n\n const aggregateSplit = split[hasAlias ? 1 : 0].split('.');\n const hasAggregate =\n aggregateSplit.length > 1 && aggregateSplit[1].endsWith('()');\n\n return {\n declaration: [currentPath?.declaration, c].filter(Boolean).join('.'),\n alias:\n hasAlias || currentPath?.alias\n ? [currentPath?.alias ?? currentPath?.path, split[0]]\n .filter(Boolean)\n .join('.')\n : undefined,\n path: [\n currentPath?.path,\n hasAggregate ? aggregateSplit[0] : split[hasAlias ? 1 : 0],\n ]\n .filter(Boolean)\n .join('.'),\n ...(hasAggregate ? { aggregate: aggregateSplit[1].slice(0, -2) } : {}),\n };\n });\n\n return [\n ...columns,\n ...Object.entries(foreignTables).flatMap(\n ([currentDeclaration, selectedColumns]) => {\n // example for declaration\n // alias:organisation!contact_organisation_id_fkey!inner\n const aliasSplit = currentDeclaration.split(':');\n\n const currentAliasElem =\n aliasSplit.length > 1 ? aliasSplit[0] : undefined;\n\n const currentPathDeclaration = aliasSplit[aliasSplit.length - 1];\n const currentPathElem = currentPathDeclaration.split('!')[0];\n\n const path = [currentPath?.path, currentPathElem]\n .filter(Boolean)\n .join('.');\n\n const alias = [\n currentPath?.alias ?? currentPath?.path,\n currentAliasElem ?? currentPathElem,\n ]\n .filter(Boolean)\n .join('.');\n\n const declaration = [currentPath?.declaration, currentDeclaration]\n .filter(Boolean)\n .join('.');\n\n return parseSelectParam(`${selectedColumns}`, {\n path,\n alias: currentPath?.alias || currentAliasElem ? alias : undefined,\n declaration,\n });\n },\n ),\n ];\n};\n","// removes alias from every level of declaration\nexport const removeAliasFromDeclaration = (d: string) =>\n d\n .split('.')\n .map((el) => el.split(':').pop() as string)\n .join('.');\n","import {\n type NestedPath,\n groupPathsRecursive,\n isNestedPath,\n} from '../lib/group-paths-recursive';\nimport type { Path } from '../lib/query-types';\n\n// Transforms a list of Path[] into a select statement\nexport const buildSelectStatement = (paths: Path[]): string => {\n return buildSelectStatementFromGroupedPaths(groupPathsRecursive(paths));\n};\n\n// Transforms a list of (Path | NestedPath)[] grouped statements into a select statement\nexport const buildSelectStatementFromGroupedPaths = (\n paths: (Path | NestedPath)[],\n): string =>\n paths\n .map((i) => {\n if (isNestedPath(i)) {\n return `${i.declaration}(${buildSelectStatement(i.paths)})`;\n }\n return `${i.alias ? `${i.alias}:` : ''}${i.path}`;\n })\n .join(',');\n","import { type NestedPath, isNestedPath } from '../lib/group-paths-recursive';\nimport type { Path } from '../lib/query-types';\n\nexport const DEDUPE_ALIAS_PREFIX = 'd';\n\nexport const dedupeGroupedPathsRecursive = (\n grouped: (Path | NestedPath)[],\n): (Path | NestedPath)[] => {\n const dedupeCounters = new Map<string, number>();\n\n return grouped.map((p, idx, a) => {\n // never dedupe non-nested paths because even if there is a duplicate we always want to dedupe the nested path instead\n // e.g. inbox_id,inbox_id(name) should be deduped to inbox_id,d_0_inbox_id:inbox_id(name)\n if (!isNestedPath(p)) return p;\n\n // dedupe current nested path if there is another path with the same `path`\n if (a.some((i, itemIdx) => i.path === p.path && idx !== itemIdx)) {\n const counter = dedupeCounters.get(p.path) || 0;\n dedupeCounters.set(p.path, counter + 1);\n const alias = [DEDUPE_ALIAS_PREFIX, counter, p.path].join('_');\n return {\n ...p,\n alias,\n declaration: `${alias}:${p.declaration}`,\n paths: dedupeGroupedPathsRecursive(p.paths),\n };\n }\n\n return {\n ...p,\n paths: dedupeGroupedPathsRecursive(p.paths),\n };\n });\n};\n","import { extractPathsFromFilters } from '../lib/extract-paths-from-filter';\nimport {\n type NestedPath,\n groupPathsRecursive,\n} from '../lib/group-paths-recursive';\nimport { parseSelectParam } from '../lib/parse-select-param';\nimport type { FilterDefinitions, Path } from '../lib/query-types';\nimport { removeAliasFromDeclaration } from '../lib/remove-alias-from-declaration';\nimport { buildSelectStatementFromGroupedPaths } from './build-select-statement';\nimport { dedupeGroupedPathsRecursive } from './dedupe';\n\nexport type BuildNormalizedQueryOps<Q extends string = '*'> = {\n query?: Q | null;\n // if true, will not add any paths from the cache to the query\n disabled?: boolean;\n queriesForTable: () => { paths: Path[]; filters: FilterDefinitions }[];\n};\n\nexport type BuildNormalizedQueryReturn = {\n // The joint select query\n selectQuery: string;\n // All paths the user is querying for\n groupedUserQueryPaths: (NestedPath | Path)[] | null;\n // All paths the user is querying for + all paths that are currently loaded into the cache\n groupedPaths: (NestedPath | Path)[];\n};\n\n/**\n * returns select statement that includes the users query + all paths currently loaded into cache to later perform a \"smart update\"\n *\n * the select statement does not contain any user-defined aliases. only custom ones to dedupe.\n * without deduping, we would not be able to query inbox_id,inbox:inbox_id(name),\n * because it will result in a select of inbox_id,inbox_id(name), which does not work.\n * to dedupe, we add a custom alias to the query, e.g. dedupe_0:inbox_id,inbox_id(name)\n * we then later remove them when normalizing the data\n **/\nexport const buildNormalizedQuery = <Q extends string = '*'>({\n query,\n disabled,\n queriesForTable,\n}: BuildNormalizedQueryOps<Q>): BuildNormalizedQueryReturn | null => {\n // parse user query\n const userQueryPaths = query ? parseSelectParam(query) : null;\n\n // unique set of declaration without paths.\n // alias not needed for paths\n // declaration without alias!\n const paths: Path[] = userQueryPaths\n ? userQueryPaths.map((q) => ({\n declaration: removeAliasFromDeclaration(q.declaration),\n path: q.path,\n }))\n : [];\n\n if (!disabled) {\n for (const tableQuery of queriesForTable()) {\n for (const filterPath of extractPathsFromFilters(\n tableQuery.filters,\n tableQuery.paths,\n )) {\n // add paths used in filter\n const path = tableQuery.paths.find(\n (p) => p.path === filterPath.path && p.alias === filterPath.alias,\n ) ?? {\n path: filterPath.path,\n declaration: filterPath.path,\n };\n // add unique\n if (\n paths.every(\n (p) =>\n removeAliasFromDeclaration(p.declaration) !==\n removeAliasFromDeclaration(path.declaration),\n )\n ) {\n // do not use alias\n paths.push({\n path: path.path,\n declaration: removeAliasFromDeclaration(path.declaration),\n });\n }\n }\n // add paths used in query\n for (const path of tableQuery.paths) {\n if (\n paths.every(\n (p) =>\n removeAliasFromDeclaration(p.declaration) !==\n removeAliasFromDeclaration(path.declaration),\n ) &&\n // do not add agg functions\n !path.declaration.endsWith('.count') &&\n // do not add wildcard queries\n !path.declaration.endsWith('*')\n ) {\n paths.push({\n path: path.path,\n declaration: removeAliasFromDeclaration(path.declaration),\n });\n }\n }\n }\n }\n\n const groupedPaths = groupPathsRecursive(paths);\n const groupedDedupedPaths = dedupeGroupedPathsRecursive(groupedPaths);\n\n const selectQuery = buildSelectStatementFromGroupedPaths(groupedDedupedPaths);\n if (selectQuery.length === 0) return null;\n return {\n selectQuery,\n groupedUserQueryPaths: userQueryPaths\n ? groupPathsRecursive(userQueryPaths)\n : null,\n groupedPaths: groupedDedupedPaths,\n };\n};\n","import { flatten } from 'flat';\n\nimport { get } from '../lib/get';\nimport { type NestedPath, isNestedPath } from '../lib/group-paths-recursive';\nimport { isPlainObject } from '../lib/is-plain-object';\nimport type { Path } from '../lib/query-types';\nimport type { BuildNormalizedQueryReturn } from './build-normalized-query';\n\n/**\n * The parsed response of the mutation fetcher\n **/\nexport type MutationFetcherResponse<R> = {\n /**\n * Normalized response. A flat json object with a depth of 1, where the keys are the full json paths.\n **/\n normalizedData: R;\n /**\n * Result of the query passed by the user\n **/\n userQueryData?: R;\n};\n\nexport const buildMutationFetcherResponse = <R>(\n /**\n * response of the select query built by `buildNormalizedQuery`. contains dedupe aliases.\n **/\n input: R,\n {\n groupedPaths,\n groupedUserQueryPaths,\n }: Pick<BuildNormalizedQueryReturn, 'groupedPaths' | 'groupedUserQueryPaths'>,\n): MutationFetcherResponse<R> => {\n return {\n normalizedData: normalizeResponse<R>(groupedPaths, input),\n userQueryData: groupedUserQueryPaths\n ? buildUserQueryData<R>(groupedUserQueryPaths, groupedPaths, input)\n : undefined,\n };\n};\n\n/**\n * Normalize the response by removing the dedupe alias and flattening it\n **/\nexport const normalizeResponse = <R>(\n groups: (Path | NestedPath)[],\n obj: R,\n): R => {\n if (groups.some((p) => p.path === '*')) {\n // if wildcard, add every non nested value\n // for every nested value, check if groups contains a nested path for it. if not, also add it.\n // reason is that the wildcard does not select relations\n\n Object.entries(obj as Record<string, unknown>).forEach(([k, v]) => {\n if (typeof v === 'object' || Array.isArray(v)) {\n if (!groups.some((g) => isNestedPath(g) && g.path === k)) {\n groups.push({\n path: k,\n declaration: k,\n });\n }\n } else if (!groups.some((g) => g.path === k)) {\n groups.push({\n path: k,\n declaration: k,\n });\n }\n });\n }\n\n return groups.reduce<R>((prev, curr) => {\n // prefer alias over path because of dedupe alias\n const value = get(obj, curr.alias || curr.path);\n\n if (typeof value === 'undefined') return prev;\n if (value === null) {\n return {\n ...prev,\n // add hint to path if it has dedupe alias\n // can happen if the same relation is queried multiple times via different fkeys\n [`${curr.path}${\n curr.alias?.startsWith('d_') && curr.declaration.split('!').length > 1\n ? `!${curr.declaration.split('!')[1]}`\n : ''\n }`]: value,\n };\n }\n if (!isNestedPath(curr)) {\n return {\n ...prev,\n ...flatten({\n [curr.path]:\n value !== null &&\n (isPlainObject(value) || (Array.isArray(value) && value.length > 0))\n ? flatten(value)\n : value,\n }),\n };\n }\n if (Array.isArray(value)) {\n return {\n ...prev,\n ...(flatten({\n [curr.path]: value.map((v) => normalizeResponse(curr.paths, v)),\n }) as R),\n };\n }\n return {\n ...prev,\n ...flatten({\n // add hint to path if it has dedupe alias\n // can happen if the same relation is queried multiple times via different fkeys\n [`${curr.path}${\n curr.alias?.startsWith('d_') && curr.declaration.split('!').length > 1\n ? `!${curr.declaration.split('!')[1]}`\n : ''\n }`]: normalizeResponse(curr.paths, value as Record<string, unknown>),\n }),\n };\n }, {} as R);\n};\n\n/**\n * Build userQueryData from response\n *\n * note that `paths` is reflecting `obj`, not `userQueryPaths`.\n * iterate over `userQueryPaths` and find the corresponding path in `paths`.\n * Then, get value using the found alias and path from `obj`.\n **/\nconst buildUserQueryData = <R>(\n userQueryGroups: (Path | NestedPath)[],\n pathGroups: (Path | NestedPath)[],\n obj: R,\n): R => {\n if (pathGroups.some((p) => p.path === '*')) {\n // if wildcard, add every non nested value\n // for every nested value, check if pathGroups contains a nested path for it. if not, also add it.\n // reason is that the wildcard does not select relations\n\n Object.entries(obj as Record<string, unknown>).forEach(([k, v]) => {\n if (typeof v === 'object' || Array.isArray(v)) {\n if (!pathGroups.some((g) => isNestedPath(g) && g.path === k)) {\n pathGroups.push({\n path: k,\n declaration: k,\n });\n }\n } else if (!pathGroups.some((g) => g.path === k)) {\n pathGroups.push({\n path: k,\n declaration: k,\n });\n }\n });\n }\n\n if (userQueryGroups.some((p) => p.path === '*')) {\n // if wildcard, add every non nested value\n // for every nested value, check if pathGroups contains a nested path for it. if not, also add it.\n // reason is that the wildcard does not select relations\n\n Object.entries(obj as Record<string, unknown>).forEach(([k, v]) => {\n if (typeof v === 'object' || Array.isArray(v)) {\n if (!pathGroups.some((g) => isNestedPath(g) && g.path === k)) {\n userQueryGroups.push({\n path: k,\n declaration: k,\n });\n }\n } else if (!userQueryGroups.some((g) => g.path === k)) {\n userQueryGroups.push({\n path: k,\n declaration: k,\n });\n }\n });\n }\n\n return userQueryGroups.reduce<R>((prev, curr) => {\n if (curr.path === '*') return prev;\n // paths is reflecting the obj\n const inputPath = pathGroups.find(\n (p) => p.path === curr.path && isNestedPath(p) === isNestedPath(curr),\n );\n if (!inputPath) {\n // should never happen though since userQueryPaths is a subset of paths\n throw new Error(`Path ${curr.path} not found in response paths`);\n }\n const value = get(obj, inputPath.alias || inputPath.path);\n\n if (typeof value === 'undefined') return prev;\n if (value === null || !isNestedPath(curr) || !isNestedPath(inputPath)) {\n (prev as Record<string, unknown>)[curr.alias ? curr.alias : curr.path] =\n value;\n } else if (Array.isArray(value)) {\n (prev as Record<string, unknown>)[curr.alias ? curr.alias : curr.path] =\n value.map((v) => buildUserQueryData(curr.paths, inputPath.paths, v));\n } else {\n (prev as Record<string, unknown>)[curr.alias ? curr.alias : curr.path] =\n buildUserQueryData(\n curr.paths,\n inputPath.paths,\n value as Record<string, unknown>,\n );\n }\n return prev;\n }, {} as R);\n};\n","export const get = (obj: any, path: string, defaultValue: any = undefined) => {\n const split = path.split(/((?:\\.|,|\\[|\\]|->>|->)+)/g);\n let result: any = obj;\n for (let i = -1; i < split.length; i += 2) {\n const separator = split[i];\n let key: string | number = split[i + 1];\n if (!key) {\n continue;\n }\n if (separator?.endsWith('->') || separator?.endsWith('->>')) {\n if (/^\\d+$/.test(key)) {\n key = Number.parseInt(key, 10);\n }\n }\n if (separator?.endsWith('->>')) {\n result = `${result ? result[key] : result}`;\n } else {\n result = result ? result[key] : result;\n }\n }\n return result === undefined || result === obj ? defaultValue : result;\n};\n","export function isPlainObject(\n value: unknown,\n): value is Record<string, unknown> {\n return Object.prototype.toString.call(value) === '[object Object]';\n}\n","/**\n * Parses a url and returns the table name the url is interacting with.\n *\n * For mutations, the .split('?') goes unused.\n *\n * @param url The url we are pulling the table name from\n * @returns Table name\n */\nexport const getTableFromUrl = (url: string): string => {\n // Split the url\n const split = url.toString().split('/');\n // Pop the last part of the path off and remove any params if they exist\n const table = split.pop()?.split('?').shift() as string;\n // Pop an additional position to check for rpc\n const maybeRpc = split.pop() as string;\n // Rejoin the result to include rpc otherwise just table name\n return [maybeRpc === 'rpc' ? maybeRpc : null, table]\n .filter(Boolean)\n .join('/');\n};\n","export type MaybeLikeQueryBuilder<Result> = unknown;\n\nexport type LikeQueryBuilder<Result> = {\n url: URL;\n};\n\nexport const isLikeQueryBuilder = <Result>(\n v: MaybeLikeQueryBuilder<Result>,\n): v is LikeQueryBuilder<Result> => {\n if (typeof v !== 'object' || v === null) return false;\n const obj = v as LikeQueryBuilder<Result>;\n\n return typeof obj['url'] === 'object';\n};\n","import { getTableFromUrl } from './get-table-from-url';\nimport { MaybeLikePostgrestBuilder } from './like-postgrest-builder';\nimport { isLikeQueryBuilder } from './like-query-builder';\n\nexport const getTable = <Result>(\n query: MaybeLikePostgrestBuilder<Result>,\n): string => {\n if (!isLikeQueryBuilder(query)) {\n throw new Error('Invalid PostgrestBuilder');\n }\n\n return getTableFromUrl(query['url'].pathname);\n};\n","import type { PostgrestHasMorePaginationResponse } from './response-types';\n\nexport type PostgrestPaginationCacheData<Result> = Result[][];\n\nexport const isPostgrestPaginationCacheData = <Result>(\n q: unknown,\n): q is PostgrestPaginationCacheData<Result> => {\n if (!Array.isArray(q)) return false;\n return q.length === 0 || Array.isArray(q[0]);\n};\n\nexport type PostgrestHasMorePaginationCacheData<Result> =\n PostgrestHasMorePaginationResponse<Result>[];\n\nexport const isPostgrestHasMorePaginationCacheData = <Result>(\n q: unknown,\n): q is PostgrestHasMorePaginationCacheData<Result> => {\n if (!Array.isArray(q)) return false;\n if (q.length === 0) return true;\n const firstPage = q[0];\n return (\n Array.isArray(\n (firstPage as PostgrestHasMorePaginationResponse<Result>).data,\n ) &&\n typeof (firstPage as PostgrestHasMorePaginationResponse<Result>).hasMore ===\n 'boolean'\n );\n};\n","import type {\n PostgrestMaybeSingleResponse,\n PostgrestResponse,\n PostgrestSingleResponse,\n} from '@supabase/postgrest-js';\n\n// Convencience type to not bloat up implementation\nexport type AnyPostgrestResponse<Result> =\n | PostgrestSingleResponse<Result>\n | PostgrestMaybeSingleResponse<Result>\n | PostgrestResponse<Result>;\n\nexport const isAnyPostgrestResponse = <Result>(\n q: unknown,\n): q is AnyPostgrestResponse<Result> => {\n if (!q) return false;\n return (\n typeof (q as AnyPostgrestResponse<Result>).data === 'object' ||\n Array.isArray((q as AnyPostgrestResponse<Result>).data)\n );\n};\n\nexport type PostgrestPaginationResponse<Result> = Result[];\n\nexport const isPostgrestPaginationResponse = <Result>(\n q: unknown,\n): q is PostgrestPaginationResponse<Result> => {\n return Array.isArray(q);\n};\n\nexport type PostgrestHasMorePaginationResponse<Result> = {\n data: Result[];\n hasMore: boolean;\n};\n\nexport const isPostgrestHasMorePaginationResponse = <Result>(\n q: unknown,\n): q is PostgrestHasMorePaginationResponse<Result> => {\n if (!q) return false;\n return (\n Array.isArray((q as PostgrestHasMorePaginationResponse<Result>).data) &&\n typeof (q as PostgrestHasMorePaginationResponse<Result>).hasMore ===\n 'boolean'\n );\n};\n","import { flatten, unflatten } from 'flat';\n\nimport { sortSearchParams } from './sort-search-param';\n\n/**\n * Encodes an object by url-encoding an ordered lists of all paths and their values.\n */\nexport const encodeObject = (obj: Record<string, unknown>): string => {\n const sortedEntries = Object.entries(\n flatten(obj) as Record<string, unknown>,\n ).sort(([a], [b]) => a.length - b.length);\n const bodyParams = new URLSearchParams();\n sortedEntries.forEach(([key, value]) => {\n bodyParams.append(key, String(value));\n });\n return sortSearchParams(bodyParams).toString();\n};\n\n/**\n * Decodes a URL-encoded string back into a nested object.\n * This is the reverse operation of encodeObject.\n */\nexport const decodeObject = (\n encodedString: string,\n): Record<string, unknown> => {\n const params = new URLSearchParams(encodedString);\n const flatObject: Record<string, unknown> = {};\n\n // Convert URLSearchParams back to a flat object\n params.forEach((value, key) => {\n // Try to convert string values to appropriate types\n let parsedValue: unknown = value;\n\n // Try to parse numbers\n if (/^-?\\d+$/.test(value)) {\n parsedValue = parseInt(value, 10);\n } else if (/^-?\\d+\\.\\d+$/.test(value)) {\n parsedValue = parseFloat(value);\n } else if (value === 'true') {\n parsedValue = true;\n } else if (value === 'false') {\n parsedValue = false;\n } else if (value === 'null') {\n parsedValue = null;\n }\n\n flatObject[key] = parsedValue;\n });\n\n // Unflatten the object to restore nested structure\n return unflatten(flatObject);\n};\n","export const sortSearchParams = (params: URLSearchParams) =>\n new URLSearchParams(\n Array.from(params.entries()).sort((a, b) => {\n const x = `${a[0]}${a[1]}`;\n const y = `${b[0]}${b[1]}`;\n return x > y ? 1 : -1;\n }),\n );\n","import type { PostgrestBuilder } from '@supabase/postgrest-js';\n\nexport const isPostgrestBuilder = <Result>(\n q: unknown,\n): q is PostgrestBuilder<Result> => {\n return typeof (q as PostgrestBuilder<Result>).throwOnError === 'function';\n};\n","import type { PostgrestTransformBuilder } from '@supabase/postgrest-js';\nimport type { GenericSchema } from '@supabase/postgrest-js/dist/cjs/types';\n\nexport const isPostgrestTransformBuilder = <\n Schema extends GenericSchema,\n Row extends Record<string, unknown>,\n Result,\n RelationName = unknown,\n Relationships = unknown,\n>(\n q: unknown,\n): q is PostgrestTransformBuilder<\n Schema,\n Row,\n Result,\n RelationName,\n Relationships\n> => {\n return (\n typeof (\n q as PostgrestTransformBuilder<\n Schema,\n Row,\n Result,\n RelationName,\n Relationships\n >\n ).abortSignal === 'function'\n );\n};\n","export const setFilterValue = (\n searchParams: URLSearchParams,\n path: string,\n op: string,\n value: string,\n) => {\n const filters = searchParams.getAll(path);\n // delete all\n searchParams.delete(path);\n\n // re-create\n for (const f of filters) {\n if (f.startsWith(`${op}.`)) {\n continue;\n }\n searchParams.append(path, f);\n }\n\n searchParams.append(path, `${op}.${value}`);\n};\n","/**\n * Check if a value is a valid ISO DateTime string\n * @param v\n * @returns\n */\nexport const isISODateString = (v: unknown): boolean =>\n typeof v === 'string' &&\n /(\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d:[0-5]\\d|Z))|(\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z))|(\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d([+-][0-2]\\d:[0-5]\\d|Z))/.test(\n v,\n );\n","import { isISODateString } from './is-iso-date-string';\nimport type { ValueType } from './query-types';\n\n/**\n * Safely parse any value to a ValueType\n * @param v Any value\n * @returns a ValueType\n */\nexport const parseValue = (v: any): ValueType => {\n if (isISODateString(v)) return new Date(v);\n try {\n return JSON.parse(v);\n } catch {\n return v;\n }\n};\n","import type { OrderDefinition } from './query-types';\n\n/**\n * Parses orderByKey back to OrderDefinition\n * @param key generated by PostgrestParser\n * @returns The parsed OrderDefinition\n */\nexport const parseOrderByKey = (v: string): OrderDefinition[] => {\n return v.split('|').map((orderBy) => {\n const [tableDef, orderDef] = orderBy.split(':');\n const [foreignTableOrCol, maybeCol] = tableDef.split('.');\n const [dir, nulls] = orderDef.split('.');\n return {\n ascending: dir === 'asc',\n nullsFirst: nulls === 'nullsFirst',\n foreignTable: maybeCol ? foreignTableOrCol : undefined,\n column: maybeCol ? maybeCol : foreignTableOrCol,\n };\n });\n};\n","import { OrderDefinition } from './query-types';\n\nexport const parseOrderBy = (searchParams: URLSearchParams) => {\n const orderBy: OrderDefinition[] = [];\n searchParams.forEach((value, key) => {\n const split = key.split('.');\n if (split[split.length === 2 ? 1 : 0] === 'order') {\n // separated by ,\n const orderByDefs = value.split(',');\n orderByDefs.forEach((def) => {\n const [column, ascending, nullsFirst] = def.split('.');\n orderBy.push({\n ascending: ascending === 'asc',\n column,\n nullsFirst: nullsFirst === 'nullsfirst',\n foreignTable: split.length === 2 ? split[0] : undefined,\n });\n });\n }\n });\n\n return orderBy;\n};\n","import {\n type FilterDefinition,\n type FilterDefinitions,\n isAndFilter,\n isFilterDefinition,\n isOrFilter,\n} from './query-types';\n\n// Helper to search for filters in a filter definition\nexport const findFilters = (\n f: FilterDefinitions,\n by: Partial<FilterDefinition>,\n) => {\n const filters: FilterDefinition[] = [];\n f.forEach((filter) => {\n if (isAndFilter(filter)) {\n filters.push(...findFilters(filter.and, by));\n }\n if (isOrFilter(filter)) {\n filters.push(...findFilters(filter.or, by));\n }\n if (isFilterDefinition(filter)) {\n if (\n (typeof by.path === 'undefined' || filter.path === by.path) &&\n (typeof by.alias === 'undefined' || filter.alias === by.alias) &&\n (typeof by.value === 'undefined' || filter.value === by.value) &&\n (typeof by.negate === 'undefined' || filter.negate === by.negate) &&\n (typeof by.operator === 'undefined' || filter.operator === by.operator)\n ) {\n filters.push(filter);\n }\n }\n });\n return filters;\n};\n","import type { PostgrestTransformBuilder } from '@supabase/postgrest-js';\nimport { GenericSchema } from '@supabase/postgrest-js/dist/cjs/types';\n\nimport { isPlainObject } from './lib/is-plain-object';\nimport { parseOrderBy } from './lib/parse-order-by';\nimport type { PostgrestPaginationResponse } from './lib/response-types';\n\nexport type PostgrestCursorPaginationFetcher<Type, Args> = (\n args: Args,\n) => Promise<Type>;\n\nexport type PostgrestCursorPaginationKeyDecoder<Args> = (args: Args) => {\n orderBy?: string;\n uqOrderBy?: string;\n};\n\nexport const createCursorPaginationFetcher = <\n Schema extends GenericSchema,\n Row extends Record<string, unknown>,\n Result,\n Args,\n Relationships = unknown,\n>(\n queryFactory:\n | (() => PostgrestTransformBuilder<Schema, Row, Result[], Relationships>)\n | null,\n config: {\n decode: PostgrestCursorPaginationKeyDecoder<Args>;\n orderBy: string;\n uqOrderBy?: string;\n rpcArgs?: { orderBy: string; uqOrderBy?: string };\n },\n): PostgrestCursorPaginationFetcher<\n PostgrestPaginationResponse<Result>,\n Args\n> | null => {\n if (!queryFactory) return null;\n return async (args) => {\n const cursor = config.decode(args);\n\n const query = queryFactory();\n\n if (config.rpcArgs) {\n query['body'] = {\n ...(isPlainObject(query['body']) ? query['body'] : {}),\n [config.rpcArgs.orderBy]: cursor.orderBy,\n ...(cursor.uqOrderBy && config.rpcArgs.uqOrderBy\n ? { [config.rpcArgs.uqOrderBy]: cursor.uqOrderBy }\n : {}),\n };\n\n const { data } = await query.throwOnError();\n\n // cannot be null because of .throwOnError()\n return data as Result[];\n }\n\n const orderByDef = parseOrderBy(query['url'].searchParams);\n const orderBy = orderByDef.find((o) => o.column === config.orderBy);\n\n if (!orderBy) {\n throw new Error(`No ordering key found for path ${config.orderBy}`);\n }\n\n const uqOrderBy = config.uqOrderBy\n ? orderByDef.find((o) => o.column === config.uqOrderBy)\n : null;\n\n if (cursor.orderBy && config.uqOrderBy && cursor.uqOrderBy && uqOrderBy) {\n const operator = orderBy.ascending ? 'gt' : 'lt';\n const uqOperator = uqOrderBy.ascending ? 'gt' : 'lt';\n\n query['url'].searchParams.append(\n 'or',\n `(${config.orderBy}.${operator}.\"${cursor.orderBy}\",and(${config.orderBy}.eq.\"${cursor.orderBy}\",${config.uqOrderBy}.${uqOperator}.\"${cursor.uqOrderBy}\"))`,\n );\n } else if (cursor.orderBy) {\n const operator = orderBy.ascending ? 'gt' : 'lt';\n query['url'].searchParams.append(\n config.orderBy,\n `${operator}.${cursor.orderBy}`,\n );\n }\n\n const { data, error } = await query;\n\n if (error) throw error;\n\n // cannot be null because of .throwOnError()\n return data as Result[];\n };\n};\n","import type { PostgrestQueryBuilder } from '@supabase/postgrest-js';\nimport { UnstableGetResult as GetResult } from '@supabase/postgrest-js';\nimport {\n GenericSchema,\n GenericTable,\n} from '@supabase/postgrest-js/dist/cjs/types';\n\nimport {\n type MutationFetcherResponse,\n buildMutationFetcherResponse,\n} from './fetch/build-mutation-fetcher-response';\nimport {\n type BuildNormalizedQueryOps,\n buildNormalizedQuery,\n} from './fetch/build-normalized-query';\n\nexport type DeleteFetcher<T extends GenericTable, R> = (\n input: Partial<T['Row']>[],\n) => Promise<MutationFetcherResponse<R>[] | null>;\n\nexport type DeleteFetcherOptions<\n S extends GenericSchema,\n T extends GenericTable,\n Re = T extends { Relationships: infer R } ? R : unknown,\n> = Parameters<PostgrestQueryBuilder<S, T, Re>['delete']>[0];\n\nexport const buildDeleteFetcher =\n <\n S extends GenericSchema,\n T extends GenericTable,\n RelationName,\n Re = T extends { Relationships: infer R } ? R : unknown,\n Q extends string = '*',\n R = GetResult<S, T['Row'], RelationName, Re, Q extends '*' ? '*' : Q>,\n >(\n qb: PostgrestQueryBuilder<S, T, R>,\n primaryKeys: (keyof T['Row'])[],\n opts: BuildNormalizedQueryOps<Q> & DeleteFetcherOptions<S, T, RelationName>,\n ): DeleteFetcher<T, R> =>\n async (\n input: Partial<T['Row']>[],\n ): Promise<MutationFetcherResponse<R>[] | null> => {\n let filterBuilder = qb.delete(opts);\n\n if (primaryKeys.length === 1) {\n const primaryKey = primaryKeys[0] as string;\n filterBuilder.in(\n primaryKey,\n input.map((i) => {\n const v = i[primaryKey];\n if (!v) {\n throw new Error(\n `Missing value for primary key ${primaryKey as string}`,\n );\n }\n return v;\n // TODO i wont bother with this, but maybe i can nerdsnipe somone else into it\n }) as any[],\n );\n } else {\n filterBuilder = filterBuilder.or(\n input\n .map(\n (i) =>\n `and(${primaryKeys.map((c) => {\n const v = i[c];\n if (!v) {\n throw new Error(\n `Missing value for primary key ${c as string}`,\n );\n }\n return `${c as string}.eq.${v}`;\n })})`,\n )\n .join(','),\n );\n }\n\n const primaryKeysData = input.map((i) =>\n primaryKeys.reduce<R>((prev, key) => {\n return {\n ...prev,\n [key]: i[key],\n };\n }, {} as R),\n );\n\n const query = buildNormalizedQuery<Q>(opts);\n if (query) {\n const { selectQuery, groupedUserQueryPaths, groupedPaths } = query;\n // make sure that primary keys are included in the select query\n const groupedPathsWithPrimaryKeys = groupedPaths;\n const addKeys: string[] = [];\n primaryKeys.forEach((key) => {\n if (!groupedPathsWithPrimaryKeys.find((p) => p.path === key)) {\n groupedPathsWithPrimaryKeys.push({\n declaration: key as string,\n path: key as string,\n });\n addKeys.push(key as string);\n }\n });\n const { data } = await filterBuilder\n .select([selectQuery, ...addKeys].join(','))\n .throwOnError();\n return (data as R[]).map((d) =>\n buildMutationFetcherResponse(d, {\n groupedPaths: groupedPathsWithPrimaryKeys,\n groupedUserQueryPaths,\n }),\n );\n }\n\n await filterBuilder.throwOnError();\n\n if (opts.queriesForTable().length > 0) {\n // if there is at least one query on the table we are deleting from, return primary keys\n return primaryKeysData.map((pk) => ({ normalizedData: pk }));\n }\n\n return null;\n };\n","import type { PostgrestFilter } from '../postgrest-filter';\nimport type { PostgrestQueryParserOptions } from '../postgrest-query-parser';\nimport type { DecodedKey } from './types';\n\nexport type RevalidateRelationOpt<Type> = {\n schema?: string;\n relation: string;\n relationIdColumn: string;\n fKeyColumn: keyof Type;\n};\n\nexport type RevalidateRelations<Type extends Record<string, unknown>> =\n RevalidateRelationOpt<Type>[];\n\nexport type RevalidateRelationsProps<Type extends Record<string, unknown>> = {\n input: Partial<Type>;\n decodedKey: Pick<DecodedKey, 'schema' | 'table' | 'queryKey'>;\n getPostgrestFilter: (\n query: string,\n opts?: PostgrestQueryParserOptions,\n ) => Pick<PostgrestFilter<Type>, 'applyFilters'>;\n};\n\nexport const shouldRevalidateRelation = <Type extends Record<string, unknown>>(\n relations: RevalidateRelations<Type>,\n {\n input,\n getPostgrestFilter,\n decodedKey: { schema, table, queryKey },\n }: RevalidateRelationsProps<Type>,\n): boolean =>\n Boolean(\n relations.find(\n (r) =>\n (!r.schema || r.schema === schema) &&\n r.relation === table &&\n typeof input[r.fKeyColumn] !== 'undefined' &&\n getPostgrestFilter(queryKey, {\n exclusivePaths: [r.relationIdColumn],\n }).applyFilters({\n [r.relationIdColumn]: input[r.fKeyColumn],\n }),\n ),\n );\n","import type { DecodedKey } from './types';\n\nexport type RevalidateTableOpt = { schema?: string; table: string };\n\nexport type RevalidateTables = RevalidateTableOpt[];\n\nexport type RevalidateTablesProps = {\n decodedKey: Pick<DecodedKey, 'schema' | 'table'>;\n};\n\nexport const shouldRevalidateTable = (\n tables: RevalidateTables,\n { decodedKey: { schema, table } }: RevalidateTablesProps,\n): boolean =>\n Boolean(\n tables.find((t) => (!t.schema || t.schema === schema) && t.table === table),\n );\n","import type {\n PostgrestHasMorePaginationCacheData,\n PostgrestPaginationCacheData,\n} from '../lib/cache-data-types';\n\nexport const toHasMorePaginationCacheData = <\n Type extends Record<string, unknown>,\n>(\n a: Type[],\n currentData: PostgrestHasMorePaginationCacheData<Type>,\n chunkSize: number,\n) => {\n // return array in chunks\n const hasMoreCache = currentData.map((p) => p.hasMore);\n return a.reduce<PostgrestHasMorePaginationCacheData<Type>>(\n (resultArray, item, index) => {\n // default limit is 1000\n // ref: https://github.com/supabase/supabase/discussions/3765#discussioncomment-1581021\n const chunkIndex = Math.floor(index / chunkSize);\n\n if (!resultArray[chunkIndex]) {\n let hasMore = hasMoreCache[chunkIndex];\n if (!hasMore) {\n // if new page, set to hasMore of last page\n hasMore = hasMoreCache[hasMoreCache.length - 1];\n }\n if (chunkIndex > 0) {\n // if not first page, set prev has more to true\n resultArray[chunkIndex - 1].hasMore = true;\n }\n resultArray[chunkIndex] = {\n data: [],\n hasMore:\n hasMoreCache[chunkIndex] ??\n hasMoreCache[hasMoreCache.length - 1] ??\n false,\n }; // start a new chunk\n }\n\n resultArray[chunkIndex].data.push(item);\n\n return resultArray;\n },\n [],\n );\n};\n\nexport const toPaginationCacheData = <Type extends Record<string, unknown>>(\n a: Type[],\n chunkSize: number,\n) => {\n return a.reduce<PostgrestPaginationCacheData<Type>>(\n (resultArray, item, index) => {\n // default limit is 1000\n // ref: https://github.com/supabase/supabase/discussions/3765#discussioncomment-1581021\n const chunkIndex = Math.floor(index / chunkSize);\n\n if (!resultArray[chunkIndex]) {\n resultArray[chunkIndex] = []; // start a new chunk\n }\n\n resultArray[chunkIndex].push(item);\n\n return resultArray;\n },\n [],\n );\n};\n","import {\n isPostgrestHasMorePaginationCacheData,\n isPostgrestPaginationCacheData,\n} from './lib/cache-data-types';\nimport { isAnyPostgrestResponse } from './lib/response-types';\nimport { shouldRevalidateRelation } from './mutate/should-revalidate-relation';\nimport { shouldRevalidateTable } from './mutate/should-revalidate-table';\nimport {\n toHasMorePaginationCacheData,\n toPaginationCacheData,\n} from './mutate/transformers';\nimport type { DecodedKey, MutatorFn, RevalidateOpts } from './mutate/types';\nimport type { PostgrestFilter } from './postgrest-filter';\nimport type { PostgrestQueryParserOptions } from './postgrest-query-parser';\n\nconst filterByPks = <Type extends Record<string, unknown>>(\n input: Type,\n currentData: Type[],\n primaryKeys: (keyof Type)[],\n) => {\n return currentData.filter((i) =>\n primaryKeys.some((pk) => i[pk] !== input[pk]),\n );\n};\n\nexport type DeleteItemOperation<Type extends Record<string, unknown>> = {\n table: string;\n schema: string;\n input: Type;\n primaryKeys: (keyof Type)[];\n} & RevalidateOpts<Type>;\n\nexport type DeleteItemCache<KeyType, Type extends Record<string, unknown>> = {\n /**\n * The keys currently present in the cache\n */\n cacheKeys: KeyType[];\n /**\n * Should return a PostgrestFilter for the given query.\n * This is exposed as a function so results can be cached by the cache library.\n */\n getPostgrestFilter: (\n query