UNPKG

@sanity/migrate

Version:

Tooling for running data migrations on Sanity.io projects

1 lines 103 kB
{"version":3,"file":"index.mjs","sources":["../src/defineMigration.ts","../src/it-utils/decodeText.ts","../src/it-utils/delay.ts","../src/it-utils/filter.ts","../src/it-utils/json.ts","../src/it-utils/map.ts","../src/it-utils/split.ts","../src/it-utils/ndjson.ts","../src/it-utils/take.ts","../src/it-utils/toArray.ts","../src/mutations/creators.ts","../src/mutations/operations/creators.ts","../src/mutations/transaction.ts","../src/mutations/asserters.ts","../src/runner/utils/getValueType.ts","../src/runner/utils/flatMapDeep.ts","../src/runner/normalizeMigrateDefinition.ts","../src/runner/collectMigrationMutations.ts","../src/runner/constants.ts","../src/debug.ts","../src/fs-webstream/bufferThroughFile.ts","../src/uint8arrays/index.ts","../src/fs-webstream/peekInto.ts","../src/fs-webstream/maybeDecompress.ts","../src/fs-webstream/readFileAsWebStream.ts","../src/tar-webstream/drain.ts","../src/tar-webstream/BufferList.ts","../src/tar-webstream/headers.ts","../src/tar-webstream/untar.ts","../src/utils/streamToAsyncIterator.ts","../src/sources/fromExportArchive.ts","../src/fetch-utils/endpoints.ts","../src/fetch-utils/fetchStream.ts","../src/fetch-utils/sanityRequestOptions.ts","../src/sources/fromExportEndpoint.ts","../src/utils/asyncIterableToStream.ts","../src/runner/utils/applyFilters.ts","../src/runner/utils/limitClientConcurrency.ts","../src/runner/utils/createContextClient.ts","../src/runner/utils/createFilteredDocumentsClient.ts","../src/runner/utils/getBufferFile.ts","../src/runner/dryRun.ts","../src/it-utils/concatStr.ts","../src/it-utils/lastValueFrom.ts","../src/it-utils/mapAsync.ts","../src/it-utils/tap.ts","../src/runner/utils/batchMutations.ts","../src/runner/utils/toSanityMutations.ts","../src/runner/run.ts","../src/sources/fromDocuments.ts"],"sourcesContent":["import {type Migration} from './types'\n\n/**\n * @public\n *\n * Helper function for defining a Sanity content migration. This function does not do anything on its own;\n * it exists to check that your schema definition is correct, and help autocompletion in your IDE.\n *\n * {@link https://www.sanity.io/docs/schema-and-content-migrations#af2be129ccd6}\n\n * @example Basic usage\n *\n * ```ts\n * export default defineMigration({\n * title: 'Make sure all strings with “acme” is uppercased to “ACME”',\n * migrate: {\n * string(node, path, context) {\n * if (node === \"acme\") {\n * return set(node.toUpperCase())\n * }\n * },\n * },\n * })\n * ```\n * @param migration - The migration definition\n *\n * See {@link Migration}\n */\nexport function defineMigration<T extends Migration>(migration: T): T {\n return migration\n}\n","export async function* decodeText(it: AsyncIterableIterator<Uint8Array>) {\n const decoder = new TextDecoder()\n for await (const chunk of it) {\n yield decoder.decode(chunk, {stream: true})\n }\n}\n","function sleep(ms: number) {\n return new Promise((resolve) => setTimeout(resolve, ms))\n}\n\nexport async function* delay<T>(it: AsyncIterableIterator<T>, ms: number) {\n for await (const chunk of it) {\n await sleep(ms)\n yield chunk\n }\n}\n","export async function* filter<T>(\n it: AsyncIterableIterator<T>,\n predicate: (value: T) => boolean | Promise<boolean>,\n) {\n for await (const chunk of it) {\n if (await predicate(chunk)) {\n yield chunk\n }\n }\n}\n","export type JSONParser<Type> = (line: string) => Type\n\nexport interface JSONOptions<Type> {\n parse?: JSONParser<Type>\n}\n\nexport async function* parseJSON<Type>(\n it: AsyncIterableIterator<string>,\n {parse = JSON.parse}: JSONOptions<Type> = {},\n): AsyncIterableIterator<Type> {\n for await (const chunk of it) {\n yield parse(chunk)\n }\n}\n\nexport async function* stringifyJSON(it: AsyncIterableIterator<unknown>) {\n for await (const chunk of it) {\n yield JSON.stringify(chunk)\n }\n}\n","export async function* map<T, U>(\n it: AsyncIterableIterator<T>,\n project: (value: T) => U,\n): AsyncIterableIterator<U> {\n for await (const chunk of it) {\n yield project(chunk)\n }\n}\n","export async function* split(\n it: AsyncIterableIterator<string>,\n delimiter: string,\n): AsyncIterableIterator<string> {\n let buf = ''\n for await (const chunk of it) {\n buf += chunk\n if (buf.includes(delimiter)) {\n const lastIndex = buf.lastIndexOf(delimiter)\n const parts = buf.slice(0, Math.max(0, lastIndex)).split(delimiter)\n\n for (const part of parts) {\n yield part\n }\n buf = buf.slice(Math.max(0, lastIndex + delimiter.length))\n }\n }\n yield buf\n}\n","import {filter} from './filter'\nimport {type JSONOptions, parseJSON} from './json'\nimport {split} from './split'\n\nexport function parse<Type>(\n it: AsyncIterableIterator<string>,\n options?: JSONOptions<Type>,\n): AsyncIterableIterator<Type> {\n return parseJSON(\n filter(split(it, '\\n'), (line) => Boolean(line && line.trim())),\n options,\n )\n}\n\nexport async function* stringify(iterable: AsyncIterableIterator<unknown>) {\n for await (const doc of iterable) {\n yield `${JSON.stringify(doc)}\\n`\n }\n}\n","export async function* take<T>(it: AsyncIterableIterator<T>, count: number) {\n let i = 0\n for await (const chunk of it) {\n if (i++ >= count) return\n yield chunk\n }\n}\n","export async function toArray<T>(it: AsyncIterableIterator<T>): Promise<T[]> {\n const result: T[] = []\n for await (const chunk of it) {\n result.push(chunk)\n }\n return result\n}\n","import {type Path} from '@sanity/types'\nimport {fromString} from '@sanity/util/paths'\nimport arrify from 'arrify'\n\nimport {type Operation} from './operations/types'\nimport {\n type CreateIfNotExistsMutation,\n type CreateMutation,\n type CreateOrReplaceMutation,\n type DeleteMutation,\n type NodePatch,\n type NodePatchList,\n type PatchMutation,\n type PatchOptions,\n type SanityDocument,\n} from './types'\nimport {type NormalizeReadOnlyArray, type Optional, type Tuplify} from './typeUtils'\n\n/**\n * Creates a new document.\n * @param document - The document to be created.\n * @returns The mutation to create the document.\n */\nexport function create<Doc extends Optional<SanityDocument, '_id'>>(\n document: Doc,\n): CreateMutation<Doc> {\n return {type: 'create', document}\n}\n\n/**\n * Applies a patch to a document.\n * @param id - The ID of the document to be patched.\n * @param patches - The patches to be applied.\n * @param options - Optional patch options.\n * @returns The mutation to patch the document.\n */\nexport function patch<P extends NodePatchList | NodePatch>(\n id: string,\n patches: P,\n options?: PatchOptions,\n): PatchMutation<NormalizeReadOnlyArray<Tuplify<P>>> {\n return {\n type: 'patch',\n id,\n patches: arrify(patches) as any,\n ...(options ? {options} : {}),\n }\n}\n\n/**\n * Creates a {@link NodePatch} at a specific path.\n * @param path - The path where the operation should be applied.\n * @param operation - The operation to be applied.\n * @returns The node patch.\n */\nexport function at<O extends Operation>(path: Path | string, operation: O): NodePatch<Path, O> {\n return {\n path: typeof path === 'string' ? fromString(path) : path,\n op: operation,\n }\n}\n\n/**\n * Creates a document if it does not exist.\n * @param document - The document to be created.\n * @returns The mutation operation to create the document if it does not exist.\n */\nexport function createIfNotExists<Doc extends SanityDocument>(\n document: Doc,\n): CreateIfNotExistsMutation<Doc> {\n return {type: 'createIfNotExists', document}\n}\n\n/**\n * Creates or replaces a document.\n * @param document - The document to be created or replaced.\n * @returns The mutation operation to create or replace the document.\n */\nexport function createOrReplace<Doc extends SanityDocument>(\n document: Doc,\n): CreateOrReplaceMutation<Doc> {\n return {type: 'createOrReplace', document}\n}\n\n/**\n * Deletes a document.\n * @param id - The id of the document to be deleted.\n * @returns The mutation operation to delete the document.\n */\nexport function delete_(id: string): DeleteMutation {\n return {type: 'delete', id}\n}\n\n/**\n * Alias for delete\n */\nexport const del = delete_\n","import arrify from 'arrify'\n\nimport {type AnyArray, type ArrayElement, type NormalizeReadOnlyArray} from '../typeUtils'\nimport {\n type DecOp,\n type DiffMatchPatchOp,\n type IncOp,\n type IndexedSegment,\n type InsertOp,\n type KeyedSegment,\n type RelativePosition,\n type ReplaceOp,\n type SetIfMissingOp,\n type SetOp,\n type TruncateOp,\n type UnsetOp,\n} from './types'\n\n/**\n * Creates a `set` operation with the provided value.\n * @param value - The value to set.\n * @returns A `set` operation.\n * {@link https://www.sanity.io/docs/http-patches#6TPENSW3}\n *\n * @example\n * ```ts\n * const setFoo = set('foo')\n * const setEmptyArray = set([])\n * ```\n */\nexport const set = <const T>(value: T): SetOp<T> => ({type: 'set', value})\n\n/**\n * Creates a `setIfMissing` operation with the provided value.\n * @param value - The value to set if missing.\n * @returns A `setIfMissing` operation.\n * {@link https://www.sanity.io/docs/http-patches#A80781bT}\n * @example\n * ```ts\n * const setFooIfMissing = setIfMissing('foo')\n * const setEmptyArrayIfMissing = setIfMissing([])\n * ```\n */\nexport const setIfMissing = <const T>(value: T): SetIfMissingOp<T> => ({\n type: 'setIfMissing',\n value,\n})\n\n/**\n * Creates an `unset` operation.\n * @returns An `unset` operation.\n * {@link https://www.sanity.io/docs/http-patches#xRtBjp8o}\n *\n * @example\n * ```ts\n * const unsetAnyValue = unset()\n * ```\n */\nexport const unset = (): UnsetOp => ({type: 'unset'})\n\n/**\n * Creates an `inc` (increment) operation with the provided amount.\n * @param amount - The amount to increment by.\n * @returns An incrementation operation for numeric values\n * {@link https://www.sanity.io/docs/http-patches#vIT8WWQo}\n *\n * @example\n * ```ts\n * const incBy1 = inc()\n * const incBy5 = inc(5)\n * ```\n */\nexport const inc = <const N extends number = 1>(amount: N = 1 as N): IncOp<N> => ({\n type: 'inc',\n amount,\n})\n\n/**\n * Creates a `dec` (decrement) operation with the provided amount.\n * @param amount - The amount to decrement by.\n * @returns A `dec` operation.\n * {@link https://www.sanity.io/docs/http-patches#vIT8WWQo}\n *\n * @example\n * ```ts\n * const decBy1 = dec()\n * const decBy10 = dec(10)\n * ```\n */\nexport const dec = <const N extends number = 1>(amount: N = 1 as N): DecOp<N> => ({\n type: 'dec',\n amount,\n})\n\n/**\n * Creates a `diffMatchPatch` operation with the provided value.\n * @param value - The value for the diff match patch operation.\n * @returns A `diffMatchPatch` operation.\n * {@link https://www.sanity.io/docs/http-patches#aTbJhlAJ}\n * @public\n */\nexport const diffMatchPatch = (value: string): DiffMatchPatchOp => ({\n type: 'diffMatchPatch',\n value,\n})\n\n/**\n * Creates an `insert` operation with the provided items, position, and reference item.\n * @param items - The items to insert.\n * @param position - The position to insert at.\n * @param indexOrReferenceItem - The index or reference item to insert before or after.\n * @returns An `insert` operation for adding values to arrays\n * {@link https://www.sanity.io/docs/http-patches#febxf6Fk}\n *\n * @example\n * ```ts\n * const prependFoo = insert(['foo'], 'before', 0)\n * const appendFooAndBar = insert(['foo', 'bar'], 'after', someArray.length -1)\n * const insertObjAfterXYZ = insert({name: 'foo'}, 'after', {_key: 'xyz'}])\n * ```\n */\nexport function insert<\n const Items extends AnyArray<unknown>,\n const Pos extends RelativePosition,\n const ReferenceItem extends IndexedSegment | KeyedSegment,\n>(\n items: Items | ArrayElement<Items>,\n position: Pos,\n indexOrReferenceItem: ReferenceItem,\n): InsertOp<NormalizeReadOnlyArray<Items>, Pos, ReferenceItem> {\n return {\n type: 'insert',\n referenceItem: indexOrReferenceItem,\n position,\n items: arrify(items) as any,\n }\n}\n\n/**\n * Creates an `insert` operation that appends the provided items.\n * @param items - The items to append.\n * @returns An `insert` operation for adding a value to the end of an array.\n * {@link https://www.sanity.io/docs/http-patches#Cw4vhD88}\n *\n * @example\n * ```ts\n * const appendFoo = append('foo')\n * const appendObject = append({name: 'foo'})\n * const appendObjects = append([{name: 'foo'}, [{name: 'bar'}]])\n * ```\n */\nexport function append<const Items extends AnyArray<unknown>>(items: Items | ArrayElement<Items>) {\n return insert(items, 'after', -1)\n}\n\n/**\n * Creates an `insert` operation that prepends the provided items.\n * @param items - The items to prepend.\n * @returns An `insert` operation for adding a value to the start of an array.\n * {@link https://www.sanity.io/docs/http-patches#refAUsf0}\n *\n * @example\n * ```ts\n * const prependFoo = prepend('foo')\n * const prependObject = prepend({name: 'foo'})\n * const prependObjects = prepend([{name: 'foo'}, [{name: 'bar'}]])\n * ```\n */\nexport function prepend<const Items extends AnyArray<unknown>>(items: Items | ArrayElement<Items>) {\n return insert(items, 'before', 0)\n}\n\n/**\n * Creates an `insert` operation that inserts the provided items before the provided index or reference item.\n * @param items - The items to insert.\n * @param indexOrReferenceItem - The index or reference item to insert before.\n * @returns An `insert` operation before the provided index or reference item.\n * {@link https://www.sanity.io/docs/http-patches#0SQmPlb6}\n * @public\n *\n * @example\n * ```ts\n * const insertFooBeforeIndex3 = insertBefore('foo', 3)\n * const insertObjectBeforeKey = insertBefore({name: 'foo'}, {_key: 'xyz'}]\n * ```\n */\nexport function insertBefore<\n const Items extends AnyArray<unknown>,\n const ReferenceItem extends IndexedSegment | KeyedSegment,\n>(items: Items | ArrayElement<Items>, indexOrReferenceItem: ReferenceItem) {\n return insert(items, 'before', indexOrReferenceItem)\n}\n\n/**\n * Creates an `insert` operation that inserts the provided items after the provided index or reference item.\n * @param items - The items to insert.\n * @param indexOrReferenceItem - The index or reference item to insert after.\n * @returns An `insert` operation after the provided index or reference item.\n * {@link https://www.sanity.io/docs/http-patches#0SQmPlb6}\n *\n * @example\n * ```ts\n * const insertFooAfterIndex3 = insertAfter('foo', 3)\n * const insertObjectAfterKey = insertAfter({name: 'foo'}, {_key: 'xyz'}]\n * ```\n */\nexport const insertAfter = <\n const Items extends AnyArray<unknown>,\n const ReferenceItem extends IndexedSegment | KeyedSegment,\n>(\n items: Items | ArrayElement<Items>,\n indexOrReferenceItem: ReferenceItem,\n) => {\n return insert(items, 'after', indexOrReferenceItem)\n}\n\n/**\n * Creates a `truncate` operation that will remove all items after `startIndex` until the end of the array or the provided `endIndex`.\n * @param startIndex - The start index for the truncate operation.\n * @param endIndex - The end index for the truncate operation.\n * @returns A `truncate` operation.\n * @remarks - This will be converted to an `unset` patch when submitted to the API\n * {@link https://www.sanity.io/docs/http-patches#xRtBjp8o}\n *\n * @example\n * ```ts\n * const clearArray = truncate(0)\n * const removeItems = truncate(3, 5) // Removes items at index 3, 4, and 5\n * const truncate200 = truncate(200) // Removes all items after index 200\n * ```\n */\nexport function truncate(startIndex: number, endIndex?: number): TruncateOp {\n return {\n type: 'truncate',\n startIndex,\n endIndex,\n }\n}\n\n/**\n * Creates a `replace` operation with the provided items and reference item.\n * @param items - The items to replace.\n * @param referenceItem - The reference item to replace.\n * @returns A ReplaceOp operation.\n * @remarks This will be converted to an `insert`/`replace` patch when submitted to the API\n * {@link https://www.sanity.io/docs/http-patches#GnVSwcPa}\n *\n * @example\n * ```ts\n * const replaceItem3WithFoo = replace('foo', 3)\n * const replaceItem3WithFooAndBar = replace(['foo', 'bar'], 3)\n * const replaceObject = replace({name: 'bar'}, {_key: 'xyz'})\n * ```\n */\nexport function replace<Items extends any[], ReferenceItem extends IndexedSegment | KeyedSegment>(\n items: Items | ArrayElement<Items>,\n referenceItem: ReferenceItem,\n): ReplaceOp<Items, ReferenceItem> {\n return {\n type: 'replace',\n referenceItem,\n items: arrify(items) as Items,\n }\n}\n","import {type Mutation} from './types'\n\nexport interface Transaction {\n type: 'transaction'\n id?: string\n mutations: Mutation[]\n}\n\n/**\n * @public\n *\n * Wraps a set of mutations in a transaction.\n * Note: use with caution. Transactions cannot be optimized and will be submitted one-by-one, which means they will make\n * your migration run slower and produce more API requests.\n * @param transactionId - The transaction ID. This is optional and should usually be omitted, as it will be auto-generated by the server if not provided.\n * @param mutations - The mutations to include in the transaction.\n *\n * {@link https://www.sanity.io/docs/http-mutations#afccc1b9ef78}\n */\nexport function transaction(transactionId: string, mutations: Mutation[]): Transaction\nexport function transaction(mutations: Mutation[]): Transaction\nexport function transaction(\n idOrMutations: string | Mutation[],\n _mutations?: Mutation[],\n): Transaction {\n const [id, mutations] =\n typeof idOrMutations === 'string'\n ? [idOrMutations, _mutations as Mutation[]]\n : [undefined, idOrMutations as Mutation[]]\n return {type: 'transaction', id, mutations}\n}\n","import {type Operation} from './operations/types'\nimport {type Transaction} from './transaction'\nimport {type Mutation, type NodePatch} from './types'\n\nexport function isMutation(mutation: unknown): mutation is Mutation {\n return (\n mutation !== null &&\n typeof mutation === 'object' &&\n 'type' in mutation &&\n (mutation.type === 'create' ||\n mutation.type === 'createIfNotExists' ||\n mutation.type === 'createOrReplace' ||\n mutation.type === 'patch' ||\n mutation.type === 'delete')\n )\n}\n\nexport function isTransaction(mutation: unknown): mutation is Transaction {\n return (\n mutation !== null &&\n typeof mutation === 'object' &&\n 'type' in mutation &&\n mutation.type === 'transaction'\n )\n}\n\nexport function isOperation(value: unknown): value is Operation {\n return (\n value !== null &&\n typeof value === 'object' &&\n 'type' in value &&\n (value.type === 'set' ||\n value.type === 'unset' ||\n value.type === 'insert' ||\n value.type === 'diffMatchPatch' ||\n value.type === 'dec' ||\n value.type === 'inc' ||\n value.type === 'upsert' ||\n value.type === 'unassign' ||\n value.type === 'truncate' ||\n value.type === 'setIfMissing')\n )\n}\n\nexport function isNodePatch(change: unknown): change is NodePatch {\n return (\n change !== null &&\n typeof change === 'object' &&\n 'path' in change &&\n Array.isArray(change.path) &&\n 'op' in change &&\n isOperation(change.op)\n )\n}\n","export function getValueType(value: unknown) {\n if (Array.isArray(value)) {\n return 'array'\n }\n return value === null ? 'null' : typeof value\n}\n","import {type Path, type PathSegment} from '@sanity/types'\n\nimport {type JsonArray, type JsonObject, type JsonValue} from '../../json'\nimport {getValueType} from './getValueType'\n\ntype SkipMarker = {_: 'SKIP_MARKER'}\nexport const SKIP_MARKER: SkipMarker = {_: 'SKIP_MARKER'}\n\nfunction callMap<T>(mapFn: MapFn<T>, value: JsonValue, path: Path): T[] {\n const res = mapFn(value, path)\n return Array.isArray(res) ? res : [res]\n}\n\nfunction getPathWithKey(\n item: unknown,\n index: number | string,\n container: JsonArray | JsonObject,\n): PathSegment {\n if (\n item &&\n Array.isArray(container) &&\n typeof item === 'object' &&\n '_key' in item &&\n typeof item._key === 'string'\n ) {\n return {_key: item._key}\n }\n return index\n}\n\ntype MapFn<T> = (value: JsonValue, path: Path) => T | T[]\n\n// Reduce depth first\nfunction mapObject<T>(reducerFn: MapFn<T>, object: JsonObject, path: Path): T[] {\n return [\n ...callMap(reducerFn, object, path),\n ...Object.keys(object).flatMap((key) =>\n flatMapAny(reducerFn, object[key], path.concat(getPathWithKey(object[key], key, object))),\n ),\n ]\n}\n\n// Reduce depth first\nfunction mapArray<T>(mapFn: MapFn<T>, array: JsonArray, path: Path): T[] {\n return [\n ...callMap(mapFn, array, path),\n ...array.flatMap((item: JsonValue, index) =>\n flatMapAny(mapFn, item, path.concat(getPathWithKey(item, index, array))),\n ),\n ]\n}\n\nfunction flatMapAny<T>(mapFn: MapFn<T>, val: JsonValue, path: Path) {\n const type = getValueType(val)\n if (type === 'object') {\n return mapObject(mapFn, val as JsonObject, path)\n }\n if (type === 'array') {\n return mapArray(mapFn, val as JsonArray, path)\n }\n return callMap(mapFn, val, path)\n}\n\n/**\n * Iterating depth first over the JSON tree, calling the mapFn for parents before children\n * @param value - the value to map deeply over\n * @param mapFn - the mapFn to call for each value\n */\nexport function flatMapDeep<T>(value: JsonValue, mapFn: MapFn<T>): T[] {\n return flatMapAny(mapFn, value, [])\n}\n","import {type Mutation as RawMutation} from '@sanity/client'\nimport {SanityEncoder} from '@sanity/mutate'\nimport {type Path, type SanityDocument} from '@sanity/types'\nimport arrify from 'arrify'\n\nimport {type JsonArray, type JsonObject, type JsonValue} from '../json'\nimport {\n at,\n type Mutation,\n type NodePatch,\n type Operation,\n patch,\n type Transaction,\n} from '../mutations'\nimport {isMutation, isNodePatch, isOperation, isTransaction} from '../mutations/asserters'\nimport {\n type AsyncIterableMigration,\n type Migration,\n type MigrationContext,\n type NodeMigration,\n type NodeMigrationReturnValue,\n} from '../types'\nimport {flatMapDeep} from './utils/flatMapDeep'\nimport {getValueType} from './utils/getValueType'\n\nexport function normalizeMigrateDefinition(migration: Migration): AsyncIterableMigration {\n if (typeof migration.migrate == 'function') {\n // assume AsyncIterableMigration\n return normalizeIteratorValues(migration.migrate)\n }\n return createAsyncIterableMutation(migration.migrate, {\n filter: migration.filter,\n documentTypes: migration.documentTypes,\n })\n}\n\nfunction normalizeIteratorValues(asyncIterable: AsyncIterableMigration): AsyncIterableMigration {\n return async function* run(docs, context) {\n for await (const documentMutations of asyncIterable(docs, context)) {\n yield normalizeMutation(documentMutations)\n }\n }\n}\n\n/**\n * Normalize a mutation or a NodePatch to a document mutation\n * @param documentId - The document id\n * @param change - The Mutation or NodePatch\n */\nfunction normalizeMutation(\n change: Transaction | Mutation | RawMutation | (Mutation | Transaction | RawMutation)[],\n): (Mutation | Transaction)[] {\n if (Array.isArray(change)) {\n return change.flatMap((ch) => normalizeMutation(ch))\n }\n if (isRawMutation(change)) {\n return SanityEncoder.decodeAll([change] as any) as Mutation[]\n }\n return [change]\n}\n\nfunction isRawMutation(\n mutation: Transaction | Mutation | NodePatch | Operation | RawMutation,\n): mutation is RawMutation {\n return (\n 'createIfNotExists' in mutation ||\n 'createOrReplace' in mutation ||\n 'create' in mutation ||\n 'patch' in mutation ||\n 'delete' in mutation\n )\n}\nexport function createAsyncIterableMutation(\n migration: NodeMigration,\n opts: {filter?: string; documentTypes?: string[]},\n): AsyncIterableMigration {\n const documentTypesSet = new Set(opts.documentTypes)\n\n return async function* run(docs, context) {\n for await (const doc of docs()) {\n if (opts.documentTypes && !documentTypesSet.has(doc._type)) continue\n\n const documentMutations = await collectDocumentMutations(migration, doc, context)\n if (documentMutations.length > 0) {\n yield documentMutations\n }\n }\n }\n}\n\nasync function collectDocumentMutations(\n migration: NodeMigration,\n doc: SanityDocument,\n context: MigrationContext,\n): Promise<(Mutation | Transaction)[]> {\n const documentMutations = Promise.resolve(migration.document?.(doc, context))\n const nodeMigrations = flatMapDeep(doc as JsonValue, async (value, path) => {\n const [nodeReturnValues, nodeTypeReturnValues] = await Promise.all([\n Promise.resolve(migration.node?.(value, path, context)),\n Promise.resolve(migrateNodeType(migration, value, path, context)),\n ])\n\n return [...arrify(nodeReturnValues), ...arrify(nodeTypeReturnValues)].map(\n (change) => change && normalizeNodeMutation(path, change),\n )\n })\n\n return (await Promise.all([...arrify(await documentMutations), ...nodeMigrations]))\n .flat()\n .flatMap((change) => (change ? normalizeDocumentMutation(doc._id, change) : []))\n}\n\n/**\n * Normalize a mutation or a NodePatch to a document mutation\n * @param documentId - The document id\n * @param change - The Mutation or NodePatch\n */\nfunction normalizeDocumentMutation(\n documentId: string,\n change:\n | Transaction\n | Mutation\n | NodePatch\n | RawMutation\n | (Mutation | NodePatch | Transaction | RawMutation)[],\n): Mutation | Transaction | (Mutation | Transaction)[] {\n if (Array.isArray(change)) {\n return change.flatMap((ch) => normalizeDocumentMutation(documentId, ch))\n }\n if (isRawMutation(change)) {\n return SanityEncoder.decodeAll([change] as any)[0] as Mutation\n }\n if (isTransaction(change)) {\n return change\n }\n return isMutation(change) ? change : patch(documentId, change)\n}\n\n/**\n * Normalize a mutation or a NodePatch to a document mutation\n * @param path - The path the operation should be applied at\n * @param change - The Mutation or NodePatch\n */\nfunction normalizeNodeMutation(\n path: Path,\n change: Mutation | NodePatch | Operation | RawMutation | RawMutation[],\n): Mutation | NodePatch | (Mutation | NodePatch)[] {\n if (Array.isArray(change)) {\n return change.flatMap((ch) => normalizeNodeMutation(path, ch))\n }\n if (isRawMutation(change)) {\n return SanityEncoder.decodeAll([change] as any)[0] as Mutation\n }\n\n if (isNodePatch(change)) {\n return at(path.concat(change.path), change.op)\n }\n return isOperation(change) ? at(path, change) : change\n}\n\nfunction migrateNodeType(\n migration: NodeMigration,\n value: JsonValue,\n path: Path,\n context: MigrationContext,\n): void | NodeMigrationReturnValue | Promise<void | NodeMigrationReturnValue> {\n switch (getValueType(value)) {\n case 'string':\n return migration.string?.(value as string, path, context)\n case 'number':\n return migration.number?.(value as number, path, context)\n case 'boolean':\n return migration.boolean?.(value as boolean, path, context)\n case 'object':\n return migration.object?.(value as JsonObject, path, context)\n case 'array':\n return migration.array?.(value as JsonArray, path, context)\n case 'null':\n return migration.null?.(value as null, path, context)\n default:\n throw new Error('Unknown value type')\n }\n}\n","import {type SanityDocument} from '@sanity/types'\n\nimport {type Migration, type MigrationContext} from '../types'\nimport {normalizeMigrateDefinition} from './normalizeMigrateDefinition'\n\nfunction wrapDocumentsIteratorProducer(factory: () => AsyncIterableIterator<SanityDocument>) {\n function documents() {\n return factory()\n }\n\n ;(documents as any)[Symbol.asyncIterator] = () => {\n throw new Error(\n `The migration is attempting to iterate over the \"documents\" function, please call the function instead:\n\n // BAD:\n for await (const document of documents) {\n // ...\n }\n\n // GOOD: 👇 This is a function and has to be called\n for await (const document of documents()) {\n // ...\n }\n `,\n )\n }\n return documents\n}\n\nexport function collectMigrationMutations(\n migration: Migration,\n documents: () => AsyncIterableIterator<SanityDocument>,\n context: MigrationContext,\n) {\n const migrate = normalizeMigrateDefinition(migration)\n return migrate(wrapDocumentsIteratorProducer(documents), context)\n}\n","export const MUTATION_ENDPOINT_MAX_BODY_SIZE = 1024 * 256 // 256KB\nexport const DEFAULT_MUTATION_CONCURRENCY = 6\nexport const MAX_MUTATION_CONCURRENCY = 10\n","import createDebug from 'debug'\n\nexport default createDebug('sanity:migrate')\n","import {type FileHandle, open, unlink} from 'node:fs/promises'\n\nimport baseDebug from '../debug'\n\nconst debug = baseDebug.extend('bufferThroughFile')\n\nconst CHUNK_SIZE = 1024\n\n/**\n * Takes a source stream that will be drained and written to the provided file name as fast as possible.\n * and returns a function that can be called to create multiple readable stream on top of the buffer file.\n * It will start pulling data from the source stream once the first readableStream is created, writing to the buffer file in the background.\n * The readable streams and can be read at any rate (but will not receive data faster than the buffer file is written to).\n * Note: by default, buffering will run to completion, and this may prevent the process from exiting after done reading from the\n * buffered streams. To stop writing to the buffer file, an AbortSignal can be provided and once it's controller aborts, the buffer file will\n * stop. After the signal is aborted, no new buffered readers can be created.\n *\n * @param source - The source readable stream. Will be drained as fast as possible.\n * @param filename - The filename to write to.\n * @param options - Optional AbortSignal to stop writing to the buffer file.\n * @returns A function that can be called multiple times to create a readable stream on top of the buffer file.\n */\nexport function bufferThroughFile(\n source: ReadableStream<Uint8Array>,\n filename: string,\n options?: {signal: AbortSignal; keepFile?: boolean},\n) {\n const signal = options?.signal\n\n let writeHandle: FileHandle\n let readHandle: Promise<FileHandle> | null\n\n // Whether the all data has been written to the buffer file.\n let bufferDone = false\n\n signal?.addEventListener('abort', async () => {\n debug('Aborting bufferThroughFile')\n await Promise.all([\n writeHandle && writeHandle.close(),\n readHandle && (await readHandle).close(),\n ])\n })\n\n // Number of active readers. When this reaches 0, the read handle will be closed.\n let readerCount = 0\n let ready: Promise<void>\n\n async function pump(reader: ReadableStreamDefaultReader<Uint8Array>) {\n try {\n while (true) {\n const {done, value} = await reader.read()\n if (done || signal?.aborted) {\n // if we're done reading, or the primary reader has been cancelled, stop writing to the buffer file\n return\n }\n await writeHandle.write(value)\n }\n } finally {\n await writeHandle.close()\n bufferDone = true\n reader.releaseLock()\n }\n }\n\n function createBufferedReader() {\n let totalBytesRead = 0\n\n return async function tryReadFromBuffer(handle: FileHandle) {\n const {bytesRead, buffer} = await handle.read(\n new Uint8Array(CHUNK_SIZE),\n 0,\n CHUNK_SIZE,\n totalBytesRead,\n )\n if (bytesRead === 0 && !bufferDone && !signal?.aborted) {\n debug('Not enough data in buffer file, waiting for more data to be written')\n // we're waiting for more data to be written to the buffer file, try again\n return tryReadFromBuffer(handle)\n }\n totalBytesRead += bytesRead\n return {bytesRead, buffer}\n }\n }\n\n function init(): Promise<void> {\n if (!ready) {\n ready = (async () => {\n debug('Initializing bufferThroughFile')\n writeHandle = await open(filename, 'w')\n // start pumping data from the source stream to the buffer file\n // note, don't await this, as it will block the ReadableStream.start() method\n debug('Start buffering source stream to file')\n pump(source.getReader()).then(() => {\n debug('Buffering source stream to buffer file')\n })\n })()\n }\n return ready\n }\n\n function getReadHandle(): Promise<FileHandle> {\n if (!readHandle) {\n debug('Opening read handle on %s', filename)\n readHandle = open(filename, 'r')\n }\n return readHandle\n }\n\n function onReaderStart() {\n readerCount++\n }\n async function onReaderEnd() {\n readerCount--\n if (readerCount === 0 && readHandle) {\n const handle = readHandle\n readHandle = null\n debug('Closing read handle on %s', filename)\n await (await handle).close()\n if (options?.keepFile !== true) {\n debug('Removing buffer file', filename)\n await unlink(filename)\n }\n }\n }\n\n return () => {\n const readChunk = createBufferedReader()\n\n let didEnd = false\n function onEnd() {\n if (didEnd) {\n return\n }\n didEnd = true\n onReaderEnd()\n }\n return new ReadableStream<Uint8Array>({\n async start() {\n if (signal?.aborted) {\n throw new Error('Cannot create new buffered readers on aborted stream')\n }\n debug('Reader started reading from file handle')\n onReaderStart()\n await init()\n await getReadHandle()\n },\n async pull(controller) {\n if (!readHandle) {\n throw new Error('Cannot read from closed handle')\n }\n const {bytesRead, buffer} = await readChunk(await readHandle)\n if (bytesRead === 0 && bufferDone) {\n debug('Reader done reading from file handle')\n await onEnd()\n controller.close()\n } else {\n controller.enqueue(buffer.subarray(0, bytesRead))\n }\n },\n cancel() {\n onEnd()\n },\n })\n }\n}\n","/**\n * Copied over from uint8array-extras to sort out ESM build issues. Should be replaced with imports from that module eventually\n */\nconst objectToString = Object.prototype.toString\nconst uint8ArrayStringified = '[object Uint8Array]'\n\nexport function isUint8Array(value: unknown): value is Uint8Array {\n if (!value) {\n return false\n }\n\n if (value.constructor === Uint8Array) {\n return true\n }\n\n return objectToString.call(value) === uint8ArrayStringified\n}\n\nexport function assertUint8Array(value: unknown): asserts value is Uint8Array {\n if (!isUint8Array(value)) {\n throw new TypeError(`Expected \\`Uint8Array\\`, got \\`${typeof value}\\``)\n }\n}\n\nexport function concatUint8Arrays(arrays: Uint8Array[], totalLength?: number) {\n if (arrays.length === 0) {\n return new Uint8Array(0)\n }\n\n totalLength ??= arrays.reduce((accumulator, currentValue) => accumulator + currentValue.length, 0)\n\n const returnValue = new Uint8Array(totalLength)\n\n let offset = 0\n for (const array of arrays) {\n assertUint8Array(array)\n returnValue.set(array, offset)\n offset += array.length\n }\n\n return returnValue\n}\n\nexport function areUint8ArraysEqual(a: Uint8Array, b: Uint8Array) {\n assertUint8Array(a)\n assertUint8Array(b)\n\n if (a === b) {\n return true\n }\n\n if (a.length !== b.length) {\n return false\n }\n\n for (let index = 0; index < a.length; index++) {\n if (a[index] !== b[index]) {\n return false\n }\n }\n\n return true\n}\n","import {concatUint8Arrays} from '../uint8arrays'\n\nexport function peekInto(readable: ReadableStream, options: {size: number}) {\n const {size} = options\n return new Promise<[head: Uint8Array, ReadableStream]>((resolve, reject) => {\n let totalBytesRead = 0\n let streamCompleted = false\n const chunks: Array<Uint8Array> = []\n const reader = readable.getReader()\n\n function settled() {\n const head = concatUint8Arrays(chunks)\n resolve([\n head,\n new ReadableStream<Uint8Array>({\n start(controller) {\n controller.enqueue(head)\n if (streamCompleted) {\n controller.close()\n }\n },\n async pull(controller) {\n const {done, value} = await reader.read()\n if (done) {\n controller.close()\n } else {\n controller.enqueue(value)\n }\n },\n }),\n ])\n }\n ;(async () => {\n while (true) {\n const {done, value: chunk} = await reader.read()\n if (done) {\n streamCompleted = true\n break\n } else {\n totalBytesRead += chunk.byteLength\n chunks.push(chunk)\n if (totalBytesRead >= size) {\n break\n }\n }\n }\n })().then(settled, reject)\n })\n}\n","import {peekInto} from './peekInto'\n\nfunction isGzip(buffer: Uint8Array) {\n return buffer.length > 3 && buffer[0] === 0x1f && buffer[1] === 0x8b && buffer[2] === 0x08\n}\n\nfunction isDeflate(buf: Uint8Array) {\n return buf.length > 2 && buf[0] === 0x78 && (buf[1] === 1 || buf[1] === 0x9c || buf[1] === 0xda)\n}\n\nexport async function maybeDecompress(readable: ReadableStream<Uint8Array>) {\n const [head, stream] = await peekInto(readable, {size: 10})\n if (isGzip(head)) {\n return stream.pipeThrough(new DecompressionStream('gzip'))\n }\n if (isDeflate(head)) {\n return stream.pipeThrough(new DecompressionStream('deflate-raw'))\n }\n return stream\n}\n","import {type FileHandle, open} from 'node:fs/promises'\n\nimport baseDebug from '../debug'\n\nconst debug = baseDebug.extend('readFileAsWebStream')\n\nconst CHUNK_SIZE = 1024 * 16\n\nexport function readFileAsWebStream(filename: string): ReadableStream<Uint8Array> {\n let fileHandle: FileHandle\n let position = 0\n\n return new ReadableStream({\n async start() {\n debug('Starting readable stream from', filename)\n fileHandle = await open(filename, 'r')\n },\n async pull(controller) {\n const {bytesRead, buffer} = await fileHandle.read(\n new Uint8Array(CHUNK_SIZE),\n 0,\n CHUNK_SIZE,\n position,\n )\n if (bytesRead === 0) {\n await fileHandle.close()\n debug('Closing readable stream from', filename)\n controller.close()\n } else {\n position += bytesRead\n controller.enqueue(buffer.subarray(0, bytesRead))\n }\n },\n\n cancel() {\n debug('Cancelling readable stream from', filename)\n return fileHandle.close()\n },\n })\n}\n","/**\n * Helper to drain a stream, useful in cases where you want to keep reading a stream but disregard the received chunks.\n * @param stream - the readable stream to drain\n */\nexport async function drain(stream: ReadableStream) {\n const reader = stream.getReader()\n while (true) {\n const {done} = await reader.read()\n if (done) {\n return\n }\n }\n}\n","import FIFO from 'fast-fifo'\n\nimport {concatUint8Arrays} from '../uint8arrays'\n\nconst EMPTY = new Uint8Array()\n\n// Extracted from https://github.com/mafintosh/tar-stream/blob/master/extract.js#L8 and converted to ts\nexport class BufferList {\n public buffered: number\n public shifted: number\n private queue: FIFO<Uint8Array>\n private _offset: number\n\n constructor() {\n this.buffered = 0\n this.shifted = 0\n this.queue = new FIFO()\n\n this._offset = 0\n }\n\n push(buffer: Uint8Array) {\n this.buffered += buffer.byteLength\n this.queue.push(buffer)\n }\n\n shiftFirst(size: number) {\n return this.buffered === 0 ? null : this._next(size)\n }\n\n shift(size: number) {\n if (size > this.buffered) return null\n if (size === 0) return EMPTY\n\n let chunk = this._next(size)\n\n if (size === chunk.byteLength) return chunk // likely case\n\n const chunks = [chunk]\n\n while ((size -= chunk.byteLength) > 0) {\n chunk = this._next(size)\n chunks.push(chunk)\n }\n\n return concatUint8Arrays(chunks)\n }\n\n private _next(size: number) {\n const buf = this.queue.peek()\n const rem = buf.byteLength - this._offset\n\n if (size >= rem) {\n const sub = this._offset ? buf.subarray(this._offset, buf.byteLength) : buf\n this.queue.shift()\n this._offset = 0\n this.buffered -= rem\n this.shifted += rem\n return sub\n }\n\n this.buffered -= size\n this.shifted += size\n\n return buf.subarray(this._offset, (this._offset += size))\n }\n}\n","/* eslint-disable no-bitwise */\n// Extracted from https://github.com/mafintosh/tar-stream/blob/master/headers.js\n// Converted to TypeScript and removed reliance on Node Buffers\n\nimport {areUint8ArraysEqual} from '../uint8arrays'\n\nconst ZERO_OFFSET = '0'.charCodeAt(0)\nconst USTAR_MAGIC = new Uint8Array([0x75, 0x73, 0x74, 0x61, 0x72, 0x00]) // ustar\\x00\nconst GNU_MAGIC = new Uint8Array([0x75, 0x73, 0x74, 0x61, 0x72, 0x20]) // ustar\\x20\nconst GNU_VER = new Uint8Array([0x20, 0x00])\nconst MAGIC_OFFSET = 257\nconst VERSION_OFFSET = 263\n\nexport type TarEntryType =\n | 'file'\n | 'link'\n | 'symlink'\n | 'directory'\n | 'block-device'\n | 'character-device'\n | 'fifo'\n | 'contiguous-file'\n\nexport interface TarHeader {\n // type of entry. defaults to file. can be:\n // file | link | symlink | directory | block-device\n // character-device | fifo | contiguous-file\n type: TarEntryType | null\n // entry name\n name: string\n // entry size. defaults to 0\n size: number | null\n // entry mode. defaults to 0o755 for dirs and 0o644 otherwise\n mode: number | null\n // uid of entry owner. defaults to 0\n uid: number | null\n // gid of entry owner. defaults to 0\n gid: number | null\n // last modified date for entry. defaults to now.\n mtime: Date | null\n // linked file name. only valid for type 'link' and 'symlink' entries\n linkname: string | null\n // uname of entry owner. defaults to null\n uname: string\n // gname of entry owner. defaults to null\n gname: string\n // device major version. defaults to 0\n devmajor: number | null\n // device minor version. defaults to 0\n devminor: number | null\n}\n\nexport function decode(\n buf: Uint8Array,\n filenameEncoding: BufferEncoding,\n allowUnknownFormat: boolean,\n): TarHeader | null {\n let typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET\n\n let name = decodeStr(buf, 0, 100, filenameEncoding)\n const mode = decodeOct(buf, 100, 8)\n const uid = decodeOct(buf, 108, 8)\n const gid = decodeOct(buf, 116, 8)\n const size = decodeOct(buf, 124, 12)\n const mtime = decodeOct(buf, 136, 12)\n const type = toType(typeflag)\n const linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100, filenameEncoding)\n const uname = decodeStr(buf, 265, 32)\n const gname = decodeStr(buf, 297, 32)\n const devmajor = decodeOct(buf, 329, 8)\n const devminor = decodeOct(buf, 337, 8)\n\n const c = cksum(buf)\n\n // checksum is still initial value if header was null.\n if (c === 8 * 32) return null\n\n // valid checksum\n if (c !== decodeOct(buf, 148, 8)) {\n throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?')\n }\n\n if (isUSTAR(buf)) {\n // ustar (posix) format.\n // prepend prefix, if present.\n if (buf[345]) name = `${decodeStr(buf, 345, 155, filenameEncoding)}/${name}`\n } else if (isGNU(buf)) {\n // 'gnu'/'oldgnu' format. Similar to ustar, but has support for incremental and\n // multi-volume tarballs.\n } else if (!allowUnknownFormat) {\n throw new Error('Invalid tar header: unknown format.')\n }\n\n // to support old tar versions that use trailing / to indicate dirs\n if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5\n\n return {\n type: type as TarEntryType,\n name,\n mode,\n uid,\n gid,\n size,\n mtime: mtime ? new Date(1000 * mtime) : null,\n linkname,\n uname,\n gname,\n devmajor,\n devminor,\n }\n}\n\nfunction isUSTAR(buf: Uint8Array) {\n return areUint8ArraysEqual(USTAR_MAGIC, buf.subarray(MAGIC_OFFSET, MAGIC_OFFSET + 6))\n}\n\nfunction isGNU(buf: Uint8Array) {\n return (\n areUint8ArraysEqual(GNU_MAGIC, buf.subarray(MAGIC_OFFSET, MAGIC_OFFSET + 6)) &&\n areUint8ArraysEqual(GNU_VER, buf.subarray(VERSION_OFFSET, VERSION_OFFSET + 2))\n )\n}\n\nfunction clamp(index: number, len: number, defaultValue: number) {\n if (typeof index !== 'number') return defaultValue\n index = ~~index // Coerce to integer.\n if (index >= len) return len\n if (index >= 0) return index\n index += len\n if (index >= 0) return index\n return 0\n}\nfunction toType(flag: number) {\n switch (flag) {\n case 0:\n return 'file'\n case 1:\n return 'link'\n case 2:\n return 'symlink'\n case 3:\n return 'character-device'\n case 4:\n return 'block-device'\n case 5:\n return 'directory'\n case 6:\n return 'fifo'\n case 7:\n return 'contiguous-file'\n case 72:\n return 'pax-header'\n case 55:\n return 'pax-global-header'\n case 27:\n return 'gnu-long-link-path'\n case 28:\n case 30:\n return 'gnu-long-path'\n default:\n return null\n }\n}\n\nfunction indexOf(block: Uint8Array, num: number, offset: number, end: number) {\n for (; offset < end; offset++) {\n if (block[offset] === num) return offset\n }\n return end\n}\n\nfunction cksum(block: Uint8Array) {\n let sum = 8 * 32\n for (let i = 0; i < 148; i++) sum += block[i]\n for (let j = 156; j < 512; j++) sum += block[j]\n return sum\n}\n\n/* Copied from the node-tar repo and modified to meet\n * tar-stream coding standard.\n *\n * Source: https://github.com/npm/node-tar/blob/51b6627a1f357d2eb433e7378e5f05e83b7aa6cd/lib/header.js#L349\n */\nfunction parse256(buf: Uint8Array) {\n // first byte MUST be either 80 or FF\n // 80 for positive, FF for 2's comp\n let positive\n if (buf[0] === 0x80) positive = true\n else if (buf[0] === 0xff) positive = false\n else return null\n\n // build up a base-256 tuple from the least sig to the highest\n const tuple = []\n let i\n for (i = buf.length - 1; i > 0; i--) {\n const byte = buf[i]\n if (positive) tuple.push(byte)\n else tuple.push(0xff - byte)\n }\n\n let sum = 0\n const l = tuple.length\n for (i = 0; i < l; i++) {\n sum += tuple[i] * Math.pow(256, i)\n }\n\n return positive ? sum : -1 * sum\n}\n\nconst decoders: {[encoding: string]: TextDecoder} = {}\nconst getCachedDecoder = (encoding: string) => {\n if (!(encoding in decoders)) {\n decoders[encoding] = new TextDecoder(encoding)\n }\n return decoders[encoding]\n}\n\nfunction toString(uint8: Uint8Array, encoding = 'utf-8') {\n return getCachedDecoder(encoding).decode(uint8)\n}\n\nfunction decodeOct(val: Uint8Array, offset: number, length: number) {\n val = val.subarray(offset, offset + length)\n offset = 0\n // If prefixed with 0x80 then parse as a base-256 integer\n if (val[offset] & 0x80) {\n return parse256(val)\n }\n // Older versions of tar can prefix with spaces\n while (offset < val.length && val[offset] === 32) offset++\n const end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length)\n while (offset < end && val[offset] === 0) offset++\n if (end === offset) return 0\n return parseInt(toString(val.subarray(offset, end)), 8)\n}\n\nfunction decodeStr(val: Uint8Array, offset: number, length: number, encoding?: string) {\n return toString(val.subarray(offset, indexOf(val, 0, offset, offset + length)), encoding)\n}\n","/* eslint-disable no-bitwise */\nimport {BufferList} from './BufferList'\nimport * as headers from './headers'\nimport {type TarHeader} from './headers'\n\n// Inspired by\n// - https://github.com/alanshaw/it-tar/blob/master/src/extract.ts\n// - https://github.com/mafintosh/tar-stream/blob/master/extract.js\n\nconst emptyReadableStream = () =>\n new ReadableStream({\n pull(controller) {\n controller.close()\n },\n })\n\nexport function untar(\n stream: ReadableStream<Uint8Array>,\n options: {\n filenameEncoding?: BufferEncoding\n allowUnknownFormat?: boolean\n } = {},\n): ReadableStream<[header: TarHeader, entry: ReadableStream<Uint8Array>]> {\n const buffer = new BufferList()\n\n const reader = stream.getReader()\n\n let readingChunk = false\n return new ReadableStream({\n async pull(controller) {\n if (readingChunk) {\n return\n }\n const {done, value} = await reader.read()\n\n if (!done) {\n buffer.push(value)\n }\n\n const headerChunk = buffer.shift(512)\n if (!headerChunk) {\n throw new Error('Unexpected end of tar file. Expected 512 bytes of headers.')\n }\n\n const header = headers.decode(\n headerChunk,\n options.filenameEncoding ?? 'utf-8',\n options.allowUnknownFormat ?? false,\n )\n if (header) {\n if (header.size === null || header.size === 0 || header.type === 'directory') {\n controller.enqueue([header, emptyReadableStream()])\n } else {\n readingChunk = true\n controller.enqueue([\n header,\n entryStream(reader, header.size!,