@loaders.gl/zip
Version:
Zip Archive Loader
4 lines • 87.1 kB
Source Map (JSON)
{
"version": 3,
"sources": ["index.js", "zip-loader.js", "zip-writer.js", "lib/tar/utils.js", "lib/tar/header.js", "lib/tar/tar.js", "tar-builder.js", "parse-zip/cd-file-header.js", "parse-zip/end-of-central-directory.js", "parse-zip/search-from-the-end.js", "parse-zip/zip64-info-generation.js", "parse-zip/local-file-header.js", "parse-zip/zip-composition.js", "filesystems/zip-filesystem.js", "filesystems/IndexedArchive.js", "hash-file-utility.js"],
"sourcesContent": ["// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nexport { ZipLoader } from \"./zip-loader.js\";\nexport { ZipWriter } from \"./zip-writer.js\";\nexport { TarBuilder } from \"./tar-builder.js\";\nexport { parseZipCDFileHeader, makeZipCDHeaderIterator, signature as CD_HEADER_SIGNATURE, generateCDHeader } from \"./parse-zip/cd-file-header.js\";\nexport { parseZipLocalFileHeader, signature as localHeaderSignature, generateLocalHeader } from \"./parse-zip/local-file-header.js\";\nexport { parseEoCDRecord } from \"./parse-zip/end-of-central-directory.js\";\nexport { searchFromTheEnd } from \"./parse-zip/search-from-the-end.js\";\nexport { addOneFile, createZip } from \"./parse-zip/zip-composition.js\";\n// export type {HashElement} from './hash-file-utility';\nexport { IndexedArchive } from \"./filesystems/IndexedArchive.js\";\nexport { parseHashTable, makeHashTableFromZipHeaders, composeHashFile } from \"./hash-file-utility.js\";\nexport { ZipFileSystem, ZIP_COMPRESSION_HANDLERS } from \"./filesystems/zip-filesystem.js\";\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport JSZip from 'jszip';\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof \"4.3.3\" !== 'undefined' ? \"4.3.3\" : 'latest';\nexport const ZipLoader = {\n dataType: null,\n batchType: null,\n id: 'zip',\n module: 'zip',\n name: 'Zip Archive',\n version: VERSION,\n extensions: ['zip'],\n mimeTypes: ['application/zip'],\n category: 'archive',\n tests: ['PK'],\n options: {},\n parse: parseZipAsync\n};\n// TODO - Could return a map of promises, perhaps as an option...\nasync function parseZipAsync(data, options = {}) {\n const promises = [];\n const fileMap = {};\n try {\n const jsZip = new JSZip();\n const zip = await jsZip.loadAsync(data, options);\n // start to load each file in this zip\n zip.forEach((relativePath, zipEntry) => {\n const subFilename = zipEntry.name;\n const promise = loadZipEntry(jsZip, subFilename, options).then((arrayBufferOrError) => {\n fileMap[relativePath] = arrayBufferOrError;\n });\n // Ensure Promise.all doesn't ignore rejected promises.\n promises.push(promise);\n });\n await Promise.all(promises);\n return fileMap;\n }\n catch (error) {\n // @ts-ignore\n options.log.error(`Unable to read zip archive: ${error}`);\n throw error;\n }\n}\nasync function loadZipEntry(jsZip, subFilename, options = {}) {\n // jszip supports both arraybuffer and text, the main loaders.gl types\n // https://stuk.github.io/jszip/documentation/api_zipobject/async.html\n try {\n const arrayBuffer = await jsZip.file(subFilename).async(options.dataType || 'arraybuffer');\n return arrayBuffer;\n }\n catch (error) {\n options.log.error(`Unable to read ${subFilename} from zip archive: ${error}`);\n // Store error in place of data in map\n return error;\n }\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport JSZip from 'jszip';\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof \"4.3.3\" !== 'undefined' ? \"4.3.3\" : 'latest';\n/**\n * Zip exporter\n */\nexport const ZipWriter = {\n name: 'Zip Archive',\n id: 'zip',\n module: 'zip',\n version: VERSION,\n extensions: ['zip'],\n category: 'archive',\n mimeTypes: ['application/zip'],\n options: {\n zip: {\n onUpdate: () => { }\n },\n jszip: {}\n },\n encode: encodeZipAsync\n};\nasync function encodeZipAsync(fileMap, options = {}) {\n const jsZip = new JSZip();\n // add files to the zip\n for (const subFileName in fileMap) {\n const subFileData = fileMap[subFileName];\n // jszip supports both arraybuffer and string data (the main loaders.gl types)\n // https://stuk.github.io/jszip/documentation/api_zipobject/async.html\n jsZip.file(subFileName, subFileData, options?.jszip || {});\n }\n const zipOptions = { ...ZipWriter.options.zip, ...options?.zip };\n const jszipOptions = { ...ZipWriter.options?.jszip, ...options.jszip };\n try {\n return await jsZip.generateAsync({ ...jszipOptions, type: 'arraybuffer' }, // generate an arraybuffer\n zipOptions.onUpdate);\n }\n catch (error) {\n options.log.error(`Unable to encode zip archive: ${error}`);\n throw error;\n }\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\n// This file is derived from the tar-js code base under MIT license\n// See https://github.com/beatgammit/tar-js/blob/master/LICENSE\n/*\n * tar-js\n * MIT (c) 2011 T. Jameson Little\n */\n/**\n * Returns the memory area specified by length\n * @param length\n * @returns {Uint8Array}\n */\nexport function clean(length) {\n let i;\n const buffer = new Uint8Array(length);\n for (i = 0; i < length; i += 1) {\n buffer[i] = 0;\n }\n return buffer;\n}\n/**\n * Converting data to a string\n * @param num\n * @param bytes\n * @param base\n * @returns string\n */\nexport function pad(num, bytes, base) {\n const numStr = num.toString(base || 8);\n return '000000000000'.substr(numStr.length + 12 - bytes) + numStr;\n}\n/**\n * Converting input to binary data\n * @param input\n * @param out\n * @param offset\n * @returns {Uint8Array}\n */\nexport function stringToUint8(input, out, offset) {\n let i;\n let length;\n out = out || clean(input.length);\n offset = offset || 0;\n for (i = 0, length = input.length; i < length; i += 1) {\n out[offset] = input.charCodeAt(i);\n offset += 1;\n }\n return out;\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\n// This file is derived from the tar-js code base under MIT license\n// See https://github.com/beatgammit/tar-js/blob/master/LICENSE\n/*\n * tar-js\n * MIT (c) 2011 T. Jameson Little\n */\n/* eslint-disable */\nimport * as utils from \"./utils.js\";\n/*\nstruct posix_header { // byte offset\n char name[100]; // 0\n char mode[8]; // 100\n char uid[8]; // 108\n char gid[8]; // 116\n char size[12]; // 124\n char mtime[12]; // 136\n char chksum[8]; // 148\n char typeflag; // 156\n char linkname[100]; // 157\n char magic[6]; // 257\n char version[2]; // 263\n char uname[32]; // 265\n char gname[32]; // 297\n char devmajor[8]; // 329\n char devminor[8]; // 337\n char prefix[155]; // 345\n // 500\n};\n*/\nconst structure = {\n fileName: 100,\n fileMode: 8,\n uid: 8,\n gid: 8,\n fileSize: 12,\n mtime: 12,\n checksum: 8,\n type: 1,\n linkName: 100,\n ustar: 8,\n owner: 32,\n group: 32,\n majorNumber: 8,\n minorNumber: 8,\n filenamePrefix: 155,\n padding: 12\n};\n/**\n * Getting the header\n * @param data\n * @param [cb]\n * @returns {Uint8Array} | Array\n */\nexport function format(data, cb) {\n const buffer = utils.clean(512);\n let offset = 0;\n Object.entries(structure).forEach(([field, length]) => {\n const str = data[field] || '';\n let i;\n let fieldLength;\n for (i = 0, fieldLength = str.length; i < fieldLength; i += 1) {\n buffer[offset] = str.charCodeAt(i);\n offset += 1;\n }\n // space it out with nulls\n offset += length - i;\n });\n if (typeof cb === 'function') {\n return cb(buffer, offset);\n }\n return buffer;\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\n// This file is derived from the tar-js code base under MIT license\n// See https://github.com/beatgammit/tar-js/blob/master/LICENSE\n/*\n * tar-js\n * MIT (c) 2011 T. Jameson Little\n */\nimport { clean, pad, stringToUint8 } from \"./utils.js\";\nimport { format } from \"./header.js\";\nlet blockSize;\nlet headerLength;\nlet inputLength;\nconst recordSize = 512;\nclass Tar {\n written;\n out;\n blocks = [];\n length;\n /**\n * @param [recordsPerBlock]\n */\n constructor(recordsPerBlock) {\n this.written = 0;\n blockSize = (recordsPerBlock || 20) * recordSize;\n this.out = clean(blockSize);\n this.blocks = [];\n this.length = 0;\n this.save = this.save.bind(this);\n this.clear = this.clear.bind(this);\n this.append = this.append.bind(this);\n }\n /**\n * Append a file to the tar archive\n * @param filepath\n * @param input\n * @param [opts]\n */\n // eslint-disable-next-line complexity\n append(filepath, input, opts) {\n let checksum;\n if (typeof input === 'string') {\n input = stringToUint8(input);\n }\n else if (input.constructor && input.constructor !== Uint8Array.prototype.constructor) {\n // @ts-ignore\n const errorInputMatch = /function\\s*([$A-Za-z_][0-9A-Za-z_]*)\\s*\\(/.exec(input.constructor.toString());\n const errorInput = errorInputMatch && errorInputMatch[1];\n const errorMessage = `Invalid input type. You gave me: ${errorInput}`;\n throw errorMessage;\n }\n opts = opts || {};\n const mode = opts.mode || parseInt('777', 8) & 0xfff;\n const mtime = opts.mtime || Math.floor(Number(new Date()) / 1000);\n const uid = opts.uid || 0;\n const gid = opts.gid || 0;\n const data = {\n fileName: filepath,\n fileMode: pad(mode, 7),\n uid: pad(uid, 7),\n gid: pad(gid, 7),\n fileSize: pad(input.length, 11),\n mtime: pad(mtime, 11),\n checksum: ' ',\n // 0 = just a file\n type: '0',\n ustar: 'ustar ',\n owner: opts.owner || '',\n group: opts.group || ''\n };\n // calculate the checksum\n checksum = 0;\n Object.keys(data).forEach((key) => {\n let i;\n const value = data[key];\n let length;\n for (i = 0, length = value.length; i < length; i += 1) {\n checksum += value.charCodeAt(i);\n }\n });\n data.checksum = `${pad(checksum, 6)}\\u0000 `;\n const headerArr = format(data);\n headerLength = Math.ceil(headerArr.length / recordSize) * recordSize;\n inputLength = Math.ceil(input.length / recordSize) * recordSize;\n this.blocks.push({\n header: headerArr,\n input,\n headerLength,\n inputLength\n });\n }\n /**\n * Compiling data to a Blob object\n * @returns {Blob}\n */\n save() {\n const buffers = [];\n const chunks = new Array();\n let length = 0;\n const max = Math.pow(2, 20);\n let chunk = new Array();\n this.blocks.forEach((b = []) => {\n if (length + b.headerLength + b.inputLength > max) {\n chunks.push({ blocks: chunk, length });\n chunk = [];\n length = 0;\n }\n chunk.push(b);\n length += b.headerLength + b.inputLength;\n });\n chunks.push({ blocks: chunk, length });\n chunks.forEach((c = []) => {\n const buffer = new Uint8Array(c.length);\n let written = 0;\n c.blocks.forEach((b = []) => {\n buffer.set(b.header, written);\n written += b.headerLength;\n buffer.set(b.input, written);\n written += b.inputLength;\n });\n buffers.push(buffer);\n });\n buffers.push(new Uint8Array(2 * recordSize));\n return new Blob(buffers, { type: 'octet/stream' });\n }\n /**\n * Clear the data by its blocksize\n */\n clear() {\n this.written = 0;\n this.out = clean(blockSize);\n }\n}\nexport default Tar;\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport Tar from \"./lib/tar/tar.js\";\nconst TAR_BUILDER_OPTIONS = {\n recordsPerBlock: 20\n};\n/**\n * Build a tar file by adding files\n */\nexport class TarBuilder {\n static get properties() {\n return {\n id: 'tar',\n name: 'TAR',\n extensions: ['tar'],\n mimeTypes: ['application/x-tar'],\n builder: TarBuilder,\n options: TAR_BUILDER_OPTIONS\n };\n }\n options;\n tape;\n count = 0;\n constructor(options) {\n this.options = { ...TAR_BUILDER_OPTIONS, ...options };\n this.tape = new Tar(this.options.recordsPerBlock);\n }\n /** Adds a file to the archive. */\n addFile(filename, buffer) {\n this.tape.append(filename, new Uint8Array(buffer));\n this.count++;\n }\n async build() {\n return new Response(this.tape.save()).arrayBuffer();\n }\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { DataViewFile, compareArrayBuffers, concatenateArrayBuffers } from '@loaders.gl/loader-utils';\nimport { parseEoCDRecord } from \"./end-of-central-directory.js\";\nimport { createZip64Info, setFieldToNumber } from \"./zip64-info-generation.js\";\n// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)\nconst CD_COMPRESSED_SIZE_OFFSET = 20;\nconst CD_UNCOMPRESSED_SIZE_OFFSET = 24;\nconst CD_FILE_NAME_LENGTH_OFFSET = 28;\nconst CD_EXTRA_FIELD_LENGTH_OFFSET = 30;\nconst CD_START_DISK_OFFSET = 32;\nconst CD_LOCAL_HEADER_OFFSET_OFFSET = 42;\nconst CD_FILE_NAME_OFFSET = 46n;\nexport const signature = new Uint8Array([0x50, 0x4b, 0x01, 0x02]);\n/**\n * Parses central directory file header of zip file\n * @param headerOffset - offset in the archive where header starts\n * @param buffer - buffer containing whole array\n * @returns Info from the header\n */\nexport const parseZipCDFileHeader = async (headerOffset, file) => {\n if (headerOffset >= file.length) {\n return null;\n }\n const mainHeader = new DataView(await file.slice(headerOffset, headerOffset + CD_FILE_NAME_OFFSET));\n const magicBytes = mainHeader.buffer.slice(0, 4);\n if (!compareArrayBuffers(magicBytes, signature.buffer)) {\n return null;\n }\n const compressedSize = BigInt(mainHeader.getUint32(CD_COMPRESSED_SIZE_OFFSET, true));\n const uncompressedSize = BigInt(mainHeader.getUint32(CD_UNCOMPRESSED_SIZE_OFFSET, true));\n const extraFieldLength = mainHeader.getUint16(CD_EXTRA_FIELD_LENGTH_OFFSET, true);\n const startDisk = BigInt(mainHeader.getUint16(CD_START_DISK_OFFSET, true));\n const fileNameLength = mainHeader.getUint16(CD_FILE_NAME_LENGTH_OFFSET, true);\n const additionalHeader = await file.slice(headerOffset + CD_FILE_NAME_OFFSET, headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength));\n const filenameBytes = additionalHeader.slice(0, fileNameLength);\n const fileName = new TextDecoder().decode(filenameBytes);\n const extraOffset = headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength);\n const oldFormatOffset = mainHeader.getUint32(CD_LOCAL_HEADER_OFFSET_OFFSET, true);\n const localHeaderOffset = BigInt(oldFormatOffset);\n const extraField = new DataView(additionalHeader.slice(fileNameLength, additionalHeader.byteLength));\n // looking for info that might be also be in zip64 extra field\n const zip64data = {\n uncompressedSize,\n compressedSize,\n localHeaderOffset,\n startDisk\n };\n const res = findZip64DataInExtra(zip64data, extraField);\n return {\n ...zip64data,\n ...res,\n extraFieldLength,\n fileNameLength,\n fileName,\n extraOffset\n };\n};\n/**\n * Create iterator over files of zip archive\n * @param fileProvider - file provider that provider random access to the file\n */\nexport async function* makeZipCDHeaderIterator(fileProvider) {\n const { cdStartOffset, cdByteSize } = await parseEoCDRecord(fileProvider);\n const centralDirectory = new DataViewFile(new DataView(await fileProvider.slice(cdStartOffset, cdStartOffset + cdByteSize)));\n let cdHeader = await parseZipCDFileHeader(0n, centralDirectory);\n while (cdHeader) {\n yield cdHeader;\n cdHeader = await parseZipCDFileHeader(cdHeader.extraOffset + BigInt(cdHeader.extraFieldLength), centralDirectory);\n }\n}\n/**\n * returns the number written in the provided bytes\n * @param bytes two bytes containing the number\n * @returns the number written in the provided bytes\n */\nconst getUint16 = (...bytes) => {\n return bytes[0] + bytes[1] * 16;\n};\n/**\n * reads all nesessary data from zip64 record in the extra data\n * @param zip64data values that might be in zip64 record\n * @param extraField full extra data\n * @returns data read from zip64\n */\nconst findZip64DataInExtra = (zip64data, extraField) => {\n const zip64dataList = findExpectedData(zip64data);\n const zip64DataRes = {};\n if (zip64dataList.length > 0) {\n // total length of data in zip64 notation in bytes\n const zip64chunkSize = zip64dataList.reduce((sum, curr) => sum + curr.length, 0);\n // we're looking for the zip64 nontation header (0x0001)\n // and a size field with a correct value next to it\n const offsetInExtraData = new Uint8Array(extraField.buffer).findIndex((_val, i, arr) => getUint16(arr[i], arr[i + 1]) === 0x0001 &&\n getUint16(arr[i + 2], arr[i + 3]) === zip64chunkSize);\n // then we read all the nesessary fields from the zip64 data\n let bytesRead = 0;\n for (const note of zip64dataList) {\n const offset = bytesRead;\n zip64DataRes[note.name] = extraField.getBigUint64(offsetInExtraData + 4 + offset, true);\n bytesRead = offset + note.length;\n }\n }\n return zip64DataRes;\n};\n/**\n * frind data that's expected to be in zip64\n * @param zip64data values that might be in zip64 record\n * @returns zip64 data description\n */\nconst findExpectedData = (zip64data) => {\n // We define fields that should be in zip64 data\n const zip64dataList = [];\n if (zip64data.uncompressedSize === BigInt(0xffffffff)) {\n zip64dataList.push({ name: 'uncompressedSize', length: 8 });\n }\n if (zip64data.compressedSize === BigInt(0xffffffff)) {\n zip64dataList.push({ name: 'compressedSize', length: 8 });\n }\n if (zip64data.localHeaderOffset === BigInt(0xffffffff)) {\n zip64dataList.push({ name: 'localHeaderOffset', length: 8 });\n }\n if (zip64data.startDisk === BigInt(0xffffffff)) {\n zip64dataList.push({ name: 'startDisk', length: 4 });\n }\n return zip64dataList;\n};\n/**\n * generates cd header for the file\n * @param options info that can be placed into cd header\n * @returns buffer with header\n */\nexport function generateCDHeader(options) {\n const optionsToUse = {\n ...options,\n fnlength: options.fileName.length,\n extraLength: 0\n };\n let zip64header = new ArrayBuffer(0);\n const optionsToZip64 = {};\n if (optionsToUse.offset >= 0xffffffff) {\n optionsToZip64.offset = optionsToUse.offset;\n optionsToUse.offset = BigInt(0xffffffff);\n }\n if (optionsToUse.length >= 0xffffffff) {\n optionsToZip64.size = optionsToUse.length;\n optionsToUse.length = 0xffffffff;\n }\n if (Object.keys(optionsToZip64).length) {\n zip64header = createZip64Info(optionsToZip64);\n optionsToUse.extraLength = zip64header.byteLength;\n }\n const header = new DataView(new ArrayBuffer(Number(CD_FILE_NAME_OFFSET)));\n for (const field of ZIP_HEADER_FIELDS) {\n setFieldToNumber(header, field.size, field.offset, optionsToUse[field.name ?? ''] ?? field.default ?? 0);\n }\n const encodedName = new TextEncoder().encode(optionsToUse.fileName);\n const resHeader = concatenateArrayBuffers(header.buffer, encodedName, zip64header);\n return resHeader;\n}\n/** Fields map */\nconst ZIP_HEADER_FIELDS = [\n // Central directory file header signature = 0x02014b50\n {\n offset: 0,\n size: 4,\n default: new DataView(signature.buffer).getUint32(0, true)\n },\n // Version made by\n {\n offset: 4,\n size: 2,\n default: 45\n },\n // Version needed to extract (minimum)\n {\n offset: 6,\n size: 2,\n default: 45\n },\n // General purpose bit flag\n {\n offset: 8,\n size: 2,\n default: 0\n },\n // Compression method\n {\n offset: 10,\n size: 2,\n default: 0\n },\n // File last modification time\n {\n offset: 12,\n size: 2,\n default: 0\n },\n // File last modification date\n {\n offset: 14,\n size: 2,\n default: 0\n },\n // CRC-32 of uncompressed data\n {\n offset: 16,\n size: 4,\n name: 'crc32'\n },\n // Compressed size (or 0xffffffff for ZIP64)\n {\n offset: 20,\n size: 4,\n name: 'length'\n },\n // Uncompressed size (or 0xffffffff for ZIP64)\n {\n offset: 24,\n size: 4,\n name: 'length'\n },\n // File name length (n)\n {\n offset: 28,\n size: 2,\n name: 'fnlength'\n },\n // Extra field length (m)\n {\n offset: 30,\n size: 2,\n default: 0,\n name: 'extraLength'\n },\n // File comment length (k)\n {\n offset: 32,\n size: 2,\n default: 0\n },\n // Disk number where file starts (or 0xffff for ZIP64)\n {\n offset: 34,\n size: 2,\n default: 0\n },\n // Internal file attributes\n {\n offset: 36,\n size: 2,\n default: 0\n },\n // External file attributes\n {\n offset: 38,\n size: 4,\n default: 0\n },\n // Relative offset of local file header\n {\n offset: 42,\n size: 4,\n name: 'offset'\n }\n];\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { compareArrayBuffers, concatenateArrayBuffers } from '@loaders.gl/loader-utils';\nimport { searchFromTheEnd } from \"./search-from-the-end.js\";\nimport { setFieldToNumber } from \"./zip64-info-generation.js\";\nconst eoCDSignature = new Uint8Array([0x50, 0x4b, 0x05, 0x06]);\nconst zip64EoCDLocatorSignature = new Uint8Array([0x50, 0x4b, 0x06, 0x07]);\nconst zip64EoCDSignature = new Uint8Array([0x50, 0x4b, 0x06, 0x06]);\n// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)\nconst CD_RECORDS_NUMBER_OFFSET = 8n;\nconst CD_RECORDS_NUMBER_ON_DISC_OFFSET = 10n;\nconst CD_CD_BYTE_SIZE_OFFSET = 12n;\nconst CD_START_OFFSET_OFFSET = 16n;\nconst CD_COMMENT_OFFSET = 22n;\nconst ZIP64_EOCD_START_OFFSET_OFFSET = 8n;\nconst ZIP64_CD_RECORDS_NUMBER_OFFSET = 24n;\nconst ZIP64_CD_RECORDS_NUMBER_ON_DISC_OFFSET = 32n;\nconst ZIP64_CD_CD_BYTE_SIZE_OFFSET = 40n;\nconst ZIP64_CD_START_OFFSET_OFFSET = 48n;\nconst ZIP64_COMMENT_OFFSET = 56n;\n/**\n * Parses end of central directory record of zip file\n * @param file - FileProvider instance\n * @returns Info from the header\n */\nexport const parseEoCDRecord = async (file) => {\n const zipEoCDOffset = await searchFromTheEnd(file, eoCDSignature);\n let cdRecordsNumber = BigInt(await file.getUint16(zipEoCDOffset + CD_RECORDS_NUMBER_OFFSET));\n let cdByteSize = BigInt(await file.getUint32(zipEoCDOffset + CD_CD_BYTE_SIZE_OFFSET));\n let cdStartOffset = BigInt(await file.getUint32(zipEoCDOffset + CD_START_OFFSET_OFFSET));\n let zip64EoCDLocatorOffset = zipEoCDOffset - 20n;\n let zip64EoCDOffset = 0n;\n const magicBytes = await file.slice(zip64EoCDLocatorOffset, zip64EoCDLocatorOffset + 4n);\n if (compareArrayBuffers(magicBytes, zip64EoCDLocatorSignature)) {\n zip64EoCDOffset = await file.getBigUint64(zip64EoCDLocatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET);\n const endOfCDMagicBytes = await file.slice(zip64EoCDOffset, zip64EoCDOffset + 4n);\n if (!compareArrayBuffers(endOfCDMagicBytes, zip64EoCDSignature.buffer)) {\n throw new Error('zip64 EoCD not found');\n }\n cdRecordsNumber = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_RECORDS_NUMBER_OFFSET);\n cdByteSize = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_CD_BYTE_SIZE_OFFSET);\n cdStartOffset = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_START_OFFSET_OFFSET);\n }\n else {\n zip64EoCDLocatorOffset = 0n;\n }\n return {\n cdRecordsNumber,\n cdStartOffset,\n cdByteSize,\n offsets: {\n zip64EoCDOffset,\n zip64EoCDLocatorOffset,\n zipEoCDOffset\n }\n };\n};\n/**\n * updates EoCD record to add more files to the archieve\n * @param eocdBody buffer containing header\n * @param oldEoCDOffsets info read from EoCD record befor updating\n * @param newCDStartOffset CD start offset to be updated\n * @param eocdStartOffset EoCD start offset to be updated\n * @returns new EoCD header\n */\nexport function updateEoCD(eocdBody, oldEoCDOffsets, newCDStartOffset, eocdStartOffset, newCDRecordsNumber) {\n const eocd = new DataView(eocdBody);\n const classicEoCDOffset = oldEoCDOffsets.zip64EoCDOffset\n ? oldEoCDOffsets.zipEoCDOffset - oldEoCDOffsets.zip64EoCDOffset\n : 0n;\n // updating classic EoCD record with new CD records number in general and on disc\n if (Number(newCDRecordsNumber) <= 0xffff) {\n setFieldToNumber(eocd, 2, classicEoCDOffset + CD_RECORDS_NUMBER_OFFSET, newCDRecordsNumber);\n setFieldToNumber(eocd, 2, classicEoCDOffset + CD_RECORDS_NUMBER_ON_DISC_OFFSET, newCDRecordsNumber);\n }\n // updating zip64 EoCD record with new size of CD\n if (eocdStartOffset - newCDStartOffset <= 0xffffffff) {\n setFieldToNumber(eocd, 4, classicEoCDOffset + CD_CD_BYTE_SIZE_OFFSET, eocdStartOffset - newCDStartOffset);\n }\n // updating classic EoCD record with new CD start offset\n if (newCDStartOffset < 0xffffffff) {\n setFieldToNumber(eocd, 4, classicEoCDOffset + CD_START_OFFSET_OFFSET, newCDStartOffset);\n }\n // updating zip64 EoCD locator and record with new EoCD record start offset and cd records number\n if (oldEoCDOffsets.zip64EoCDLocatorOffset && oldEoCDOffsets.zip64EoCDOffset) {\n // updating zip64 EoCD locator with new EoCD record start offset\n const locatorOffset = oldEoCDOffsets.zip64EoCDLocatorOffset - oldEoCDOffsets.zip64EoCDOffset;\n setFieldToNumber(eocd, 8, locatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET, eocdStartOffset);\n // updating zip64 EoCD record with new cd start offset\n setFieldToNumber(eocd, 8, ZIP64_CD_START_OFFSET_OFFSET, newCDStartOffset);\n // updating zip64 EoCD record with new cd records number\n setFieldToNumber(eocd, 8, ZIP64_CD_RECORDS_NUMBER_OFFSET, newCDRecordsNumber);\n setFieldToNumber(eocd, 8, ZIP64_CD_RECORDS_NUMBER_ON_DISC_OFFSET, newCDRecordsNumber);\n // updating zip64 EoCD record with new size of CD\n setFieldToNumber(eocd, 8, ZIP64_CD_CD_BYTE_SIZE_OFFSET, eocdStartOffset - newCDStartOffset);\n }\n return new Uint8Array(eocd.buffer);\n}\n/**\n * generates EoCD record\n * @param options data to generate EoCD record\n * @returns ArrayBuffer with EoCD record\n */\nexport function generateEoCD(options) {\n const header = new DataView(new ArrayBuffer(Number(CD_COMMENT_OFFSET)));\n for (const field of EOCD_FIELDS) {\n setFieldToNumber(header, field.size, field.offset, options[field.name ?? ''] ?? field.default ?? 0);\n }\n const locator = generateZip64InfoLocator(options);\n const zip64Record = generateZip64Info(options);\n return concatenateArrayBuffers(zip64Record, locator, header.buffer);\n}\n/** standart EoCD fields */\nconst EOCD_FIELDS = [\n // End of central directory signature = 0x06054b50\n {\n offset: 0,\n size: 4,\n default: new DataView(eoCDSignature.buffer).getUint32(0, true)\n },\n // Number of this disk (or 0xffff for ZIP64)\n {\n offset: 4,\n size: 2,\n default: 0\n },\n // Disk where central directory starts (or 0xffff for ZIP64)\n {\n offset: 6,\n size: 2,\n default: 0\n },\n // Number of central directory records on this disk (or 0xffff for ZIP64)\n {\n offset: 8,\n size: 2,\n name: 'recordsNumber'\n },\n // Total number of central directory records (or 0xffff for ZIP64)\n {\n offset: 10,\n size: 2,\n name: 'recordsNumber'\n },\n // Size of central directory (bytes) (or 0xffffffff for ZIP64)\n {\n offset: 12,\n size: 4,\n name: 'cdSize'\n },\n // Offset of start of central directory, relative to start of archive (or 0xffffffff for ZIP64)\n {\n offset: 16,\n size: 4,\n name: 'cdOffset'\n },\n // Comment length (n)\n {\n offset: 20,\n size: 2,\n default: 0\n }\n];\n/**\n * generates eocd zip64 record\n * @param options data to generate eocd zip64 record\n * @returns buffer with eocd zip64 record\n */\nfunction generateZip64Info(options) {\n const record = new DataView(new ArrayBuffer(Number(ZIP64_COMMENT_OFFSET)));\n for (const field of ZIP64_EOCD_FIELDS) {\n setFieldToNumber(record, field.size, field.offset, options[field.name ?? ''] ?? field.default ?? 0);\n }\n return record.buffer;\n}\n/**\n * generates eocd zip64 record locator\n * @param options data to generate eocd zip64 record\n * @returns buffer with eocd zip64 record\n */\nfunction generateZip64InfoLocator(options) {\n const locator = new DataView(new ArrayBuffer(Number(20)));\n for (const field of ZIP64_EOCD_LOCATOR_FIELDS) {\n setFieldToNumber(locator, field.size, field.offset, options[field.name ?? ''] ?? field.default ?? 0);\n }\n return locator.buffer;\n}\n/** zip64 EoCD record locater fields */\nconst ZIP64_EOCD_LOCATOR_FIELDS = [\n // zip64 end of central dir locator signature\n {\n offset: 0,\n size: 4,\n default: new DataView(zip64EoCDLocatorSignature.buffer).getUint32(0, true)\n },\n // number of the disk with the start of the zip64 end of\n {\n offset: 4,\n size: 4,\n default: 0\n },\n // start of the zip64 end of central directory\n {\n offset: 8,\n size: 8,\n name: 'eoCDStart'\n },\n // total number of disks\n {\n offset: 16,\n size: 4,\n default: 1\n }\n];\n/** zip64 EoCD recodrd fields */\nconst ZIP64_EOCD_FIELDS = [\n // End of central directory signature = 0x06064b50\n {\n offset: 0,\n size: 4,\n default: new DataView(zip64EoCDSignature.buffer).getUint32(0, true)\n },\n // Size of the EOCD64 minus 12\n {\n offset: 4,\n size: 8,\n default: 44\n },\n // Version made by\n {\n offset: 12,\n size: 2,\n default: 45\n },\n // Version needed to extract (minimum)\n {\n offset: 14,\n size: 2,\n default: 45\n },\n // Number of this disk\n {\n offset: 16,\n size: 4,\n default: 0\n },\n // Disk where central directory starts\n {\n offset: 20,\n size: 4,\n default: 0\n },\n // Number of central directory records on this disk\n {\n offset: 24,\n size: 8,\n name: 'recordsNumber'\n },\n // Total number of central directory records\n {\n offset: 32,\n size: 8,\n name: 'recordsNumber'\n },\n // Size of central directory (bytes)\n {\n offset: 40,\n size: 8,\n name: 'cdSize'\n },\n // Offset of start of central directory, relative to start of archive\n {\n offset: 48,\n size: 8,\n name: 'cdOffset'\n }\n];\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nconst buffLength = 1024;\n/**\n * looking for the last occurrence of the provided\n * @param file\n * @param target\n * @returns\n */\nexport const searchFromTheEnd = async (file, target) => {\n const searchWindow = [\n await file.getUint8(file.length - 1n),\n await file.getUint8(file.length - 2n),\n await file.getUint8(file.length - 3n),\n undefined\n ];\n let targetOffset = -1;\n // looking for the last record in the central directory\n let point = file.length - 4n;\n do {\n const prevPoint = point;\n point -= BigInt(buffLength);\n point = point >= 0n ? point : 0n;\n const buff = new Uint8Array(await file.slice(point, prevPoint));\n for (let i = buff.length - 1; i > -1; i--) {\n searchWindow[3] = searchWindow[2];\n searchWindow[2] = searchWindow[1];\n searchWindow[1] = searchWindow[0];\n searchWindow[0] = buff[i];\n if (searchWindow.every((val, index) => val === target[index])) {\n targetOffset = i;\n break;\n }\n }\n } while (targetOffset === -1 && point > 0n);\n return point + BigInt(targetOffset);\n};\n", "import { concatenateArrayBuffers } from '@loaders.gl/loader-utils';\nexport const signature = new Uint8Array([0x01, 0x00]);\n/**\n * creates zip64 extra field\n * @param options info that can be placed into zip64 field\n * @returns buffer with field\n */\nexport function createZip64Info(options) {\n const optionsToUse = {\n ...options,\n zip64Length: (options.offset ? 1 : 0) * 8 + (options.size ? 1 : 0) * 16\n };\n const arraysToConcat = [];\n for (const field of ZIP64_FIELDS) {\n if (!optionsToUse[field.name ?? ''] && !field.default) {\n continue; // eslint-disable-line no-continue\n }\n const newValue = new DataView(new ArrayBuffer(field.size));\n NUMBER_SETTERS[field.size](newValue, 0, optionsToUse[field.name ?? ''] ?? field.default);\n arraysToConcat.push(newValue.buffer);\n }\n return concatenateArrayBuffers(...arraysToConcat);\n}\n/**\n * Writes values into buffer according to the bytes amount\n * @param header header where to write the data\n * @param fieldSize size of the field in bytes\n * @param fieldOffset offset of the field\n * @param value value to be written\n */\nexport function setFieldToNumber(header, fieldSize, fieldOffset, value) {\n NUMBER_SETTERS[fieldSize](header, Number(fieldOffset), value);\n}\n/** functions to write values into buffer according to the bytes amount */\nconst NUMBER_SETTERS = {\n 2: (header, offset, value) => {\n header.setUint16(offset, Number(value > 0xffff ? 0xffff : value), true);\n },\n 4: (header, offset, value) => {\n header.setUint32(offset, Number(value > 0xffffffff ? 0xffffffff : value), true);\n },\n 8: (header, offset, value) => {\n header.setBigUint64(offset, BigInt(value), true);\n }\n};\n/** zip64 info fields description, we need it as a pattern to build a zip64 info */\nconst ZIP64_FIELDS = [\n // Header ID 0x0001\n {\n size: 2,\n default: new DataView(signature.buffer).getUint16(0, true)\n },\n // Size of the extra field chunk (8, 16, 24 or 28)\n {\n size: 2,\n name: 'zip64Length'\n },\n // Original uncompressed file size\n {\n size: 8,\n name: 'size'\n },\n // Size of compressed data\n {\n size: 8,\n name: 'size'\n },\n // Offset of local header record\n {\n size: 8,\n name: 'offset'\n }\n];\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { compareArrayBuffers, concatenateArrayBuffers } from '@loaders.gl/loader-utils';\nimport { createZip64Info, setFieldToNumber } from \"./zip64-info-generation.js\";\n// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)\nconst COMPRESSION_METHOD_OFFSET = 8;\nconst COMPRESSED_SIZE_OFFSET = 18;\nconst UNCOMPRESSED_SIZE_OFFSET = 22;\nconst FILE_NAME_LENGTH_OFFSET = 26;\nconst EXTRA_FIELD_LENGTH_OFFSET = 28;\nconst FILE_NAME_OFFSET = 30n;\nexport const signature = new Uint8Array([0x50, 0x4b, 0x03, 0x04]);\n/**\n * Parses local file header of zip file\n * @param headerOffset - offset in the archive where header starts\n * @param buffer - buffer containing whole array\n * @returns Info from the header\n */\nexport const parseZipLocalFileHeader = async (headerOffset, file) => {\n const mainHeader = new DataView(await file.slice(headerOffset, headerOffset + FILE_NAME_OFFSET));\n const magicBytes = mainHeader.buffer.slice(0, 4);\n if (!compareArrayBuffers(magicBytes, signature)) {\n return null;\n }\n const fileNameLength = mainHeader.getUint16(FILE_NAME_LENGTH_OFFSET, true);\n const extraFieldLength = mainHeader.getUint16(EXTRA_FIELD_LENGTH_OFFSET, true);\n const additionalHeader = await file.slice(headerOffset + FILE_NAME_OFFSET, headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength));\n const fileNameBuffer = additionalHeader.slice(0, fileNameLength);\n const extraDataBuffer = new DataView(additionalHeader.slice(fileNameLength, additionalHeader.byteLength));\n const fileName = new TextDecoder().decode(fileNameBuffer).split('\\\\').join('/');\n let fileDataOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength);\n const compressionMethod = mainHeader.getUint16(COMPRESSION_METHOD_OFFSET, true);\n let compressedSize = BigInt(mainHeader.getUint32(COMPRESSED_SIZE_OFFSET, true)); // add zip 64 logic\n let uncompressedSize = BigInt(mainHeader.getUint32(UNCOMPRESSED_SIZE_OFFSET, true)); // add zip 64 logic\n let offsetInZip64Data = 4;\n // looking for info that might be also be in zip64 extra field\n if (uncompressedSize === BigInt(0xffffffff)) {\n uncompressedSize = extraDataBuffer.getBigUint64(offsetInZip64Data, true);\n offsetInZip64Data += 8;\n }\n if (compressedSize === BigInt(0xffffffff)) {\n compressedSize = extraDataBuffer.getBigUint64(offsetInZip64Data, true);\n offsetInZip64Data += 8;\n }\n if (fileDataOffset === BigInt(0xffffffff)) {\n fileDataOffset = extraDataBuffer.getBigUint64(offsetInZip64Data, true); // setting it to the one from zip64\n }\n return {\n fileNameLength,\n fileName,\n extraFieldLength,\n fileDataOffset,\n compressedSize,\n compressionMethod\n };\n};\n/**\n * generates local header for the file\n * @param options info that can be placed into local header\n * @returns buffer with header\n */\nexport function generateLocalHeader(options) {\n const optionsToUse = {\n ...options,\n extraLength: 0,\n fnlength: options.fileName.length\n };\n let zip64header = new ArrayBuffer(0);\n const optionsToZip64 = {};\n if (optionsToUse.length >= 0xffffffff) {\n optionsToZip64.size = optionsToUse.length;\n optionsToUse.length = 0xffffffff;\n }\n if (Object.keys(optionsToZip64).length) {\n zip64header = createZip64Info(optionsToZip64);\n optionsToUse.extraLength = zip64header.byteLength;\n }\n // base length without file name and extra info is static\n const header = new DataView(new ArrayBuffer(Number(FILE_NAME_OFFSET)));\n for (const field of ZIP_HEADER_FIELDS) {\n setFieldToNumber(header, field.size, field.offset, optionsToUse[field.name ?? ''] ?? field.default ?? 0);\n }\n const encodedName = new TextEncoder().encode(optionsToUse.fileName);\n const resHeader = concatenateArrayBuffers(header.buffer, encodedName, zip64header);\n return resHeader;\n}\nconst ZIP_HEADER_FIELDS = [\n // Local file header signature = 0x04034b50\n {\n offset: 0,\n size: 4,\n default: new DataView(signature.buffer).getUint32(0, true)\n },\n // Version needed to extract (minimum)\n {\n offset: 4,\n size: 2,\n default: 45\n },\n // General purpose bit flag\n {\n offset: 6,\n size: 2,\n default: 0\n },\n // Compression method\n {\n offset: 8,\n size: 2,\n default: 0\n },\n // File last modification time\n {\n offset: 10,\n size: 2,\n default: 0\n },\n // File last modification date\n {\n offset: 12,\n size: 2,\n default: 0\n },\n // CRC-32 of uncompressed data\n {\n offset: 14,\n size: 4,\n name: 'crc32'\n },\n // Compressed size (or 0xffffffff for ZIP64)\n {\n offset: 18,\n size: 4,\n name: 'length'\n },\n // Uncompressed size (or 0xffffffff for ZIP64)\n {\n offset: 22,\n size: 4,\n name: 'length'\n },\n // File name length (n)\n {\n offset: 26,\n size: 2,\n name: 'fnlength'\n },\n // Extra field length (m)\n {\n offset: 28,\n size: 2,\n default: 0,\n name: 'extraLength'\n }\n];\n", "import { FileHandleFile, concatenateArrayBuffers, path, NodeFilesystem, NodeFile } from '@loaders.gl/loader-utils';\nimport { generateEoCD, parseEoCDRecord, updateEoCD } from \"./end-of-central-directory.js\";\nimport { CRC32Hash } from '@loaders.gl/crypto';\nimport { generateLocalHeader } from \"./local-file-header.js\";\nimport { generateCDHeader } from \"./cd-file-header.js\";\nimport { fetchFile } from '@loaders.gl/core';\n/**\n * cut off CD and EoCD records from zip file\n * @param provider zip file\n * @returns tuple with three values: CD, EoCD record, EoCD information\n */\nasync function cutTheTailOff(provider) {\n // define where the body ends\n const oldEoCDinfo = await parseEoCDRecord(provider);\n const oldCDStartOffset = oldEoCDinfo.cdStartOffset;\n // define cd length\n const oldCDLength = Number(oldEoCDinfo.offsets.zip64EoCDOffset\n ? oldEoCDinfo.offsets.zip64EoCDOffset - oldCDStartOffset\n : oldEoCDinfo.offsets.zipEoCDOffset - oldCDStartOffset);\n // cut off everything except of archieve body\n const zipEnding = await provider.slice(oldCDStartOffset, provider.length);\n await provider.truncate(Number(oldCDStartOffset));\n // divide cd body and eocd record\n const oldCDBody = zipEnding.slice(0, oldCDLength);\n const eocdBody = zipEnding.slice(oldCDLength, zipEnding.byteLength);\n return [oldCDBody, eocdBody, oldEoCDinfo];\n}\n/**\n * generates CD and local headers for the file\n * @param fileName name of the file\n * @param fileToAdd buffer with the file\n * @param localFileHeaderOffset offset of the file local header\n * @returns tuple with two values: local header and file body, cd header\n */\nasync function generateFileHeaders(fileName, fileToAdd, localFileHeaderOffset) {\n // generating CRC32 of the content\n const newFileCRC322 = parseInt(await new CRC32Hash().hash(fileToAdd, 'hex'), 16);\n // generate local header for the file\n const newFileLocalHeader = generateLocalHeader({\n crc32: newFileCRC322,\n fileName,\n length: fileToAdd.byteLength\n });\n // generate hash file cd header\n const newFileCDHeader = generateCDHeader({\n crc32: newFileCRC322,\n fileName,\n offset: localFileHeaderOffset,\n length: fileToAdd.byteLength\n });\n return [\n new Uint8Array(concatenateArrayBuffers(newFileLocalHeader, fileToAdd)),\n new Uint8Array(newFileCDHeader)\n ];\n}\n/**\n * adds one file in the end of the archieve\n * @param zipUrl path to the file\n * @param fileToAdd new file body\n * @param fileName new file name\n */\nexport async function addOneFile(zipUrl, fileToAdd, fileName) {\n // init file handler\n const provider = new FileHandleFile(zipUrl, true);\n const [oldCDBody, eocdBody, oldEoCDinfo] = await cutTheTailOff(provider);\n // remember the new file local header start offset\n const newFileOffset = provider.length;\n const [localPart, cdHeaderPart] = await generateFileHeaders(fileName, fileToAdd, newFileOffset);\n // write down the file local header\n await provider.append(localPart);\n // add the file CD header to the CD\n const newCDBody = concatenateArrayBuffers(oldCDBody, cdHeaderPart);\n // remember the CD start offset\n const newCDStartOffset = provider.length;\n // write down new CD\n await provider.append(new Uint8Array(newCDBody));\n // remember where eocd starts\n const eocdOffset = provider.length;\n await provider.append(updateEoCD(eocdBody, oldEoCDinfo.offsets, newCDStartOffset, eocdOffset, oldEoCDinfo.cdRecordsNumber + 1n));\n}\n/**\n * creates zip archive with no compression\n * @note This is a node specific function that works on files\n * @param inputPath path where files for the achive are stored\n * @param outputPath path where zip archive will be placed\n */\nexport async function createZip(inputPath, outputPath, createAdditionalData) {\n const fileIterator = getFileIterator(inputPath);\n const resFile = new NodeFile(outputPath, 'w');\n const fileList = [];\n const cdArray = [];\n for await (const file of fileIterator) {\n await addFile(file, resFile, cdArray, fileList);\n }\n if (createAdditionalData) {\n const additionaldata = await createAdditionalData(fileList);\n await addFile(additionaldata, resFile, cdArray);\n }\n const cdOffset = (await resFile.stat()).bigsize;\n const cd = concatenateArrayBuffers(...cdArray);\n await resFile.append(new Uint8Array(cd));\n const eoCDStart = (await resFile.stat()).bigsize;\n await resFile.append(new Uint8Array(generateEoCD({ recordsNumber: cdArray.length, cdSize: cd.byteLength, cdOffset, eoCDStart })));\n}\n/**\n * Adds file to zip parts\n * @param file file to add\n * @param resFile zip file body\n * @param cdArray zip file central directory\n * @param fileList list of file offsets\n */\nasync function addFile(file, resFile, cdArray, fileList) {\n const size = (await resFile.stat()).bigsize;\n fileList?.push({ fileName: file.path, localHeaderOffset: size });\n const [localPart, cdHeaderPart] = await generateFileHeaders(file.path, file.file, size);\n await resFile.append(localPart);\n cdArray.push(cdHeaderPart);\n}\n/**\n * creates iterator providing buffer with file content and path to every file in the input folder\n * @param inputPath path to the input folder\n * @returns iterator\n */\nexport function getFileIterator(inputPath) {\n async function* iterable() {\n const fileList = await getAllFiles(inputPath);\n for (const filePath of fileList) {\n const file = await (await fetchFile(path.join(inputPath, filePath))).arrayBuffer();\n yield { path: filePath, file };\n }\n }\n return iterable();\n}\n/**\n * creates a list of relative paths to all files in the provided folder\n * @param basePath path of the root folder\n * @param subfolder relative path from the root folder.\n * @returns list of paths\n */\nexport async function getAllFiles(basePath, subfolder = '', fsPassed) {\n const fs = fsPassed ? fsPassed : new NodeFilesystem({});\n const files = await fs.readdir(pathJoin(basePath, subfolder));\n const arrayOfFiles = [];\n for (const file of files) {\n const fullPath = pathJoin(basePath, subfolder, file);\n if ((await fs.stat(fullPath)).isDirectory) {\n const files = await getAllFiles(basePath, pathJoin(subfolder, file));\n arrayOfFiles.push(...files);\n }\n else {\n arrayOfFiles.push(pathJoin(subfolder, file));\n }\n }\n return arrayOfFiles;\n}\n/**\n * removes empty parts from path array and joins it\n * @param paths paths to join\n * @returns joined path\n */\nfunction pathJoin(...paths) {\n const resPaths = paths.filter((val) => val.length);\n return path.join(...resPaths);\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { isBrowser, isFileProvider, FileHandleFile } from '@loaders.gl/loader-utils';\nimport { makeZipCDHeaderIterator } from \"../parse-zip/cd-file-header.js\";\nimport { parseZipLocalFileHeader } from \"../parse-zip/local-file-header.js\";\nimport { DeflateCompression } from '@loaders.gl/compression';\nimport { IndexedArchive } from \"./IndexedArchive.js\";\n/** Handling different compression types in zip */\nexport const ZIP_COMPRESSION_HANDLERS = {\n /** No compression */\n 0: async (compressedFile) => compressedFile