@loaders.gl/zip
Version:
Zip Archive Loader
1,366 lines (1,347 loc) • 43.4 kB
JavaScript
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// dist/index.js
var dist_exports = {};
__export(dist_exports, {
CD_HEADER_SIGNATURE: () => signature2,
IndexedArchive: () => IndexedArchive,
TarBuilder: () => TarBuilder,
ZIP_COMPRESSION_HANDLERS: () => ZIP_COMPRESSION_HANDLERS,
ZipFileSystem: () => ZipFileSystem,
ZipLoader: () => ZipLoader,
ZipWriter: () => ZipWriter,
addOneFile: () => addOneFile,
composeHashFile: () => composeHashFile,
createZip: () => createZip,
generateCDHeader: () => generateCDHeader,
generateLocalHeader: () => generateLocalHeader,
localHeaderSignature: () => signature3,
makeHashTableFromZipHeaders: () => makeHashTableFromZipHeaders,
makeZipCDHeaderIterator: () => makeZipCDHeaderIterator,
parseEoCDRecord: () => parseEoCDRecord,
parseHashTable: () => parseHashTable,
parseZipCDFileHeader: () => parseZipCDFileHeader,
parseZipLocalFileHeader: () => parseZipLocalFileHeader,
searchFromTheEnd: () => searchFromTheEnd
});
module.exports = __toCommonJS(dist_exports);
// dist/zip-loader.js
var import_jszip = __toESM(require("jszip"), 1);
var VERSION = true ? "4.3.3" : "latest";
var ZipLoader = {
dataType: null,
batchType: null,
id: "zip",
module: "zip",
name: "Zip Archive",
version: VERSION,
extensions: ["zip"],
mimeTypes: ["application/zip"],
category: "archive",
tests: ["PK"],
options: {},
parse: parseZipAsync
};
async function parseZipAsync(data, options = {}) {
const promises = [];
const fileMap = {};
try {
const jsZip = new import_jszip.default();
const zip = await jsZip.loadAsync(data, options);
zip.forEach((relativePath, zipEntry) => {
const subFilename = zipEntry.name;
const promise = loadZipEntry(jsZip, subFilename, options).then((arrayBufferOrError) => {
fileMap[relativePath] = arrayBufferOrError;
});
promises.push(promise);
});
await Promise.all(promises);
return fileMap;
} catch (error) {
options.log.error(`Unable to read zip archive: ${error}`);
throw error;
}
}
async function loadZipEntry(jsZip, subFilename, options = {}) {
try {
const arrayBuffer = await jsZip.file(subFilename).async(options.dataType || "arraybuffer");
return arrayBuffer;
} catch (error) {
options.log.error(`Unable to read ${subFilename} from zip archive: ${error}`);
return error;
}
}
// dist/zip-writer.js
var import_jszip2 = __toESM(require("jszip"), 1);
var VERSION2 = true ? "4.3.3" : "latest";
var ZipWriter = {
name: "Zip Archive",
id: "zip",
module: "zip",
version: VERSION2,
extensions: ["zip"],
category: "archive",
mimeTypes: ["application/zip"],
options: {
zip: {
onUpdate: () => {
}
},
jszip: {}
},
encode: encodeZipAsync
};
async function encodeZipAsync(fileMap, options = {}) {
var _a;
const jsZip = new import_jszip2.default();
for (const subFileName in fileMap) {
const subFileData = fileMap[subFileName];
jsZip.file(subFileName, subFileData, (options == null ? void 0 : options.jszip) || {});
}
const zipOptions = { ...ZipWriter.options.zip, ...options == null ? void 0 : options.zip };
const jszipOptions = { ...(_a = ZipWriter.options) == null ? void 0 : _a.jszip, ...options.jszip };
try {
return await jsZip.generateAsync(
{ ...jszipOptions, type: "arraybuffer" },
// generate an arraybuffer
zipOptions.onUpdate
);
} catch (error) {
options.log.error(`Unable to encode zip archive: ${error}`);
throw error;
}
}
// dist/lib/tar/utils.js
function clean(length) {
let i;
const buffer = new Uint8Array(length);
for (i = 0; i < length; i += 1) {
buffer[i] = 0;
}
return buffer;
}
function pad(num, bytes, base) {
const numStr = num.toString(base || 8);
return "000000000000".substr(numStr.length + 12 - bytes) + numStr;
}
function stringToUint8(input, out, offset) {
let i;
let length;
out = out || clean(input.length);
offset = offset || 0;
for (i = 0, length = input.length; i < length; i += 1) {
out[offset] = input.charCodeAt(i);
offset += 1;
}
return out;
}
// dist/lib/tar/header.js
var structure = {
fileName: 100,
fileMode: 8,
uid: 8,
gid: 8,
fileSize: 12,
mtime: 12,
checksum: 8,
type: 1,
linkName: 100,
ustar: 8,
owner: 32,
group: 32,
majorNumber: 8,
minorNumber: 8,
filenamePrefix: 155,
padding: 12
};
function format(data, cb) {
const buffer = clean(512);
let offset = 0;
Object.entries(structure).forEach(([field, length]) => {
const str = data[field] || "";
let i;
let fieldLength;
for (i = 0, fieldLength = str.length; i < fieldLength; i += 1) {
buffer[offset] = str.charCodeAt(i);
offset += 1;
}
offset += length - i;
});
if (typeof cb === "function") {
return cb(buffer, offset);
}
return buffer;
}
// dist/lib/tar/tar.js
var blockSize;
var headerLength;
var inputLength;
var recordSize = 512;
var Tar = class {
written;
out;
blocks = [];
length;
/**
* @param [recordsPerBlock]
*/
constructor(recordsPerBlock) {
this.written = 0;
blockSize = (recordsPerBlock || 20) * recordSize;
this.out = clean(blockSize);
this.blocks = [];
this.length = 0;
this.save = this.save.bind(this);
this.clear = this.clear.bind(this);
this.append = this.append.bind(this);
}
/**
* Append a file to the tar archive
* @param filepath
* @param input
* @param [opts]
*/
// eslint-disable-next-line complexity
append(filepath, input, opts) {
let checksum;
if (typeof input === "string") {
input = stringToUint8(input);
} else if (input.constructor && input.constructor !== Uint8Array.prototype.constructor) {
const errorInputMatch = /function\s*([$A-Za-z_][0-9A-Za-z_]*)\s*\(/.exec(input.constructor.toString());
const errorInput = errorInputMatch && errorInputMatch[1];
const errorMessage = `Invalid input type. You gave me: ${errorInput}`;
throw errorMessage;
}
opts = opts || {};
const mode = opts.mode || parseInt("777", 8) & 4095;
const mtime = opts.mtime || Math.floor(Number(new Date()) / 1e3);
const uid = opts.uid || 0;
const gid = opts.gid || 0;
const data = {
fileName: filepath,
fileMode: pad(mode, 7),
uid: pad(uid, 7),
gid: pad(gid, 7),
fileSize: pad(input.length, 11),
mtime: pad(mtime, 11),
checksum: " ",
// 0 = just a file
type: "0",
ustar: "ustar ",
owner: opts.owner || "",
group: opts.group || ""
};
checksum = 0;
Object.keys(data).forEach((key) => {
let i;
const value = data[key];
let length;
for (i = 0, length = value.length; i < length; i += 1) {
checksum += value.charCodeAt(i);
}
});
data.checksum = `${pad(checksum, 6)}\0 `;
const headerArr = format(data);
headerLength = Math.ceil(headerArr.length / recordSize) * recordSize;
inputLength = Math.ceil(input.length / recordSize) * recordSize;
this.blocks.push({
header: headerArr,
input,
headerLength,
inputLength
});
}
/**
* Compiling data to a Blob object
* @returns {Blob}
*/
save() {
const buffers = [];
const chunks = new Array();
let length = 0;
const max = Math.pow(2, 20);
let chunk = new Array();
this.blocks.forEach((b = []) => {
if (length + b.headerLength + b.inputLength > max) {
chunks.push({ blocks: chunk, length });
chunk = [];
length = 0;
}
chunk.push(b);
length += b.headerLength + b.inputLength;
});
chunks.push({ blocks: chunk, length });
chunks.forEach((c = []) => {
const buffer = new Uint8Array(c.length);
let written = 0;
c.blocks.forEach((b = []) => {
buffer.set(b.header, written);
written += b.headerLength;
buffer.set(b.input, written);
written += b.inputLength;
});
buffers.push(buffer);
});
buffers.push(new Uint8Array(2 * recordSize));
return new Blob(buffers, { type: "octet/stream" });
}
/**
* Clear the data by its blocksize
*/
clear() {
this.written = 0;
this.out = clean(blockSize);
}
};
var tar_default = Tar;
// dist/tar-builder.js
var TAR_BUILDER_OPTIONS = {
recordsPerBlock: 20
};
var TarBuilder = class {
static get properties() {
return {
id: "tar",
name: "TAR",
extensions: ["tar"],
mimeTypes: ["application/x-tar"],
builder: TarBuilder,
options: TAR_BUILDER_OPTIONS
};
}
options;
tape;
count = 0;
constructor(options) {
this.options = { ...TAR_BUILDER_OPTIONS, ...options };
this.tape = new tar_default(this.options.recordsPerBlock);
}
/** Adds a file to the archive. */
addFile(filename, buffer) {
this.tape.append(filename, new Uint8Array(buffer));
this.count++;
}
async build() {
return new Response(this.tape.save()).arrayBuffer();
}
};
// dist/parse-zip/cd-file-header.js
var import_loader_utils3 = require("@loaders.gl/loader-utils");
// dist/parse-zip/end-of-central-directory.js
var import_loader_utils2 = require("@loaders.gl/loader-utils");
// dist/parse-zip/search-from-the-end.js
var buffLength = 1024;
var searchFromTheEnd = async (file, target) => {
const searchWindow = [
await file.getUint8(file.length - 1n),
await file.getUint8(file.length - 2n),
await file.getUint8(file.length - 3n),
void 0
];
let targetOffset = -1;
let point = file.length - 4n;
do {
const prevPoint = point;
point -= BigInt(buffLength);
point = point >= 0n ? point : 0n;
const buff = new Uint8Array(await file.slice(point, prevPoint));
for (let i = buff.length - 1; i > -1; i--) {
searchWindow[3] = searchWindow[2];
searchWindow[2] = searchWindow[1];
searchWindow[1] = searchWindow[0];
searchWindow[0] = buff[i];
if (searchWindow.every((val, index) => val === target[index])) {
targetOffset = i;
break;
}
}
} while (targetOffset === -1 && point > 0n);
return point + BigInt(targetOffset);
};
// dist/parse-zip/zip64-info-generation.js
var import_loader_utils = require("@loaders.gl/loader-utils");
var signature = new Uint8Array([1, 0]);
function createZip64Info(options) {
const optionsToUse = {
...options,
zip64Length: (options.offset ? 1 : 0) * 8 + (options.size ? 1 : 0) * 16
};
const arraysToConcat = [];
for (const field of ZIP64_FIELDS) {
if (!optionsToUse[field.name ?? ""] && !field.default) {
continue;
}
const newValue = new DataView(new ArrayBuffer(field.size));
NUMBER_SETTERS[field.size](newValue, 0, optionsToUse[field.name ?? ""] ?? field.default);
arraysToConcat.push(newValue.buffer);
}
return (0, import_loader_utils.concatenateArrayBuffers)(...arraysToConcat);
}
function setFieldToNumber(header, fieldSize, fieldOffset, value) {
NUMBER_SETTERS[fieldSize](header, Number(fieldOffset), value);
}
var NUMBER_SETTERS = {
2: (header, offset, value) => {
header.setUint16(offset, Number(value > 65535 ? 65535 : value), true);
},
4: (header, offset, value) => {
header.setUint32(offset, Number(value > 4294967295 ? 4294967295 : value), true);
},
8: (header, offset, value) => {
header.setBigUint64(offset, BigInt(value), true);
}
};
var ZIP64_FIELDS = [
// Header ID 0x0001
{
size: 2,
default: new DataView(signature.buffer).getUint16(0, true)
},
// Size of the extra field chunk (8, 16, 24 or 28)
{
size: 2,
name: "zip64Length"
},
// Original uncompressed file size
{
size: 8,
name: "size"
},
// Size of compressed data
{
size: 8,
name: "size"
},
// Offset of local header record
{
size: 8,
name: "offset"
}
];
// dist/parse-zip/end-of-central-directory.js
var eoCDSignature = new Uint8Array([80, 75, 5, 6]);
var zip64EoCDLocatorSignature = new Uint8Array([80, 75, 6, 7]);
var zip64EoCDSignature = new Uint8Array([80, 75, 6, 6]);
var CD_RECORDS_NUMBER_OFFSET = 8n;
var CD_RECORDS_NUMBER_ON_DISC_OFFSET = 10n;
var CD_CD_BYTE_SIZE_OFFSET = 12n;
var CD_START_OFFSET_OFFSET = 16n;
var CD_COMMENT_OFFSET = 22n;
var ZIP64_EOCD_START_OFFSET_OFFSET = 8n;
var ZIP64_CD_RECORDS_NUMBER_OFFSET = 24n;
var ZIP64_CD_RECORDS_NUMBER_ON_DISC_OFFSET = 32n;
var ZIP64_CD_CD_BYTE_SIZE_OFFSET = 40n;
var ZIP64_CD_START_OFFSET_OFFSET = 48n;
var ZIP64_COMMENT_OFFSET = 56n;
var parseEoCDRecord = async (file) => {
const zipEoCDOffset = await searchFromTheEnd(file, eoCDSignature);
let cdRecordsNumber = BigInt(await file.getUint16(zipEoCDOffset + CD_RECORDS_NUMBER_OFFSET));
let cdByteSize = BigInt(await file.getUint32(zipEoCDOffset + CD_CD_BYTE_SIZE_OFFSET));
let cdStartOffset = BigInt(await file.getUint32(zipEoCDOffset + CD_START_OFFSET_OFFSET));
let zip64EoCDLocatorOffset = zipEoCDOffset - 20n;
let zip64EoCDOffset = 0n;
const magicBytes = await file.slice(zip64EoCDLocatorOffset, zip64EoCDLocatorOffset + 4n);
if ((0, import_loader_utils2.compareArrayBuffers)(magicBytes, zip64EoCDLocatorSignature)) {
zip64EoCDOffset = await file.getBigUint64(zip64EoCDLocatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET);
const endOfCDMagicBytes = await file.slice(zip64EoCDOffset, zip64EoCDOffset + 4n);
if (!(0, import_loader_utils2.compareArrayBuffers)(endOfCDMagicBytes, zip64EoCDSignature.buffer)) {
throw new Error("zip64 EoCD not found");
}
cdRecordsNumber = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_RECORDS_NUMBER_OFFSET);
cdByteSize = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_CD_BYTE_SIZE_OFFSET);
cdStartOffset = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_START_OFFSET_OFFSET);
} else {
zip64EoCDLocatorOffset = 0n;
}
return {
cdRecordsNumber,
cdStartOffset,
cdByteSize,
offsets: {
zip64EoCDOffset,
zip64EoCDLocatorOffset,
zipEoCDOffset
}
};
};
function updateEoCD(eocdBody, oldEoCDOffsets, newCDStartOffset, eocdStartOffset, newCDRecordsNumber) {
const eocd = new DataView(eocdBody);
const classicEoCDOffset = oldEoCDOffsets.zip64EoCDOffset ? oldEoCDOffsets.zipEoCDOffset - oldEoCDOffsets.zip64EoCDOffset : 0n;
if (Number(newCDRecordsNumber) <= 65535) {
setFieldToNumber(eocd, 2, classicEoCDOffset + CD_RECORDS_NUMBER_OFFSET, newCDRecordsNumber);
setFieldToNumber(eocd, 2, classicEoCDOffset + CD_RECORDS_NUMBER_ON_DISC_OFFSET, newCDRecordsNumber);
}
if (eocdStartOffset - newCDStartOffset <= 4294967295) {
setFieldToNumber(eocd, 4, classicEoCDOffset + CD_CD_BYTE_SIZE_OFFSET, eocdStartOffset - newCDStartOffset);
}
if (newCDStartOffset < 4294967295) {
setFieldToNumber(eocd, 4, classicEoCDOffset + CD_START_OFFSET_OFFSET, newCDStartOffset);
}
if (oldEoCDOffsets.zip64EoCDLocatorOffset && oldEoCDOffsets.zip64EoCDOffset) {
const locatorOffset = oldEoCDOffsets.zip64EoCDLocatorOffset - oldEoCDOffsets.zip64EoCDOffset;
setFieldToNumber(eocd, 8, locatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET, eocdStartOffset);
setFieldToNumber(eocd, 8, ZIP64_CD_START_OFFSET_OFFSET, newCDStartOffset);
setFieldToNumber(eocd, 8, ZIP64_CD_RECORDS_NUMBER_OFFSET, newCDRecordsNumber);
setFieldToNumber(eocd, 8, ZIP64_CD_RECORDS_NUMBER_ON_DISC_OFFSET, newCDRecordsNumber);
setFieldToNumber(eocd, 8, ZIP64_CD_CD_BYTE_SIZE_OFFSET, eocdStartOffset - newCDStartOffset);
}
return new Uint8Array(eocd.buffer);
}
function generateEoCD(options) {
const header = new DataView(new ArrayBuffer(Number(CD_COMMENT_OFFSET)));
for (const field of EOCD_FIELDS) {
setFieldToNumber(header, field.size, field.offset, options[field.name ?? ""] ?? field.default ?? 0);
}
const locator = generateZip64InfoLocator(options);
const zip64Record = generateZip64Info(options);
return (0, import_loader_utils2.concatenateArrayBuffers)(zip64Record, locator, header.buffer);
}
var EOCD_FIELDS = [
// End of central directory signature = 0x06054b50
{
offset: 0,
size: 4,
default: new DataView(eoCDSignature.buffer).getUint32(0, true)
},
// Number of this disk (or 0xffff for ZIP64)
{
offset: 4,
size: 2,
default: 0
},
// Disk where central directory starts (or 0xffff for ZIP64)
{
offset: 6,
size: 2,
default: 0
},
// Number of central directory records on this disk (or 0xffff for ZIP64)
{
offset: 8,
size: 2,
name: "recordsNumber"
},
// Total number of central directory records (or 0xffff for ZIP64)
{
offset: 10,
size: 2,
name: "recordsNumber"
},
// Size of central directory (bytes) (or 0xffffffff for ZIP64)
{
offset: 12,
size: 4,
name: "cdSize"
},
// Offset of start of central directory, relative to start of archive (or 0xffffffff for ZIP64)
{
offset: 16,
size: 4,
name: "cdOffset"
},
// Comment length (n)
{
offset: 20,
size: 2,
default: 0
}
];
function generateZip64Info(options) {
const record = new DataView(new ArrayBuffer(Number(ZIP64_COMMENT_OFFSET)));
for (const field of ZIP64_EOCD_FIELDS) {
setFieldToNumber(record, field.size, field.offset, options[field.name ?? ""] ?? field.default ?? 0);
}
return record.buffer;
}
function generateZip64InfoLocator(options) {
const locator = new DataView(new ArrayBuffer(Number(20)));
for (const field of ZIP64_EOCD_LOCATOR_FIELDS) {
setFieldToNumber(locator, field.size, field.offset, options[field.name ?? ""] ?? field.default ?? 0);
}
return locator.buffer;
}
var ZIP64_EOCD_LOCATOR_FIELDS = [
// zip64 end of central dir locator signature
{
offset: 0,
size: 4,
default: new DataView(zip64EoCDLocatorSignature.buffer).getUint32(0, true)
},
// number of the disk with the start of the zip64 end of
{
offset: 4,
size: 4,
default: 0
},
// start of the zip64 end of central directory
{
offset: 8,
size: 8,
name: "eoCDStart"
},
// total number of disks
{
offset: 16,
size: 4,
default: 1
}
];
var ZIP64_EOCD_FIELDS = [
// End of central directory signature = 0x06064b50
{
offset: 0,
size: 4,
default: new DataView(zip64EoCDSignature.buffer).getUint32(0, true)
},
// Size of the EOCD64 minus 12
{
offset: 4,
size: 8,
default: 44
},
// Version made by
{
offset: 12,
size: 2,
default: 45
},
// Version needed to extract (minimum)
{
offset: 14,
size: 2,
default: 45
},
// Number of this disk
{
offset: 16,
size: 4,
default: 0
},
// Disk where central directory starts
{
offset: 20,
size: 4,
default: 0
},
// Number of central directory records on this disk
{
offset: 24,
size: 8,
name: "recordsNumber"
},
// Total number of central directory records
{
offset: 32,
size: 8,
name: "recordsNumber"
},
// Size of central directory (bytes)
{
offset: 40,
size: 8,
name: "cdSize"
},
// Offset of start of central directory, relative to start of archive
{
offset: 48,
size: 8,
name: "cdOffset"
}
];
// dist/parse-zip/cd-file-header.js
var CD_COMPRESSED_SIZE_OFFSET = 20;
var CD_UNCOMPRESSED_SIZE_OFFSET = 24;
var CD_FILE_NAME_LENGTH_OFFSET = 28;
var CD_EXTRA_FIELD_LENGTH_OFFSET = 30;
var CD_START_DISK_OFFSET = 32;
var CD_LOCAL_HEADER_OFFSET_OFFSET = 42;
var CD_FILE_NAME_OFFSET = 46n;
var signature2 = new Uint8Array([80, 75, 1, 2]);
var parseZipCDFileHeader = async (headerOffset, file) => {
if (headerOffset >= file.length) {
return null;
}
const mainHeader = new DataView(await file.slice(headerOffset, headerOffset + CD_FILE_NAME_OFFSET));
const magicBytes = mainHeader.buffer.slice(0, 4);
if (!(0, import_loader_utils3.compareArrayBuffers)(magicBytes, signature2.buffer)) {
return null;
}
const compressedSize = BigInt(mainHeader.getUint32(CD_COMPRESSED_SIZE_OFFSET, true));
const uncompressedSize = BigInt(mainHeader.getUint32(CD_UNCOMPRESSED_SIZE_OFFSET, true));
const extraFieldLength = mainHeader.getUint16(CD_EXTRA_FIELD_LENGTH_OFFSET, true);
const startDisk = BigInt(mainHeader.getUint16(CD_START_DISK_OFFSET, true));
const fileNameLength = mainHeader.getUint16(CD_FILE_NAME_LENGTH_OFFSET, true);
const additionalHeader = await file.slice(headerOffset + CD_FILE_NAME_OFFSET, headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength));
const filenameBytes = additionalHeader.slice(0, fileNameLength);
const fileName = new TextDecoder().decode(filenameBytes);
const extraOffset = headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength);
const oldFormatOffset = mainHeader.getUint32(CD_LOCAL_HEADER_OFFSET_OFFSET, true);
const localHeaderOffset = BigInt(oldFormatOffset);
const extraField = new DataView(additionalHeader.slice(fileNameLength, additionalHeader.byteLength));
const zip64data = {
uncompressedSize,
compressedSize,
localHeaderOffset,
startDisk
};
const res = findZip64DataInExtra(zip64data, extraField);
return {
...zip64data,
...res,
extraFieldLength,
fileNameLength,
fileName,
extraOffset
};
};
async function* makeZipCDHeaderIterator(fileProvider) {
const { cdStartOffset, cdByteSize } = await parseEoCDRecord(fileProvider);
const centralDirectory = new import_loader_utils3.DataViewFile(new DataView(await fileProvider.slice(cdStartOffset, cdStartOffset + cdByteSize)));
let cdHeader = await parseZipCDFileHeader(0n, centralDirectory);
while (cdHeader) {
yield cdHeader;
cdHeader = await parseZipCDFileHeader(cdHeader.extraOffset + BigInt(cdHeader.extraFieldLength), centralDirectory);
}
}
var getUint16 = (...bytes) => {
return bytes[0] + bytes[1] * 16;
};
var findZip64DataInExtra = (zip64data, extraField) => {
const zip64dataList = findExpectedData(zip64data);
const zip64DataRes = {};
if (zip64dataList.length > 0) {
const zip64chunkSize = zip64dataList.reduce((sum, curr) => sum + curr.length, 0);
const offsetInExtraData = new Uint8Array(extraField.buffer).findIndex((_val, i, arr) => getUint16(arr[i], arr[i + 1]) === 1 && getUint16(arr[i + 2], arr[i + 3]) === zip64chunkSize);
let bytesRead = 0;
for (const note of zip64dataList) {
const offset = bytesRead;
zip64DataRes[note.name] = extraField.getBigUint64(offsetInExtraData + 4 + offset, true);
bytesRead = offset + note.length;
}
}
return zip64DataRes;
};
var findExpectedData = (zip64data) => {
const zip64dataList = [];
if (zip64data.uncompressedSize === BigInt(4294967295)) {
zip64dataList.push({ name: "uncompressedSize", length: 8 });
}
if (zip64data.compressedSize === BigInt(4294967295)) {
zip64dataList.push({ name: "compressedSize", length: 8 });
}
if (zip64data.localHeaderOffset === BigInt(4294967295)) {
zip64dataList.push({ name: "localHeaderOffset", length: 8 });
}
if (zip64data.startDisk === BigInt(4294967295)) {
zip64dataList.push({ name: "startDisk", length: 4 });
}
return zip64dataList;
};
function generateCDHeader(options) {
const optionsToUse = {
...options,
fnlength: options.fileName.length,
extraLength: 0
};
let zip64header = new ArrayBuffer(0);
const optionsToZip64 = {};
if (optionsToUse.offset >= 4294967295) {
optionsToZip64.offset = optionsToUse.offset;
optionsToUse.offset = BigInt(4294967295);
}
if (optionsToUse.length >= 4294967295) {
optionsToZip64.size = optionsToUse.length;
optionsToUse.length = 4294967295;
}
if (Object.keys(optionsToZip64).length) {
zip64header = createZip64Info(optionsToZip64);
optionsToUse.extraLength = zip64header.byteLength;
}
const header = new DataView(new ArrayBuffer(Number(CD_FILE_NAME_OFFSET)));
for (const field of ZIP_HEADER_FIELDS) {
setFieldToNumber(header, field.size, field.offset, optionsToUse[field.name ?? ""] ?? field.default ?? 0);
}
const encodedName = new TextEncoder().encode(optionsToUse.fileName);
const resHeader = (0, import_loader_utils3.concatenateArrayBuffers)(header.buffer, encodedName, zip64header);
return resHeader;
}
var ZIP_HEADER_FIELDS = [
// Central directory file header signature = 0x02014b50
{
offset: 0,
size: 4,
default: new DataView(signature2.buffer).getUint32(0, true)
},
// Version made by
{
offset: 4,
size: 2,
default: 45
},
// Version needed to extract (minimum)
{
offset: 6,
size: 2,
default: 45
},
// General purpose bit flag
{
offset: 8,
size: 2,
default: 0
},
// Compression method
{
offset: 10,
size: 2,
default: 0
},
// File last modification time
{
offset: 12,
size: 2,
default: 0
},
// File last modification date
{
offset: 14,
size: 2,
default: 0
},
// CRC-32 of uncompressed data
{
offset: 16,
size: 4,
name: "crc32"
},
// Compressed size (or 0xffffffff for ZIP64)
{
offset: 20,
size: 4,
name: "length"
},
// Uncompressed size (or 0xffffffff for ZIP64)
{
offset: 24,
size: 4,
name: "length"
},
// File name length (n)
{
offset: 28,
size: 2,
name: "fnlength"
},
// Extra field length (m)
{
offset: 30,
size: 2,
default: 0,
name: "extraLength"
},
// File comment length (k)
{
offset: 32,
size: 2,
default: 0
},
// Disk number where file starts (or 0xffff for ZIP64)
{
offset: 34,
size: 2,
default: 0
},
// Internal file attributes
{
offset: 36,
size: 2,
default: 0
},
// External file attributes
{
offset: 38,
size: 4,
default: 0
},
// Relative offset of local file header
{
offset: 42,
size: 4,
name: "offset"
}
];
// dist/parse-zip/local-file-header.js
var import_loader_utils4 = require("@loaders.gl/loader-utils");
var COMPRESSION_METHOD_OFFSET = 8;
var COMPRESSED_SIZE_OFFSET = 18;
var UNCOMPRESSED_SIZE_OFFSET = 22;
var FILE_NAME_LENGTH_OFFSET = 26;
var EXTRA_FIELD_LENGTH_OFFSET = 28;
var FILE_NAME_OFFSET = 30n;
var signature3 = new Uint8Array([80, 75, 3, 4]);
var parseZipLocalFileHeader = async (headerOffset, file) => {
const mainHeader = new DataView(await file.slice(headerOffset, headerOffset + FILE_NAME_OFFSET));
const magicBytes = mainHeader.buffer.slice(0, 4);
if (!(0, import_loader_utils4.compareArrayBuffers)(magicBytes, signature3)) {
return null;
}
const fileNameLength = mainHeader.getUint16(FILE_NAME_LENGTH_OFFSET, true);
const extraFieldLength = mainHeader.getUint16(EXTRA_FIELD_LENGTH_OFFSET, true);
const additionalHeader = await file.slice(headerOffset + FILE_NAME_OFFSET, headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength));
const fileNameBuffer = additionalHeader.slice(0, fileNameLength);
const extraDataBuffer = new DataView(additionalHeader.slice(fileNameLength, additionalHeader.byteLength));
const fileName = new TextDecoder().decode(fileNameBuffer).split("\\").join("/");
let fileDataOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength);
const compressionMethod = mainHeader.getUint16(COMPRESSION_METHOD_OFFSET, true);
let compressedSize = BigInt(mainHeader.getUint32(COMPRESSED_SIZE_OFFSET, true));
let uncompressedSize = BigInt(mainHeader.getUint32(UNCOMPRESSED_SIZE_OFFSET, true));
let offsetInZip64Data = 4;
if (uncompressedSize === BigInt(4294967295)) {
uncompressedSize = extraDataBuffer.getBigUint64(offsetInZip64Data, true);
offsetInZip64Data += 8;
}
if (compressedSize === BigInt(4294967295)) {
compressedSize = extraDataBuffer.getBigUint64(offsetInZip64Data, true);
offsetInZip64Data += 8;
}
if (fileDataOffset === BigInt(4294967295)) {
fileDataOffset = extraDataBuffer.getBigUint64(offsetInZip64Data, true);
}
return {
fileNameLength,
fileName,
extraFieldLength,
fileDataOffset,
compressedSize,
compressionMethod
};
};
function generateLocalHeader(options) {
const optionsToUse = {
...options,
extraLength: 0,
fnlength: options.fileName.length
};
let zip64header = new ArrayBuffer(0);
const optionsToZip64 = {};
if (optionsToUse.length >= 4294967295) {
optionsToZip64.size = optionsToUse.length;
optionsToUse.length = 4294967295;
}
if (Object.keys(optionsToZip64).length) {
zip64header = createZip64Info(optionsToZip64);
optionsToUse.extraLength = zip64header.byteLength;
}
const header = new DataView(new ArrayBuffer(Number(FILE_NAME_OFFSET)));
for (const field of ZIP_HEADER_FIELDS2) {
setFieldToNumber(header, field.size, field.offset, optionsToUse[field.name ?? ""] ?? field.default ?? 0);
}
const encodedName = new TextEncoder().encode(optionsToUse.fileName);
const resHeader = (0, import_loader_utils4.concatenateArrayBuffers)(header.buffer, encodedName, zip64header);
return resHeader;
}
var ZIP_HEADER_FIELDS2 = [
// Local file header signature = 0x04034b50
{
offset: 0,
size: 4,
default: new DataView(signature3.buffer).getUint32(0, true)
},
// Version needed to extract (minimum)
{
offset: 4,
size: 2,
default: 45
},
// General purpose bit flag
{
offset: 6,
size: 2,
default: 0
},
// Compression method
{
offset: 8,
size: 2,
default: 0
},
// File last modification time
{
offset: 10,
size: 2,
default: 0
},
// File last modification date
{
offset: 12,
size: 2,
default: 0
},
// CRC-32 of uncompressed data
{
offset: 14,
size: 4,
name: "crc32"
},
// Compressed size (or 0xffffffff for ZIP64)
{
offset: 18,
size: 4,
name: "length"
},
// Uncompressed size (or 0xffffffff for ZIP64)
{
offset: 22,
size: 4,
name: "length"
},
// File name length (n)
{
offset: 26,
size: 2,
name: "fnlength"
},
// Extra field length (m)
{
offset: 28,
size: 2,
default: 0,
name: "extraLength"
}
];
// dist/parse-zip/zip-composition.js
var import_loader_utils5 = require("@loaders.gl/loader-utils");
var import_crypto = require("@loaders.gl/crypto");
var import_core = require("@loaders.gl/core");
async function cutTheTailOff(provider) {
const oldEoCDinfo = await parseEoCDRecord(provider);
const oldCDStartOffset = oldEoCDinfo.cdStartOffset;
const oldCDLength = Number(oldEoCDinfo.offsets.zip64EoCDOffset ? oldEoCDinfo.offsets.zip64EoCDOffset - oldCDStartOffset : oldEoCDinfo.offsets.zipEoCDOffset - oldCDStartOffset);
const zipEnding = await provider.slice(oldCDStartOffset, provider.length);
await provider.truncate(Number(oldCDStartOffset));
const oldCDBody = zipEnding.slice(0, oldCDLength);
const eocdBody = zipEnding.slice(oldCDLength, zipEnding.byteLength);
return [oldCDBody, eocdBody, oldEoCDinfo];
}
async function generateFileHeaders(fileName, fileToAdd, localFileHeaderOffset) {
const newFileCRC322 = parseInt(await new import_crypto.CRC32Hash().hash(fileToAdd, "hex"), 16);
const newFileLocalHeader = generateLocalHeader({
crc32: newFileCRC322,
fileName,
length: fileToAdd.byteLength
});
const newFileCDHeader = generateCDHeader({
crc32: newFileCRC322,
fileName,
offset: localFileHeaderOffset,
length: fileToAdd.byteLength
});
return [
new Uint8Array((0, import_loader_utils5.concatenateArrayBuffers)(newFileLocalHeader, fileToAdd)),
new Uint8Array(newFileCDHeader)
];
}
async function addOneFile(zipUrl, fileToAdd, fileName) {
const provider = new import_loader_utils5.FileHandleFile(zipUrl, true);
const [oldCDBody, eocdBody, oldEoCDinfo] = await cutTheTailOff(provider);
const newFileOffset = provider.length;
const [localPart, cdHeaderPart] = await generateFileHeaders(fileName, fileToAdd, newFileOffset);
await provider.append(localPart);
const newCDBody = (0, import_loader_utils5.concatenateArrayBuffers)(oldCDBody, cdHeaderPart);
const newCDStartOffset = provider.length;
await provider.append(new Uint8Array(newCDBody));
const eocdOffset = provider.length;
await provider.append(updateEoCD(eocdBody, oldEoCDinfo.offsets, newCDStartOffset, eocdOffset, oldEoCDinfo.cdRecordsNumber + 1n));
}
async function createZip(inputPath, outputPath, createAdditionalData) {
const fileIterator = getFileIterator(inputPath);
const resFile = new import_loader_utils5.NodeFile(outputPath, "w");
const fileList = [];
const cdArray = [];
for await (const file of fileIterator) {
await addFile(file, resFile, cdArray, fileList);
}
if (createAdditionalData) {
const additionaldata = await createAdditionalData(fileList);
await addFile(additionaldata, resFile, cdArray);
}
const cdOffset = (await resFile.stat()).bigsize;
const cd = (0, import_loader_utils5.concatenateArrayBuffers)(...cdArray);
await resFile.append(new Uint8Array(cd));
const eoCDStart = (await resFile.stat()).bigsize;
await resFile.append(new Uint8Array(generateEoCD({ recordsNumber: cdArray.length, cdSize: cd.byteLength, cdOffset, eoCDStart })));
}
async function addFile(file, resFile, cdArray, fileList) {
const size = (await resFile.stat()).bigsize;
fileList == null ? void 0 : fileList.push({ fileName: file.path, localHeaderOffset: size });
const [localPart, cdHeaderPart] = await generateFileHeaders(file.path, file.file, size);
await resFile.append(localPart);
cdArray.push(cdHeaderPart);
}
function getFileIterator(inputPath) {
async function* iterable() {
const fileList = await getAllFiles(inputPath);
for (const filePath of fileList) {
const file = await (await (0, import_core.fetchFile)(import_loader_utils5.path.join(inputPath, filePath))).arrayBuffer();
yield { path: filePath, file };
}
}
return iterable();
}
async function getAllFiles(basePath, subfolder = "", fsPassed) {
const fs = fsPassed ? fsPassed : new import_loader_utils5.NodeFilesystem({});
const files = await fs.readdir(pathJoin(basePath, subfolder));
const arrayOfFiles = [];
for (const file of files) {
const fullPath = pathJoin(basePath, subfolder, file);
if ((await fs.stat(fullPath)).isDirectory) {
const files2 = await getAllFiles(basePath, pathJoin(subfolder, file));
arrayOfFiles.push(...files2);
} else {
arrayOfFiles.push(pathJoin(subfolder, file));
}
}
return arrayOfFiles;
}
function pathJoin(...paths) {
const resPaths = paths.filter((val) => val.length);
return import_loader_utils5.path.join(...resPaths);
}
// dist/filesystems/zip-filesystem.js
var import_loader_utils6 = require("@loaders.gl/loader-utils");
var import_compression = require("@loaders.gl/compression");
var ZIP_COMPRESSION_HANDLERS = {
/** No compression */
0: async (compressedFile) => compressedFile,
/** Deflation */
8: async (compressedFile) => {
const compression = new import_compression.DeflateCompression({ raw: true });
const decompressedData = await compression.decompress(compressedFile);
return decompressedData;
}
};
var ZipFileSystem = class {
/** FileProvider instance promise */
fileProvider = null;
fileName;
archive = null;
/**
* Constructor
* @param file - instance of FileProvider or file path string
*/
constructor(file) {
if (typeof file === "string") {
this.fileName = file;
if (!import_loader_utils6.isBrowser) {
this.fileProvider = new import_loader_utils6.FileHandleFile(file);
} else {
throw new Error("Cannot open file for random access in a WEB browser");
}
} else if (file instanceof IndexedArchive) {
this.fileProvider = file.fileProvider;
this.archive = file;
this.fileName = file.fileName;
} else if ((0, import_loader_utils6.isFileProvider)(file)) {
this.fileProvider = file;
}
}
/** Clean up resources */
async destroy() {
if (this.fileProvider) {
await this.fileProvider.destroy();
}
}
/**
* Get file names list from zip archive
* @returns array of file names
*/
async readdir() {
if (!this.fileProvider) {
throw new Error("No data detected in the zip archive");
}
const fileNames = [];
const zipCDIterator = makeZipCDHeaderIterator(this.fileProvider);
for await (const cdHeader of zipCDIterator) {
fileNames.push(cdHeader.fileName);
}
return fileNames;
}
/**
* Get file metadata
* @param filename - name of a file
* @returns central directory data
*/
async stat(filename) {
const cdFileHeader = await this.getCDFileHeader(filename);
return { ...cdFileHeader, size: Number(cdFileHeader.uncompressedSize) };
}
/**
* Implementation of fetch against this file system
* @param filename - name of a file
* @returns - Response with file data
*/
async fetch(filename) {
if (this.fileName && filename.indexOf(this.fileName) === 0) {
filename = filename.substring(this.fileName.length + 1);
}
let uncompressedFile;
if (this.archive) {
uncompressedFile = await this.archive.getFile(filename, "http");
} else {
if (!this.fileProvider) {
throw new Error("No data detected in the zip archive");
}
const cdFileHeader = await this.getCDFileHeader(filename);
const localFileHeader = await parseZipLocalFileHeader(cdFileHeader.localHeaderOffset, this.fileProvider);
if (!localFileHeader) {
throw new Error("Local file header has not been found in the zip archive`");
}
const compressionHandler = ZIP_COMPRESSION_HANDLERS[localFileHeader.compressionMethod.toString()];
if (!compressionHandler) {
throw Error("Only Deflation compression is supported");
}
const compressedFile = await this.fileProvider.slice(localFileHeader.fileDataOffset, localFileHeader.fileDataOffset + localFileHeader.compressedSize);
uncompressedFile = await compressionHandler(compressedFile);
}
const response = new Response(uncompressedFile);
Object.defineProperty(response, "url", {
value: filename ? `${this.fileName || ""}/${filename}` : this.fileName || ""
});
return response;
}
/**
* Get central directory file header
* @param filename - name of a file
* @returns central directory file header
*/
async getCDFileHeader(filename) {
if (!this.fileProvider) {
throw new Error("No data detected in the zip archive");
}
const zipCDIterator = makeZipCDHeaderIterator(this.fileProvider);
let result = null;
for await (const cdHeader of zipCDIterator) {
if (cdHeader.fileName === filename) {
result = cdHeader;
break;
}
}
if (!result) {
throw new Error("File has not been found in the zip archive");
}
return result;
}
};
// dist/filesystems/IndexedArchive.js
var IndexedArchive = class {
fileProvider;
fileName;
/**
* Constructor
* @param fileProvider - instance of a binary data reader
* @param hashTable - pre-loaded hashTable. If presented, getFile will skip reading the hash file
* @param fileName - name of the archive. It is used to add to an URL of a loader context
*/
constructor(fileProvider, hashTable, fileName) {
this.fileProvider = fileProvider;
this.fileName = fileName;
}
/**
* Get file as from order ZIP arhive without using the hash file
* @param filename - path to the internal file
* @returns
*/
async getFileWithoutHash(filename) {
const zipFS = new ZipFileSystem(this.fileProvider);
const response = await zipFS.fetch(filename);
return await response.arrayBuffer();
}
};
// dist/hash-file-utility.js
var import_crypto2 = require("@loaders.gl/crypto");
var import_loader_utils7 = require("@loaders.gl/loader-utils");
function parseHashTable(arrayBuffer) {
const dataView = new DataView(arrayBuffer);
const hashMap = {};
for (let i = 0; i < arrayBuffer.byteLength; i = i + 24) {
const offset = dataView.getBigUint64(i + 16, true);
const hash = bufferToHex(arrayBuffer, i, 16);
hashMap[hash] = offset;
}
return hashMap;
}
function bufferToHex(buffer, start, length) {
return [...new Uint8Array(buffer, start, length)].map((x) => x.toString(16).padStart(2, "0")).join("");
}
async function makeHashTableFromZipHeaders(fileProvider) {
const zipCDIterator = makeZipCDHeaderIterator(fileProvider);
return getHashTable(zipCDIterator);
}
async function getHashTable(zipCDIterator) {
const md5Hash = new import_crypto2.MD5Hash();
const textEncoder = new TextEncoder();
const hashTable = {};
for await (const cdHeader of zipCDIterator) {
const filename = cdHeader.fileName.split("\\").join("/").toLocaleLowerCase();
const arrayBuffer = textEncoder.encode(filename).buffer;
const md5 = await md5Hash.hash(arrayBuffer, "hex");
hashTable[md5] = cdHeader.localHeaderOffset;
}
return hashTable;
}
async function composeHashFile(zipCDIterator) {
const md5Hash = new import_crypto2.MD5Hash();
const textEncoder = new TextEncoder();
const hashArray = [];
for await (const cdHeader of zipCDIterator) {
let filename = cdHeader.fileName.split("\\").join("/");
if (filename !== "3dSceneLayer.json.gz") {
filename = filename.toLocaleLowerCase();
}
const arrayBuffer = textEncoder.encode(filename).buffer;
const md5 = await md5Hash.hash(arrayBuffer, "hex");
hashArray.push((0, import_loader_utils7.concatenateArrayBuffers)(hexStringToBuffer(md5), bigintToBuffer(cdHeader.localHeaderOffset)));
}
const bufferArray = hashArray.sort(compareHashes);
return (0, import_loader_utils7.concatenateArrayBuffersFromArray)(bufferArray);
}
function compareHashes(arrA, arrB) {
const a = new BigUint64Array(arrA);
const b = new BigUint64Array(arrB);
return Number(a[0] === b[0] ? a[1] - b[1] : a[0] - b[0]);
}
function hexStringToBuffer(str) {
var _a;
const byteArray = (_a = str.match(/../g)) == null ? void 0 : _a.map((h) => parseInt(h, 16));
return new Uint8Array(byteArray ?? new Array(16)).buffer;
}
function bigintToBuffer(n) {
return new BigUint64Array([n]).buffer;
}
//# sourceMappingURL=index.cjs.map