@loaders.gl/loader-utils
Version:
Framework-independent loaders for 3D graphics formats
1,456 lines (1,417 loc) • 47.3 kB
JavaScript
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var __publicField = (obj, key, value) => {
__defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
return value;
};
// dist/index.js
var dist_exports = {};
__export(dist_exports, {
BlobFile: () => BlobFile,
DataSource: () => DataSource,
DataViewFile: () => DataViewFile,
FileHandleFile: () => FileHandleFile,
FileProvider: () => FileProvider,
HttpFile: () => HttpFile,
ImageSource: () => ImageSource,
JSONLoader: () => JSONLoader,
NodeFile: () => NodeFileFacade,
NodeFilesystem: () => NodeFileSystemFacade,
RequestScheduler: () => RequestScheduler,
VectorSource: () => VectorSource,
_addAliases: () => addAliases,
assert: () => assert,
canEncodeWithWorker: () => canEncodeWithWorker,
canParseWithWorker: () => canParseWithWorker,
checkJSModule: () => checkJSModule,
compareArrayBuffers: () => compareArrayBuffers,
concatenateArrayBuffers: () => concatenateArrayBuffers,
concatenateArrayBuffersAsync: () => concatenateArrayBuffersAsync,
concatenateArrayBuffersFromArray: () => concatenateArrayBuffersFromArray,
concatenateTypedArrays: () => concatenateTypedArrays,
copyArrayBuffer: () => copyArrayBuffer,
copyBinaryToDataView: () => copyBinaryToDataView,
copyPaddedArrayBufferToDataView: () => copyPaddedArrayBufferToDataView,
copyPaddedStringToDataView: () => copyPaddedStringToDataView,
copyStringToDataView: () => copyStringToDataView,
copyToArray: () => copyToArray,
createLoaderWorker: () => createLoaderWorker,
document: () => document_,
forEach: () => forEach,
getFirstCharacters: () => getFirstCharacters,
getJSModule: () => getJSModule,
getJSModuleOrNull: () => getJSModuleOrNull,
getMagicString: () => getMagicString,
getPathPrefix: () => getPathPrefix,
global: () => global_,
isBrowser: () => isBrowser,
isBuffer: () => isBuffer,
isFileProvider: () => isFileProvider,
isWorker: () => isWorker,
log: () => log,
makeLineIterator: () => makeLineIterator,
makeNumberedLineIterator: () => makeNumberedLineIterator,
makeTextDecoderIterator: () => makeTextDecoderIterator,
makeTextEncoderIterator: () => makeTextEncoderIterator,
mergeLoaderOptions: () => mergeLoaderOptions,
nodeVersion: () => nodeVersion,
padStringToByteAlignment: () => padStringToByteAlignment,
padToNBytes: () => padToNBytes,
parseFromContext: () => parseFromContext,
parseInBatchesFromContext: () => parseInBatchesFromContext,
parseJSON: () => parseJSON,
parseSyncFromContext: () => parseSyncFromContext,
parseWithWorker: () => parseWithWorker,
path: () => path_exports,
promisify1: () => promisify1,
promisify2: () => promisify2,
registerJSModules: () => registerJSModules,
resolvePath: () => resolvePath,
self: () => self_,
setPathPrefix: () => setPathPrefix,
sliceArrayBuffer: () => sliceArrayBuffer,
stream: () => stream_browser_exports,
toArrayBuffer: () => toArrayBuffer2,
toBuffer: () => toBuffer2,
window: () => window_
});
module.exports = __toCommonJS(dist_exports);
// dist/loader-types.js
async function parseFromContext(data, loaders, options, context) {
return context._parse(data, loaders, options, context);
}
function parseSyncFromContext(data, loader, options, context) {
if (!context._parseSync) {
throw new Error("parseSync");
}
return context._parseSync(data, loader, options, context);
}
async function parseInBatchesFromContext(data, loader, options, context) {
if (!context._parseInBatches) {
throw new Error("parseInBatches");
}
return context._parseInBatches(data, loader, options, context);
}
// dist/lib/env-utils/assert.js
function assert(condition, message) {
if (!condition) {
throw new Error(message || "loader assertion failed.");
}
}
// dist/lib/env-utils/globals.js
var globals = {
self: typeof self !== "undefined" && self,
window: typeof window !== "undefined" && window,
global: typeof global !== "undefined" && global,
document: typeof document !== "undefined" && document
};
var self_ = globals.self || globals.window || globals.global || {};
var window_ = globals.window || globals.self || globals.global || {};
var global_ = globals.global || globals.self || globals.window || {};
var document_ = globals.document || {};
var isBrowser = (
// @ts-ignore process does not exist on browser
Boolean(typeof process !== "object" || String(process) !== "[object process]" || process.browser)
);
var isWorker = typeof importScripts === "function";
var matches = typeof process !== "undefined" && process.version && /v([0-9]*)/.exec(process.version);
var nodeVersion = matches && parseFloat(matches[1]) || 0;
// dist/lib/log-utils/log.js
var import_log = require("@probe.gl/log");
var VERSION = true ? "4.3.2" : "latest";
var version = VERSION[0] >= "0" && VERSION[0] <= "9" ? `v${VERSION}` : "";
function createLog() {
const log2 = new import_log.Log({ id: "loaders.gl" });
globalThis.loaders = globalThis.loaders || {};
globalThis.loaders.log = log2;
globalThis.loaders.version = version;
globalThis.probe = globalThis.probe || {};
globalThis.probe.loaders = log2;
return log2;
}
var log = createLog();
// dist/lib/option-utils/merge-loader-options.js
function mergeLoaderOptions(baseOptions, newOptions) {
return mergeOptionsRecursively(baseOptions || {}, newOptions);
}
function mergeOptionsRecursively(baseOptions, newOptions, level = 0) {
if (level > 3) {
return newOptions;
}
const options = { ...baseOptions };
for (const [key, newValue] of Object.entries(newOptions)) {
if (newValue && typeof newValue === "object" && !Array.isArray(newValue)) {
options[key] = mergeOptionsRecursively(options[key] || {}, newOptions[key], level + 1);
} else {
options[key] = newOptions[key];
}
}
return options;
}
// dist/lib/module-utils/js-module-utils.js
function registerJSModules(modules) {
globalThis.loaders ||= {};
globalThis.loaders.modules ||= {};
Object.assign(globalThis.loaders.modules, modules);
}
function checkJSModule(name, caller) {
var _a, _b;
const module2 = (_b = (_a = globalThis.loaders) == null ? void 0 : _a.modules) == null ? void 0 : _b[name];
if (!module2) {
log.warn(`${caller}: ${name} library not installed`)();
}
}
function getJSModule(name, caller) {
var _a, _b;
const module2 = (_b = (_a = globalThis.loaders) == null ? void 0 : _a.modules) == null ? void 0 : _b[name];
if (!module2) {
throw new Error(`${caller}: ${name} library not installed`);
}
return module2;
}
function getJSModuleOrNull(name) {
var _a, _b;
const module2 = (_b = (_a = globalThis.loaders) == null ? void 0 : _a.modules) == null ? void 0 : _b[name];
return module2 || null;
}
// dist/lib/worker-loader-utils/create-loader-worker.js
var import_worker_utils = require("@loaders.gl/worker-utils");
var requestId = 0;
async function createLoaderWorker(loader) {
if (!await import_worker_utils.WorkerBody.inWorkerThread()) {
return;
}
import_worker_utils.WorkerBody.onmessage = async (type, payload) => {
switch (type) {
case "process":
try {
const { input, options = {}, context = {} } = payload;
const result = await parseData({
loader,
arrayBuffer: input,
options,
// @ts-expect-error fetch missing
context: {
...context,
_parse: parseOnMainThread
}
});
import_worker_utils.WorkerBody.postMessage("done", { result });
} catch (error) {
const message = error instanceof Error ? error.message : "";
import_worker_utils.WorkerBody.postMessage("error", { error: message });
}
break;
default:
}
};
}
function parseOnMainThread(arrayBuffer, loader, options, context) {
return new Promise((resolve2, reject) => {
const id = requestId++;
const onMessage2 = (type, payload2) => {
if (payload2.id !== id) {
return;
}
switch (type) {
case "done":
import_worker_utils.WorkerBody.removeEventListener(onMessage2);
resolve2(payload2.result);
break;
case "error":
import_worker_utils.WorkerBody.removeEventListener(onMessage2);
reject(payload2.error);
break;
default:
}
};
import_worker_utils.WorkerBody.addEventListener(onMessage2);
const payload = { id, input: arrayBuffer, options };
import_worker_utils.WorkerBody.postMessage("process", payload);
});
}
async function parseData({ loader, arrayBuffer, options, context }) {
let data;
let parser;
if (loader.parseSync || loader.parse) {
data = arrayBuffer;
parser = loader.parseSync || loader.parse;
} else if (loader.parseTextSync) {
const textDecoder = new TextDecoder();
data = textDecoder.decode(arrayBuffer);
parser = loader.parseTextSync;
} else {
throw new Error(`Could not load data with ${loader.name} loader`);
}
options = {
...options,
modules: loader && loader.options && loader.options.modules || {},
worker: false
};
return await parser(data, { ...options }, context, loader);
}
// dist/lib/worker-loader-utils/parse-with-worker.js
var import_worker_utils2 = require("@loaders.gl/worker-utils");
function canParseWithWorker(loader, options) {
if (!import_worker_utils2.WorkerFarm.isSupported()) {
return false;
}
if (!import_worker_utils2.isBrowser && !(options == null ? void 0 : options._nodeWorkers)) {
return false;
}
return loader.worker && (options == null ? void 0 : options.worker);
}
async function parseWithWorker(loader, data, options, context, parseOnMainThread2) {
const name = loader.id;
const url = (0, import_worker_utils2.getWorkerURL)(loader, options);
const workerFarm = import_worker_utils2.WorkerFarm.getWorkerFarm(options);
const workerPool = workerFarm.getWorkerPool({ name, url });
options = JSON.parse(JSON.stringify(options));
context = JSON.parse(JSON.stringify(context || {}));
const job = await workerPool.startJob(
"process-on-worker",
// @ts-expect-error
onMessage.bind(null, parseOnMainThread2)
// eslint-disable-line @typescript-eslint/no-misused-promises
);
job.postMessage("process", {
// @ts-ignore
input: data,
options,
context
});
const result = await job.result;
return await result.result;
}
async function onMessage(parseOnMainThread2, job, type, payload) {
switch (type) {
case "done":
job.done(payload);
break;
case "error":
job.error(new Error(payload.error));
break;
case "process":
const { id, input, options } = payload;
try {
const result = await parseOnMainThread2(input, options);
job.postMessage("done", { id, result });
} catch (error) {
const message = error instanceof Error ? error.message : "unknown error";
job.postMessage("error", { id, error: message });
}
break;
default:
console.warn(`parse-with-worker unknown message ${type}`);
}
}
// dist/lib/worker-loader-utils/encode-with-worker.js
var import_worker_utils3 = require("@loaders.gl/worker-utils");
function canEncodeWithWorker(writer, options) {
if (!import_worker_utils3.WorkerFarm.isSupported()) {
return false;
}
if (!isBrowser && !(options == null ? void 0 : options._nodeWorkers)) {
return false;
}
return writer.worker && (options == null ? void 0 : options.worker);
}
// dist/lib/binary-utils/get-first-characters.js
function getFirstCharacters(data, length = 5) {
if (typeof data === "string") {
return data.slice(0, length);
} else if (ArrayBuffer.isView(data)) {
return getMagicString(data.buffer, data.byteOffset, length);
} else if (data instanceof ArrayBuffer) {
const byteOffset = 0;
return getMagicString(data, byteOffset, length);
}
return "";
}
function getMagicString(arrayBuffer, byteOffset, length) {
if (arrayBuffer.byteLength <= byteOffset + length) {
return "";
}
const dataView = new DataView(arrayBuffer);
let magic = "";
for (let i = 0; i < length; i++) {
magic += String.fromCharCode(dataView.getUint8(byteOffset + i));
}
return magic;
}
// dist/lib/parser-utils/parse-json.js
function parseJSON(string) {
try {
return JSON.parse(string);
} catch (_) {
throw new Error(`Failed to parse JSON from data starting with "${getFirstCharacters(string)}"`);
}
}
// dist/lib/binary-utils/array-buffer-utils.js
function compareArrayBuffers(arrayBuffer1, arrayBuffer2, byteLength) {
byteLength = byteLength || arrayBuffer1.byteLength;
if (arrayBuffer1.byteLength < byteLength || arrayBuffer2.byteLength < byteLength) {
return false;
}
const array1 = new Uint8Array(arrayBuffer1);
const array2 = new Uint8Array(arrayBuffer2);
for (let i = 0; i < array1.length; ++i) {
if (array1[i] !== array2[i]) {
return false;
}
}
return true;
}
function concatenateArrayBuffers(...sources) {
return concatenateArrayBuffersFromArray(sources);
}
function concatenateArrayBuffersFromArray(sources) {
const sourceArrays = sources.map((source2) => source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2);
const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);
const result = new Uint8Array(byteLength);
let offset = 0;
for (const sourceArray of sourceArrays) {
result.set(sourceArray, offset);
offset += sourceArray.byteLength;
}
return result.buffer;
}
function concatenateTypedArrays(...typedArrays) {
const arrays = typedArrays;
const TypedArrayConstructor = arrays && arrays.length > 1 && arrays[0].constructor || null;
if (!TypedArrayConstructor) {
throw new Error('"concatenateTypedArrays" - incorrect quantity of arguments or arguments have incompatible data types');
}
const sumLength = arrays.reduce((acc, value) => acc + value.length, 0);
const result = new TypedArrayConstructor(sumLength);
let offset = 0;
for (const array of arrays) {
result.set(array, offset);
offset += array.length;
}
return result;
}
function sliceArrayBuffer(arrayBuffer, byteOffset, byteLength) {
const subArray = byteLength !== void 0 ? new Uint8Array(arrayBuffer).subarray(byteOffset, byteOffset + byteLength) : new Uint8Array(arrayBuffer).subarray(byteOffset);
const arrayCopy = new Uint8Array(subArray);
return arrayCopy.buffer;
}
// dist/lib/binary-utils/memory-copy-utils.js
function padToNBytes(byteLength, padding) {
assert(byteLength >= 0);
assert(padding > 0);
return byteLength + (padding - 1) & ~(padding - 1);
}
function copyArrayBuffer(targetBuffer, sourceBuffer, byteOffset, byteLength = sourceBuffer.byteLength) {
const targetArray = new Uint8Array(targetBuffer, byteOffset, byteLength);
const sourceArray = new Uint8Array(sourceBuffer);
targetArray.set(sourceArray);
return targetBuffer;
}
function copyToArray(source, target, targetOffset) {
let sourceArray;
if (source instanceof ArrayBuffer) {
sourceArray = new Uint8Array(source);
} else {
const srcByteOffset = source.byteOffset;
const srcByteLength = source.byteLength;
sourceArray = new Uint8Array(source.buffer || source.arrayBuffer, srcByteOffset, srcByteLength);
}
target.set(sourceArray, targetOffset);
return targetOffset + padToNBytes(sourceArray.byteLength, 4);
}
// dist/lib/binary-utils/dataview-copy-utils.js
function padStringToByteAlignment(string, byteAlignment) {
const length = string.length;
const paddedLength = Math.ceil(length / byteAlignment) * byteAlignment;
const padding = paddedLength - length;
let whitespace = "";
for (let i = 0; i < padding; ++i) {
whitespace += " ";
}
return string + whitespace;
}
function copyStringToDataView(dataView, byteOffset, string, byteLength) {
if (dataView) {
for (let i = 0; i < byteLength; i++) {
dataView.setUint8(byteOffset + i, string.charCodeAt(i));
}
}
return byteOffset + byteLength;
}
function copyBinaryToDataView(dataView, byteOffset, binary, byteLength) {
if (dataView) {
for (let i = 0; i < byteLength; i++) {
dataView.setUint8(byteOffset + i, binary[i]);
}
}
return byteOffset + byteLength;
}
function copyPaddedArrayBufferToDataView(dataView, byteOffset, sourceBuffer, padding) {
const paddedLength = padToNBytes(sourceBuffer.byteLength, padding);
const padLength = paddedLength - sourceBuffer.byteLength;
if (dataView) {
const targetArray = new Uint8Array(dataView.buffer, dataView.byteOffset + byteOffset, sourceBuffer.byteLength);
const sourceArray = new Uint8Array(sourceBuffer);
targetArray.set(sourceArray);
for (let i = 0; i < padLength; ++i) {
dataView.setUint8(byteOffset + sourceBuffer.byteLength + i, 32);
}
}
byteOffset += paddedLength;
return byteOffset;
}
function copyPaddedStringToDataView(dataView, byteOffset, string, padding) {
const textEncoder = new TextEncoder();
const stringBuffer = textEncoder.encode(string);
byteOffset = copyPaddedArrayBufferToDataView(dataView, byteOffset, stringBuffer, padding);
return byteOffset;
}
// dist/lib/iterators/text-iterators.js
async function* makeTextDecoderIterator(arrayBufferIterator, options = {}) {
const textDecoder = new TextDecoder(void 0, options);
for await (const arrayBuffer of arrayBufferIterator) {
yield typeof arrayBuffer === "string" ? arrayBuffer : textDecoder.decode(arrayBuffer, { stream: true });
}
}
async function* makeTextEncoderIterator(textIterator) {
const textEncoder = new TextEncoder();
for await (const text of textIterator) {
yield typeof text === "string" ? textEncoder.encode(text) : text;
}
}
async function* makeLineIterator(textIterator) {
let previous = "";
for await (const textChunk of textIterator) {
previous += textChunk;
let eolIndex;
while ((eolIndex = previous.indexOf("\n")) >= 0) {
const line = previous.slice(0, eolIndex + 1);
previous = previous.slice(eolIndex + 1);
yield line;
}
}
if (previous.length > 0) {
yield previous;
}
}
async function* makeNumberedLineIterator(lineIterator) {
let counter = 1;
for await (const line of lineIterator) {
yield { counter, line };
counter++;
}
}
// dist/lib/iterators/async-iteration.js
async function forEach(iterator, visitor) {
while (true) {
const { done, value } = await iterator.next();
if (done) {
iterator.return();
return;
}
const cancel = visitor(value);
if (cancel) {
return;
}
}
}
async function concatenateArrayBuffersAsync(asyncIterator) {
const arrayBuffers = [];
for await (const chunk of asyncIterator) {
arrayBuffers.push(chunk);
}
return concatenateArrayBuffers(...arrayBuffers);
}
// dist/lib/request-utils/request-scheduler.js
var import_stats = require("@probe.gl/stats");
var STAT_QUEUED_REQUESTS = "Queued Requests";
var STAT_ACTIVE_REQUESTS = "Active Requests";
var STAT_CANCELLED_REQUESTS = "Cancelled Requests";
var STAT_QUEUED_REQUESTS_EVER = "Queued Requests Ever";
var STAT_ACTIVE_REQUESTS_EVER = "Active Requests Ever";
var DEFAULT_PROPS = {
id: "request-scheduler",
/** Specifies if the request scheduler should throttle incoming requests, mainly for comparative testing. */
throttleRequests: true,
/** The maximum number of simultaneous active requests. Un-throttled requests do not observe this limit. */
maxRequests: 6,
/**
* Specifies a debounce time, in milliseconds. All requests are queued, until no new requests have
* been added to the queue for this amount of time.
*/
debounceTime: 0
};
var RequestScheduler = class {
props;
stats;
activeRequestCount = 0;
/** Tracks the number of active requests and prioritizes/cancels queued requests. */
requestQueue = [];
requestMap = /* @__PURE__ */ new Map();
updateTimer = null;
constructor(props = {}) {
this.props = { ...DEFAULT_PROPS, ...props };
this.stats = new import_stats.Stats({ id: this.props.id });
this.stats.get(STAT_QUEUED_REQUESTS);
this.stats.get(STAT_ACTIVE_REQUESTS);
this.stats.get(STAT_CANCELLED_REQUESTS);
this.stats.get(STAT_QUEUED_REQUESTS_EVER);
this.stats.get(STAT_ACTIVE_REQUESTS_EVER);
}
/**
* Called by an application that wants to issue a request, without having it deeply queued by the browser
*
* When the returned promise resolved, it is OK for the application to issue a request.
* The promise resolves to an object that contains a `done` method.
* When the application's request has completed (or failed), the application must call the `done` function
*
* @param handle
* @param getPriority will be called when request "slots" open up,
* allowing the caller to update priority or cancel the request
* Highest priority executes first, priority < 0 cancels the request
* @returns a promise
* - resolves to a object (with a `done` field) when the request can be issued without queueing,
* - resolves to `null` if the request has been cancelled (by the callback return < 0).
* In this case the application should not issue the request
*/
scheduleRequest(handle, getPriority = () => 0) {
if (!this.props.throttleRequests) {
return Promise.resolve({ done: () => {
} });
}
if (this.requestMap.has(handle)) {
return this.requestMap.get(handle);
}
const request = { handle, priority: 0, getPriority };
const promise = new Promise((resolve2) => {
request.resolve = resolve2;
return request;
});
this.requestQueue.push(request);
this.requestMap.set(handle, promise);
this._issueNewRequests();
return promise;
}
// PRIVATE
_issueRequest(request) {
const { handle, resolve: resolve2 } = request;
let isDone = false;
const done = () => {
if (!isDone) {
isDone = true;
this.requestMap.delete(handle);
this.activeRequestCount--;
this._issueNewRequests();
}
};
this.activeRequestCount++;
return resolve2 ? resolve2({ done }) : Promise.resolve({ done });
}
/** We check requests asynchronously, to prevent multiple updates */
_issueNewRequests() {
if (this.updateTimer !== null) {
clearTimeout(this.updateTimer);
}
this.updateTimer = setTimeout(() => this._issueNewRequestsAsync(), this.props.debounceTime);
}
/** Refresh all requests */
_issueNewRequestsAsync() {
if (this.updateTimer !== null) {
clearTimeout(this.updateTimer);
}
this.updateTimer = null;
const freeSlots = Math.max(this.props.maxRequests - this.activeRequestCount, 0);
if (freeSlots === 0) {
return;
}
this._updateAllRequests();
for (let i = 0; i < freeSlots; ++i) {
const request = this.requestQueue.shift();
if (request) {
this._issueRequest(request);
}
}
}
/** Ensure all requests have updated priorities, and that no longer valid requests are cancelled */
_updateAllRequests() {
const requestQueue = this.requestQueue;
for (let i = 0; i < requestQueue.length; ++i) {
const request = requestQueue[i];
if (!this._updateRequest(request)) {
requestQueue.splice(i, 1);
this.requestMap.delete(request.handle);
i--;
}
}
requestQueue.sort((a, b) => a.priority - b.priority);
}
/** Update a single request by calling the callback */
_updateRequest(request) {
request.priority = request.getPriority(request.handle);
if (request.priority < 0) {
request.resolve(null);
return false;
}
return true;
}
};
// dist/lib/path-utils/file-aliases.js
var pathPrefix = "";
var fileAliases = {};
function setPathPrefix(prefix) {
pathPrefix = prefix;
}
function getPathPrefix() {
return pathPrefix;
}
function addAliases(aliases) {
Object.assign(fileAliases, aliases);
}
function resolvePath(filename2) {
for (const alias in fileAliases) {
if (filename2.startsWith(alias)) {
const replacement = fileAliases[alias];
filename2 = filename2.replace(alias, replacement);
}
}
if (!filename2.startsWith("http://") && !filename2.startsWith("https://")) {
filename2 = `${pathPrefix}${filename2}`;
}
return filename2;
}
// dist/json-loader.js
var VERSION2 = true ? "4.3.2" : "latest";
var JSONLoader = {
dataType: null,
batchType: null,
name: "JSON",
id: "json",
module: "json",
version: VERSION2,
extensions: ["json", "geojson"],
mimeTypes: ["application/json"],
category: "json",
text: true,
parseTextSync,
parse: async (arrayBuffer) => parseTextSync(new TextDecoder().decode(arrayBuffer)),
options: {}
};
function parseTextSync(text) {
return JSON.parse(text);
}
// dist/lib/node/buffer.browser.js
function toArrayBuffer(buffer) {
return buffer;
}
function toBuffer(binaryData) {
throw new Error("Buffer not supported in browser");
}
// dist/lib/binary-utils/memory-conversion-utils.js
function isBuffer(value) {
return value && typeof value === "object" && value.isBuffer;
}
function toBuffer2(data) {
return toBuffer ? toBuffer(data) : data;
}
function toArrayBuffer2(data) {
if (isBuffer(data)) {
return toArrayBuffer(data);
}
if (data instanceof ArrayBuffer) {
return data;
}
if (ArrayBuffer.isView(data)) {
if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) {
return data.buffer;
}
return data.buffer.slice(data.byteOffset, data.byteOffset + data.byteLength);
}
if (typeof data === "string") {
const text = data;
const uint8Array = new TextEncoder().encode(text);
return uint8Array.buffer;
}
if (data && typeof data === "object" && data._toArrayBuffer) {
return data._toArrayBuffer();
}
throw new Error("toArrayBuffer");
}
// dist/lib/node/promisify.js
function promisify1(fn) {
return (args) => new Promise((resolve2, reject) => fn(args, (error, callbackArgs) => error ? reject(error) : resolve2(callbackArgs)));
}
function promisify2(fn) {
return (arg1, arg2) => new Promise((resolve2, reject) => fn(arg1, arg2, (error, callbackArgs) => error ? reject(error) : resolve2(callbackArgs)));
}
// dist/lib/path-utils/path.js
var path_exports = {};
__export(path_exports, {
dirname: () => dirname,
filename: () => filename,
join: () => join,
resolve: () => resolve
});
// dist/lib/path-utils/get-cwd.js
function getCWD() {
var _a;
if (typeof process !== "undefined" && typeof process.cwd !== "undefined") {
return process.cwd();
}
const pathname = (_a = window.location) == null ? void 0 : _a.pathname;
return (pathname == null ? void 0 : pathname.slice(0, pathname.lastIndexOf("/") + 1)) || "";
}
// dist/lib/path-utils/path.js
function filename(url) {
const slashIndex = url ? url.lastIndexOf("/") : -1;
return slashIndex >= 0 ? url.substr(slashIndex + 1) : "";
}
function dirname(url) {
const slashIndex = url ? url.lastIndexOf("/") : -1;
return slashIndex >= 0 ? url.substr(0, slashIndex) : "";
}
function join(...parts) {
const separator = "/";
parts = parts.map((part, index) => {
if (index) {
part = part.replace(new RegExp(`^${separator}`), "");
}
if (index !== parts.length - 1) {
part = part.replace(new RegExp(`${separator}$`), "");
}
return part;
});
return parts.join(separator);
}
function resolve(...components) {
const paths = [];
for (let _i = 0; _i < components.length; _i++) {
paths[_i] = components[_i];
}
let resolvedPath = "";
let resolvedAbsolute = false;
let cwd;
for (let i = paths.length - 1; i >= -1 && !resolvedAbsolute; i--) {
let path;
if (i >= 0) {
path = paths[i];
} else {
if (cwd === void 0) {
cwd = getCWD();
}
path = cwd;
}
if (path.length === 0) {
continue;
}
resolvedPath = `${path}/${resolvedPath}`;
resolvedAbsolute = path.charCodeAt(0) === SLASH;
}
resolvedPath = normalizeStringPosix(resolvedPath, !resolvedAbsolute);
if (resolvedAbsolute) {
return `/${resolvedPath}`;
} else if (resolvedPath.length > 0) {
return resolvedPath;
}
return ".";
}
var SLASH = 47;
var DOT = 46;
function normalizeStringPosix(path, allowAboveRoot) {
let res = "";
let lastSlash = -1;
let dots = 0;
let code;
let isAboveRoot = false;
for (let i = 0; i <= path.length; ++i) {
if (i < path.length) {
code = path.charCodeAt(i);
} else if (code === SLASH) {
break;
} else {
code = SLASH;
}
if (code === SLASH) {
if (lastSlash === i - 1 || dots === 1) {
} else if (lastSlash !== i - 1 && dots === 2) {
if (res.length < 2 || !isAboveRoot || res.charCodeAt(res.length - 1) !== DOT || res.charCodeAt(res.length - 2) !== DOT) {
if (res.length > 2) {
const start = res.length - 1;
let j = start;
for (; j >= 0; --j) {
if (res.charCodeAt(j) === SLASH) {
break;
}
}
if (j !== start) {
res = j === -1 ? "" : res.slice(0, j);
lastSlash = i;
dots = 0;
isAboveRoot = false;
continue;
}
} else if (res.length === 2 || res.length === 1) {
res = "";
lastSlash = i;
dots = 0;
isAboveRoot = false;
continue;
}
}
if (allowAboveRoot) {
if (res.length > 0) {
res += "/..";
} else {
res = "..";
}
isAboveRoot = true;
}
} else {
const slice = path.slice(lastSlash + 1, i);
if (res.length > 0) {
res += `/${slice}`;
} else {
res = slice;
}
isAboveRoot = false;
}
lastSlash = i;
dots = 0;
} else if (code === DOT && dots !== -1) {
++dots;
} else {
dots = -1;
}
}
return res;
}
// dist/lib/node/stream.browser.js
var stream_browser_exports = {};
__export(stream_browser_exports, {
isSupported: () => isSupported
});
var isSupported = false;
// dist/lib/files/blob-file.js
var BlobFile = class {
handle;
size;
bigsize;
url;
constructor(blob) {
this.handle = blob instanceof ArrayBuffer ? new Blob([blob]) : blob;
this.size = blob instanceof ArrayBuffer ? blob.byteLength : blob.size;
this.bigsize = BigInt(this.size);
this.url = blob instanceof File ? blob.name : "";
}
async close() {
}
async stat() {
return {
size: this.handle.size,
bigsize: BigInt(this.handle.size),
isDirectory: false
};
}
async read(start, length) {
const arrayBuffer = await this.handle.slice(Number(start), Number(start) + Number(length)).arrayBuffer();
return arrayBuffer;
}
};
// dist/lib/files/http-file.js
var HttpFile = class {
handle;
size = 0;
bigsize = 0n;
url;
constructor(url) {
this.handle = url;
this.url = url;
}
async close() {
}
async stat() {
const response = await fetch(this.handle, { method: "HEAD" });
if (!response.ok) {
throw new Error(`Failed to fetch HEAD ${this.handle}`);
}
const size = parseInt(response.headers.get("Content-Length") || "0");
return {
size,
bigsize: BigInt(size),
isDirectory: false
};
}
async read(offset = 0, length = 0) {
const response = await this.fetchRange(offset, length);
const arrayBuffer = await response.arrayBuffer();
return arrayBuffer;
}
/**
*
* @param offset
* @param length
* @param signal
* @returns
* @see https://github.com/protomaps/PMTiles
*/
// eslint-disable-next-line complexity
async fetchRange(offset, length, signal) {
const nOffset = Number(offset);
const nLength = Number(length);
let controller;
if (!signal) {
controller = new AbortController();
signal = controller.signal;
}
const url = this.handle;
let response = await fetch(url, {
signal,
headers: { Range: `bytes=${nOffset}-${nOffset + nLength - 1}` }
});
switch (response.status) {
case 206:
break;
case 200:
const contentLength = response.headers.get("Content-Length");
if (!contentLength || Number(contentLength) > length) {
if (controller) {
controller.abort();
}
throw Error("content-length header missing or exceeding request. Server must support HTTP Byte Serving.");
}
case 416:
if (offset === 0) {
const contentRange = response.headers.get("Content-Range");
if (!contentRange || !contentRange.startsWith("bytes *")) {
throw Error("Missing content-length on 416 response");
}
const actualLength = Number(contentRange.substr(8));
response = await fetch(this.url, {
signal,
headers: { Range: `bytes=0-${actualLength - 1}` }
});
}
break;
default:
if (response.status >= 300) {
throw Error(`Bad response code: ${response.status}`);
}
}
return response;
}
};
// dist/lib/files/node-file-facade.js
var NOT_IMPLEMENTED = new Error("Not implemented");
var NodeFileFacade = class {
handle;
size = 0;
bigsize = 0n;
url = "";
constructor(url, flags, mode) {
var _a;
if ((_a = globalThis.loaders) == null ? void 0 : _a.NodeFile) {
return new globalThis.loaders.NodeFile(url, flags, mode);
}
if (isBrowser) {
throw new Error("Can't instantiate NodeFile in browser.");
}
throw new Error("Can't instantiate NodeFile. Make sure to import @loaders.gl/polyfills first.");
}
/** Read data */
async read(start, length) {
throw NOT_IMPLEMENTED;
}
/** Write to file. The number of bytes written will be returned */
async write(arrayBuffer, offset, length) {
throw NOT_IMPLEMENTED;
}
/** Get information about file */
async stat() {
throw NOT_IMPLEMENTED;
}
/** Truncates the file descriptor. Only available on NodeFile. */
async truncate(length) {
throw NOT_IMPLEMENTED;
}
/** Append data to a file. Only available on NodeFile. */
async append(data) {
throw NOT_IMPLEMENTED;
}
/** Close the file */
async close() {
}
};
// dist/lib/filesystems/node-filesystem-facade.js
var NOT_IMPLEMENTED2 = new Error("Not implemented");
var NodeFileSystemFacade = class {
// implements FileSystem
constructor(options) {
var _a;
if ((_a = globalThis.loaders) == null ? void 0 : _a.NodeFileSystem) {
return new globalThis.loaders.NodeFileSystem(options);
}
if (isBrowser) {
throw new Error("Can't instantiate NodeFileSystem in browser.");
}
throw new Error("Can't instantiate NodeFileSystem. Make sure to import @loaders.gl/polyfills first.");
}
// DUMMY IMPLEMENTATION, not used (constructor returns a real NodeFileSystem instance)
// implements RandomAccessReadFileSystem
readable = true;
writable = true;
async openReadableFile(path, flags) {
throw NOT_IMPLEMENTED2;
}
// implements RandomAccessWriteFileSystem
async openWritableFile(path, flags, mode) {
throw NOT_IMPLEMENTED2;
}
// Implements file system
async readdir(dirname2 = ".", options) {
throw NOT_IMPLEMENTED2;
}
async stat(path, options) {
throw NOT_IMPLEMENTED2;
}
async unlink(path) {
throw NOT_IMPLEMENTED2;
}
async fetch(path, options) {
throw NOT_IMPLEMENTED2;
}
};
// dist/lib/file-provider/file-provider-interface.js
var isFileProvider = (fileProvider) => {
return (fileProvider == null ? void 0 : fileProvider.getUint8) && (fileProvider == null ? void 0 : fileProvider.slice) && (fileProvider == null ? void 0 : fileProvider.length);
};
// dist/lib/file-provider/file-provider.js
var FileProvider = class {
/** The File object from which data is provided */
file;
size;
/** Create a new BrowserFile */
constructor(file, size) {
this.file = file;
this.size = BigInt(size);
}
static async create(file) {
var _a;
let size = 0n;
if (file.bigsize > 0n) {
size = file.bigsize;
} else if (file.size > 0) {
size = file.size;
} else {
const stats = await ((_a = file.stat) == null ? void 0 : _a.call(file));
size = (stats == null ? void 0 : stats.bigsize) ?? 0n;
}
return new FileProvider(file, size);
}
/**
* Truncates the file descriptor.
* @param length desired file lenght
*/
async truncate(length) {
throw new Error("file loaded via range requests cannot be changed");
}
/**
* Append data to a file.
* @param buffer data to append
*/
async append(buffer) {
throw new Error("file loaded via range requests cannot be changed");
}
/** Close file */
async destroy() {
throw new Error("file loaded via range requests cannot be changed");
}
/**
* Gets an unsigned 8-bit integer at the specified byte offset from the start of the file.
* @param offset The offset, in bytes, from the start of the file where to read the data.
*/
async getUint8(offset) {
const arrayBuffer = await this.file.read(offset, 1);
const val = new Uint8Array(arrayBuffer).at(0);
if (val === void 0) {
throw new Error("something went wrong");
}
return val;
}
/**
* Gets an unsigned 16-bit integer at the specified byte offset from the start of the file.
* @param offset The offset, in bytes, from the start of the file where to read the data.
*/
async getUint16(offset) {
const arrayBuffer = await this.file.read(offset, 2);
const val = new Uint16Array(arrayBuffer).at(0);
if (val === void 0) {
throw new Error("something went wrong");
}
return val;
}
/**
* Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.
* @param offset The offset, in bytes, from the start of the file where to read the data.
*/
async getUint32(offset) {
const arrayBuffer = await this.file.read(offset, 4);
const val = new Uint32Array(arrayBuffer).at(0);
if (val === void 0) {
throw new Error("something went wrong");
}
return val;
}
/**
* Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.
* @param offset The offset, in bytes, from the start of the file where to read the data.
*/
async getBigUint64(offset) {
const arrayBuffer = await this.file.read(offset, 8);
const val = new BigInt64Array(arrayBuffer).at(0);
if (val === void 0) {
throw new Error("something went wrong");
}
return val;
}
/**
* returns an ArrayBuffer whose contents are a copy of this file bytes from startOffset, inclusive, up to endOffset, exclusive.
* @param startOffset The offset, in byte, from the start of the file where to start reading the data.
* @param endOffset The offset, in bytes, from the start of the file where to end reading the data.
*/
async slice(startOffset, endOffset) {
const bigLength = BigInt(endOffset) - BigInt(startOffset);
if (bigLength > Number.MAX_SAFE_INTEGER) {
throw new Error("too big slice");
}
const length = Number(bigLength);
return await this.file.read(startOffset, length);
}
/**
* the length (in bytes) of the data.
*/
get length() {
return this.size;
}
};
// dist/lib/file-provider/file-handle-file.js
var FileHandleFile = class {
/** The FileHandle from which data is provided */
file;
/** Create a new FileHandleFile */
constructor(path, append = false) {
this.file = new NodeFileFacade(path, append ? "a+" : "r");
}
/**
* Truncates the file descriptor.
* @param length desired file lenght
*/
async truncate(length) {
await this.file.truncate(length);
}
/**
* Append data to a file.
* @param buffer data to append
*/
async append(buffer) {
await this.file.append(buffer);
}
/** Close file */
async destroy() {
await this.file.close();
}
/**
* Gets an unsigned 8-bit integer at the specified byte offset from the start of the file.
* @param offset The offset, in bytes, from the start of the file where to read the data.
*/
async getUint8(offset) {
const arrayBuffer = await this.file.read(offset, 1);
const val = new Uint8Array(arrayBuffer).at(0);
if (val === void 0) {
throw new Error("something went wrong");
}
return val;
}
/**
* Gets an unsigned 16-bit integer at the specified byte offset from the start of the file.
* @param offset The offset, in bytes, from the start of the file where to read the data.
*/
async getUint16(offset) {
const arrayBuffer = await this.file.read(offset, 2);
const val = new Uint16Array(arrayBuffer).at(0);
if (val === void 0) {
throw new Error("something went wrong");
}
return val;
}
/**
* Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.
* @param offset The offset, in bytes, from the start of the file where to read the data.
*/
async getUint32(offset) {
const arrayBuffer = await this.file.read(offset, 4);
const val = new Uint32Array(arrayBuffer).at(0);
if (val === void 0) {
throw new Error("something went wrong");
}
return val;
}
/**
* Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.
* @param offset The offset, in bytes, from the start of the file where to read the data.
*/
async getBigUint64(offset) {
const arrayBuffer = await this.file.read(offset, 8);
const val = new BigInt64Array(arrayBuffer).at(0);
if (val === void 0) {
throw new Error("something went wrong");
}
return val;
}
/**
* returns an ArrayBuffer whose contents are a copy of this file bytes from startOffset, inclusive, up to endOffset, exclusive.
* @param startOffset The offset, in byte, from the start of the file where to start reading the data.
* @param endOffset The offset, in bytes, from the start of the file where to end reading the data.
*/
async slice(startOffset, endOffset) {
const bigLength = endOffset - startOffset;
if (bigLength > Number.MAX_SAFE_INTEGER) {
throw new Error("too big slice");
}
const length = Number(bigLength);
return await this.file.read(startOffset, length);
}
/**
* the length (in bytes) of the data.
*/
get length() {
return this.file.bigsize;
}
};
// dist/lib/file-provider/data-view-file.js
var toNumber = (bigint) => {
if (bigint > Number.MAX_SAFE_INTEGER) {
throw new Error("Offset is out of bounds");
}
return Number(bigint);
};
var DataViewFile = class {
/** The DataView from which data is provided */
file;
constructor(file) {
this.file = file;
}
async destroy() {
}
/**
* Gets an unsigned 8-bit integer at the specified byte offset from the start of the file.
* @param offset The offset, in bytes, from the start of the file where to read the data.
*/
async getUint8(offset) {
return this.file.getUint8(toNumber(offset));
}
/**
* Gets an unsigned 16-bit intege at the specified byte offset from the start of the file.
* @param offset The offset, in bytes, from the start of the file where to read the data.
*/
async getUint16(offset) {
return this.file.getUint16(toNumber(offset), true);
}
/**
* Gets an unsigned 32-bit integer at the specified byte offset from the start of the file.
* @param offset The offset, in bytes, from the start of the file where to read the data.
*/
async getUint32(offset) {
return this.file.getUint32(toNumber(offset), true);
}
/**
* Gets an unsigned 64-bit integer at the specified byte offset from the start of the file.
* @param offset The offset, in bytes, from the start of the file where to read the data.
*/
async getBigUint64(offset) {
return this.file.getBigUint64(toNumber(offset), true);
}
/**
* returns an ArrayBuffer whose contents are a copy of this file bytes from startOffset, inclusive, up to endOffset, exclusive.
* @param startOffset The offset, in bytes, from the start of the file where to start reading the data.
* @param endOffset The offset, in bytes, from the start of the file where to end reading the data.
*/
async slice(startOffset, endOffset) {
return this.file.buffer.slice(toNumber(startOffset), toNumber(endOffset));
}
/** the length (in bytes) of the data. */
get length() {
return BigInt(this.file.byteLength);
}
};
// dist/lib/sources/data-source.js
var DataSource = class {
/** A resolved fetch function extracted from loadOptions prop */
fetch;
/** The actual load options, if calling a loaders.gl loader */
loadOptions;
_needsRefresh = true;
props;
constructor(props) {
this.props = { ...props };
this.loadOptions = { ...props.loadOptions };
this.fetch = getFetchFunction(this.loadOptions);
}
setProps(props) {
this.props = Object.assign(this.props, props);
this.setNeedsRefresh();
}
/** Mark this data source as needing a refresh (redraw) */
setNeedsRefresh() {
this._needsRefresh = true;
}
/**
* Does this data source need refreshing?
* @note The specifics of the refresh mechanism depends on type of data source
*/
getNeedsRefresh(clear = true) {
const needsRefresh = this._needsRefresh;
if (clear) {
this._needsRefresh = false;
}
return needsRefresh;
}
};
function getFetchFunction(options) {
const fetchFunction = options == null ? void 0 : options.fetch;
if (fetchFunction && typeof fetchFunction === "function") {
return (url, fetchOptions2) => fetchFunction(url, fetchOptions2);
}
const fetchOptions = options == null ? void 0 : options.fetch;
if (fetchOptions && typeof fetchOptions !== "function") {
return (url) => fetch(url, fetchOptions);
}
return (url) => fetch(url);
}
// dist/lib/sources/image-source.js
var ImageSource = class extends DataSource {
};
__publicField(ImageSource, "type", "template");
__publicField(ImageSource, "testURL", (url) => false);
// dist/lib/sources/vector-source.js
var VectorSource = class extends DataSource {
};
__publicField(VectorSource, "type", "template");
__publicField(VectorSource, "testURL", (url) => false);
//# sourceMappingURL=index.cjs.map
;