UNPKG

@loaders.gl/core

Version:

The core API for working with loaders.gl loaders and writers

4 lines 128 kB
{ "version": 3, "sources": ["index.js", "lib/fetch/fetch-file.js", "javascript-utils/is-type.js", "lib/fetch/fetch-error.js", "lib/utils/mime-type-utils.js", "lib/utils/url-utils.js", "lib/utils/resource-utils.js", "lib/utils/response-utils.js", "lib/fetch/read-array-buffer.js", "lib/loader-utils/option-utils.js", "lib/loader-utils/loggers.js", "lib/loader-utils/option-defaults.js", "lib/loader-utils/normalize-loader.js", "lib/api/register-loaders.js", "lib/api/select-loader.js", "lib/api/parse.js", "lib/loader-utils/get-data.js", "iterators/make-iterator/make-string-iterator.js", "iterators/make-iterator/make-array-buffer-iterator.js", "iterators/make-iterator/make-blob-iterator.js", "iterators/make-iterator/make-stream-iterator.js", "iterators/make-iterator/make-iterator.js", "lib/loader-utils/get-fetch-function.js", "lib/loader-utils/loader-context.js", "lib/api/parse-sync.js", "lib/api/parse-in-batches.js", "lib/api/load.js", "lib/api/load-in-batches.js", "lib/api/encode-table.js", "lib/api/encode.js", "lib/api/create-data-source.js", "lib/api/select-source.js", "iterators/make-stream/make-stream.js", "null-loader.js", "lib/progress/fetch-progress.js", "lib/filesystems/browser-filesystem.js"], "sourcesContent": ["// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\n// FILE READING AND WRITING\nexport { fetchFile } from \"./lib/fetch/fetch-file.js\";\nexport { FetchError } from \"./lib/fetch/fetch-error.js\";\nexport { readArrayBuffer } from \"./lib/fetch/read-array-buffer.js\";\n// export {readFileSync} from './lib/fetch/read-file';\n// export {writeFile, writeFileSync} from './lib/fetch/write-file';\n// CONFIGURATION\nexport { setLoaderOptions, getLoaderOptions } from \"./lib/api/loader-options.js\";\nexport { registerLoaders } from \"./lib/api/register-loaders.js\";\nexport { selectLoader, selectLoaderSync } from \"./lib/api/select-loader.js\";\n// LOADING (READING + PARSING)\nexport { parse } from \"./lib/api/parse.js\";\nexport { parseSync } from \"./lib/api/parse-sync.js\";\nexport { parseInBatches } from \"./lib/api/parse-in-batches.js\";\nexport { load } from \"./lib/api/load.js\";\nexport { loadInBatches } from \"./lib/api/load-in-batches.js\";\n// ENCODING (ENCODING AND WRITING)\nexport { encodeTable, encodeTableAsText, encodeTableInBatches } from \"./lib/api/encode-table.js\";\nexport { encode, encodeSync, encodeInBatches, encodeURLtoURL } from \"./lib/api/encode.js\";\nexport { encodeText, encodeTextSync } from \"./lib/api/encode.js\";\n// SERVICES AND SOURCES\nexport { createDataSource } from \"./lib/api/create-data-source.js\";\nexport { selectSource as _selectSource } from \"./lib/api/select-source.js\";\n// CORE UTILS SHARED WITH LOADERS (RE-EXPORTED FROM LOADER-UTILS)\nexport { setPathPrefix, getPathPrefix, resolvePath } from '@loaders.gl/loader-utils';\nexport { RequestScheduler } from '@loaders.gl/loader-utils';\n// ITERATOR UTILS\nexport { makeIterator } from \"./iterators/make-iterator/make-iterator.js\";\nexport { makeStream } from \"./iterators/make-stream/make-stream.js\";\n// CORE LOADERS\nexport { NullWorkerLoader, NullLoader } from \"./null-loader.js\";\nexport { JSONLoader } from '@loaders.gl/loader-utils';\n// EXPERIMENTAL\nexport { fetchProgress as _fetchProgress } from \"./lib/progress/fetch-progress.js\";\nexport { BrowserFileSystem as _BrowserFileSystem } from \"./lib/filesystems/browser-filesystem.js\";\n// FOR TESTING\nexport { _unregisterLoaders } from \"./lib/api/register-loaders.js\";\n//\n// TODO - MOVE TO LOADER-UTILS AND DEPRECATE IN CORE?\n//\nexport { isBrowser, isWorker, self, window, global, document } from '@loaders.gl/loader-utils';\nexport { assert } from '@loaders.gl/loader-utils';\nexport { forEach, concatenateArrayBuffersAsync } from '@loaders.gl/loader-utils';\nexport { makeTextDecoderIterator, makeTextEncoderIterator, makeLineIterator, makeNumberedLineIterator } from '@loaders.gl/loader-utils';\n// \"JAVASCRIPT\" UTILS - move to loader-utils?\nexport { isPureObject, isPromise, isIterable, isAsyncIterable, isIterator, isResponse, isReadableStream, isWritableStream } from \"./javascript-utils/is-type.js\";\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { resolvePath } from '@loaders.gl/loader-utils';\nimport { makeResponse } from \"../utils/response-utils.js\";\n// import {FetchError} from './fetch-error';\nexport function isNodePath(url) {\n return !isRequestURL(url) && !isDataURL(url);\n}\nexport function isRequestURL(url) {\n return url.startsWith('http:') || url.startsWith('https:');\n}\nexport function isDataURL(url) {\n return url.startsWith('data:');\n}\n/**\n * fetch API compatible function\n * - Supports fetching from Node.js local file system paths\n * - Respects pathPrefix and file aliases\n */\nexport async function fetchFile(urlOrData, fetchOptions) {\n if (typeof urlOrData === 'string') {\n const url = resolvePath(urlOrData);\n // Support fetching from local file system\n if (isNodePath(url)) {\n if (globalThis.loaders?.fetchNode) {\n return globalThis.loaders?.fetchNode(url, fetchOptions);\n }\n // throw new Error(\n // 'fetchFile: globalThis.loaders.fetchNode not defined. Install @loaders.gl/polyfills'\n // );\n }\n // Call global fetch\n return await fetch(url, fetchOptions);\n }\n // TODO - should we still call fetch on non-URL inputs?\n return await makeResponse(urlOrData);\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nconst isBoolean = (x) => typeof x === 'boolean';\nconst isFunction = (x) => typeof x === 'function';\nexport const isObject = (x) => x !== null && typeof x === 'object';\nexport const isPureObject = (x) => isObject(x) && x.constructor === {}.constructor;\nexport const isPromise = (x) => isObject(x) && isFunction(x.then);\nexport const isIterable = (x) => Boolean(x) && typeof x[Symbol.iterator] === 'function';\nexport const isAsyncIterable = (x) => x && typeof x[Symbol.asyncIterator] === 'function';\nexport const isIterator = (x) => x && isFunction(x.next);\nexport const isResponse = (x) => (typeof Response !== 'undefined' && x instanceof Response) ||\n (x && x.arrayBuffer && x.text && x.json);\nexport const isFile = (x) => typeof File !== 'undefined' && x instanceof File;\nexport const isBlob = (x) => typeof Blob !== 'undefined' && x instanceof Blob;\n/** Check for Node.js `Buffer` without triggering bundler to include buffer polyfill */\nexport const isBuffer = (x) => x && typeof x === 'object' && x.isBuffer;\nexport const isWritableDOMStream = (x) => isObject(x) && isFunction(x.abort) && isFunction(x.getWriter);\nexport const isReadableDOMStream = (x) => (typeof ReadableStream !== 'undefined' && x instanceof ReadableStream) ||\n (isObject(x) && isFunction(x.tee) && isFunction(x.cancel) && isFunction(x.getReader));\n// Not implemented in Firefox: && isFunction(x.pipeTo)\nexport const isWritableNodeStream = (x) => isObject(x) && isFunction(x.end) && isFunction(x.write) && isBoolean(x.writable);\nexport const isReadableNodeStream = (x) => isObject(x) && isFunction(x.read) && isFunction(x.pipe) && isBoolean(x.readable);\nexport const isReadableStream = (x) => isReadableDOMStream(x) || isReadableNodeStream(x);\nexport const isWritableStream = (x) => isWritableDOMStream(x) || isWritableNodeStream(x);\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nexport class FetchError extends Error {\n constructor(message, info) {\n super(message);\n this.reason = info.reason;\n this.url = info.url;\n this.response = info.response;\n }\n /** A best effort reason for why the fetch failed */\n reason;\n /** The URL that failed to load. Empty string if not available. */\n url;\n /** The Response object, if any. */\n response;\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\n// TODO - build/integrate proper MIME type parsing\n// https://mimesniff.spec.whatwg.org/\nconst DATA_URL_PATTERN = /^data:([-\\w.]+\\/[-\\w.+]+)(;|,)/;\nconst MIME_TYPE_PATTERN = /^([-\\w.]+\\/[-\\w.+]+)/;\n/**\n * Compare two MIME types, case insensitively etc.\n * @param mimeType1\n * @param mimeType2\n * @returns true if the MIME types are equivalent\n * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types#structure_of_a_mime_type\n */\nexport function compareMIMETypes(mimeType1, mimeType2) {\n if (mimeType1.toLowerCase() === mimeType2.toLowerCase()) {\n return true;\n }\n return false;\n}\n/**\n * Remove extra data like `charset` from MIME types\n * @param mimeString\n * @returns A clean MIME type, or an empty string\n *\n * @todo - handle more advanced MIMETYpes, multiple types\n * @todo - extract charset etc\n */\nexport function parseMIMEType(mimeString) {\n // If resource is a data url, extract any embedded mime type\n const matches = MIME_TYPE_PATTERN.exec(mimeString);\n if (matches) {\n return matches[1];\n }\n return mimeString;\n}\n/**\n * Extract MIME type from data URL\n *\n * @param mimeString\n * @returns A clean MIME type, or an empty string\n *\n * @todo - handle more advanced MIMETYpes, multiple types\n * @todo - extract charset etc\n */\nexport function parseMIMETypeFromURL(url) {\n // If resource is a data URL, extract any embedded mime type\n const matches = DATA_URL_PATTERN.exec(url);\n if (matches) {\n return matches[1];\n }\n return '';\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nconst QUERY_STRING_PATTERN = /\\?.*/;\nexport function extractQueryString(url) {\n const matches = url.match(QUERY_STRING_PATTERN);\n return matches && matches[0];\n}\nexport function stripQueryString(url) {\n return url.replace(QUERY_STRING_PATTERN, '');\n}\nexport function shortenUrlForDisplay(url) {\n if (url.length < 50) {\n return url;\n }\n const urlEnd = url.slice(url.length - 15);\n const urlStart = url.substr(0, 32);\n return `${urlStart}...${urlEnd}`;\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { isResponse, isBlob } from \"../../javascript-utils/is-type.js\";\nimport { parseMIMEType, parseMIMETypeFromURL } from \"./mime-type-utils.js\";\nimport { stripQueryString } from \"./url-utils.js\";\n/**\n * Returns the URL associated with this resource.\n * The returned value may include a query string and need further processing.\n * If it cannot determine url, the corresponding value will be an empty string\n *\n * @todo string parameters are assumed to be URLs\n */\nexport function getResourceUrl(resource) {\n // If resource is a `Response`, it contains the information directly as a field\n if (isResponse(resource)) {\n const response = resource;\n return response.url;\n }\n // If the resource is a Blob or a File (subclass of Blob)\n if (isBlob(resource)) {\n const blob = resource;\n // File objects have a \"name\" property. Blob objects don't have any\n // url (name) information\n return blob.name || '';\n }\n if (typeof resource === 'string') {\n return resource;\n }\n // Unknown\n return '';\n}\n/**\n * Returns the URL associated with this resource.\n * The returned value may include a query string and need further processing.\n * If it cannot determine url, the corresponding value will be an empty string\n *\n * @todo string parameters are assumed to be URLs\n */\nexport function getResourceMIMEType(resource) {\n // If resource is a response, it contains the information directly\n if (isResponse(resource)) {\n const response = resource;\n const contentTypeHeader = response.headers.get('content-type') || '';\n const noQueryUrl = stripQueryString(response.url);\n return parseMIMEType(contentTypeHeader) || parseMIMETypeFromURL(noQueryUrl);\n }\n // If the resource is a Blob or a File (subclass of Blob)\n if (isBlob(resource)) {\n const blob = resource;\n return blob.type || '';\n }\n if (typeof resource === 'string') {\n return parseMIMETypeFromURL(resource);\n }\n // Unknown\n return '';\n}\n/**\n * Returns (approximate) content length for a resource if it can be determined.\n * Returns -1 if content length cannot be determined.\n * @param resource\n\n * @note string parameters are NOT assumed to be URLs\n */\nexport function getResourceContentLength(resource) {\n if (isResponse(resource)) {\n const response = resource;\n return response.headers['content-length'] || -1;\n }\n if (isBlob(resource)) {\n const blob = resource;\n return blob.size;\n }\n if (typeof resource === 'string') {\n // TODO - handle data URL?\n return resource.length;\n }\n if (resource instanceof ArrayBuffer) {\n return resource.byteLength;\n }\n if (ArrayBuffer.isView(resource)) {\n return resource.byteLength;\n }\n return -1;\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { isResponse } from \"../../javascript-utils/is-type.js\";\nimport { FetchError } from \"../fetch/fetch-error.js\";\nimport { getResourceContentLength, getResourceUrl, getResourceMIMEType } from \"./resource-utils.js\";\nimport { shortenUrlForDisplay } from \"./url-utils.js\";\n/**\n * Returns a Response object\n * Adds content-length header when possible\n *\n * @param resource\n */\nexport async function makeResponse(resource) {\n if (isResponse(resource)) {\n return resource;\n }\n // Add content-length header if possible\n const headers = {};\n const contentLength = getResourceContentLength(resource);\n if (contentLength >= 0) {\n headers['content-length'] = String(contentLength);\n }\n // `new Response(File)` does not preserve content-type and URL\n // so we add them here\n const url = getResourceUrl(resource);\n const type = getResourceMIMEType(resource);\n if (type) {\n headers['content-type'] = type;\n }\n // Add a custom header with initial bytes if available\n const initialDataUrl = await getInitialDataUrl(resource);\n if (initialDataUrl) {\n headers['x-first-bytes'] = initialDataUrl;\n }\n // TODO - is this the best way of handling strings?\n // Maybe package as data URL instead?\n if (typeof resource === 'string') {\n // Convert to ArrayBuffer to avoid Response treating it as a URL\n resource = new TextEncoder().encode(resource);\n }\n // Attempt to create a Response from the resource, adding headers and setting url\n const response = new Response(resource, { headers });\n // We can't control `Response.url` via constructor, use a property override to record URL.\n Object.defineProperty(response, 'url', { value: url });\n return response;\n}\n/**\n * Checks response status (async) and throws a helpful error message if status is not OK.\n * @param response\n */\nexport async function checkResponse(response) {\n if (!response.ok) {\n const error = await getResponseError(response);\n throw error;\n }\n}\n/**\n * Checks response status (sync) and throws a helpful error message if status is not OK.\n * @param response\n */\nexport function checkResponseSync(response) {\n if (!response.ok) {\n let message = `${response.status} ${response.statusText}`;\n message = message.length > 60 ? `${message.slice(0, 60)}...` : message;\n throw new Error(message);\n }\n}\n// HELPERS\nasync function getResponseError(response) {\n const shortUrl = shortenUrlForDisplay(response.url);\n let message = `Failed to fetch resource (${response.status}) ${response.statusText}: ${shortUrl}`;\n message = message.length > 100 ? `${message.slice(0, 100)}...` : message;\n const info = {\n reason: response.statusText,\n url: response.url,\n response\n };\n // See if we got an error message in the body\n try {\n const contentType = response.headers.get('Content-Type');\n info.reason =\n !response.bodyUsed && contentType?.includes('application/json')\n ? await response.json()\n : await response.text();\n }\n catch (error) {\n // eslint forbids return in a finally statement, so we just catch here\n }\n return new FetchError(message, info);\n}\nasync function getInitialDataUrl(resource) {\n const INITIAL_DATA_LENGTH = 5;\n if (typeof resource === 'string') {\n return `data:,${resource.slice(0, INITIAL_DATA_LENGTH)}`;\n }\n if (resource instanceof Blob) {\n const blobSlice = resource.slice(0, 5);\n return await new Promise((resolve) => {\n const reader = new FileReader();\n reader.onload = (event) => resolve(event?.target?.result);\n reader.readAsDataURL(blobSlice);\n });\n }\n if (resource instanceof ArrayBuffer) {\n const slice = resource.slice(0, INITIAL_DATA_LENGTH);\n const base64 = arrayBufferToBase64(slice);\n return `data:base64,${base64}`;\n }\n return null;\n}\n// https://stackoverflow.com/questions/9267899/arraybuffer-to-base64-encoded-string\nfunction arrayBufferToBase64(buffer) {\n let binary = '';\n const bytes = new Uint8Array(buffer);\n for (let i = 0; i < bytes.byteLength; i++) {\n binary += String.fromCharCode(bytes[i]);\n }\n return btoa(binary);\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\n/**\n * Reads a chunk from a random access file\n * @param file\n * @param start\n * @param length\n * @returns\n */\nexport async function readArrayBuffer(file, start, length) {\n // TODO - we can do better for ArrayBuffer and string\n if (!(file instanceof Blob)) {\n file = new Blob([file]);\n }\n const slice = file.slice(start, start + length);\n return await readBlob(slice);\n}\n/**\n * Read a slice of a Blob or File, without loading the entire file into memory\n * The trick when reading File objects is to read successive \"slices\" of the File\n * Per spec https://w3c.github.io/FileAPI/, slicing a File only updates the start and end fields\n * Actually reading from file happens in `readAsArrayBuffer`\n * @param blob to read\n */\nexport async function readBlob(blob) {\n return await new Promise((resolve, reject) => {\n const fileReader = new FileReader();\n fileReader.onload = (event) => resolve(event?.target?.result);\n // TODO - reject with a proper Error\n fileReader.onerror = (error) => reject(error);\n fileReader.readAsArrayBuffer(blob);\n });\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { registerJSModules } from '@loaders.gl/loader-utils';\nimport { isPureObject, isObject } from \"../../javascript-utils/is-type.js\";\nimport { probeLog, NullLog } from \"./loggers.js\";\nimport { DEFAULT_LOADER_OPTIONS, REMOVED_LOADER_OPTIONS } from \"./option-defaults.js\";\n/**\n * Helper for safely accessing global loaders.gl variables\n * Wraps initialization of global variable in function to defeat overly aggressive tree-shakers\n */\nexport function getGlobalLoaderState() {\n // @ts-ignore\n globalThis.loaders = globalThis.loaders || {};\n // @ts-ignore\n const { loaders } = globalThis;\n // Add _state object to keep separate from modules added to globalThis.loaders\n if (!loaders._state) {\n loaders._state = {};\n }\n return loaders._state;\n}\n/**\n * Store global loader options on the global object to increase chances of cross loaders-version interoperability\n * NOTE: This use case is not reliable but can help when testing new versions of loaders.gl with existing frameworks\n * @returns global loader options merged with default loader options\n */\nexport function getGlobalLoaderOptions() {\n const state = getGlobalLoaderState();\n // Ensure all default loader options from this library are mentioned\n state.globalOptions = state.globalOptions || { ...DEFAULT_LOADER_OPTIONS };\n return state.globalOptions;\n}\n/**\n * Set global loader options\n * @param options\n */\nexport function setGlobalOptions(options) {\n const state = getGlobalLoaderState();\n const globalOptions = getGlobalLoaderOptions();\n // @ts-expect-error First param looks incorrect\n state.globalOptions = normalizeOptionsInternal(globalOptions, options);\n // Make sure any new modules are registered\n registerJSModules(options.modules);\n}\n/**\n * Merges options with global opts and loader defaults, also injects baseUri\n * @param options\n * @param loader\n * @param loaders\n * @param url\n */\nexport function normalizeOptions(options, loader, loaders, url) {\n loaders = loaders || [];\n loaders = Array.isArray(loaders) ? loaders : [loaders];\n validateOptions(options, loaders);\n return normalizeOptionsInternal(loader, options, url);\n}\n// VALIDATE OPTIONS\n/**\n * Warn for unsupported options\n * @param options\n * @param loaders\n */\nfunction validateOptions(options, loaders) {\n // Check top level options\n validateOptionsObject(options, null, DEFAULT_LOADER_OPTIONS, REMOVED_LOADER_OPTIONS, loaders);\n for (const loader of loaders) {\n // Get the scoped, loader specific options from the user supplied options\n const idOptions = ((options && options[loader.id]) || {});\n // Get scoped, loader specific default and deprecated options from the selected loader\n const loaderOptions = (loader.options && loader.options[loader.id]) || {};\n const deprecatedOptions = (loader.deprecatedOptions && loader.deprecatedOptions[loader.id]) || {};\n // Validate loader specific options\n // @ts-ignore\n validateOptionsObject(idOptions, loader.id, loaderOptions, deprecatedOptions, loaders);\n }\n}\n// eslint-disable-next-line max-params, complexity\nfunction validateOptionsObject(options, id, defaultOptions, deprecatedOptions, loaders) {\n const loaderName = id || 'Top level';\n const prefix = id ? `${id}.` : '';\n for (const key in options) {\n // If top level option value is an object it could options for a loader, so ignore\n const isSubOptions = !id && isObject(options[key]);\n const isBaseUriOption = key === 'baseUri' && !id;\n const isWorkerUrlOption = key === 'workerUrl' && id;\n // <loader>.workerUrl requires special handling as it is now auto-generated and no longer specified as a default option.\n if (!(key in defaultOptions) && !isBaseUriOption && !isWorkerUrlOption) {\n // Issue deprecation warnings\n if (key in deprecatedOptions) {\n probeLog.warn(`${loaderName} loader option \\'${prefix}${key}\\' no longer supported, use \\'${deprecatedOptions[key]}\\'`)();\n }\n else if (!isSubOptions) {\n const suggestion = findSimilarOption(key, loaders);\n probeLog.warn(`${loaderName} loader option \\'${prefix}${key}\\' not recognized. ${suggestion}`)();\n }\n }\n }\n}\nfunction findSimilarOption(optionKey, loaders) {\n const lowerCaseOptionKey = optionKey.toLowerCase();\n let bestSuggestion = '';\n for (const loader of loaders) {\n for (const key in loader.options) {\n if (optionKey === key) {\n return `Did you mean \\'${loader.id}.${key}\\'?`;\n }\n const lowerCaseKey = key.toLowerCase();\n const isPartialMatch = lowerCaseOptionKey.startsWith(lowerCaseKey) || lowerCaseKey.startsWith(lowerCaseOptionKey);\n if (isPartialMatch) {\n bestSuggestion = bestSuggestion || `Did you mean \\'${loader.id}.${key}\\'?`;\n }\n }\n }\n return bestSuggestion;\n}\nfunction normalizeOptionsInternal(loader, options, url) {\n const loaderDefaultOptions = loader.options || {};\n const mergedOptions = { ...loaderDefaultOptions };\n addUrlOptions(mergedOptions, url);\n // LOGGING: options.log can be set to `null` to defeat logging\n if (mergedOptions.log === null) {\n mergedOptions.log = new NullLog();\n }\n mergeNestedFields(mergedOptions, getGlobalLoaderOptions());\n mergeNestedFields(mergedOptions, options);\n return mergedOptions;\n}\n// Merge nested options objects\nfunction mergeNestedFields(mergedOptions, options) {\n for (const key in options) {\n // Check for nested options\n // object in options => either no key in defaultOptions or object in defaultOptions\n if (key in options) {\n const value = options[key];\n if (isPureObject(value) && isPureObject(mergedOptions[key])) {\n mergedOptions[key] = {\n ...mergedOptions[key],\n ...options[key]\n };\n }\n else {\n mergedOptions[key] = options[key];\n }\n }\n // else: No need to merge nested opts, and the initial merge already copied over the nested options\n }\n}\n/**\n * Harvest information from the url\n * @deprecated This is mainly there to support a hack in the GLTFLoader\n * TODO - baseUri should be a directory, i.e. remove file component from baseUri\n * TODO - extract extension?\n * TODO - extract query parameters?\n * TODO - should these be injected on context instead of options?\n */\nfunction addUrlOptions(options, url) {\n if (url && !('baseUri' in options)) {\n options.baseUri = url;\n }\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\n// probe.gl Log compatible loggers\nimport { Log } from '@probe.gl/log';\nexport const probeLog = new Log({ id: 'loaders.gl' });\n// Logs nothing\nexport class NullLog {\n log() {\n return () => { };\n }\n info() {\n return () => { };\n }\n warn() {\n return () => { };\n }\n error() {\n return () => { };\n }\n}\n// Logs to console\nexport class ConsoleLog {\n console;\n constructor() {\n this.console = console; // eslint-disable-line\n }\n log(...args) {\n return this.console.log.bind(this.console, ...args);\n }\n info(...args) {\n return this.console.info.bind(this.console, ...args);\n }\n warn(...args) {\n return this.console.warn.bind(this.console, ...args);\n }\n error(...args) {\n return this.console.error.bind(this.console, ...args);\n }\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { isBrowser } from '@loaders.gl/loader-utils';\nimport { ConsoleLog } from \"./loggers.js\";\nexport const DEFAULT_LOADER_OPTIONS = {\n // baseUri\n fetch: null,\n mimeType: undefined,\n nothrow: false,\n log: new ConsoleLog(), // A probe.gl compatible (`log.log()()` syntax) that just logs to console\n useLocalLibraries: false,\n CDN: 'https://unpkg.com/@loaders.gl',\n worker: true, // By default, use worker if provided by loader.\n maxConcurrency: 3, // How many worker instances should be created for each loader.\n maxMobileConcurrency: 1, // How many worker instances should be created for each loader on mobile devices.\n reuseWorkers: isBrowser, // By default reuse workers in browser (Node.js refuses to terminate if browsers are running)\n _nodeWorkers: false, // By default do not support node workers\n _workerType: '', // 'test' to use locally generated workers\n limit: 0,\n _limitMB: 0,\n batchSize: 'auto',\n batchDebounceMs: 0,\n metadata: false, // TODO - currently only implemented for parseInBatches, adds initial metadata batch,\n transforms: []\n};\nexport const REMOVED_LOADER_OPTIONS = {\n throws: 'nothrow',\n dataType: '(no longer used)',\n uri: 'baseUri',\n // Warn if fetch options are used on top-level\n method: 'fetch.method',\n headers: 'fetch.headers',\n body: 'fetch.body',\n mode: 'fetch.mode',\n credentials: 'fetch.credentials',\n cache: 'fetch.cache',\n redirect: 'fetch.redirect',\n referrer: 'fetch.referrer',\n referrerPolicy: 'fetch.referrerPolicy',\n integrity: 'fetch.integrity',\n keepalive: 'fetch.keepalive',\n signal: 'fetch.signal'\n};\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { assert } from '@loaders.gl/loader-utils';\nexport function isLoaderObject(loader) {\n if (!loader) {\n return false;\n }\n if (Array.isArray(loader)) {\n loader = loader[0];\n }\n const hasExtensions = Array.isArray(loader?.extensions);\n /* Now handled by types and worker loaders do not have these\n let hasParser =\n loader.parseTextSync ||\n loader.parseSync ||\n loader.parse ||\n loader.parseStream || // TODO Remove, Replace with parseInBatches\n loader.parseInBatches;\n */\n return hasExtensions;\n}\nexport function normalizeLoader(loader) {\n // This error is fairly easy to trigger by mixing up import statements etc\n // So we make an exception and add a developer error message for this case\n // To help new users from getting stuck here\n assert(loader, 'null loader');\n assert(isLoaderObject(loader), 'invalid loader');\n // NORMALIZE [LOADER, OPTIONS] => LOADER\n // If [loader, options], create a new loaders object with options merged in\n let options;\n if (Array.isArray(loader)) {\n options = loader[1];\n loader = loader[0];\n loader = {\n ...loader,\n options: { ...loader.options, ...options }\n };\n }\n // NORMALIZE text and binary flags\n // Ensure at least one of text/binary flags are properly set\n // @ts-expect-error\n if (loader?.parseTextSync || loader?.parseText) {\n loader.text = true;\n }\n if (!loader.text) {\n loader.binary = true;\n }\n return loader;\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { normalizeLoader } from \"../loader-utils/normalize-loader.js\";\nimport { getGlobalLoaderState } from \"../loader-utils/option-utils.js\";\n/**\n * Store global registered loaders on the global object to increase chances of cross loaders-version interoperability\n * This use case is not reliable but can help when testing new versions of loaders.gl with existing frameworks\n */\nconst getGlobalLoaderRegistry = () => {\n const state = getGlobalLoaderState();\n state.loaderRegistry = state.loaderRegistry || [];\n return state.loaderRegistry;\n};\n/**\n * Register a list of global loaders\n * @note Registration erases loader type information.\n * @deprecated It is recommended that applications manage loader registration. This function will likely be remove in loaders.gl v5\n */\nexport function registerLoaders(loaders) {\n const loaderRegistry = getGlobalLoaderRegistry();\n loaders = Array.isArray(loaders) ? loaders : [loaders];\n for (const loader of loaders) {\n const normalizedLoader = normalizeLoader(loader);\n if (!loaderRegistry.find((registeredLoader) => normalizedLoader === registeredLoader)) {\n // add to the beginning of the loaderRegistry, so the last registeredLoader get picked\n loaderRegistry.unshift(normalizedLoader);\n }\n }\n}\n/**\n * @deprecated It is recommended that applications manage loader registration. This function will likely be remove in loaders.gl v5\n */\nexport function getRegisteredLoaders() {\n return getGlobalLoaderRegistry();\n}\n/** @deprecated For testing only */\nexport function _unregisterLoaders() {\n const state = getGlobalLoaderState();\n state.loaderRegistry = [];\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { compareArrayBuffers, path, log } from '@loaders.gl/loader-utils';\nimport { normalizeLoader } from \"../loader-utils/normalize-loader.js\";\nimport { getResourceUrl, getResourceMIMEType } from \"../utils/resource-utils.js\";\nimport { compareMIMETypes } from \"../utils/mime-type-utils.js\";\nimport { getRegisteredLoaders } from \"./register-loaders.js\";\nimport { isBlob } from \"../../javascript-utils/is-type.js\";\nimport { stripQueryString } from \"../utils/url-utils.js\";\nconst EXT_PATTERN = /\\.([^.]+)$/;\n// TODO - Need a variant that peeks at streams for parseInBatches\n// TODO - Detect multiple matching loaders? Use heuristics to grade matches?\n// TODO - Allow apps to pass context to disambiguate between multiple matches (e.g. multiple .json formats)?\n/**\n * Find a loader that matches file extension and/or initial file content\n * Search the loaders array argument for a loader that matches url extension or initial data\n * Returns: a normalized loader\n * @param data data to assist\n * @param loaders\n * @param options\n * @param context used internally, applications should not provide this parameter\n */\nexport async function selectLoader(data, loaders = [], options, context) {\n if (!validHTTPResponse(data)) {\n return null;\n }\n // First make a sync attempt, disabling exceptions\n let loader = selectLoaderSync(data, loaders, { ...options, nothrow: true }, context);\n if (loader) {\n return loader;\n }\n // For Blobs and Files, try to asynchronously read a small initial slice and test again with that\n // to see if we can detect by initial content\n if (isBlob(data)) {\n data = await data.slice(0, 10).arrayBuffer();\n loader = selectLoaderSync(data, loaders, options, context);\n }\n // no loader available\n if (!loader && !options?.nothrow) {\n throw new Error(getNoValidLoaderMessage(data));\n }\n return loader;\n}\n/**\n * Find a loader that matches file extension and/or initial file content\n * Search the loaders array argument for a loader that matches url extension or initial data\n * Returns: a normalized loader\n * @param data data to assist\n * @param loaders\n * @param options\n * @param context used internally, applications should not provide this parameter\n */\nexport function selectLoaderSync(data, loaders = [], options, context) {\n if (!validHTTPResponse(data)) {\n return null;\n }\n // eslint-disable-next-line complexity\n // if only a single loader was provided (not as array), force its use\n // TODO - Should this behavior be kept and documented?\n if (loaders && !Array.isArray(loaders)) {\n // TODO - remove support for legacy loaders\n return normalizeLoader(loaders);\n }\n // Build list of candidate loaders that will be searched in order for a match\n let candidateLoaders = [];\n // First search supplied loaders\n if (loaders) {\n candidateLoaders = candidateLoaders.concat(loaders);\n }\n // Then fall back to registered loaders\n if (!options?.ignoreRegisteredLoaders) {\n candidateLoaders.push(...getRegisteredLoaders());\n }\n // TODO - remove support for legacy loaders\n normalizeLoaders(candidateLoaders);\n const loader = selectLoaderInternal(data, candidateLoaders, options, context);\n // no loader available\n if (!loader && !options?.nothrow) {\n throw new Error(getNoValidLoaderMessage(data));\n }\n return loader;\n}\n/** Implements loaders selection logic */\n// eslint-disable-next-line complexity\nfunction selectLoaderInternal(data, loaders, options, context) {\n const url = getResourceUrl(data);\n const type = getResourceMIMEType(data);\n const testUrl = stripQueryString(url) || context?.url;\n let loader = null;\n let reason = '';\n // if options.mimeType is supplied, it takes precedence\n if (options?.mimeType) {\n loader = findLoaderByMIMEType(loaders, options?.mimeType);\n reason = `match forced by supplied MIME type ${options?.mimeType}`;\n }\n // Look up loader by url\n loader = loader || findLoaderByUrl(loaders, testUrl);\n reason = reason || (loader ? `matched url ${testUrl}` : '');\n // Look up loader by mime type\n loader = loader || findLoaderByMIMEType(loaders, type);\n reason = reason || (loader ? `matched MIME type ${type}` : '');\n // Look for loader via initial bytes (Note: not always accessible (e.g. Response, stream, async iterator)\n // @ts-ignore Blob | Response\n loader = loader || findLoaderByInitialBytes(loaders, data);\n // @ts-ignore Blob | Response\n reason = reason || (loader ? `matched initial data ${getFirstCharacters(data)}` : '');\n // Look up loader by fallback mime type\n if (options?.fallbackMimeType) {\n loader = loader || findLoaderByMIMEType(loaders, options?.fallbackMimeType);\n reason = reason || (loader ? `matched fallback MIME type ${type}` : '');\n }\n if (reason) {\n log.log(1, `selectLoader selected ${loader?.name}: ${reason}.`);\n }\n return loader;\n}\n/** Check HTTP Response */\nfunction validHTTPResponse(data) {\n // HANDLE HTTP status\n if (data instanceof Response) {\n // 204 - NO CONTENT. This handles cases where e.g. a tile server responds with 204 for a missing tile\n if (data.status === 204) {\n return false;\n }\n }\n return true;\n}\n/** Generate a helpful message to help explain why loader selection failed. */\nfunction getNoValidLoaderMessage(data) {\n const url = getResourceUrl(data);\n const type = getResourceMIMEType(data);\n let message = 'No valid loader found (';\n message += url ? `${path.filename(url)}, ` : 'no url provided, ';\n message += `MIME type: ${type ? `\"${type}\"` : 'not provided'}, `;\n // First characters are only accessible when called on data (string or arrayBuffer).\n // @ts-ignore Blob | Response\n const firstCharacters = data ? getFirstCharacters(data) : '';\n message += firstCharacters ? ` first bytes: \"${firstCharacters}\"` : 'first bytes: not available';\n message += ')';\n return message;\n}\nfunction normalizeLoaders(loaders) {\n for (const loader of loaders) {\n normalizeLoader(loader);\n }\n}\n// TODO - Would be nice to support http://example.com/file.glb?parameter=1\n// E.g: x = new URL('http://example.com/file.glb?load=1'; x.pathname\nfunction findLoaderByUrl(loaders, url) {\n // Get extension\n const match = url && EXT_PATTERN.exec(url);\n const extension = match && match[1];\n return extension ? findLoaderByExtension(loaders, extension) : null;\n}\nfunction findLoaderByExtension(loaders, extension) {\n extension = extension.toLowerCase();\n for (const loader of loaders) {\n for (const loaderExtension of loader.extensions) {\n if (loaderExtension.toLowerCase() === extension) {\n return loader;\n }\n }\n }\n return null;\n}\nfunction findLoaderByMIMEType(loaders, mimeType) {\n for (const loader of loaders) {\n if (loader.mimeTypes?.some((mimeType1) => compareMIMETypes(mimeType, mimeType1))) {\n return loader;\n }\n // Support referring to loaders using the \"unregistered tree\"\n // https://en.wikipedia.org/wiki/Media_type#Unregistered_tree\n if (compareMIMETypes(mimeType, `application/x.${loader.id}`)) {\n return loader;\n }\n }\n return null;\n}\nfunction findLoaderByInitialBytes(loaders, data) {\n if (!data) {\n return null;\n }\n for (const loader of loaders) {\n if (typeof data === 'string') {\n if (testDataAgainstText(data, loader)) {\n return loader;\n }\n }\n else if (ArrayBuffer.isView(data)) {\n // Typed Arrays can have offsets into underlying buffer\n if (testDataAgainstBinary(data.buffer, data.byteOffset, loader)) {\n return loader;\n }\n }\n else if (data instanceof ArrayBuffer) {\n const byteOffset = 0;\n if (testDataAgainstBinary(data, byteOffset, loader)) {\n return loader;\n }\n }\n // TODO Handle streaming case (requires creating a new AsyncIterator)\n }\n return null;\n}\nfunction testDataAgainstText(data, loader) {\n if (loader.testText) {\n return loader.testText(data);\n }\n const tests = Array.isArray(loader.tests) ? loader.tests : [loader.tests];\n return tests.some((test) => data.startsWith(test));\n}\nfunction testDataAgainstBinary(data, byteOffset, loader) {\n const tests = Array.isArray(loader.tests) ? loader.tests : [loader.tests];\n return tests.some((test) => testBinary(data, byteOffset, loader, test));\n}\nfunction testBinary(data, byteOffset, loader, test) {\n if (test instanceof ArrayBuffer) {\n return compareArrayBuffers(test, data, test.byteLength);\n }\n switch (typeof test) {\n case 'function':\n return test(data);\n case 'string':\n // Magic bytes check: If `test` is a string, check if binary data starts with that strings\n const magic = getMagicString(data, byteOffset, test.length);\n return test === magic;\n default:\n return false;\n }\n}\nfunction getFirstCharacters(data, length = 5) {\n if (typeof data === 'string') {\n return data.slice(0, length);\n }\n else if (ArrayBuffer.isView(data)) {\n // Typed Arrays can have offsets into underlying buffer\n return getMagicString(data.buffer, data.byteOffset, length);\n }\n else if (data instanceof ArrayBuffer) {\n const byteOffset = 0;\n return getMagicString(data, byteOffset, length);\n }\n return '';\n}\nfunction getMagicString(arrayBuffer, byteOffset, length) {\n if (arrayBuffer.byteLength < byteOffset + length) {\n return '';\n }\n const dataView = new DataView(arrayBuffer);\n let magic = '';\n for (let i = 0; i < length; i++) {\n magic += String.fromCharCode(dataView.getUint8(byteOffset + i));\n }\n return magic;\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { parseWithWorker, canParseWithWorker, mergeLoaderOptions } from '@loaders.gl/loader-utils';\nimport { assert, validateWorkerVersion } from '@loaders.gl/worker-utils';\nimport { isLoaderObject } from \"../loader-utils/normalize-loader.js\";\nimport { isResponse } from \"../../javascript-utils/is-type.js\";\nimport { normalizeOptions } from \"../loader-utils/option-utils.js\";\nimport { getArrayBufferOrStringFromData } from \"../loader-utils/get-data.js\";\nimport { getLoaderContext, getLoadersFromContext } from \"../loader-utils/loader-context.js\";\nimport { getResourceUrl } from \"../utils/resource-utils.js\";\nimport { selectLoader } from \"./select-loader.js\";\n/**\n * Parses `data` using a specified loader\n * @param data\n * @param loaders\n * @param options\n * @param context\n */\n// implementation signature\nexport async function parse(data, loaders, options, context) {\n // Signature: parse(data, options, context | url)\n // Uses registered loaders\n if (loaders && !Array.isArray(loaders) && !isLoaderObject(loaders)) {\n context = undefined; // context not supported in short signature\n options = loaders;\n loaders = undefined;\n }\n data = await data; // Resolve any promise\n options = options || {}; // Could be invalid...\n // Extract a url for auto detection\n const url = getResourceUrl(data);\n // Chooses a loader (and normalizes it)\n // Also use any loaders in the context, new loaders take priority\n const typedLoaders = loaders;\n const candidateLoaders = getLoadersFromContext(typedLoaders, context);\n // todo hacky type cast\n const loader = await selectLoader(data, candidateLoaders, options);\n // Note: if no loader was found, if so just return null\n if (!loader) {\n return null;\n }\n // Normalize options\n // @ts-expect-error\n options = normalizeOptions(options, loader, candidateLoaders, url); // Could be invalid...\n // Get a context (if already present, will be unchanged)\n context = getLoaderContext(\n // @ts-expect-error\n { url, _parse: parse, loaders: candidateLoaders }, options, context || null);\n return await parseWithLoader(loader, data, options, context);\n}\n// TODO: support progress and abort\n// TODO - should accept loader.parseAsyncIterator and concatenate.\nasync function parseWithLoader(loader, data, options, context) {\n validateWorkerVersion(loader);\n options = mergeLoaderOptions(loader.options, options);\n if (isResponse(data)) {\n // Serialize to support passing the response to web worker\n const response = data;\n const { ok, redirected, status, statusText, type, url } = response;\n const headers = Object.fromEntries(response.headers.entries());\n // @ts-expect-error TODO - fix this\n context.response = { headers, ok, redirected, status, statusText, type, url };\n }\n data = await getArrayBufferOrStringFromData(data, loader, options);\n const loaderWithParser = loader;\n // First check for synchronous text parser, wrap results in promises\n if (loaderWithParser.parseTextSync && typeof data === 'string') {\n return loaderWithParser.parseTextSync(data, options, context);\n }\n // If we have a workerUrl and the loader can parse the given options efficiently in a worker\n if (canParseWithWorker(loader, options)) {\n return await parseWithWorker(loader, data, options, context, parse);\n }\n // Check for asynchronous parser\n if (loaderWithParser.parseText && typeof data === 'string') {\n return await loaderWithParser.parseText(data, options, context);\n }\n if (loaderWithParser.parse) {\n return await loaderWithParser.parse(data, options, context);\n }\n // This should not happen, all sync loaders should also offer `parse` function\n assert(!loaderWithParser.parseSync);\n // TBD - If asynchronous parser not available, return null\n throw new Error(`${loader.id} loader - no parser found and worker is disabled`);\n}\n", "// loaders.gl\n// SPDX-License-Identifier: MIT\n// Copyright (c) vis.gl contributors\nimport { concatenateArrayBuffersAsync } from '@loaders.gl/loader-utils';\nimport { isResponse, isReadableStream, isAsyncIterable, isIterable, isIterator, isBlob, isBuffer } from \"../../javascript-utils/is-type.js\";\nimport { makeIterator } from \"../../iterators/make-iterator/make-iterator.js\";\nimport { checkResponse, makeResponse } from \"../utils/response-utils.js\";\nconst ERR_DATA = 'Cannot convert supplied data type';\n// eslint-disable-next-line complexity\nexport function getArrayBufferOrStringFromDataSync(data, loader, options) {\n if (loader.text && typeof data === 'string') {\n return data;\n }\n if (isBuffer(data)) {\n // @ts-ignore\n data = data.buffer;\n }\n if (data instanceof ArrayBuffer) {\n const arrayBuffer = data;\n if (loader.text && !loader.binary) {\n const textDecoder = new TextDecoder('utf8');\n return textDecoder.decode(arrayBuffer);\n }\n return arrayBuffer;\n }\n // We may need to handle offsets\n if (ArrayBuffer.isView(data)) {\n // TextDecoder is invoked on typed arrays and will handle offsets\n if (loader.text && !loader.binary) {\n const textDecoder = new TextDecoder('utf8');\n return textDecoder.decode(data);\n }\n let arrayBuffer = data.buffer;\n // Since we are returning the underlying arrayBuffer, we must create a new copy\n // if this typed array / Buffer is a partial view into the ArryayBuffer\n // TODO - this is a potentially unnecessary copy\n const byteLength = data.byteLength || data.length;\n if (data.byteOffset !== 0 || byteLength !== arrayBuffer.byteLength) {\n // console.warn(`loaders.gl copying arraybuffer of length ${byteLength}`);\n arrayBuffer = arrayBuffer.slice(data.byteOffset, data.byteOffset + byteLength);\n }\n return arrayBuffer;\n }\n throw new Error(ERR_DATA);\n}\n// Convert async iterator to a promise\nexport async function getArrayBufferOrStringFromData(data, loader, options) {\n const isArrayBuffer = data instanceof ArrayBuffer || ArrayBuffer.isView(data);\n if (typeof data === 'string' || isArrayBuffer) {\n return getArrayBufferOrStringFromDataSync(data, loader, options);\n }\n // Blobs and files are FileReader compatible\n if (isBlob(data)) {\n data = await makeResponse(data);\n }\n if (isResponse(data)) {\n const response = data;\n await checkResponse(response);\n return loader.binary ? await response.arrayBuffer() : await response.text();\n }\n if (isReadableStream(data)) {\n // @ts-expect-error TS2559 options type\n data = makeIterator(data, options);\n }\n if (isIterable(data) || isAsyncIterable(data)) {\n // Assume arrayBuffer iterator - attempt to concatenate\n return concatenateArrayBuffersAsync(data);\n }\n throw new Error(ERR_DATA);\n}\nexport async function getAsyncIterableFromData(data, options) {\n if (isIterator(data)) {\n return data;\n }\n if (isResponse(data)) {\n