@huggingface/hub
Version:
Utilities to interact with the Hugging Face hub
1,464 lines (1,423 loc) • 170 kB
JavaScript
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } async function _asyncNullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return await rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3;require('./chunk-UICA3PK6.js');
// src/consts.ts
var HUB_URL = "https://huggingface.co";
// src/error.ts
async function createApiError(response, opts) {
const error = new HubApiError(response.url, response.status, _nullishCoalesce(response.headers.get("X-Request-Id"), () => ( _optionalChain([opts, 'optionalAccess', _2 => _2.requestId]))));
error.message = `Api error with status ${error.statusCode}${_optionalChain([opts, 'optionalAccess', _3 => _3.message]) ? `. ${opts.message}` : ""}`;
const trailer = [`URL: ${error.url}`, error.requestId ? `Request ID: ${error.requestId}` : void 0].filter(Boolean).join(". ");
if (_optionalChain([response, 'access', _4 => _4.headers, 'access', _5 => _5.get, 'call', _6 => _6("Content-Type"), 'optionalAccess', _7 => _7.startsWith, 'call', _8 => _8("application/json")])) {
const json = await response.json();
error.message = json.error || json.message || error.message;
if (json.error_description) {
error.message = error.message ? error.message + `: ${json.error_description}` : json.error_description;
}
error.data = json;
} else {
error.data = { message: await response.text() };
}
error.message += `. ${trailer}`;
throw error;
}
var HubApiError = class extends Error {
constructor(url, statusCode, requestId, message) {
super(message);
this.statusCode = statusCode;
this.requestId = requestId;
this.url = url;
}
};
var InvalidApiResponseFormatError = class extends Error {
};
// src/utils/checkCredentials.ts
function checkAccessToken(accessToken) {
if (!accessToken.startsWith("hf_")) {
throw new TypeError("Your access token must start with 'hf_'");
}
}
function checkCredentials(params) {
if (params.accessToken) {
checkAccessToken(params.accessToken);
return params.accessToken;
}
if (_optionalChain([params, 'access', _9 => _9.credentials, 'optionalAccess', _10 => _10.accessToken])) {
checkAccessToken(params.credentials.accessToken);
return params.credentials.accessToken;
}
}
// src/utils/toRepoId.ts
function toRepoId(repo) {
if (typeof repo !== "string") {
return repo;
}
if (repo.startsWith("model/") || repo.startsWith("models/")) {
throw new TypeError(
"A repo designation for a model should not start with 'models/', directly specify the model namespace / name"
);
}
if (repo.startsWith("space/")) {
throw new TypeError("Spaces should start with 'spaces/', plural, not 'space/'");
}
if (repo.startsWith("dataset/")) {
throw new TypeError("Datasets should start with 'dataset/', plural, not 'dataset/'");
}
const slashes = repo.split("/").length - 1;
if (repo.startsWith("spaces/")) {
if (slashes !== 2) {
throw new TypeError("Space Id must include namespace and name of the space");
}
return {
type: "space",
name: repo.slice("spaces/".length)
};
}
if (repo.startsWith("datasets/")) {
if (slashes > 2) {
throw new TypeError("Too many slashes in repo designation: " + repo);
}
return {
type: "dataset",
name: repo.slice("datasets/".length)
};
}
if (slashes > 1) {
throw new TypeError("Too many slashes in repo designation: " + repo);
}
return {
type: "model",
name: repo
};
}
// src/lib/check-repo-access.ts
async function checkRepoAccess(params) {
const accessToken = params && checkCredentials(params);
const repoId = toRepoId(params.repo);
const response = await (params.fetch || fetch)(`${_optionalChain([params, 'optionalAccess', _11 => _11.hubUrl]) || HUB_URL}/api/${repoId.type}s/${repoId.name}`, {
headers: {
...accessToken ? { Authorization: `Bearer ${accessToken}` } : {}
}
});
if (!response.ok) {
throw await createApiError(response);
}
}
// src/utils/range.ts
function range(n, b) {
return b ? Array(b - n).fill(0).map((_, i) => n + i) : Array(n).fill(0).map((_, i) => i);
}
// src/utils/chunk.ts
function chunk(arr, chunkSize) {
if (isNaN(chunkSize) || chunkSize < 1) {
throw new RangeError("Invalid chunk size: " + chunkSize);
}
if (!arr.length) {
return [];
}
if (arr.length <= chunkSize) {
return [arr];
}
return range(Math.ceil(arr.length / chunkSize)).map((i) => {
return arr.slice(i * chunkSize, (i + 1) * chunkSize);
});
}
// src/utils/promisesQueue.ts
async function promisesQueue(factories, concurrency) {
const results = [];
const executing = /* @__PURE__ */ new Set();
let index = 0;
for (const factory of factories) {
const closureIndex = index++;
const e = factory().then((r) => {
results[closureIndex] = r;
executing.delete(e);
});
executing.add(e);
if (executing.size >= concurrency) {
await Promise.race(executing);
}
}
await Promise.all(executing);
return results;
}
// src/utils/promisesQueueStreaming.ts
async function promisesQueueStreaming(factories, concurrency) {
const executing = [];
for await (const factory of factories) {
const e = factory().then(() => {
executing.splice(executing.indexOf(e), 1);
});
executing.push(e);
if (executing.length >= concurrency) {
await Promise.race(executing);
}
}
await Promise.all(executing);
}
// src/utils/eventToGenerator.ts
async function* eventToGenerator(cb) {
const promises = [];
function addPromise() {
let resolve2;
let reject;
const p = new Promise((res, rej) => {
resolve2 = res;
reject = rej;
});
promises.push({ p, resolve: resolve2, reject });
}
addPromise();
const callbackRes = Promise.resolve().then(
() => cb(
(y) => {
addPromise();
_optionalChain([promises, 'access', _12 => _12.at, 'call', _13 => _13(-2), 'optionalAccess', _14 => _14.resolve, 'call', _15 => _15({ done: false, value: y })]);
},
(r) => {
addPromise();
_optionalChain([promises, 'access', _16 => _16.at, 'call', _17 => _17(-2), 'optionalAccess', _18 => _18.resolve, 'call', _19 => _19({ done: true, value: r })]);
},
(err) => _optionalChain([promises, 'access', _20 => _20.shift, 'call', _21 => _21(), 'optionalAccess', _22 => _22.reject, 'call', _23 => _23(err)])
)
).catch((err) => _optionalChain([promises, 'access', _24 => _24.shift, 'call', _25 => _25(), 'optionalAccess', _26 => _26.reject, 'call', _27 => _27(err)]));
while (1) {
const p = promises[0];
if (!p) {
throw new Error("Logic error in eventGenerator, promises should never be empty");
}
const result = await p.p;
promises.shift();
if (result.done) {
await callbackRes;
return result.value;
}
yield result.value;
}
throw new Error("Unreachable");
}
// src/utils/hexFromBytes.ts
function hexFromBytes(arr) {
if (globalThis.Buffer) {
return globalThis.Buffer.from(arr).toString("hex");
} else {
const bin = [];
arr.forEach((byte) => {
bin.push(byte.toString(16).padStart(2, "0"));
});
return bin.join("");
}
}
// src/utils/isBackend.ts
var isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined";
var isWebWorker = typeof self === "object" && self.constructor && self.constructor.name === "DedicatedWorkerGlobalScope";
var isBackend = !isBrowser && !isWebWorker;
// src/utils/isFrontend.ts
var isFrontend = !isBackend;
// src/utils/sha256.ts
async function getWebWorkerCode() {
const sha256Module = await Promise.resolve().then(() => _interopRequireWildcard(require("./sha256-wrapper-DHTT2DPH.js")));
return URL.createObjectURL(new Blob([sha256Module.createSHA256WorkerCode()]));
}
var pendingWorkers = [];
var runningWorkers = /* @__PURE__ */ new Set();
var resolve;
var waitPromise = new Promise((r) => {
resolve = r;
});
async function getWorker(poolSize) {
{
const worker2 = pendingWorkers.pop();
if (worker2) {
runningWorkers.add(worker2);
return worker2;
}
}
if (!poolSize) {
const worker2 = new Worker(await getWebWorkerCode());
runningWorkers.add(worker2);
return worker2;
}
if (poolSize <= 0) {
throw new TypeError("Invalid webworker pool size: " + poolSize);
}
while (runningWorkers.size >= poolSize) {
await waitPromise;
}
const worker = new Worker(await getWebWorkerCode());
runningWorkers.add(worker);
return worker;
}
async function freeWorker(worker, poolSize) {
if (!poolSize) {
return destroyWorker(worker);
}
runningWorkers.delete(worker);
pendingWorkers.push(worker);
const r = resolve;
waitPromise = new Promise((r2) => {
resolve = r2;
});
r();
}
function destroyWorker(worker) {
runningWorkers.delete(worker);
worker.terminate();
const r = resolve;
waitPromise = new Promise((r2) => {
resolve = r2;
});
r();
}
async function* sha256(buffer, opts) {
yield 0;
const maxCryptoSize = typeof _optionalChain([opts, 'optionalAccess', _28 => _28.useWebWorker]) === "object" && _optionalChain([opts, 'optionalAccess', _29 => _29.useWebWorker, 'access', _30 => _30.minSize]) !== void 0 ? opts.useWebWorker.minSize : 1e7;
if (buffer.size < maxCryptoSize && _optionalChain([globalThis, 'access', _31 => _31.crypto, 'optionalAccess', _32 => _32.subtle])) {
const res = hexFromBytes(
new Uint8Array(
await globalThis.crypto.subtle.digest("SHA-256", buffer instanceof Blob ? await buffer.arrayBuffer() : buffer)
)
);
yield 1;
return res;
}
if (isFrontend) {
if (_optionalChain([opts, 'optionalAccess', _33 => _33.useWebWorker])) {
try {
const poolSize = typeof _optionalChain([opts, 'optionalAccess', _34 => _34.useWebWorker]) === "object" ? opts.useWebWorker.poolSize : void 0;
const worker = await getWorker(poolSize);
let messageHandler;
let errorHandler;
const cleanup = () => {
worker.removeEventListener("message", messageHandler);
worker.removeEventListener("error", errorHandler);
};
return yield* eventToGenerator((yieldCallback, returnCallback, rejectCallback) => {
messageHandler = (event) => {
if (event.data.sha256) {
cleanup();
freeWorker(worker, poolSize);
returnCallback(event.data.sha256);
} else if (event.data.progress) {
yieldCallback(event.data.progress);
try {
_optionalChain([opts, 'access', _35 => _35.abortSignal, 'optionalAccess', _36 => _36.throwIfAborted, 'call', _37 => _37()]);
} catch (err) {
cleanup();
destroyWorker(worker);
rejectCallback(err);
}
} else {
cleanup();
destroyWorker(worker);
rejectCallback(event);
}
};
errorHandler = (event) => {
cleanup();
destroyWorker(worker);
rejectCallback(event.error);
};
if (_optionalChain([opts, 'optionalAccess', _38 => _38.abortSignal])) {
try {
_optionalChain([opts, 'access', _39 => _39.abortSignal, 'optionalAccess', _40 => _40.throwIfAborted, 'call', _41 => _41()]);
} catch (err) {
cleanup();
destroyWorker(worker);
rejectCallback(_nullishCoalesce(opts.abortSignal.reason, () => ( new DOMException("Aborted", "AbortError"))));
return;
}
const abortListener = () => {
cleanup();
destroyWorker(worker);
rejectCallback(_nullishCoalesce(_optionalChain([opts, 'access', _42 => _42.abortSignal, 'optionalAccess', _43 => _43.reason]), () => ( new DOMException("Aborted", "AbortError"))));
_optionalChain([opts, 'access', _44 => _44.abortSignal, 'optionalAccess', _45 => _45.removeEventListener, 'call', _46 => _46("abort", abortListener)]);
};
opts.abortSignal.addEventListener("abort", abortListener);
}
worker.addEventListener("message", messageHandler);
worker.addEventListener("error", errorHandler);
worker.postMessage({ file: buffer });
});
} catch (err) {
console.warn("Failed to use web worker for sha256", err);
}
}
if (!wasmModule) {
wasmModule = await Promise.resolve().then(() => _interopRequireWildcard(require("./sha256-wrapper-DHTT2DPH.js")));
}
const sha2562 = await wasmModule.createSHA256();
sha2562.init();
const reader = buffer.stream().getReader();
const total = buffer.size;
let bytesDone = 0;
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
sha2562.update(value);
bytesDone += value.length;
yield bytesDone / total;
_optionalChain([opts, 'optionalAccess', _47 => _47.abortSignal, 'optionalAccess', _48 => _48.throwIfAborted, 'call', _49 => _49()]);
}
return sha2562.digest("hex");
}
if (!cryptoModule) {
cryptoModule = await Promise.resolve().then(() => _interopRequireWildcard(require("./sha256-node-FT2Y3VXD.js")));
}
return yield* cryptoModule.sha256Node(buffer, { abortSignal: _optionalChain([opts, 'optionalAccess', _50 => _50.abortSignal]) });
}
var cryptoModule;
var wasmModule;
// src/utils/WebBlob.ts
var WebBlob = class extends Blob {
static async create(url, opts) {
const customFetch = _nullishCoalesce(_optionalChain([opts, 'optionalAccess', _51 => _51.fetch]), () => ( fetch));
const response = await customFetch(url, {
method: "HEAD",
..._optionalChain([opts, 'optionalAccess', _52 => _52.accessToken]) && {
headers: {
Authorization: `Bearer ${opts.accessToken}`
}
}
});
const size = Number(response.headers.get("content-length"));
const contentType = response.headers.get("content-type") || "";
const supportRange = response.headers.get("accept-ranges") === "bytes";
if (!supportRange || size < (_nullishCoalesce(_optionalChain([opts, 'optionalAccess', _53 => _53.cacheBelow]), () => ( 1e6)))) {
return await (await customFetch(url)).blob();
}
return new WebBlob(url, 0, size, contentType, true, customFetch, _optionalChain([opts, 'optionalAccess', _54 => _54.accessToken]));
}
constructor(url, start, end, contentType, full, customFetch, accessToken) {
super([]);
this.url = url;
this.start = start;
this.end = end;
this.contentType = contentType;
this.full = full;
this.fetch = customFetch;
this.accessToken = accessToken;
}
get size() {
return this.end - this.start;
}
get type() {
return this.contentType;
}
slice(start = 0, end = this.size) {
if (start < 0 || end < 0) {
new TypeError("Unsupported negative start/end on WebBlob.slice");
}
const slice = new WebBlob(
this.url,
this.start + start,
Math.min(this.start + end, this.end),
this.contentType,
start === 0 && end === this.size ? this.full : false,
this.fetch,
this.accessToken
);
return slice;
}
async arrayBuffer() {
const result = await this.fetchRange();
return result.arrayBuffer();
}
async text() {
const result = await this.fetchRange();
return result.text();
}
stream() {
const stream = new TransformStream();
this.fetchRange().then((response) => _optionalChain([response, 'access', _55 => _55.body, 'optionalAccess', _56 => _56.pipeThrough, 'call', _57 => _57(stream)])).catch((error) => stream.writable.abort(error.message));
return stream.readable;
}
fetchRange() {
const fetch2 = this.fetch;
if (this.full) {
return fetch2(this.url, {
...this.accessToken && {
headers: {
Authorization: `Bearer ${this.accessToken}`
}
}
}).then((resp) => resp.ok ? resp : createApiError(resp));
}
return fetch2(this.url, {
headers: {
Range: `bytes=${this.start}-${this.end - 1}`,
...this.accessToken && { Authorization: `Bearer ${this.accessToken}` }
}
}).then((resp) => resp.ok ? resp : createApiError(resp));
}
};
// src/utils/base64FromBytes.ts
function base64FromBytes(arr) {
if (globalThis.Buffer) {
return globalThis.Buffer.from(arr).toString("base64");
} else {
const bin = [];
arr.forEach((byte) => {
bin.push(String.fromCharCode(byte));
});
return globalThis.btoa(bin.join(""));
}
}
// src/utils/createBlobs.ts
async function createBlobs(url, destPath, opts) {
if (url.protocol === "http:" || url.protocol === "https:") {
const blob = await WebBlob.create(url, { fetch: _optionalChain([opts, 'optionalAccess', _58 => _58.fetch]), accessToken: _optionalChain([opts, 'optionalAccess', _59 => _59.accessToken]) });
return [{ path: destPath, blob }];
}
if (isFrontend) {
throw new TypeError(`Unsupported URL protocol "${url.protocol}"`);
}
if (url.protocol === "file:") {
const { FileBlob } = await Promise.resolve().then(() => _interopRequireWildcard(require("./FileBlob-YC2EPDW4.js")));
const { subPaths } = await Promise.resolve().then(() => _interopRequireWildcard(require("./sub-paths-RH3O65LG.js")));
const paths = await subPaths(url, _optionalChain([opts, 'optionalAccess', _60 => _60.maxFolderDepth]));
if (paths.length === 1 && paths[0].relativePath === ".") {
const blob = await FileBlob.create(url);
return [{ path: destPath, blob }];
}
return Promise.all(
paths.map(async (path) => ({
path: `${destPath}/${path.relativePath}`.replace(/\/[.]$/, "").replaceAll("//", "/").replace(/^[.]?\//, ""),
blob: await FileBlob.create(new URL(path.path))
}))
);
}
throw new TypeError(`Unsupported URL protocol "${url.protocol}"`);
}
// src/utils/combineUint8Arrays.ts
function combineUint8Arrays(a, b) {
const aLength = a.length;
const combinedBytes = new Uint8Array(aLength + b.length);
combinedBytes.set(a);
combinedBytes.set(b, aLength);
return combinedBytes;
}
// src/vendor/lz4js/util.ts
function hashU32(a) {
a = a | 0;
a = a + 2127912214 + (a << 12) | 0;
a = a ^ -949894596 ^ a >>> 19;
a = a + 374761393 + (a << 5) | 0;
a = a + -744332180 ^ a << 9;
a = a + -42973499 + (a << 3) | 0;
return a ^ -1252372727 ^ a >>> 16 | 0;
}
function readU64(b, n) {
let x = 0;
x |= b[n++] << 0;
x |= b[n++] << 8;
x |= b[n++] << 16;
x |= b[n++] << 24;
x |= b[n++] << 32;
x |= b[n++] << 40;
x |= b[n++] << 48;
x |= b[n++] << 56;
return x;
}
function readU32(b, n) {
let x = 0;
x |= b[n++] << 0;
x |= b[n++] << 8;
x |= b[n++] << 16;
x |= b[n++] << 24;
return x;
}
function writeU32(b, n, x) {
b[n++] = x >> 0 & 255;
b[n++] = x >> 8 & 255;
b[n++] = x >> 16 & 255;
b[n++] = x >> 24 & 255;
}
function imul(a, b) {
const ah = a >>> 16;
const al = a & 65535;
const bh = b >>> 16;
const bl = b & 65535;
return al * bl + (ah * bl + al * bh << 16) | 0;
}
// src/vendor/lz4js/xxh32.ts
var prime1 = 2654435761;
var prime2 = 2246822519;
var prime3 = 3266489917;
var prime4 = 668265263;
var prime5 = 374761393;
function rotl32(x, r) {
x = x | 0;
r = r | 0;
return x >>> (32 - r | 0) | x << r | 0;
}
function rotmul32(h, r, m) {
h = h | 0;
r = r | 0;
m = m | 0;
return imul(h >>> (32 - r | 0) | h << r, m) | 0;
}
function shiftxor32(h, s) {
h = h | 0;
s = s | 0;
return h >>> s ^ h | 0;
}
function xxhapply(h, src, m0, s, m1) {
return rotmul32(imul(src, m0) + h, s, m1);
}
function xxh1(h, src, index) {
return rotmul32(h + imul(src[index], prime5), 11, prime1);
}
function xxh4(h, src, index) {
return xxhapply(h, readU32(src, index), prime3, 17, prime4);
}
function xxh16(h, src, index) {
return [
xxhapply(h[0], readU32(src, index + 0), prime2, 13, prime1),
xxhapply(h[1], readU32(src, index + 4), prime2, 13, prime1),
xxhapply(h[2], readU32(src, index + 8), prime2, 13, prime1),
xxhapply(h[3], readU32(src, index + 12), prime2, 13, prime1)
];
}
function xxh32(seed, src, index, len) {
let h;
const l = len;
if (len >= 16) {
h = [seed + prime1 + prime2, seed + prime2, seed, seed - prime1];
while (len >= 16) {
h = xxh16(h, src, index);
index += 16;
len -= 16;
}
h = rotl32(h[0], 1) + rotl32(h[1], 7) + rotl32(h[2], 12) + rotl32(h[3], 18) + l;
} else {
h = seed + prime5 + len >>> 0;
}
while (len >= 4) {
h = xxh4(h, src, index);
index += 4;
len -= 4;
}
while (len > 0) {
h = xxh1(h, src, index);
index++;
len--;
}
h = shiftxor32(imul(shiftxor32(imul(shiftxor32(h, 15), prime2), 13), prime3), 16);
return h >>> 0;
}
var hash = xxh32;
// src/vendor/lz4js/index.ts
var minMatch = 4;
var matchSearchLimit = 12;
var minTrailingLitterals = 5;
var skipTrigger = 6;
var hashSize = 1 << 16;
var mlBits = 4;
var mlMask = (1 << mlBits) - 1;
var runBits = 4;
var runMask = (1 << runBits) - 1;
var blockBuf = makeBuffer(5 << 20);
var hashTable = makeHashTable();
var magicNum = 407708164;
var fdContentChksum = 4;
var fdContentSize = 8;
var fdBlockChksum = 16;
var fdVersion = 64;
var fdVersionMask = 192;
var bsUncompressed = 2147483648;
var bsDefault = 7;
var bsShift = 4;
var bsMask = 7;
var bsMap = {
4: 65536,
5: 262144,
6: 1048576,
7: 4194304
};
function makeHashTable() {
try {
return new Uint32Array(hashSize);
} catch (error) {
const hashTable2 = new Array(hashSize);
for (let i = 0; i < hashSize; i++) {
hashTable2[i] = 0;
}
return hashTable2;
}
}
function clearHashTable(table) {
for (let i = 0; i < hashSize; i++) {
table[i] = 0;
}
}
function makeBuffer(size) {
return new Uint8Array(size);
}
function sliceArray(array, start, end) {
return array.slice(start, end);
}
function compressBound(n) {
return n + n / 255 + 16 | 0;
}
function decompressBound(src) {
let sIndex = 0;
if (readU32(src, sIndex) !== magicNum) {
throw new Error("invalid magic number");
}
sIndex += 4;
const descriptor = src[sIndex++];
if ((descriptor & fdVersionMask) !== fdVersion) {
throw new Error("incompatible descriptor version " + (descriptor & fdVersionMask));
}
const useBlockSum = (descriptor & fdBlockChksum) !== 0;
const useContentSize = (descriptor & fdContentSize) !== 0;
const bsIdx = src[sIndex++] >> bsShift & bsMask;
if (bsMap[bsIdx] === void 0) {
throw new Error("invalid block size " + bsIdx);
}
const maxBlockSize = bsMap[bsIdx];
if (useContentSize) {
return readU64(src, sIndex);
}
sIndex++;
let maxSize = 0;
while (true) {
let blockSize = readU32(src, sIndex);
sIndex += 4;
if (blockSize & bsUncompressed) {
blockSize &= ~bsUncompressed;
maxSize += blockSize;
} else if (blockSize > 0) {
maxSize += maxBlockSize;
}
if (blockSize === 0) {
return maxSize;
}
if (useBlockSum) {
sIndex += 4;
}
sIndex += blockSize;
}
}
function decompressBlock(src, dst, sIndex, sLength, dIndex) {
let mLength, mOffset, sEnd, n, i;
const hasCopyWithin = dst.copyWithin !== void 0 && dst.fill !== void 0;
sEnd = sIndex + sLength;
while (sIndex < sEnd) {
const token = src[sIndex++];
let literalCount = token >> 4;
if (literalCount > 0) {
if (literalCount === 15) {
while (true) {
literalCount += src[sIndex];
if (src[sIndex++] !== 255) {
break;
}
}
}
for (n = sIndex + literalCount; sIndex < n; ) {
dst[dIndex++] = src[sIndex++];
}
}
if (sIndex >= sEnd) {
break;
}
mLength = token & 15;
mOffset = src[sIndex++] | src[sIndex++] << 8;
if (mLength === 15) {
while (true) {
mLength += src[sIndex];
if (src[sIndex++] !== 255) {
break;
}
}
}
mLength += minMatch;
if (hasCopyWithin && mOffset === 1) {
dst.fill(dst[dIndex - 1] | 0, dIndex, dIndex + mLength);
dIndex += mLength;
} else if (hasCopyWithin && mOffset > mLength && mLength > 31) {
dst.copyWithin(dIndex, dIndex - mOffset, dIndex - mOffset + mLength);
dIndex += mLength;
} else {
for (i = dIndex - mOffset, n = i + mLength; i < n; ) {
dst[dIndex++] = dst[i++] | 0;
}
}
}
return dIndex;
}
function compressBlock(src, dst, sIndex, sLength, hashTable2) {
let mIndex, mAnchor, mLength, mOffset, mStep;
let literalCount, dIndex, sEnd, n;
dIndex = 0;
sEnd = sLength + sIndex;
mAnchor = sIndex;
let searchMatchCount = (1 << skipTrigger) + 3;
while (sIndex <= sEnd - matchSearchLimit) {
const seq = readU32(src, sIndex);
let hash2 = hashU32(seq) >>> 0;
hash2 = (hash2 >> 16 ^ hash2) >>> 0 & 65535;
mIndex = hashTable2[hash2] - 1;
hashTable2[hash2] = sIndex + 1;
if (mIndex < 0 || sIndex - mIndex >>> 16 > 0 || readU32(src, mIndex) !== seq) {
mStep = searchMatchCount++ >> skipTrigger;
sIndex += mStep;
continue;
}
searchMatchCount = (1 << skipTrigger) + 3;
literalCount = sIndex - mAnchor;
mOffset = sIndex - mIndex;
sIndex += minMatch;
mIndex += minMatch;
mLength = sIndex;
while (sIndex < sEnd - minTrailingLitterals && src[sIndex] === src[mIndex]) {
sIndex++;
mIndex++;
}
mLength = sIndex - mLength;
const token = mLength < mlMask ? mLength : mlMask;
if (literalCount >= runMask) {
dst[dIndex++] = (runMask << mlBits) + token;
for (n = literalCount - runMask; n >= 255; n -= 255) {
dst[dIndex++] = 255;
}
dst[dIndex++] = n;
} else {
dst[dIndex++] = (literalCount << mlBits) + token;
}
for (let i = 0; i < literalCount; i++) {
dst[dIndex++] = src[mAnchor + i];
}
dst[dIndex++] = mOffset;
dst[dIndex++] = mOffset >> 8;
if (mLength >= mlMask) {
for (n = mLength - mlMask; n >= 255; n -= 255) {
dst[dIndex++] = 255;
}
dst[dIndex++] = n;
}
mAnchor = sIndex;
}
if (mAnchor === 0) {
return 0;
}
literalCount = sEnd - mAnchor;
if (literalCount >= runMask) {
dst[dIndex++] = runMask << mlBits;
for (n = literalCount - runMask; n >= 255; n -= 255) {
dst[dIndex++] = 255;
}
dst[dIndex++] = n;
} else {
dst[dIndex++] = literalCount << mlBits;
}
sIndex = mAnchor;
while (sIndex < sEnd) {
dst[dIndex++] = src[sIndex++];
}
return dIndex;
}
function decompressFrame(src, dst) {
let useBlockSum, useContentSum, useContentSize, descriptor;
let sIndex = 0;
let dIndex = 0;
if (readU32(src, sIndex) !== magicNum) {
throw new Error("invalid magic number");
}
sIndex += 4;
descriptor = src[sIndex++];
if ((descriptor & fdVersionMask) !== fdVersion) {
throw new Error("incompatible descriptor version");
}
useBlockSum = (descriptor & fdBlockChksum) !== 0;
useContentSum = (descriptor & fdContentChksum) !== 0;
useContentSize = (descriptor & fdContentSize) !== 0;
const bsIdx = src[sIndex++] >> bsShift & bsMask;
if (bsMap[bsIdx] === void 0) {
throw new Error("invalid block size");
}
if (useContentSize) {
sIndex += 8;
}
sIndex++;
while (true) {
var compSize;
compSize = readU32(src, sIndex);
sIndex += 4;
if (compSize === 0) {
break;
}
if (useBlockSum) {
sIndex += 4;
}
if ((compSize & bsUncompressed) !== 0) {
compSize &= ~bsUncompressed;
for (let j = 0; j < compSize; j++) {
dst[dIndex++] = src[sIndex++];
}
} else {
dIndex = decompressBlock(src, dst, sIndex, compSize, dIndex);
sIndex += compSize;
}
}
if (useContentSum) {
sIndex += 4;
}
return dIndex;
}
function compressFrame(src, dst) {
let dIndex = 0;
writeU32(dst, dIndex, magicNum);
dIndex += 4;
dst[dIndex++] = fdVersion;
dst[dIndex++] = bsDefault << bsShift;
dst[dIndex] = hash(0, dst, 4, dIndex - 4) >> 8;
dIndex++;
const maxBlockSize = bsMap[bsDefault];
let remaining = src.length;
let sIndex = 0;
clearHashTable(hashTable);
while (remaining > 0) {
let compSize = 0;
const blockSize = remaining > maxBlockSize ? maxBlockSize : remaining;
compSize = compressBlock(src, blockBuf, sIndex, blockSize, hashTable);
if (compSize > blockSize || compSize === 0) {
writeU32(dst, dIndex, 2147483648 | blockSize);
dIndex += 4;
for (let z = sIndex + blockSize; sIndex < z; ) {
dst[dIndex++] = src[sIndex++];
}
remaining -= blockSize;
} else {
writeU32(dst, dIndex, compSize);
dIndex += 4;
for (let j = 0; j < compSize; ) {
dst[dIndex++] = blockBuf[j++];
}
sIndex += blockSize;
remaining -= blockSize;
}
}
writeU32(dst, dIndex, 0);
dIndex += 4;
return dIndex;
}
function decompress(src, maxSize) {
let dst, size;
if (maxSize === void 0) {
maxSize = decompressBound(src);
}
dst = makeBuffer(maxSize);
size = decompressFrame(src, dst);
if (size !== maxSize) {
dst = sliceArray(dst, 0, size);
}
return dst;
}
function compress(src, maxSize) {
let dst, size;
if (maxSize === void 0) {
maxSize = compressBound(src.length);
}
dst = makeBuffer(maxSize);
size = compressFrame(src, dst);
if (size !== maxSize) {
dst = sliceArray(dst, 0, size);
}
return dst;
}
// src/utils/RangeList.ts
var RangeList = (_class = class {constructor() { _class.prototype.__init.call(this); }
__init() {this.ranges = []}
/**
* Add a range to the list. If it overlaps with existing ranges,
* it will split them and increment reference counts accordingly.
*/
add(start, end) {
if (end <= start) {
throw new TypeError("End must be greater than start");
}
const overlappingRanges = [];
for (let i = 0; i < this.ranges.length; i++) {
const range2 = this.ranges[i];
if (start < range2.end && end > range2.start) {
overlappingRanges.push({ index: i, range: range2 });
}
if (range2.data !== null) {
throw new Error("Overlapping range already has data");
}
}
if (overlappingRanges.length === 0) {
this.ranges.push({ start, end, refCount: 1, data: null });
this.ranges.sort((a, b) => a.start - b.start);
return;
}
const newRanges = [];
let currentPos = start;
for (let i = 0; i < overlappingRanges.length; i++) {
const { range: range2 } = overlappingRanges[i];
if (currentPos < range2.start) {
newRanges.push({
start: currentPos,
end: range2.start,
refCount: 1,
data: null
});
} else if (range2.start < currentPos) {
newRanges.push({
start: range2.start,
end: currentPos,
refCount: range2.refCount,
data: null
});
}
newRanges.push({
start: Math.max(currentPos, range2.start),
end: Math.min(end, range2.end),
refCount: range2.refCount + 1,
data: null
});
if (range2.end > end) {
newRanges.push({
start: end,
end: range2.end,
refCount: range2.refCount,
data: null
});
}
currentPos = Math.max(currentPos, range2.end);
}
if (currentPos < end) {
newRanges.push({
start: currentPos,
end,
refCount: 1,
data: null
});
}
const firstIndex = overlappingRanges[0].index;
const lastIndex = overlappingRanges[overlappingRanges.length - 1].index;
this.ranges.splice(firstIndex, lastIndex - firstIndex + 1, ...newRanges);
this.ranges.sort((a, b) => a.start - b.start);
}
/**
* Remove a range from the list. The range must start and end at existing boundaries.
*/
remove(start, end) {
if (end <= start) {
throw new TypeError("End must be greater than start");
}
const affectedRanges = [];
for (let i = 0; i < this.ranges.length; i++) {
const range2 = this.ranges[i];
if (start < range2.end && end > range2.start) {
affectedRanges.push({ index: i, range: range2 });
}
}
if (affectedRanges.length === 0) {
throw new Error("No ranges found to remove");
}
if (start !== affectedRanges[0].range.start || end !== affectedRanges[affectedRanges.length - 1].range.end) {
throw new Error("Range boundaries must match existing boundaries");
}
for (let i = 0; i < affectedRanges.length; i++) {
const { range: range2 } = affectedRanges[i];
range2.refCount--;
}
this.ranges = this.ranges.filter((range2) => range2.refCount > 0);
}
/**
* Get all ranges within the specified boundaries.
*/
getRanges(start, end) {
if (end <= start) {
throw new TypeError("End must be greater than start");
}
return this.ranges.filter((range2) => start < range2.end && end > range2.start);
}
/**
* Get all ranges in the list
*/
getAllRanges() {
return [...this.ranges];
}
}, _class);
// src/utils/XetBlob.ts
var JWT_SAFETY_PERIOD = 6e4;
var JWT_CACHE_SIZE = 1e3;
var compressionSchemeLabels = {
[0 /* None */]: "None",
[1 /* LZ4 */]: "LZ4",
[2 /* ByteGroupingLZ4 */]: "ByteGroupingLZ4"
};
var XET_CHUNK_HEADER_BYTES = 8;
var XetBlob = (_class2 = class extends Blob {
__init2() {this.start = 0}
__init3() {this.end = 0}
__init4() {this.internalLogging = false}
constructor(params) {
super([]);_class2.prototype.__init2.call(this);_class2.prototype.__init3.call(this);_class2.prototype.__init4.call(this);;
this.fetch = _nullishCoalesce(params.fetch, () => ( fetch.bind(globalThis)));
this.accessToken = checkCredentials(params);
this.refreshUrl = params.refreshUrl;
this.end = params.size;
this.reconstructionUrl = params.reconstructionUrl;
this.hash = params.hash;
this.listener = params.listener;
this.internalLogging = _nullishCoalesce(params.internalLogging, () => ( false));
this.refreshUrl;
}
get size() {
return this.end - this.start;
}
#clone() {
const blob = new XetBlob({
fetch: this.fetch,
hash: this.hash,
refreshUrl: this.refreshUrl,
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
reconstructionUrl: this.reconstructionUrl,
size: this.size
});
blob.accessToken = this.accessToken;
blob.start = this.start;
blob.end = this.end;
blob.reconstructionInfo = this.reconstructionInfo;
blob.listener = this.listener;
blob.internalLogging = this.internalLogging;
return blob;
}
slice(start = 0, end = this.size) {
if (start < 0 || end < 0) {
new TypeError("Unsupported negative start/end on XetBlob.slice");
}
const slice = this.#clone();
slice.start = this.start + start;
slice.end = Math.min(this.start + end, this.end);
if (slice.start !== this.start || slice.end !== this.end) {
slice.reconstructionInfo = void 0;
}
return slice;
}
#reconstructionInfoPromise;
#loadReconstructionInfo() {
if (this.#reconstructionInfoPromise) {
return this.#reconstructionInfoPromise;
}
this.#reconstructionInfoPromise = (async () => {
const connParams = await getAccessToken(this.accessToken, this.fetch, this.refreshUrl);
const resp = await this.fetch(_nullishCoalesce(this.reconstructionUrl, () => ( `${connParams.casUrl}/v1/reconstructions/${this.hash}`)), {
headers: {
Authorization: `Bearer ${connParams.accessToken}`,
Range: `bytes=${this.start}-${this.end - 1}`
}
});
if (!resp.ok) {
throw await createApiError(resp);
}
this.reconstructionInfo = await resp.json();
return this.reconstructionInfo;
})().finally(() => this.#reconstructionInfoPromise = void 0);
return this.#reconstructionInfoPromise;
}
async #fetch() {
if (!this.reconstructionInfo) {
await this.#loadReconstructionInfo();
}
const rangeLists = /* @__PURE__ */ new Map();
if (!this.reconstructionInfo) {
throw new Error("Failed to load reconstruction info");
}
for (const term of this.reconstructionInfo.terms) {
let rangeList = rangeLists.get(term.hash);
if (!rangeList) {
rangeList = new RangeList();
rangeLists.set(term.hash, rangeList);
}
rangeList.add(term.range.start, term.range.end);
}
const listener = this.listener;
const log = this.internalLogging ? (...args) => console.log(...args) : () => {
};
async function* readData(reconstructionInfo, customFetch, maxBytes, reloadReconstructionInfo) {
let totalBytesRead = 0;
let readBytesToSkip = reconstructionInfo.offset_into_first_range;
for (const term of reconstructionInfo.terms) {
if (totalBytesRead >= maxBytes) {
break;
}
const rangeList = rangeLists.get(term.hash);
if (!rangeList) {
throw new Error(`Failed to find range list for term ${term.hash}`);
}
{
const termRanges = rangeList.getRanges(term.range.start, term.range.end);
if (termRanges.every((range2) => range2.data)) {
log("all data available for term", term.hash, readBytesToSkip);
rangeLoop:
for (const range2 of termRanges) {
for (let chunk2 of range2.data) {
if (readBytesToSkip) {
const skipped = Math.min(readBytesToSkip, chunk2.byteLength);
chunk2 = chunk2.slice(skipped);
readBytesToSkip -= skipped;
if (!chunk2.byteLength) {
continue;
}
}
if (chunk2.byteLength > maxBytes - totalBytesRead) {
chunk2 = chunk2.slice(0, maxBytes - totalBytesRead);
}
totalBytesRead += chunk2.byteLength;
yield range2.refCount > 1 ? chunk2.slice() : chunk2;
_optionalChain([listener, 'optionalCall', _61 => _61({ event: "progress", progress: { read: totalBytesRead, total: maxBytes } })]);
if (totalBytesRead >= maxBytes) {
break rangeLoop;
}
}
}
rangeList.remove(term.range.start, term.range.end);
continue;
}
}
const fetchInfo = reconstructionInfo.fetch_info[term.hash].find(
(info) => info.range.start <= term.range.start && info.range.end >= term.range.end
);
if (!fetchInfo) {
throw new Error(
`Failed to find fetch info for term ${term.hash} and range ${term.range.start}-${term.range.end}`
);
}
log("term", term);
log("fetchinfo", fetchInfo);
log("readBytesToSkip", readBytesToSkip);
let resp = await customFetch(fetchInfo.url, {
headers: {
Range: `bytes=${fetchInfo.url_range.start}-${fetchInfo.url_range.end}`
}
});
if (resp.status === 403) {
reconstructionInfo = await reloadReconstructionInfo();
resp = await customFetch(fetchInfo.url, {
headers: {
Range: `bytes=${fetchInfo.url_range.start}-${fetchInfo.url_range.end}`
}
});
}
if (!resp.ok) {
throw await createApiError(resp);
}
log(
"expected content length",
resp.headers.get("content-length"),
"range",
fetchInfo.url_range,
resp.headers.get("content-range")
);
const reader = _optionalChain([resp, 'access', _62 => _62.body, 'optionalAccess', _63 => _63.getReader, 'call', _64 => _64()]);
if (!reader) {
throw new Error("Failed to get reader from response body");
}
let done = false;
let chunkIndex = fetchInfo.range.start;
const ranges = rangeList.getRanges(fetchInfo.range.start, fetchInfo.range.end);
let leftoverBytes = void 0;
let totalFetchBytes = 0;
fetchData:
while (!done && totalBytesRead < maxBytes) {
const result = await reader.read();
_optionalChain([listener, 'optionalCall', _65 => _65({ event: "read" })]);
done = result.done;
log("read", _optionalChain([result, 'access', _66 => _66.value, 'optionalAccess', _67 => _67.byteLength]), "bytes", "total read", totalBytesRead, "toSkip", readBytesToSkip);
if (!result.value) {
log("no data in result, cancelled", result);
continue;
}
totalFetchBytes += result.value.byteLength;
if (leftoverBytes) {
result.value = combineUint8Arrays(leftoverBytes, result.value);
leftoverBytes = void 0;
}
while (totalBytesRead < maxBytes && _optionalChain([result, 'access', _68 => _68.value, 'optionalAccess', _69 => _69.byteLength])) {
if (result.value.byteLength < 8) {
leftoverBytes = result.value;
continue fetchData;
}
const header = new DataView(result.value.buffer, result.value.byteOffset, XET_CHUNK_HEADER_BYTES);
const chunkHeader = {
version: header.getUint8(0),
compressed_length: header.getUint8(1) | header.getUint8(2) << 8 | header.getUint8(3) << 16,
compression_scheme: header.getUint8(4),
uncompressed_length: header.getUint8(5) | header.getUint8(6) << 8 | header.getUint8(7) << 16
};
log("chunk header", chunkHeader, "to skip", readBytesToSkip);
if (chunkHeader.version !== 0) {
throw new Error(`Unsupported chunk version ${chunkHeader.version}`);
}
if (chunkHeader.compression_scheme !== 0 /* None */ && chunkHeader.compression_scheme !== 1 /* LZ4 */ && chunkHeader.compression_scheme !== 2 /* ByteGroupingLZ4 */) {
throw new Error(
`Unsupported compression scheme ${_nullishCoalesce(compressionSchemeLabels[chunkHeader.compression_scheme], () => ( chunkHeader.compression_scheme))}`
);
}
if (result.value.byteLength < chunkHeader.compressed_length + XET_CHUNK_HEADER_BYTES) {
leftoverBytes = result.value;
continue fetchData;
}
result.value = result.value.slice(XET_CHUNK_HEADER_BYTES);
let uncompressed = chunkHeader.compression_scheme === 1 /* LZ4 */ ? decompress(result.value.slice(0, chunkHeader.compressed_length), chunkHeader.uncompressed_length) : chunkHeader.compression_scheme === 2 /* ByteGroupingLZ4 */ ? bg4_regroup_bytes(
decompress(
result.value.slice(0, chunkHeader.compressed_length),
chunkHeader.uncompressed_length
)
) : result.value.slice(0, chunkHeader.compressed_length);
const range2 = ranges.find((range3) => chunkIndex >= range3.start && chunkIndex < range3.end);
const shouldYield = chunkIndex >= term.range.start && chunkIndex < term.range.end;
const minRefCountToStore = shouldYield ? 2 : 1;
let stored = false;
if (range2 && range2.refCount >= minRefCountToStore) {
range2.data ??= [];
range2.data.push(uncompressed);
stored = true;
}
if (shouldYield) {
if (readBytesToSkip) {
const skipped = Math.min(readBytesToSkip, uncompressed.byteLength);
uncompressed = uncompressed.slice(readBytesToSkip);
readBytesToSkip -= skipped;
}
if (uncompressed.byteLength > maxBytes - totalBytesRead) {
uncompressed = uncompressed.slice(0, maxBytes - totalBytesRead);
}
if (uncompressed.byteLength) {
log(
"yield",
uncompressed.byteLength,
"bytes",
result.value.byteLength,
"total read",
totalBytesRead,
stored
);
totalBytesRead += uncompressed.byteLength;
yield stored ? uncompressed.slice() : uncompressed;
_optionalChain([listener, 'optionalCall', _70 => _70({ event: "progress", progress: { read: totalBytesRead, total: maxBytes } })]);
}
}
chunkIndex++;
result.value = result.value.slice(chunkHeader.compressed_length);
}
}
if (done && totalBytesRead < maxBytes && totalFetchBytes < fetchInfo.url_range.end - fetchInfo.url_range.start + 1) {
log("done", done, "total read", totalBytesRead, maxBytes, totalFetchBytes);
log("failed to fetch all data for term", term.hash);
throw new Error(
`Failed to fetch all data for term ${term.hash}, fetched ${totalFetchBytes} bytes out of ${fetchInfo.url_range.end - fetchInfo.url_range.start + 1}`
);
}
log("done", done, "total read", totalBytesRead, maxBytes, totalFetchBytes);
log("cancel reader");
await reader.cancel();
}
}
const iterator = readData(
this.reconstructionInfo,
this.fetch,
this.end - this.start,
this.#loadReconstructionInfo.bind(this)
);
return new ReadableStream(
{
// todo: when Safari supports it, type controller as ReadableByteStreamController
async pull(controller) {
const result = await iterator.next();
if (result.value) {
controller.enqueue(result.value);
}
if (result.done) {
controller.close();
}
},
type: "bytes"
// todo: when Safari supports it, add autoAllocateChunkSize param
},
// todo : use ByteLengthQueuingStrategy when there's good support for it, currently in Node.js it fails due to size being a function
{
highWaterMark: 1e3
// 1_000 chunks for ~1MB of RAM
}
);
}
async arrayBuffer() {
const result = await this.#fetch();
return new Response(result).arrayBuffer();
}
async text() {
const result = await this.#fetch();
return new Response(result).text();
}
async response() {
const result = await this.#fetch();
return new Response(result);
}
stream() {
const stream = new TransformStream();
this.#fetch().then((response) => response.pipeThrough(stream)).catch((error) => stream.writable.abort(error.message));
return stream.readable;
}
}, _class2);
var jwtPromises = /* @__PURE__ */ new Map();
var jwts = /* @__PURE__ */ new Map();
function cacheKey(params) {
return JSON.stringify([params.refreshUrl, params.initialAccessToken]);
}
function bg4_regroup_bytes(bytes) {
const split = Math.floor(bytes.byteLength / 4);
const rem = bytes.byteLength % 4;
const g1_pos = split + (rem >= 1 ? 1 : 0);
const g2_pos = g1_pos + split + (rem >= 2 ? 1 : 0);
const g3_pos = g2_pos + split + (rem == 3 ? 1 : 0);
const ret = new Uint8Array(bytes.byteLength);
for (let i = 0, j = 0; i < bytes.byteLength; i += 4, j++) {
ret[i] = bytes[j];
}
for (let i = 1, j = g1_pos; i < bytes.byteLength; i += 4, j++) {
ret[i] = bytes[j];
}
for (let i = 2, j = g2_pos; i < bytes.byteLength; i += 4, j++) {
ret[i] = bytes[j];
}
for (let i = 3, j = g3_pos; i < bytes.byteLength; i += 4, j++) {
ret[i] = bytes[j];
}
return ret;
}
function bg4_split_bytes(bytes) {
const ret = new Uint8Array(bytes.byteLength);
const split = Math.floor(bytes.byteLength / 4);
const rem = bytes.byteLength % 4;
const g1_pos = split + (rem >= 1 ? 1 : 0);
const g2_pos = g1_pos + split + (rem >= 2 ? 1 : 0);
const g3_pos = g2_pos + split + (rem == 3 ? 1 : 0);
for (let i = 0, j = 0; i < bytes.byteLength; i += 4, j++) {
ret[j] = bytes[i];
}
for (let i = 1, j = g1_pos; i < bytes.byteLength; i += 4, j++) {
ret[j] = bytes[i];
}
for (let i = 2, j = g2_pos; i < bytes.byteLength; i += 4, j++) {
ret[j] = bytes[i];
}
for (let i = 3, j = g3_pos; i < bytes.byteLength; i += 4, j++) {
ret[j] = bytes[i];
}
return ret;
}
async function getAccessToken(initialAccessToken, customFetch, refreshUrl) {
const key = cacheKey({ refreshUrl, initialAccessToken });
const jwt = jwts.get(key);
if (jwt && jwt.expiresAt > new Date(Date.now() + JWT_SAFETY_PERIOD)) {
return { accessToken: jwt.accessToken, casUrl: jwt.casUrl };
}
const existingPromise = jwtPromises.get(key)