UNPKG

@netlify/plugin-nextjs

Version:
1,497 lines (1,493 loc) 47.4 kB
"use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // src/run/storage/request-scoped-in-memory-cache.cts var request_scoped_in_memory_cache_exports = {}; __export(request_scoped_in_memory_cache_exports, { getRequestScopedInMemoryCache: () => getRequestScopedInMemoryCache, setInMemoryCacheMaxSizeFromNextConfig: () => setInMemoryCacheMaxSizeFromNextConfig }); module.exports = __toCommonJS(request_scoped_in_memory_cache_exports); var import_types = require("node:util/types"); // node_modules/lru-cache/dist/esm/index.js var perf = typeof performance === "object" && performance && typeof performance.now === "function" ? performance : Date; var warned = /* @__PURE__ */ new Set(); var PROCESS = typeof process === "object" && !!process ? process : {}; var emitWarning = (msg, type, code, fn) => { typeof PROCESS.emitWarning === "function" ? PROCESS.emitWarning(msg, type, code, fn) : console.error(`[${code}] ${type}: ${msg}`); }; var AC = globalThis.AbortController; var AS = globalThis.AbortSignal; if (typeof AC === "undefined") { AS = class AbortSignal { onabort; _onabort = []; reason; aborted = false; addEventListener(_, fn) { this._onabort.push(fn); } }; AC = class AbortController { constructor() { warnACPolyfill(); } signal = new AS(); abort(reason) { if (this.signal.aborted) return; this.signal.reason = reason; this.signal.aborted = true; for (const fn of this.signal._onabort) { fn(reason); } this.signal.onabort?.(reason); } }; let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== "1"; const warnACPolyfill = () => { if (!printACPolyfillWarning) return; printACPolyfillWarning = false; emitWarning("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.", "NO_ABORT_CONTROLLER", "ENOTSUP", warnACPolyfill); }; } var shouldWarn = (code) => !warned.has(code); var TYPE = Symbol("type"); var isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); var getUintArray = (max) => !isPosInt(max) ? null : max <= Math.pow(2, 8) ? Uint8Array : max <= Math.pow(2, 16) ? Uint16Array : max <= Math.pow(2, 32) ? Uint32Array : max <= Number.MAX_SAFE_INTEGER ? ZeroArray : null; var ZeroArray = class extends Array { constructor(size) { super(size); this.fill(0); } }; var Stack = class _Stack { heap; length; // private constructor static #constructing = false; static create(max) { const HeapCls = getUintArray(max); if (!HeapCls) return []; _Stack.#constructing = true; const s = new _Stack(max, HeapCls); _Stack.#constructing = false; return s; } constructor(max, HeapCls) { if (!_Stack.#constructing) { throw new TypeError("instantiate Stack using Stack.create(n)"); } this.heap = new HeapCls(max); this.length = 0; } push(n) { this.heap[this.length++] = n; } pop() { return this.heap[--this.length]; } }; var LRUCache = class _LRUCache { // options that cannot be changed without disaster #max; #maxSize; #dispose; #disposeAfter; #fetchMethod; #memoMethod; /** * {@link LRUCache.OptionsBase.ttl} */ ttl; /** * {@link LRUCache.OptionsBase.ttlResolution} */ ttlResolution; /** * {@link LRUCache.OptionsBase.ttlAutopurge} */ ttlAutopurge; /** * {@link LRUCache.OptionsBase.updateAgeOnGet} */ updateAgeOnGet; /** * {@link LRUCache.OptionsBase.updateAgeOnHas} */ updateAgeOnHas; /** * {@link LRUCache.OptionsBase.allowStale} */ allowStale; /** * {@link LRUCache.OptionsBase.noDisposeOnSet} */ noDisposeOnSet; /** * {@link LRUCache.OptionsBase.noUpdateTTL} */ noUpdateTTL; /** * {@link LRUCache.OptionsBase.maxEntrySize} */ maxEntrySize; /** * {@link LRUCache.OptionsBase.sizeCalculation} */ sizeCalculation; /** * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} */ noDeleteOnFetchRejection; /** * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} */ noDeleteOnStaleGet; /** * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} */ allowStaleOnFetchAbort; /** * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} */ allowStaleOnFetchRejection; /** * {@link LRUCache.OptionsBase.ignoreFetchAbort} */ ignoreFetchAbort; // computed properties #size; #calculatedSize; #keyMap; #keyList; #valList; #next; #prev; #head; #tail; #free; #disposed; #sizes; #starts; #ttls; #hasDispose; #hasFetchMethod; #hasDisposeAfter; /** * Do not call this method unless you need to inspect the * inner workings of the cache. If anything returned by this * object is modified in any way, strange breakage may occur. * * These fields are private for a reason! * * @internal */ static unsafeExposeInternals(c) { return { // properties starts: c.#starts, ttls: c.#ttls, sizes: c.#sizes, keyMap: c.#keyMap, keyList: c.#keyList, valList: c.#valList, next: c.#next, prev: c.#prev, get head() { return c.#head; }, get tail() { return c.#tail; }, free: c.#free, // methods isBackgroundFetch: (p) => c.#isBackgroundFetch(p), backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), moveToTail: (index) => c.#moveToTail(index), indexes: (options) => c.#indexes(options), rindexes: (options) => c.#rindexes(options), isStale: (index) => c.#isStale(index) }; } // Protected read-only members /** * {@link LRUCache.OptionsBase.max} (read-only) */ get max() { return this.#max; } /** * {@link LRUCache.OptionsBase.maxSize} (read-only) */ get maxSize() { return this.#maxSize; } /** * The total computed size of items in the cache (read-only) */ get calculatedSize() { return this.#calculatedSize; } /** * The number of items stored in the cache (read-only) */ get size() { return this.#size; } /** * {@link LRUCache.OptionsBase.fetchMethod} (read-only) */ get fetchMethod() { return this.#fetchMethod; } get memoMethod() { return this.#memoMethod; } /** * {@link LRUCache.OptionsBase.dispose} (read-only) */ get dispose() { return this.#dispose; } /** * {@link LRUCache.OptionsBase.disposeAfter} (read-only) */ get disposeAfter() { return this.#disposeAfter; } constructor(options) { const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort } = options; if (max !== 0 && !isPosInt(max)) { throw new TypeError("max option must be a nonnegative integer"); } const UintArray = max ? getUintArray(max) : Array; if (!UintArray) { throw new Error("invalid max value: " + max); } this.#max = max; this.#maxSize = maxSize; this.maxEntrySize = maxEntrySize || this.#maxSize; this.sizeCalculation = sizeCalculation; if (this.sizeCalculation) { if (!this.#maxSize && !this.maxEntrySize) { throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize"); } if (typeof this.sizeCalculation !== "function") { throw new TypeError("sizeCalculation set to non-function"); } } if (memoMethod !== void 0 && typeof memoMethod !== "function") { throw new TypeError("memoMethod must be a function if defined"); } this.#memoMethod = memoMethod; if (fetchMethod !== void 0 && typeof fetchMethod !== "function") { throw new TypeError("fetchMethod must be a function if specified"); } this.#fetchMethod = fetchMethod; this.#hasFetchMethod = !!fetchMethod; this.#keyMap = /* @__PURE__ */ new Map(); this.#keyList = new Array(max).fill(void 0); this.#valList = new Array(max).fill(void 0); this.#next = new UintArray(max); this.#prev = new UintArray(max); this.#head = 0; this.#tail = 0; this.#free = Stack.create(max); this.#size = 0; this.#calculatedSize = 0; if (typeof dispose === "function") { this.#dispose = dispose; } if (typeof disposeAfter === "function") { this.#disposeAfter = disposeAfter; this.#disposed = []; } else { this.#disposeAfter = void 0; this.#disposed = void 0; } this.#hasDispose = !!this.#dispose; this.#hasDisposeAfter = !!this.#disposeAfter; this.noDisposeOnSet = !!noDisposeOnSet; this.noUpdateTTL = !!noUpdateTTL; this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; this.ignoreFetchAbort = !!ignoreFetchAbort; if (this.maxEntrySize !== 0) { if (this.#maxSize !== 0) { if (!isPosInt(this.#maxSize)) { throw new TypeError("maxSize must be a positive integer if specified"); } } if (!isPosInt(this.maxEntrySize)) { throw new TypeError("maxEntrySize must be a positive integer if specified"); } this.#initializeSizeTracking(); } this.allowStale = !!allowStale; this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; this.updateAgeOnGet = !!updateAgeOnGet; this.updateAgeOnHas = !!updateAgeOnHas; this.ttlResolution = isPosInt(ttlResolution) || ttlResolution === 0 ? ttlResolution : 1; this.ttlAutopurge = !!ttlAutopurge; this.ttl = ttl || 0; if (this.ttl) { if (!isPosInt(this.ttl)) { throw new TypeError("ttl must be a positive integer if specified"); } this.#initializeTTLTracking(); } if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { throw new TypeError("At least one of max, maxSize, or ttl is required"); } if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { const code = "LRU_CACHE_UNBOUNDED"; if (shouldWarn(code)) { warned.add(code); const msg = "TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption."; emitWarning(msg, "UnboundedCacheWarning", code, _LRUCache); } } } /** * Return the number of ms left in the item's TTL. If item is not in cache, * returns `0`. Returns `Infinity` if item is in cache without a defined TTL. */ getRemainingTTL(key) { return this.#keyMap.has(key) ? Infinity : 0; } #initializeTTLTracking() { const ttls = new ZeroArray(this.#max); const starts = new ZeroArray(this.#max); this.#ttls = ttls; this.#starts = starts; this.#setItemTTL = (index, ttl, start = perf.now()) => { starts[index] = ttl !== 0 ? start : 0; ttls[index] = ttl; if (ttl !== 0 && this.ttlAutopurge) { const t = setTimeout(() => { if (this.#isStale(index)) { this.#delete(this.#keyList[index], "expire"); } }, ttl + 1); if (t.unref) { t.unref(); } } }; this.#updateItemAge = (index) => { starts[index] = ttls[index] !== 0 ? perf.now() : 0; }; this.#statusTTL = (status, index) => { if (ttls[index]) { const ttl = ttls[index]; const start = starts[index]; if (!ttl || !start) return; status.ttl = ttl; status.start = start; status.now = cachedNow || getNow(); const age = status.now - start; status.remainingTTL = ttl - age; } }; let cachedNow = 0; const getNow = () => { const n = perf.now(); if (this.ttlResolution > 0) { cachedNow = n; const t = setTimeout(() => cachedNow = 0, this.ttlResolution); if (t.unref) { t.unref(); } } return n; }; this.getRemainingTTL = (key) => { const index = this.#keyMap.get(key); if (index === void 0) { return 0; } const ttl = ttls[index]; const start = starts[index]; if (!ttl || !start) { return Infinity; } const age = (cachedNow || getNow()) - start; return ttl - age; }; this.#isStale = (index) => { const s = starts[index]; const t = ttls[index]; return !!t && !!s && (cachedNow || getNow()) - s > t; }; } // conditionally set private methods related to TTL #updateItemAge = () => { }; #statusTTL = () => { }; #setItemTTL = () => { }; /* c8 ignore stop */ #isStale = () => false; #initializeSizeTracking() { const sizes = new ZeroArray(this.#max); this.#calculatedSize = 0; this.#sizes = sizes; this.#removeItemSize = (index) => { this.#calculatedSize -= sizes[index]; sizes[index] = 0; }; this.#requireSize = (k, v, size, sizeCalculation) => { if (this.#isBackgroundFetch(v)) { return 0; } if (!isPosInt(size)) { if (sizeCalculation) { if (typeof sizeCalculation !== "function") { throw new TypeError("sizeCalculation must be a function"); } size = sizeCalculation(v, k); if (!isPosInt(size)) { throw new TypeError("sizeCalculation return invalid (expect positive integer)"); } } else { throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set."); } } return size; }; this.#addItemSize = (index, size, status) => { sizes[index] = size; if (this.#maxSize) { const maxSize = this.#maxSize - sizes[index]; while (this.#calculatedSize > maxSize) { this.#evict(true); } } this.#calculatedSize += sizes[index]; if (status) { status.entrySize = size; status.totalCalculatedSize = this.#calculatedSize; } }; } #removeItemSize = (_i) => { }; #addItemSize = (_i, _s, _st) => { }; #requireSize = (_k, _v, size, sizeCalculation) => { if (size || sizeCalculation) { throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache"); } return 0; }; *#indexes({ allowStale = this.allowStale } = {}) { if (this.#size) { for (let i = this.#tail; true; ) { if (!this.#isValidIndex(i)) { break; } if (allowStale || !this.#isStale(i)) { yield i; } if (i === this.#head) { break; } else { i = this.#prev[i]; } } } } *#rindexes({ allowStale = this.allowStale } = {}) { if (this.#size) { for (let i = this.#head; true; ) { if (!this.#isValidIndex(i)) { break; } if (allowStale || !this.#isStale(i)) { yield i; } if (i === this.#tail) { break; } else { i = this.#next[i]; } } } } #isValidIndex(index) { return index !== void 0 && this.#keyMap.get(this.#keyList[index]) === index; } /** * Return a generator yielding `[key, value]` pairs, * in order from most recently used to least recently used. */ *entries() { for (const i of this.#indexes()) { if (this.#valList[i] !== void 0 && this.#keyList[i] !== void 0 && !this.#isBackgroundFetch(this.#valList[i])) { yield [this.#keyList[i], this.#valList[i]]; } } } /** * Inverse order version of {@link LRUCache.entries} * * Return a generator yielding `[key, value]` pairs, * in order from least recently used to most recently used. */ *rentries() { for (const i of this.#rindexes()) { if (this.#valList[i] !== void 0 && this.#keyList[i] !== void 0 && !this.#isBackgroundFetch(this.#valList[i])) { yield [this.#keyList[i], this.#valList[i]]; } } } /** * Return a generator yielding the keys in the cache, * in order from most recently used to least recently used. */ *keys() { for (const i of this.#indexes()) { const k = this.#keyList[i]; if (k !== void 0 && !this.#isBackgroundFetch(this.#valList[i])) { yield k; } } } /** * Inverse order version of {@link LRUCache.keys} * * Return a generator yielding the keys in the cache, * in order from least recently used to most recently used. */ *rkeys() { for (const i of this.#rindexes()) { const k = this.#keyList[i]; if (k !== void 0 && !this.#isBackgroundFetch(this.#valList[i])) { yield k; } } } /** * Return a generator yielding the values in the cache, * in order from most recently used to least recently used. */ *values() { for (const i of this.#indexes()) { const v = this.#valList[i]; if (v !== void 0 && !this.#isBackgroundFetch(this.#valList[i])) { yield this.#valList[i]; } } } /** * Inverse order version of {@link LRUCache.values} * * Return a generator yielding the values in the cache, * in order from least recently used to most recently used. */ *rvalues() { for (const i of this.#rindexes()) { const v = this.#valList[i]; if (v !== void 0 && !this.#isBackgroundFetch(this.#valList[i])) { yield this.#valList[i]; } } } /** * Iterating over the cache itself yields the same results as * {@link LRUCache.entries} */ [Symbol.iterator]() { return this.entries(); } /** * A String value that is used in the creation of the default string * description of an object. Called by the built-in method * `Object.prototype.toString`. */ [Symbol.toStringTag] = "LRUCache"; /** * Find a value for which the supplied fn method returns a truthy value, * similar to `Array.find()`. fn is called as `fn(value, key, cache)`. */ find(fn, getOptions = {}) { for (const i of this.#indexes()) { const v = this.#valList[i]; const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; if (value === void 0) continue; if (fn(value, this.#keyList[i], this)) { return this.get(this.#keyList[i], getOptions); } } } /** * Call the supplied function on each item in the cache, in order from most * recently used to least recently used. * * `fn` is called as `fn(value, key, cache)`. * * If `thisp` is provided, function will be called in the `this`-context of * the provided object, or the cache if no `thisp` object is provided. * * Does not update age or recenty of use, or iterate over stale values. */ forEach(fn, thisp = this) { for (const i of this.#indexes()) { const v = this.#valList[i]; const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; if (value === void 0) continue; fn.call(thisp, value, this.#keyList[i], this); } } /** * The same as {@link LRUCache.forEach} but items are iterated over in * reverse order. (ie, less recently used items are iterated over first.) */ rforEach(fn, thisp = this) { for (const i of this.#rindexes()) { const v = this.#valList[i]; const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; if (value === void 0) continue; fn.call(thisp, value, this.#keyList[i], this); } } /** * Delete any stale entries. Returns true if anything was removed, * false otherwise. */ purgeStale() { let deleted = false; for (const i of this.#rindexes({ allowStale: true })) { if (this.#isStale(i)) { this.#delete(this.#keyList[i], "expire"); deleted = true; } } return deleted; } /** * Get the extended info about a given entry, to get its value, size, and * TTL info simultaneously. Returns `undefined` if the key is not present. * * Unlike {@link LRUCache#dump}, which is designed to be portable and survive * serialization, the `start` value is always the current timestamp, and the * `ttl` is a calculated remaining time to live (negative if expired). * * Always returns stale values, if their info is found in the cache, so be * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl}) * if relevant. */ info(key) { const i = this.#keyMap.get(key); if (i === void 0) return void 0; const v = this.#valList[i]; const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; if (value === void 0) return void 0; const entry = { value }; if (this.#ttls && this.#starts) { const ttl = this.#ttls[i]; const start = this.#starts[i]; if (ttl && start) { const remain = ttl - (perf.now() - start); entry.ttl = remain; entry.start = Date.now(); } } if (this.#sizes) { entry.size = this.#sizes[i]; } return entry; } /** * Return an array of [key, {@link LRUCache.Entry}] tuples which can be * passed to {@link LRLUCache#load}. * * The `start` fields are calculated relative to a portable `Date.now()` * timestamp, even if `performance.now()` is available. * * Stale entries are always included in the `dump`, even if * {@link LRUCache.OptionsBase.allowStale} is false. * * Note: this returns an actual array, not a generator, so it can be more * easily passed around. */ dump() { const arr = []; for (const i of this.#indexes({ allowStale: true })) { const key = this.#keyList[i]; const v = this.#valList[i]; const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; if (value === void 0 || key === void 0) continue; const entry = { value }; if (this.#ttls && this.#starts) { entry.ttl = this.#ttls[i]; const age = perf.now() - this.#starts[i]; entry.start = Math.floor(Date.now() - age); } if (this.#sizes) { entry.size = this.#sizes[i]; } arr.unshift([key, entry]); } return arr; } /** * Reset the cache and load in the items in entries in the order listed. * * The shape of the resulting cache may be different if the same options are * not used in both caches. * * The `start` fields are assumed to be calculated relative to a portable * `Date.now()` timestamp, even if `performance.now()` is available. */ load(arr) { this.clear(); for (const [key, entry] of arr) { if (entry.start) { const age = Date.now() - entry.start; entry.start = perf.now() - age; } this.set(key, entry.value, entry); } } /** * Add a value to the cache. * * Note: if `undefined` is specified as a value, this is an alias for * {@link LRUCache#delete} * * Fields on the {@link LRUCache.SetOptions} options param will override * their corresponding values in the constructor options for the scope * of this single `set()` operation. * * If `start` is provided, then that will set the effective start * time for the TTL calculation. Note that this must be a previous * value of `performance.now()` if supported, or a previous value of * `Date.now()` if not. * * Options object may also include `size`, which will prevent * calling the `sizeCalculation` function and just use the specified * number if it is a positive integer, and `noDisposeOnSet` which * will prevent calling a `dispose` function in the case of * overwrites. * * If the `size` (or return value of `sizeCalculation`) for a given * entry is greater than `maxEntrySize`, then the item will not be * added to the cache. * * Will update the recency of the entry. * * If the value is `undefined`, then this is an alias for * `cache.delete(key)`. `undefined` is never stored in the cache. */ set(k, v, setOptions = {}) { if (v === void 0) { this.delete(k); return this; } const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status } = setOptions; let { noUpdateTTL = this.noUpdateTTL } = setOptions; const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); if (this.maxEntrySize && size > this.maxEntrySize) { if (status) { status.set = "miss"; status.maxEntrySizeExceeded = true; } this.#delete(k, "set"); return this; } let index = this.#size === 0 ? void 0 : this.#keyMap.get(k); if (index === void 0) { index = this.#size === 0 ? this.#tail : this.#free.length !== 0 ? this.#free.pop() : this.#size === this.#max ? this.#evict(false) : this.#size; this.#keyList[index] = k; this.#valList[index] = v; this.#keyMap.set(k, index); this.#next[this.#tail] = index; this.#prev[index] = this.#tail; this.#tail = index; this.#size++; this.#addItemSize(index, size, status); if (status) status.set = "add"; noUpdateTTL = false; } else { this.#moveToTail(index); const oldVal = this.#valList[index]; if (v !== oldVal) { if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { oldVal.__abortController.abort(new Error("replaced")); const { __staleWhileFetching: s } = oldVal; if (s !== void 0 && !noDisposeOnSet) { if (this.#hasDispose) { this.#dispose?.(s, k, "set"); } if (this.#hasDisposeAfter) { this.#disposed?.push([s, k, "set"]); } } } else if (!noDisposeOnSet) { if (this.#hasDispose) { this.#dispose?.(oldVal, k, "set"); } if (this.#hasDisposeAfter) { this.#disposed?.push([oldVal, k, "set"]); } } this.#removeItemSize(index); this.#addItemSize(index, size, status); this.#valList[index] = v; if (status) { status.set = "replace"; const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ? oldVal.__staleWhileFetching : oldVal; if (oldValue !== void 0) status.oldValue = oldValue; } } else if (status) { status.set = "update"; } } if (ttl !== 0 && !this.#ttls) { this.#initializeTTLTracking(); } if (this.#ttls) { if (!noUpdateTTL) { this.#setItemTTL(index, ttl, start); } if (status) this.#statusTTL(status, index); } if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { const dt = this.#disposed; let task; while (task = dt?.shift()) { this.#disposeAfter?.(...task); } } return this; } /** * Evict the least recently used item, returning its value or * `undefined` if cache is empty. */ pop() { try { while (this.#size) { const val = this.#valList[this.#head]; this.#evict(true); if (this.#isBackgroundFetch(val)) { if (val.__staleWhileFetching) { return val.__staleWhileFetching; } } else if (val !== void 0) { return val; } } } finally { if (this.#hasDisposeAfter && this.#disposed) { const dt = this.#disposed; let task; while (task = dt?.shift()) { this.#disposeAfter?.(...task); } } } } #evict(free) { const head = this.#head; const k = this.#keyList[head]; const v = this.#valList[head]; if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { v.__abortController.abort(new Error("evicted")); } else if (this.#hasDispose || this.#hasDisposeAfter) { if (this.#hasDispose) { this.#dispose?.(v, k, "evict"); } if (this.#hasDisposeAfter) { this.#disposed?.push([v, k, "evict"]); } } this.#removeItemSize(head); if (free) { this.#keyList[head] = void 0; this.#valList[head] = void 0; this.#free.push(head); } if (this.#size === 1) { this.#head = this.#tail = 0; this.#free.length = 0; } else { this.#head = this.#next[head]; } this.#keyMap.delete(k); this.#size--; return head; } /** * Check if a key is in the cache, without updating the recency of use. * Will return false if the item is stale, even though it is technically * in the cache. * * Check if a key is in the cache, without updating the recency of * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set * to `true` in either the options or the constructor. * * Will return `false` if the item is stale, even though it is technically in * the cache. The difference can be determined (if it matters) by using a * `status` argument, and inspecting the `has` field. * * Will not update item age unless * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. */ has(k, hasOptions = {}) { const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; const index = this.#keyMap.get(k); if (index !== void 0) { const v = this.#valList[index]; if (this.#isBackgroundFetch(v) && v.__staleWhileFetching === void 0) { return false; } if (!this.#isStale(index)) { if (updateAgeOnHas) { this.#updateItemAge(index); } if (status) { status.has = "hit"; this.#statusTTL(status, index); } return true; } else if (status) { status.has = "stale"; this.#statusTTL(status, index); } } else if (status) { status.has = "miss"; } return false; } /** * Like {@link LRUCache#get} but doesn't update recency or delete stale * items. * * Returns `undefined` if the item is stale, unless * {@link LRUCache.OptionsBase.allowStale} is set. */ peek(k, peekOptions = {}) { const { allowStale = this.allowStale } = peekOptions; const index = this.#keyMap.get(k); if (index === void 0 || !allowStale && this.#isStale(index)) { return; } const v = this.#valList[index]; return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; } #backgroundFetch(k, index, options, context) { const v = index === void 0 ? void 0 : this.#valList[index]; if (this.#isBackgroundFetch(v)) { return v; } const ac = new AC(); const { signal } = options; signal?.addEventListener("abort", () => ac.abort(signal.reason), { signal: ac.signal }); const fetchOpts = { signal: ac.signal, options, context }; const cb = (v2, updateCache = false) => { const { aborted } = ac.signal; const ignoreAbort = options.ignoreFetchAbort && v2 !== void 0; if (options.status) { if (aborted && !updateCache) { options.status.fetchAborted = true; options.status.fetchError = ac.signal.reason; if (ignoreAbort) options.status.fetchAbortIgnored = true; } else { options.status.fetchResolved = true; } } if (aborted && !ignoreAbort && !updateCache) { return fetchFail(ac.signal.reason); } const bf2 = p; if (this.#valList[index] === p) { if (v2 === void 0) { if (bf2.__staleWhileFetching) { this.#valList[index] = bf2.__staleWhileFetching; } else { this.#delete(k, "fetch"); } } else { if (options.status) options.status.fetchUpdated = true; this.set(k, v2, fetchOpts.options); } } return v2; }; const eb = (er) => { if (options.status) { options.status.fetchRejected = true; options.status.fetchError = er; } return fetchFail(er); }; const fetchFail = (er) => { const { aborted } = ac.signal; const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; const noDelete = allowStale || options.noDeleteOnFetchRejection; const bf2 = p; if (this.#valList[index] === p) { const del = !noDelete || bf2.__staleWhileFetching === void 0; if (del) { this.#delete(k, "fetch"); } else if (!allowStaleAborted) { this.#valList[index] = bf2.__staleWhileFetching; } } if (allowStale) { if (options.status && bf2.__staleWhileFetching !== void 0) { options.status.returnedStale = true; } return bf2.__staleWhileFetching; } else if (bf2.__returned === bf2) { throw er; } }; const pcall = (res, rej) => { const fmp = this.#fetchMethod?.(k, v, fetchOpts); if (fmp && fmp instanceof Promise) { fmp.then((v2) => res(v2 === void 0 ? void 0 : v2), rej); } ac.signal.addEventListener("abort", () => { if (!options.ignoreFetchAbort || options.allowStaleOnFetchAbort) { res(void 0); if (options.allowStaleOnFetchAbort) { res = (v2) => cb(v2, true); } } }); }; if (options.status) options.status.fetchDispatched = true; const p = new Promise(pcall).then(cb, eb); const bf = Object.assign(p, { __abortController: ac, __staleWhileFetching: v, __returned: void 0 }); if (index === void 0) { this.set(k, bf, { ...fetchOpts.options, status: void 0 }); index = this.#keyMap.get(k); } else { this.#valList[index] = bf; } return bf; } #isBackgroundFetch(p) { if (!this.#hasFetchMethod) return false; const b = p; return !!b && b instanceof Promise && b.hasOwnProperty("__staleWhileFetching") && b.__abortController instanceof AC; } async fetch(k, fetchOptions = {}) { const { // get options allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, // set options ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, // fetch exclusive options noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal } = fetchOptions; if (!this.#hasFetchMethod) { if (status) status.fetch = "get"; return this.get(k, { allowStale, updateAgeOnGet, noDeleteOnStaleGet, status }); } const options = { allowStale, updateAgeOnGet, noDeleteOnStaleGet, ttl, noDisposeOnSet, size, sizeCalculation, noUpdateTTL, noDeleteOnFetchRejection, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, status, signal }; let index = this.#keyMap.get(k); if (index === void 0) { if (status) status.fetch = "miss"; const p = this.#backgroundFetch(k, index, options, context); return p.__returned = p; } else { const v = this.#valList[index]; if (this.#isBackgroundFetch(v)) { const stale = allowStale && v.__staleWhileFetching !== void 0; if (status) { status.fetch = "inflight"; if (stale) status.returnedStale = true; } return stale ? v.__staleWhileFetching : v.__returned = v; } const isStale = this.#isStale(index); if (!forceRefresh && !isStale) { if (status) status.fetch = "hit"; this.#moveToTail(index); if (updateAgeOnGet) { this.#updateItemAge(index); } if (status) this.#statusTTL(status, index); return v; } const p = this.#backgroundFetch(k, index, options, context); const hasStale = p.__staleWhileFetching !== void 0; const staleVal = hasStale && allowStale; if (status) { status.fetch = isStale ? "stale" : "refresh"; if (staleVal && isStale) status.returnedStale = true; } return staleVal ? p.__staleWhileFetching : p.__returned = p; } } async forceFetch(k, fetchOptions = {}) { const v = await this.fetch(k, fetchOptions); if (v === void 0) throw new Error("fetch() returned undefined"); return v; } memo(k, memoOptions = {}) { const memoMethod = this.#memoMethod; if (!memoMethod) { throw new Error("no memoMethod provided to constructor"); } const { context, forceRefresh, ...options } = memoOptions; const v = this.get(k, options); if (!forceRefresh && v !== void 0) return v; const vv = memoMethod(k, v, { options, context }); this.set(k, vv, options); return vv; } /** * Return a value from the cache. Will update the recency of the cache * entry found. * * If the key is not found, get() will return `undefined`. */ get(k, getOptions = {}) { const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status } = getOptions; const index = this.#keyMap.get(k); if (index !== void 0) { const value = this.#valList[index]; const fetching = this.#isBackgroundFetch(value); if (status) this.#statusTTL(status, index); if (this.#isStale(index)) { if (status) status.get = "stale"; if (!fetching) { if (!noDeleteOnStaleGet) { this.#delete(k, "expire"); } if (status && allowStale) status.returnedStale = true; return allowStale ? value : void 0; } else { if (status && allowStale && value.__staleWhileFetching !== void 0) { status.returnedStale = true; } return allowStale ? value.__staleWhileFetching : void 0; } } else { if (status) status.get = "hit"; if (fetching) { return value.__staleWhileFetching; } this.#moveToTail(index); if (updateAgeOnGet) { this.#updateItemAge(index); } return value; } } else if (status) { status.get = "miss"; } } #connect(p, n) { this.#prev[n] = p; this.#next[p] = n; } #moveToTail(index) { if (index !== this.#tail) { if (index === this.#head) { this.#head = this.#next[index]; } else { this.#connect(this.#prev[index], this.#next[index]); } this.#connect(this.#tail, index); this.#tail = index; } } /** * Deletes a key out of the cache. * * Returns true if the key was deleted, false otherwise. */ delete(k) { return this.#delete(k, "delete"); } #delete(k, reason) { let deleted = false; if (this.#size !== 0) { const index = this.#keyMap.get(k); if (index !== void 0) { deleted = true; if (this.#size === 1) { this.#clear(reason); } else { this.#removeItemSize(index); const v = this.#valList[index]; if (this.#isBackgroundFetch(v)) { v.__abortController.abort(new Error("deleted")); } else if (this.#hasDispose || this.#hasDisposeAfter) { if (this.#hasDispose) { this.#dispose?.(v, k, reason); } if (this.#hasDisposeAfter) { this.#disposed?.push([v, k, reason]); } } this.#keyMap.delete(k); this.#keyList[index] = void 0; this.#valList[index] = void 0; if (index === this.#tail) { this.#tail = this.#prev[index]; } else if (index === this.#head) { this.#head = this.#next[index]; } else { const pi = this.#prev[index]; this.#next[pi] = this.#next[index]; const ni = this.#next[index]; this.#prev[ni] = this.#prev[index]; } this.#size--; this.#free.push(index); } } } if (this.#hasDisposeAfter && this.#disposed?.length) { const dt = this.#disposed; let task; while (task = dt?.shift()) { this.#disposeAfter?.(...task); } } return deleted; } /** * Clear the cache entirely, throwing away all values. */ clear() { return this.#clear("delete"); } #clear(reason) { for (const index of this.#rindexes({ allowStale: true })) { const v = this.#valList[index]; if (this.#isBackgroundFetch(v)) { v.__abortController.abort(new Error("deleted")); } else { const k = this.#keyList[index]; if (this.#hasDispose) { this.#dispose?.(v, k, reason); } if (this.#hasDisposeAfter) { this.#disposed?.push([v, k, reason]); } } } this.#keyMap.clear(); this.#valList.fill(void 0); this.#keyList.fill(void 0); if (this.#ttls && this.#starts) { this.#ttls.fill(0); this.#starts.fill(0); } if (this.#sizes) { this.#sizes.fill(0); } this.#head = 0; this.#tail = 0; this.#free.length = 0; this.#calculatedSize = 0; this.#size = 0; if (this.#hasDisposeAfter && this.#disposed) { const dt = this.#disposed; let task; while (task = dt?.shift()) { this.#disposeAfter?.(...task); } } } }; // src/run/storage/request-scoped-in-memory-cache.cts var import_blob_types = require("../../shared/blob-types.cjs"); var import_request_context = require("../handlers/request-context.cjs"); var import_tracer = require("../handlers/tracer.cjs"); var NullValue = Symbol.for("null-value"); var IN_MEMORY_CACHE_MAX_SIZE = Symbol.for("nf-in-memory-cache-max-size"); var IN_MEMORY_LRU_CACHE = Symbol.for("nf-in-memory-lru-cache"); var extendedGlobalThis = globalThis; var DEFAULT_FALLBACK_MAX_SIZE = 50 * 1024 * 1024; function setInMemoryCacheMaxSizeFromNextConfig(size) { if (typeof size === "number") { extendedGlobalThis[IN_MEMORY_CACHE_MAX_SIZE] = size; } } var isPositiveNumber = (value) => { return typeof value === "number" && value > 0; }; var BASE_BLOB_SIZE = 25; var estimateBlobKnownTypeSize = (valueToStore) => { if (valueToStore === null || (0, import_types.isPromise)(valueToStore) || (0, import_blob_types.isTagManifest)(valueToStore)) { return BASE_BLOB_SIZE; } if ((0, import_blob_types.isHtmlBlob)(valueToStore)) { return BASE_BLOB_SIZE + valueToStore.html.length; } if (valueToStore.value?.kind === "FETCH") { return BASE_BLOB_SIZE + valueToStore.value.data.body.length; } if (valueToStore.value?.kind === "APP_PAGE") { return BASE_BLOB_SIZE + valueToStore.value.html.length + (valueToStore.value.rscData?.length ?? 0); } if (valueToStore.value?.kind === "PAGE" || valueToStore.value?.kind === "PAGES") { return BASE_BLOB_SIZE + valueToStore.value.html.length + JSON.stringify(valueToStore.value.pageData).length; } if (valueToStore.value?.kind === "ROUTE" || valueToStore.value?.kind === "APP_ROUTE") { return BASE_BLOB_SIZE + valueToStore.value.body.length; } }; var estimateBlobSize = (valueToStore) => { let estimatedKnownTypeSize; let estimateBlobKnownTypeSizeError; try { estimatedKnownTypeSize = estimateBlobKnownTypeSize(valueToStore); if (isPositiveNumber(estimatedKnownTypeSize)) { return estimatedKnownTypeSize; } } catch (error) { estimateBlobKnownTypeSizeError = error; } const calculatedSize = JSON.stringify(valueToStore).length; (0, import_tracer.recordWarning)( new Error( `Blob size calculation did fallback to JSON.stringify. EstimatedKnownTypeSize: ${estimatedKnownTypeSize}, CalculatedSize: ${calculatedSize}, ValueToStore: ${JSON.stringify(valueToStore)}`, estimateBlobKnownTypeSizeError ? { cause: estimateBlobKnownTypeSizeError } : void 0 ) ); return isPositiveNumber(calculatedSize) ? calculatedSize : BASE_BLOB_SIZE; }; function getInMemoryLRUCache() { if (typeof extendedGlobalThis[IN_MEMORY_LRU_CACHE] === "undefined") { const maxSize = typeof extendedGlobalThis[IN_MEMORY_CACHE_MAX_SIZE] === "number" ? extendedGlobalThis[IN_MEMORY_CACHE_MAX_SIZE] : DEFAULT_FALLBACK_MAX_SIZE; extendedGlobalThis[IN_MEMORY_LRU_CACHE] = maxSize === 0 ? null : new LRUCache({ max: 1e3, maxSize, sizeCalculation: (valueToStore) => { return estimateBlobSize(valueToStore === NullValue ? null : valueToStore); } }); } return extendedGlobalThis[IN_MEMORY_LRU_CACHE]; } var getRequestScopedInMemoryCache = () => { const requestContext = (0, import_request_context.getRequestContext)(); const inMemoryLRUCache = getInMemoryLRUCache(); return { get(key) { if (!requestContext) return; try { const value = inMemoryLRUCache?.get(`${requestContext.requestID}:${key}`); return value === NullValue ? null : value; } catch (error) { (0, import_tracer.recordWarning)(new Error("Failed to get value from memory cache", { cause: error })); } }, set(key, value) { if (!requestContext) return; try { inMemoryLRUCache?.set(`${requestContext?.requestID}:${key}`, value ?? NullValue); } catch (error) { (0, import_tracer.recordWarning)(new Error("Failed to store value in memory cache", { cause: error })); } } }; }; // Annotate the CommonJS export names for ESM import in node: 0 && (module.exports = { getRequestScopedInMemoryCache, setInMemoryCacheMaxSizeFromNextConfig });