UNPKG

next

Version:

The React Framework

107 lines (106 loc) 4.32 kB
/** * This is the default "use cache" handler it defaults to an in-memory store. * In-memory caches are fragile and should not use stale-while-revalidate * semantics on the caches because it's not worth warming up an entry that's * likely going to get evicted before we get to use it anyway. However, we also * don't want to reuse a stale entry for too long so stale entries should be * considered expired/missing in such cache handlers. */ import { LRUCache } from '../lru-cache'; import { isStale, tagsManifest } from '../incremental-cache/tags-manifest.external'; // LRU cache default to max 50 MB but in future track const memoryCache = new LRUCache(50 * 1024 * 1024, (entry)=>entry.size); const pendingSets = new Map(); const debug = process.env.NEXT_PRIVATE_DEBUG_CACHE ? console.debug.bind(console, 'DefaultCacheHandler:') : undefined; const DefaultCacheHandler = { async get (cacheKey) { const pendingPromise = pendingSets.get(cacheKey); if (pendingPromise) { debug == null ? void 0 : debug('get', cacheKey, 'pending'); await pendingPromise; } const privateEntry = memoryCache.get(cacheKey); if (!privateEntry) { debug == null ? void 0 : debug('get', cacheKey, 'not found'); return undefined; } const entry = privateEntry.entry; if (performance.timeOrigin + performance.now() > entry.timestamp + entry.revalidate * 1000) { // In-memory caches should expire after revalidate time because it is // unlikely that a new entry will be able to be used before it is dropped // from the cache. debug == null ? void 0 : debug('get', cacheKey, 'expired'); return undefined; } if (isStale(entry.tags, entry.timestamp)) { debug == null ? void 0 : debug('get', cacheKey, 'had stale tag'); return undefined; } const [returnStream, newSaved] = entry.value.tee(); entry.value = newSaved; debug == null ? void 0 : debug('get', cacheKey, 'found', { tags: entry.tags, timestamp: entry.timestamp, revalidate: entry.revalidate, expire: entry.expire }); return { ...entry, value: returnStream }; }, async set (cacheKey, pendingEntry) { debug == null ? void 0 : debug('set', cacheKey, 'start'); let resolvePending = ()=>{}; const pendingPromise = new Promise((resolve)=>{ resolvePending = resolve; }); pendingSets.set(cacheKey, pendingPromise); const entry = await pendingEntry; let size = 0; try { const [value, clonedValue] = entry.value.tee(); entry.value = value; const reader = clonedValue.getReader(); for(let chunk; !(chunk = await reader.read()).done;){ size += Buffer.from(chunk.value).byteLength; } memoryCache.set(cacheKey, { entry, isErrored: false, errorRetryCount: 0, size }); debug == null ? void 0 : debug('set', cacheKey, 'done'); } catch (err) { // TODO: store partial buffer with error after we retry 3 times debug == null ? void 0 : debug('set', cacheKey, 'failed', err); } finally{ resolvePending(); pendingSets.delete(cacheKey); } }, async refreshTags () { // Nothing to do for an in-memory cache handler. }, async getExpiration (...tags) { const expiration = Math.max(...tags.map((tag)=>tagsManifest.get(tag) ?? 0)); debug == null ? void 0 : debug('getExpiration', { tags, expiration }); return expiration; }, async expireTags (...tags) { const timestamp = Math.round(performance.timeOrigin + performance.now()); debug == null ? void 0 : debug('expireTags', { tags, timestamp }); for (const tag of tags){ // TODO: update file-system-cache? tagsManifest.set(tag, timestamp); } } }; export default DefaultCacheHandler; //# sourceMappingURL=default.js.map