UNPKG

next

Version:

The React Framework

968 lines (967 loc) • 67.5 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "cache", { enumerable: true, get: function() { return cache; } }); const _server = require("react-server-dom-webpack/server"); const _client = require("react-server-dom-webpack/client"); const _static = require("react-server-dom-webpack/static"); const _workasyncstorageexternal = require("../app-render/work-async-storage.external"); const _workunitasyncstorageexternal = require("../app-render/work-unit-async-storage.external"); const _dynamicrenderingutils = require("../dynamic-rendering-utils"); const _encryptionutils = require("../app-render/encryption-utils"); const _encryption = require("../app-render/encryption"); const _invarianterror = require("../../shared/lib/invariant-error"); const _createerrorhandler = require("../app-render/create-error-handler"); const _constants = require("./constants"); const _handlers = require("./handlers"); const _usecacheerrors = require("./use-cache-errors"); const _dynamicrendering = require("../app-render/dynamic-rendering"); const _searchparams = require("../request/search-params"); const _lazyresult = require("../lib/lazy-result"); const _dynamicaccessasyncstorageexternal = require("../app-render/dynamic-access-async-storage.external"); const _reactlargeshellerror = require("../app-render/react-large-shell-error"); const _stagedrendering = require("../app-render/staged-rendering"); const isEdgeRuntime = process.env.NEXT_RUNTIME === 'edge'; const debug = process.env.NEXT_PRIVATE_DEBUG_CACHE ? console.debug.bind(console, 'use-cache:') : undefined; const filterStackFrame = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').filterStackFrameDEV : undefined; const findSourceMapURL = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').findSourceMapURLDEV : undefined; function generateCacheEntry(workStore, cacheContext, clientReferenceManifest, encodedArguments, fn, timeoutError) { // We need to run this inside a clean AsyncLocalStorage snapshot so that the cache // generation cannot read anything from the context we're currently executing which // might include request specific things like cookies() inside a React.cache(). // Note: It is important that we await at least once before this because it lets us // pop out of any stack specific contexts as well - aka "Sync" Local Storage. return workStore.runInCleanSnapshot(generateCacheEntryWithRestoredWorkStore, workStore, cacheContext, clientReferenceManifest, encodedArguments, fn, timeoutError); } function generateCacheEntryWithRestoredWorkStore(workStore, cacheContext, clientReferenceManifest, encodedArguments, fn, timeoutError) { // Since we cleared the AsyncLocalStorage we need to restore the workStore. // Note: We explicitly don't restore the RequestStore nor the PrerenderStore. // We don't want any request specific information leaking an we don't want to create a // bloated fake request mock for every cache call. So any feature that currently lives // in RequestStore but should be available to Caches need to move to WorkStore. // PrerenderStore is not needed inside the cache scope because the outer most one will // be the one to report its result to the outer Prerender. return _workasyncstorageexternal.workAsyncStorage.run(workStore, generateCacheEntryWithCacheContext, workStore, cacheContext, clientReferenceManifest, encodedArguments, fn, timeoutError); } function createUseCacheStore(workStore, cacheContext, defaultCacheLife) { if (cacheContext.kind === 'private') { const outerWorkUnitStore = cacheContext.outerWorkUnitStore; return { type: 'private-cache', phase: 'render', implicitTags: outerWorkUnitStore == null ? void 0 : outerWorkUnitStore.implicitTags, revalidate: defaultCacheLife.revalidate, expire: defaultCacheLife.expire, stale: defaultCacheLife.stale, explicitRevalidate: undefined, explicitExpire: undefined, explicitStale: undefined, tags: null, hmrRefreshHash: (0, _workunitasyncstorageexternal.getHmrRefreshHash)(workStore, outerWorkUnitStore), isHmrRefresh: (0, _workunitasyncstorageexternal.isHmrRefresh)(workStore, outerWorkUnitStore), serverComponentsHmrCache: (0, _workunitasyncstorageexternal.getServerComponentsHmrCache)(workStore, outerWorkUnitStore), forceRevalidate: shouldForceRevalidate(workStore, outerWorkUnitStore), runtimeStagePromise: (0, _workunitasyncstorageexternal.getRuntimeStagePromise)(outerWorkUnitStore), draftMode: (0, _workunitasyncstorageexternal.getDraftModeProviderForCacheScope)(workStore, outerWorkUnitStore), rootParams: outerWorkUnitStore.rootParams, headers: outerWorkUnitStore.headers, cookies: outerWorkUnitStore.cookies }; } else { let useCacheOrRequestStore; const outerWorkUnitStore = cacheContext.outerWorkUnitStore; if (outerWorkUnitStore) { switch(outerWorkUnitStore == null ? void 0 : outerWorkUnitStore.type){ case 'cache': case 'private-cache': case 'request': useCacheOrRequestStore = outerWorkUnitStore; break; case 'prerender-runtime': case 'prerender': case 'prerender-ppr': case 'prerender-legacy': case 'unstable-cache': break; default: outerWorkUnitStore; } } return { type: 'cache', phase: 'render', implicitTags: outerWorkUnitStore == null ? void 0 : outerWorkUnitStore.implicitTags, revalidate: defaultCacheLife.revalidate, expire: defaultCacheLife.expire, stale: defaultCacheLife.stale, explicitRevalidate: undefined, explicitExpire: undefined, explicitStale: undefined, tags: null, hmrRefreshHash: outerWorkUnitStore && (0, _workunitasyncstorageexternal.getHmrRefreshHash)(workStore, outerWorkUnitStore), isHmrRefresh: (useCacheOrRequestStore == null ? void 0 : useCacheOrRequestStore.isHmrRefresh) ?? false, serverComponentsHmrCache: useCacheOrRequestStore == null ? void 0 : useCacheOrRequestStore.serverComponentsHmrCache, forceRevalidate: shouldForceRevalidate(workStore, outerWorkUnitStore), draftMode: outerWorkUnitStore && (0, _workunitasyncstorageexternal.getDraftModeProviderForCacheScope)(workStore, outerWorkUnitStore) }; } } function assertDefaultCacheLife(defaultCacheLife) { if (!defaultCacheLife || defaultCacheLife.revalidate == null || defaultCacheLife.expire == null || defaultCacheLife.stale == null) { throw Object.defineProperty(new _invarianterror.InvariantError('A default cacheLife profile must always be provided.'), "__NEXT_ERROR_CODE", { value: "E750", enumerable: false, configurable: true }); } } function generateCacheEntryWithCacheContext(workStore, cacheContext, clientReferenceManifest, encodedArguments, fn, timeoutError) { if (!workStore.cacheLifeProfiles) { throw Object.defineProperty(new _invarianterror.InvariantError('cacheLifeProfiles should always be provided.'), "__NEXT_ERROR_CODE", { value: "E748", enumerable: false, configurable: true }); } const defaultCacheLife = workStore.cacheLifeProfiles['default']; assertDefaultCacheLife(defaultCacheLife); // Initialize the Store for this Cache entry. const cacheStore = createUseCacheStore(workStore, cacheContext, defaultCacheLife); return _workunitasyncstorageexternal.workUnitAsyncStorage.run(cacheStore, ()=>_dynamicaccessasyncstorageexternal.dynamicAccessAsyncStorage.run({ abortController: new AbortController() }, generateCacheEntryImpl, workStore, cacheContext, cacheStore, clientReferenceManifest, encodedArguments, fn, timeoutError)); } function propagateCacheLifeAndTagsToRevalidateStore(revalidateStore, entry) { const outerTags = revalidateStore.tags ??= []; for (const tag of entry.tags){ if (!outerTags.includes(tag)) { outerTags.push(tag); } } if (revalidateStore.stale > entry.stale) { revalidateStore.stale = entry.stale; } if (revalidateStore.revalidate > entry.revalidate) { revalidateStore.revalidate = entry.revalidate; } if (revalidateStore.expire > entry.expire) { revalidateStore.expire = entry.expire; } } function propagateCacheLifeAndTags(cacheContext, entry) { if (cacheContext.kind === 'private') { switch(cacheContext.outerWorkUnitStore.type){ case 'prerender-runtime': case 'private-cache': propagateCacheLifeAndTagsToRevalidateStore(cacheContext.outerWorkUnitStore, entry); break; case 'request': case undefined: break; default: cacheContext.outerWorkUnitStore; } } else { var _cacheContext_outerWorkUnitStore; switch((_cacheContext_outerWorkUnitStore = cacheContext.outerWorkUnitStore) == null ? void 0 : _cacheContext_outerWorkUnitStore.type){ case 'cache': case 'private-cache': case 'prerender': case 'prerender-runtime': case 'prerender-ppr': case 'prerender-legacy': propagateCacheLifeAndTagsToRevalidateStore(cacheContext.outerWorkUnitStore, entry); break; case 'request': case 'unstable-cache': case undefined: break; default: cacheContext.outerWorkUnitStore; } } } async function collectResult(savedStream, workStore, cacheContext, innerCacheStore, startTime, errors) { // We create a buffered stream that collects all chunks until the end to // ensure that RSC has finished rendering and therefore we have collected // all tags. In the future the RSC API might allow for the equivalent of // the allReady Promise that exists on SSR streams. // // If something errored or rejected anywhere in the render, we close // the stream as errored. This lets a CacheHandler choose to save the // partial result up until that point for future hits for a while to avoid // unnecessary retries or not to retry. We use the end of the stream for // this to avoid another complicated side-channel. A receiver has to consider // that the stream might also error for other reasons anyway such as losing // connection. const buffer = []; const reader = savedStream.getReader(); try { for(let entry; !(entry = await reader.read()).done;){ buffer.push(entry.value); } } catch (error) { errors.push(error); } let idx = 0; const bufferStream = new ReadableStream({ pull (controller) { if (workStore.invalidDynamicUsageError) { controller.error(workStore.invalidDynamicUsageError); } else if (idx < buffer.length) { controller.enqueue(buffer[idx++]); } else if (errors.length > 0) { // TODO: Should we use AggregateError here? controller.error(errors[0]); } else { controller.close(); } } }); const collectedTags = innerCacheStore.tags; // If cacheLife() was used to set an explicit revalidate time we use that. // Otherwise, we use the lowest of all inner fetch()/unstable_cache() or nested "use cache". // If they're lower than our default. const collectedRevalidate = innerCacheStore.explicitRevalidate !== undefined ? innerCacheStore.explicitRevalidate : innerCacheStore.revalidate; const collectedExpire = innerCacheStore.explicitExpire !== undefined ? innerCacheStore.explicitExpire : innerCacheStore.expire; const collectedStale = innerCacheStore.explicitStale !== undefined ? innerCacheStore.explicitStale : innerCacheStore.stale; const entry = { value: bufferStream, timestamp: startTime, revalidate: collectedRevalidate, expire: collectedExpire, stale: collectedStale, tags: collectedTags === null ? [] : collectedTags }; if (cacheContext.outerWorkUnitStore) { const outerWorkUnitStore = cacheContext.outerWorkUnitStore; // Propagate cache life & tags to the parent context if appropriate. switch(outerWorkUnitStore.type){ case 'prerender': case 'prerender-runtime': { break; } case 'request': { if (process.env.NODE_ENV === 'development' && outerWorkUnitStore.cacheSignal) { break; } // fallthrough } case 'private-cache': case 'cache': case 'unstable-cache': case 'prerender-legacy': case 'prerender-ppr': { propagateCacheLifeAndTags(cacheContext, entry); break; } default: { outerWorkUnitStore; } } const cacheSignal = (0, _workunitasyncstorageexternal.getCacheSignal)(outerWorkUnitStore); if (cacheSignal) { cacheSignal.endRead(); } } return entry; } async function generateCacheEntryImpl(workStore, cacheContext, innerCacheStore, clientReferenceManifest, encodedArguments, fn, timeoutError) { const temporaryReferences = (0, _server.createTemporaryReferenceSet)(); const outerWorkUnitStore = cacheContext.outerWorkUnitStore; const [, , args] = typeof encodedArguments === 'string' ? await (0, _server.decodeReply)(encodedArguments, (0, _encryptionutils.getServerModuleMap)(), { temporaryReferences }) : await (0, _server.decodeReplyFromAsyncIterable)({ async *[Symbol.asyncIterator] () { for (const entry of encodedArguments){ yield entry; } if (outerWorkUnitStore) { switch(outerWorkUnitStore.type){ case 'prerender-runtime': case 'prerender': // The encoded arguments might contain hanging promises. In // this case we don't want to reject with "Error: Connection // closed.", so we intentionally keep the iterable alive. // This is similar to the halting trick that we do while // rendering. await new Promise((resolve)=>{ if (outerWorkUnitStore.renderSignal.aborted) { resolve(); } else { outerWorkUnitStore.renderSignal.addEventListener('abort', ()=>resolve(), { once: true }); } }); break; case 'prerender-ppr': case 'prerender-legacy': case 'request': case 'cache': case 'private-cache': case 'unstable-cache': break; default: outerWorkUnitStore; } } } }, (0, _encryptionutils.getServerModuleMap)(), { temporaryReferences }); // Track the timestamp when we started computing the result. const startTime = performance.timeOrigin + performance.now(); // Invoke the inner function to load a new result. We delay the invocation // though, until React awaits the promise so that React's request store (ALS) // is available when the function is invoked. This allows us, for example, to // capture logs so that we can later replay them. const resultPromise = (0, _lazyresult.createLazyResult)(fn.bind(null, ...args)); let errors = []; // In the "Cache" environment, we only need to make sure that the error // digests are handled correctly. Error formatting and reporting is not // necessary here; the errors are encoded in the stream, and will be reported // in the "Server" environment. const handleError = (error)=>{ const digest = (0, _createerrorhandler.getDigestForWellKnownError)(error); if (digest) { return digest; } if ((0, _reactlargeshellerror.isReactLargeShellError)(error)) { // TODO: Aggregate console.error(error); return undefined; } if (process.env.NODE_ENV !== 'development') { // TODO: For now we're also reporting the error here, because in // production, the "Server" environment will only get the obfuscated // error (created by the Flight Client in the cache wrapper). console.error(error); } errors.push(error); }; let stream; switch(outerWorkUnitStore == null ? void 0 : outerWorkUnitStore.type){ case 'prerender-runtime': case 'prerender': var _dynamicAccessAsyncStorage_getStore; const timeoutAbortController = new AbortController(); // If we're prerendering, we give you 50 seconds to fill a cache entry. // Otherwise we assume you stalled on hanging input and de-opt. This needs // to be lower than just the general timeout of 60 seconds. const timer = setTimeout(()=>{ workStore.invalidDynamicUsageError = timeoutError; timeoutAbortController.abort(timeoutError); }, 50000); const dynamicAccessAbortSignal = (_dynamicAccessAsyncStorage_getStore = _dynamicaccessasyncstorageexternal.dynamicAccessAsyncStorage.getStore()) == null ? void 0 : _dynamicAccessAsyncStorage_getStore.abortController.signal; const abortSignal = dynamicAccessAbortSignal ? AbortSignal.any([ dynamicAccessAbortSignal, outerWorkUnitStore.renderSignal, timeoutAbortController.signal ]) : timeoutAbortController.signal; const { prelude } = await (0, _static.prerender)(resultPromise, clientReferenceManifest.clientModules, { environmentName: 'Cache', filterStackFrame, signal: abortSignal, temporaryReferences, onError (error) { if (abortSignal.aborted && abortSignal.reason === error) { return undefined; } return handleError(error); } }); clearTimeout(timer); if (timeoutAbortController.signal.aborted) { // When the timeout is reached we always error the stream. Even for // fallback shell prerenders we don't want to return a hanging promise, // which would allow the function to become a dynamic hole. Because that // would mean that a non-empty shell could be generated which would be // subject to revalidation, and we don't want to create long // revalidation times. stream = new ReadableStream({ start (controller) { controller.error(timeoutAbortController.signal.reason); } }); } else if (dynamicAccessAbortSignal == null ? void 0 : dynamicAccessAbortSignal.aborted) { // If the prerender is aborted because of dynamic access (e.g. reading // fallback params), we return a hanging promise. This essentially makes // the "use cache" function dynamic. const hangingPromise = (0, _dynamicrenderingutils.makeHangingPromise)(outerWorkUnitStore.renderSignal, workStore.route, 'dynamic "use cache"'); if (outerWorkUnitStore.cacheSignal) { outerWorkUnitStore.cacheSignal.endRead(); } return { type: 'prerender-dynamic', hangingPromise }; } else { stream = prelude; } break; case 'prerender-ppr': case 'prerender-legacy': case 'request': case 'cache': case 'private-cache': case 'unstable-cache': case undefined: stream = (0, _server.renderToReadableStream)(resultPromise, clientReferenceManifest.clientModules, { environmentName: 'Cache', filterStackFrame, temporaryReferences, onError: handleError }); break; default: return outerWorkUnitStore; } const [returnStream, savedStream] = stream.tee(); const pendingCacheEntry = collectResult(savedStream, workStore, cacheContext, innerCacheStore, startTime, errors); if (process.env.NODE_ENV === 'development') { // Name the stream for React DevTools. // @ts-expect-error returnStream.name = 'use cache'; } return { type: 'cached', // Return the stream as we're creating it. This means that if it ends up // erroring we cannot return a stale-if-error version but it allows // streaming back the result earlier. stream: returnStream, pendingCacheEntry }; } function cloneCacheEntry(entry) { const [streamA, streamB] = entry.value.tee(); entry.value = streamA; const clonedEntry = { value: streamB, timestamp: entry.timestamp, revalidate: entry.revalidate, expire: entry.expire, stale: entry.stale, tags: entry.tags }; return [ entry, clonedEntry ]; } async function clonePendingCacheEntry(pendingCacheEntry) { const entry = await pendingCacheEntry; return cloneCacheEntry(entry); } async function getNthCacheEntry(split, i) { return (await split)[i]; } async function encodeFormData(formData) { let result = ''; for (let [key, value] of formData){ // We don't need this key to be serializable but from a security perspective it should not be // possible to generate a string that looks the same from a different structure. To ensure this // we need a delimeter between fields but just using a delimeter is not enough since a string // might contain that delimeter. We use the length of each field as the delimeter to avoid // escaping the values. result += key.length.toString(16) + ':' + key; let stringValue; if (typeof value === 'string') { stringValue = value; } else { // The FormData might contain binary data that is not valid UTF-8 so this cache // key may generate a UCS-2 string. Passing this to another service needs to be // aware that the key might not be compatible. const arrayBuffer = await value.arrayBuffer(); if (arrayBuffer.byteLength % 2 === 0) { stringValue = String.fromCodePoint(...new Uint16Array(arrayBuffer)); } else { stringValue = String.fromCodePoint(...new Uint16Array(arrayBuffer, 0, (arrayBuffer.byteLength - 1) / 2)) + String.fromCodePoint(new Uint8Array(arrayBuffer, arrayBuffer.byteLength - 1, 1)[0]); } } result += stringValue.length.toString(16) + ':' + stringValue; } return result; } function createTrackedReadableStream(stream, cacheSignal) { const reader = stream.getReader(); return new ReadableStream({ async pull (controller) { const { done, value } = await reader.read(); if (done) { controller.close(); cacheSignal.endRead(); } else { controller.enqueue(value); } } }); } async function cache(kind, id, boundArgsLength, originalFn, argsObj) { let args = Array.prototype.slice.call(argsObj); const isPrivate = kind === 'private'; // Private caches are currently only stored in the Resume Data Cache (RDC), // and not in cache handlers. const cacheHandler = isPrivate ? undefined : (0, _handlers.getCacheHandler)(kind); if (!isPrivate && !cacheHandler) { throw Object.defineProperty(new Error('Unknown cache handler: ' + kind), "__NEXT_ERROR_CODE", { value: "E248", enumerable: false, configurable: true }); } const timeoutError = new _usecacheerrors.UseCacheTimeoutError(); Error.captureStackTrace(timeoutError, cache); const wrapAsInvalidDynamicUsageError = (error, workStore)=>{ Error.captureStackTrace(error, cache); workStore.invalidDynamicUsageError ??= error; return error; }; const workStore = _workasyncstorageexternal.workAsyncStorage.getStore(); if (workStore === undefined) { throw Object.defineProperty(new Error('"use cache" cannot be used outside of App Router. Expected a WorkStore.'), "__NEXT_ERROR_CODE", { value: "E279", enumerable: false, configurable: true }); } const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore(); const name = originalFn.name; let fn = originalFn; let cacheContext; if (isPrivate) { const expression = '"use cache: private"'; switch(workUnitStore == null ? void 0 : workUnitStore.type){ // "use cache: private" is dynamic in prerendering contexts. case 'prerender': return (0, _dynamicrenderingutils.makeHangingPromise)(workUnitStore.renderSignal, workStore.route, expression); case 'prerender-ppr': return (0, _dynamicrendering.postponeWithTracking)(workStore.route, expression, workUnitStore.dynamicTracking); case 'prerender-legacy': return (0, _dynamicrendering.throwToInterruptStaticGeneration)(expression, workStore, workUnitStore); case 'prerender-client': throw Object.defineProperty(new _invarianterror.InvariantError(`${expression} must not be used within a client component. Next.js should be preventing ${expression} from being allowed in client components statically, but did not in this case.`), "__NEXT_ERROR_CODE", { value: "E741", enumerable: false, configurable: true }); case 'unstable-cache': { throw wrapAsInvalidDynamicUsageError(Object.defineProperty(new Error(// TODO: Add a link to an error documentation page when we have one. `${expression} must not be used within \`unstable_cache()\`.`), "__NEXT_ERROR_CODE", { value: "E744", enumerable: false, configurable: true }), workStore); } case 'cache': { throw wrapAsInvalidDynamicUsageError(Object.defineProperty(new Error(// TODO: Add a link to an error documentation page when we have one. `${expression} must not be used within "use cache". It can only be nested inside of another ${expression}.`), "__NEXT_ERROR_CODE", { value: "E735", enumerable: false, configurable: true }), workStore); } case 'request': case 'prerender-runtime': case 'private-cache': cacheContext = { kind: 'private', outerWorkUnitStore: workUnitStore }; break; case undefined: throw wrapAsInvalidDynamicUsageError(Object.defineProperty(new Error(// TODO: Add a link to an error documentation page when we have one. `${expression} cannot be used outside of a request context.`), "__NEXT_ERROR_CODE", { value: "E754", enumerable: false, configurable: true }), workStore); default: workUnitStore; // This is dead code, but without throwing an error here, TypeScript // will assume that cacheContext is used before being assigned. throw Object.defineProperty(new _invarianterror.InvariantError(`Unexpected work unit store.`), "__NEXT_ERROR_CODE", { value: "E737", enumerable: false, configurable: true }); } } else { switch(workUnitStore == null ? void 0 : workUnitStore.type){ case 'prerender-client': const expression = '"use cache"'; throw Object.defineProperty(new _invarianterror.InvariantError(`${expression} must not be used within a client component. Next.js should be preventing ${expression} from being allowed in client components statically, but did not in this case.`), "__NEXT_ERROR_CODE", { value: "E741", enumerable: false, configurable: true }); case 'prerender': case 'prerender-runtime': case 'prerender-ppr': case 'prerender-legacy': case 'request': case 'cache': case 'private-cache': // TODO: We should probably forbid nesting "use cache" inside // unstable_cache. (fallthrough) case 'unstable-cache': case undefined: cacheContext = { kind: 'public', outerWorkUnitStore: workUnitStore }; break; default: workUnitStore; // This is dead code, but without throwing an error here, TypeScript // will assume that cacheContext is used before being assigned. throw Object.defineProperty(new _invarianterror.InvariantError(`Unexpected work unit store.`), "__NEXT_ERROR_CODE", { value: "E737", enumerable: false, configurable: true }); } } // Get the clientReferenceManifest while we're still in the outer Context. // In case getClientReferenceManifestSingleton is implemented using AsyncLocalStorage. const clientReferenceManifest = (0, _encryptionutils.getClientReferenceManifestForRsc)(); // Because the Action ID is not yet unique per implementation of that Action we can't // safely reuse the results across builds yet. In the meantime we add the buildId to the // arguments as a seed to ensure they're not reused. Remove this once Action IDs hash // the implementation. const buildId = workStore.buildId; // In dev mode, when the HMR refresh hash is set, we include it in the // cache key. This ensures that cache entries are not reused when server // components have been edited. This is a very coarse approach. But it's // also only a temporary solution until Action IDs are unique per // implementation. Remove this once Action IDs hash the implementation. const hmrRefreshHash = workUnitStore && (0, _workunitasyncstorageexternal.getHmrRefreshHash)(workStore, workUnitStore); const hangingInputAbortSignal = workUnitStore ? (0, _dynamicrendering.createHangingInputAbortSignal)(workUnitStore) : undefined; if (cacheContext.kind === 'private') { const { outerWorkUnitStore } = cacheContext; switch(outerWorkUnitStore.type){ case 'prerender-runtime': { // In a runtime prerender, we have to make sure that APIs that would hang during a static prerender // are resolved with a delay, in the runtime stage. Private caches are one of these. if (outerWorkUnitStore.runtimeStagePromise) { await outerWorkUnitStore.runtimeStagePromise; } break; } case 'request': { if (process.env.NODE_ENV === 'development') { // Similar to runtime prerenders, private caches should not resolve in the static stage // of a dev request, so we delay them. await (0, _dynamicrenderingutils.makeDevtoolsIOAwarePromise)(undefined, outerWorkUnitStore, _stagedrendering.RenderStage.Runtime); } break; } case 'private-cache': break; default: { outerWorkUnitStore; } } } let isPageOrLayoutSegmentFunction = false; // For page and layout segment functions (i.e. the page/layout component, // or generateMetadata/generateViewport), the cache function is // overwritten, which allows us to apply special handling for params and // searchParams. For pages and layouts we're using the outer params prop, // and not the inner one that was serialized/deserialized. While it's not // generally true for "use cache" args, in the case of `params` the inner // and outer object are essentially equivalent, so this is safe to do // (including fallback params that are hanging promises). It allows us to // avoid waiting for the timeout, when prerendering a fallback shell of a // cached page or layout that awaits params. if (isPageSegmentFunction(args)) { isPageOrLayoutSegmentFunction = true; const [{ params: outerParams, searchParams: outerSearchParams }, ...otherOuterArgs] = args; const props = { params: outerParams }; if (isPrivate) { // Private caches allow accessing search params. We need to include // them in the serialized args and when generating the cache key. props.searchParams = outerSearchParams; } args = [ props, ...otherOuterArgs ]; fn = ({ [name]: async ({ params: _innerParams, searchParams: innerSearchParams }, ...otherInnerArgs)=>originalFn.apply(null, [ { params: outerParams, searchParams: innerSearchParams ?? // For public caches, search params are omitted from the cache // key (and the serialized args) to avoid mismatches between // prerendering and resuming a cached page that does not // access search params. This is also the reason why we're not // using a hanging promise for search params. For cached pages // that do access them, which is an invalid dynamic usage, we // need to ensure that an error is shown. (0, _searchparams.makeErroringSearchParamsForUseCache)(workStore) }, ...otherInnerArgs ]) })[name]; } else if (isLayoutSegmentFunction(args)) { isPageOrLayoutSegmentFunction = true; const [{ params: outerParams, $$isLayout, ...outerSlots }, ...otherOuterArgs] = args; // Overwrite the props to omit $$isLayout. Note that slots are only // passed to the layout component (if any are defined), and not to // generateMetadata nor generateViewport. For those functions, // outerSlots/innerSlots is an empty object, which is fine because we're // just spreading it into the props. args = [ { params: outerParams, ...outerSlots }, ...otherOuterArgs ]; fn = ({ [name]: async ({ params: _innerParams, ...innerSlots }, ...otherInnerArgs)=>originalFn.apply(null, [ { params: outerParams, ...innerSlots }, ...otherInnerArgs ]) })[name]; } if (boundArgsLength > 0) { if (args.length === 0) { throw Object.defineProperty(new _invarianterror.InvariantError(`Expected the "use cache" function ${JSON.stringify(fn.name)} to receive its encrypted bound arguments as the first argument.`), "__NEXT_ERROR_CODE", { value: "E524", enumerable: false, configurable: true }); } const encryptedBoundArgs = args.shift(); const boundArgs = await (0, _encryption.decryptActionBoundArgs)(id, encryptedBoundArgs); if (!Array.isArray(boundArgs)) { throw Object.defineProperty(new _invarianterror.InvariantError(`Expected the bound arguments of "use cache" function ${JSON.stringify(fn.name)} to deserialize into an array, got ${typeof boundArgs} instead.`), "__NEXT_ERROR_CODE", { value: "E581", enumerable: false, configurable: true }); } if (boundArgsLength !== boundArgs.length) { throw Object.defineProperty(new _invarianterror.InvariantError(`Expected the "use cache" function ${JSON.stringify(fn.name)} to receive ${boundArgsLength} bound arguments, got ${boundArgs.length} instead.`), "__NEXT_ERROR_CODE", { value: "E559", enumerable: false, configurable: true }); } args.unshift(boundArgs); } const temporaryReferences = (0, _client.createTemporaryReferenceSet)(); // For private caches, which are allowed to read cookies, we still don't // need to include the cookies in the cache key. This is because we don't // store the cache entries in a cache handler, but only in the Resume Data // Cache (RDC). Private caches are only used during dynamic requests and // runtime prefetches. For dynamic requests, the RDC is immutable, so it // does not include any private caches. For runtime prefetches, the RDC is // mutable, but only lives as long as the request, so the key does not // need to include cookies. const cacheKeyParts = hmrRefreshHash ? [ buildId, id, args, hmrRefreshHash ] : [ buildId, id, args ]; const encodeCacheKeyParts = ()=>(0, _client.encodeReply)(cacheKeyParts, { temporaryReferences, signal: hangingInputAbortSignal }); let encodedCacheKeyParts; switch(workUnitStore == null ? void 0 : workUnitStore.type){ case 'prerender-runtime': // We're currently only using `dynamicAccessAsyncStorage` for params, // which are always available in a runtime prerender, so they will never hang, // effectively making the tracking below a no-op. // However, a runtime prerender shares a lot of the semantics with a static prerender, // and might need to follow this codepath in the future // if we start using `dynamicAccessAsyncStorage` for other APIs. // // fallthrough case 'prerender': if (!isPageOrLayoutSegmentFunction) { // If the "use cache" function is not a page or layout segment // function, we need to track dynamic access already when encoding // the arguments. If params are passed explicitly into a "use cache" // function (as opposed to receiving them automatically in a page or // layout), we assume that the params are also accessed. This allows // us to abort early, and treat the function as dynamic, instead of // waiting for the timeout to be reached. const dynamicAccessAbortController = new AbortController(); encodedCacheKeyParts = await _dynamicaccessasyncstorageexternal.dynamicAccessAsyncStorage.run({ abortController: dynamicAccessAbortController }, encodeCacheKeyParts); if (dynamicAccessAbortController.signal.aborted) { return (0, _dynamicrenderingutils.makeHangingPromise)(workUnitStore.renderSignal, workStore.route, 'dynamic "use cache"'); } break; } // fallthrough case 'prerender-ppr': case 'prerender-legacy': case 'request': // TODO(restart-on-cache-miss): We need to handle params/searchParams on page components. // the promises will be tasky, so `encodeCacheKeyParts` will not resolve in the static stage. // We have not started a cache read at this point, so we might just miss the cache completely. // fallthrough case 'cache': case 'private-cache': case 'unstable-cache': case undefined: encodedCacheKeyParts = await encodeCacheKeyParts(); break; default: return workUnitStore; } const serializedCacheKey = typeof encodedCacheKeyParts === 'string' ? // Convert it to an ArrayBuffer if it wants to. encodedCacheKeyParts : await encodeFormData(encodedCacheKeyParts); let stream = undefined; // Get an immutable and mutable versions of the resume data cache. const prerenderResumeDataCache = workUnitStore ? (0, _workunitasyncstorageexternal.getPrerenderResumeDataCache)(workUnitStore) : null; const renderResumeDataCache = workUnitStore ? (0, _workunitasyncstorageexternal.getRenderResumeDataCache)(workUnitStore) : null; if (renderResumeDataCache) { const cacheSignal = workUnitStore ? (0, _workunitasyncstorageexternal.getCacheSignal)(workUnitStore) : null; if (cacheSignal) { cacheSignal.beginRead(); } const cachedEntry = renderResumeDataCache.cache.get(serializedCacheKey); if (cachedEntry !== undefined) { const existingEntry = await cachedEntry; if (workUnitStore !== undefined && existingEntry !== undefined) { if (existingEntry.revalidate === 0 || existingEntry.expire < _constants.DYNAMIC_EXPIRE) { switch(workUnitStore.type){ case 'prerender': // In a Dynamic I/O prerender, if the cache entry has // revalidate: 0 or if the expire time is under 5 minutes, // then we consider this cache entry dynamic as it's not worth // generating static pages for such data. It's better to leave // a dynamic hole that can be filled in during the resume with // a potentially cached entry. if (cacheSignal) { cacheSignal.endRead(); } return (0, _dynamicrenderingutils.makeHangingPromise)(workUnitStore.renderSignal, workStore.route, 'dynamic "use cache"'); case 'prerender-runtime': { // In the final phase of a runtime prerender, we have to make // sure that APIs that would hang during a static prerender // are resolved with a delay, in the runtime stage. if (workUnitStore.runtimeStagePromise) { await workUnitStore.runtimeStagePromise; } break; } case 'request': { if (process.env.NODE_ENV === 'development') { // We delay the cache here so that it doesn't resolve in the static task -- // in a regular static prerender, it'd be a hanging promise, and we need to reflect that, // so it has to resolve later. // TODO(restart-on-cache-miss): Optimize this to avoid unnecessary restarts. // We don't end the cache read here, so this will always appear as a cache miss in the static stage, // and thus will cause a restart even if all caches are filled. await (0, _dynamicrenderingutils.makeDevtoolsIOAwarePromise)(undefined, workUnitStore, _stagedrendering.RenderStage.Runtime); } break; } case 'prerender-ppr': case 'prerender-legacy': case 'cache': case 'private-cache': case 'unstable-cache': break; default: workUnitStore; } } if (existingEntry.stale < _constants.RUNTIME_PREFETCH_DYNAMIC_STALE) { switch(workUnitStore.type){ case 'prerender-runtime': // In a runtime prerender, if the cache entry will become // stale in less then 30 seconds, we consider this cache entry // dynamic as it's not worth prefetching. It's better to leave // a dynamic hole that can be filled during the navigation. if (cacheSignal) { cacheSignal.endRead(); } return (0, _dynamicrenderingutils.makeHangingPromise)(workUnitStore.renderSignal, workStore.route, 'dynamic "use cache"'); case 'request': { if (process.env.NODE_ENV === 'development') { // We delay the cache here so that it doesn't resolve in the runtime phase -- // in a regular runtime prerender, it'd be a hanging promise, and we need to reflect that, // so it has to resolve later. // TODO(restart-on-cache-miss): Optimize this to avoid unnecessary restarts. // We don't end the cache read here, so this will always appear as a cache miss in the runtime stage, // and thus will cause a restart even if all caches are filled. await (0, _dynamicrenderingutils.makeDevtoolsIOAwarePromise)(undefined, workUnitStore, _stagedrendering.RenderStage.Dynamic); } break; } case 'prerender': case 'prerender-ppr': case 'prerender-legacy': case 'cache': case 'private-cache': case 'unstable-cache': break; default: workUnitStore; } } } // We want to make sure we only propagate cache life & tags if the // entry was *not* omitted from the prerender. So we only do this // after the above early returns. propagateCacheLifeAndTags(cacheContext, existingEntry); const [streamA, streamB] = existingEntry.value.tee(); existingEntry.value = streamB; if (cacheSignal) { // When we have a cacheSignal we need to block on reading the cache // entry before ending the read. stream = createTrackedReadableStream(streamA, cacheSignal); } else { stream = streamA; } } else { if (cacheSignal) { cacheSignal.endRead(); } if (workUnitStore) { switch(workUnitStore.type){ case 'prerender': // If `allowEmptyStaticShell` is true, and thus a prefilled // resume data cache was provided, then a cache miss means that // params were part of the cache key. In this case, we can make // this cache function a dynamic hole in the shell (or produce // an empty shell if there's no parent suspense boundary). // Currently, this also includes layouts and pages that don't // read params, which will be improved when we implement // NAR-136. Otherwise, we assume that if params are passed