next
Version:
The React Framework
1,000 lines • 84.7 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "cache", {
enumerable: true,
get: function() {
return cache;
}
});
const _server = require("react-server-dom-webpack/server");
const _client = require("react-server-dom-webpack/client");
const _static = require("react-server-dom-webpack/static");
const _workasyncstorageexternal = require("../app-render/work-async-storage.external");
const _workunitasyncstorageexternal = require("../app-render/work-unit-async-storage.external");
const _dynamicrenderingutils = require("../dynamic-rendering-utils");
const _manifestssingleton = require("../app-render/manifests-singleton");
const _encryption = require("../app-render/encryption");
const _invarianterror = require("../../shared/lib/invariant-error");
const _createerrorhandler = require("../app-render/create-error-handler");
const _constants = require("./constants");
const _constants1 = require("../../lib/constants");
const _handlers = require("./handlers");
const _usecacheerrors = require("./use-cache-errors");
const _dynamicrendering = require("../app-render/dynamic-rendering");
const _searchparams = require("../request/search-params");
const _lazyresult = require("../lib/lazy-result");
const _dynamicaccessasyncstorageexternal = require("../app-render/dynamic-access-async-storage.external");
const _stagedrendering = require("../app-render/staged-rendering");
const _log = /*#__PURE__*/ _interop_require_wildcard(require("../../build/output/log"));
function _getRequireWildcardCache(nodeInterop) {
if (typeof WeakMap !== "function") return null;
var cacheBabelInterop = new WeakMap();
var cacheNodeInterop = new WeakMap();
return (_getRequireWildcardCache = function(nodeInterop) {
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
})(nodeInterop);
}
function _interop_require_wildcard(obj, nodeInterop) {
if (!nodeInterop && obj && obj.__esModule) {
return obj;
}
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
return {
default: obj
};
}
var cache = _getRequireWildcardCache(nodeInterop);
if (cache && cache.has(obj)) {
return cache.get(obj);
}
var newObj = {
__proto__: null
};
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
for(var key in obj){
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
if (desc && (desc.get || desc.set)) {
Object.defineProperty(newObj, key, desc);
} else {
newObj[key] = obj[key];
}
}
}
newObj.default = obj;
if (cache) {
cache.set(obj, newObj);
}
return newObj;
}
const isEdgeRuntime = process.env.NEXT_RUNTIME === 'edge';
const debug = process.env.NEXT_PRIVATE_DEBUG_CACHE ? console.debug.bind(console, 'use-cache:') : undefined;
const filterStackFrame = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').filterStackFrameDEV : undefined;
const findSourceMapURL = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').findSourceMapURLDEV : undefined;
const nestedCacheZeroRevalidateErrorMessage = `A "use cache" with zero \`revalidate\` is nested inside another "use cache" ` + `that has no explicit \`cacheLife\`, which is not allowed during ` + `prerendering. Add \`cacheLife()\` to the outer \`"use cache"\` to choose ` + `whether it should be prerendered (with non-zero \`revalidate\`) or remain ` + `dynamic (with zero \`revalidate\`). Read more: ` + `https://nextjs.org/docs/messages/nested-use-cache-no-explicit-cachelife`;
const nestedCacheShortExpireErrorMessage = `A "use cache" with short \`expire\` (under 5 minutes) is nested inside ` + `another "use cache" that has no explicit \`cacheLife\`, which is not ` + `allowed during prerendering. Add \`cacheLife()\` to the outer \`"use cache"\` ` + `to choose whether it should be prerendered (with longer \`expire\`) or remain ` + `dynamic (with short \`expire\`). Read more: ` + `https://nextjs.org/docs/messages/nested-use-cache-no-explicit-cachelife`;
// Tracks which root params each cache function has historically read. Used to
// compute the specific cache key upfront on subsequent invocations. In-memory
// only — after server restart, the coarse-key redirect entry in the cache
// handler provides fallback.
const knownRootParamsByFunctionId = new Map();
function addKnownRootParamNames(id, names) {
const existing = knownRootParamsByFunctionId.get(id);
if (existing) {
for (const name of names){
existing.add(name);
}
return existing;
}
const created = new Set(names);
knownRootParamsByFunctionId.set(id, created);
return created;
}
function computeRootParamsCacheKeySuffix(rootParams, paramNames) {
if (paramNames.size === 0) {
return '';
}
return JSON.stringify([
...paramNames
].sort().map((paramName)=>[
paramName,
rootParams[paramName]
]));
}
function saveToResumeDataCache(prerenderResumeDataCache, serializedCacheKey, pendingCacheResult) {
if (!prerenderResumeDataCache) {
return pendingCacheResult;
}
const split = clonePendingCacheResult(pendingCacheResult);
const savedCacheResult = getNthCacheResult(split, 0);
const rdcResult = getNthCacheResult(split, 1);
// The RDC is per-page and root params are fixed within a page, so we always
// use the coarse key (without root param suffix). Unlike the cache handler,
// the RDC doesn't need root-param-specific keys for isolation.
prerenderResumeDataCache.cache.set(serializedCacheKey, rdcResult);
return savedCacheResult;
}
function saveToCacheHandler(cacheHandler, workStore, id, serializedCacheKey, savedCacheResult, rootParams) {
const pendingCoarseEntry = savedCacheResult.then((collectedResult)=>{
const { entry: fullEntry, readRootParamNames } = collectedResult;
// Use the combined set (union of all historically observed reads) for both
// the specific key and the redirect entry's tags. The read path computes
// cacheHandlerKey from this same union (knownRootParamsByFunctionId), so
// the write path must use the identical set to land on the same specific
// key. If we used only the current invocation's reads, a function that
// conditionally reads different root params across invocations would
// scatter entries across different specific keys, making previous entries
// unreachable from the read path's union-based lookup.
const rootParamNames = readRootParamNames ? addKnownRootParamNames(id, readRootParamNames) : knownRootParamsByFunctionId.get(id);
if (rootParamNames && rootParamNames.size > 0 && rootParams) {
const specificKey = serializedCacheKey + computeRootParamsCacheKeySuffix(rootParams, rootParamNames);
const specificSetPromise = cacheHandler.set(specificKey, Promise.resolve(fullEntry));
workStore.pendingRevalidateWrites ??= [];
workStore.pendingRevalidateWrites.push(specificSetPromise);
// Return a redirect entry for the coarse key. On a cold server (empty
// knownRootParamsByFunctionId), this entry's tags tell us which root
// params to include in the specific key for the follow-up lookup.
const rootParamTags = [
...rootParamNames
].map((paramName)=>_constants1.NEXT_CACHE_ROOT_PARAM_TAG_ID + paramName);
return {
value: new ReadableStream({
start (controller) {
// Single byte so the entry has non-zero size in LRU caches.
controller.enqueue(new Uint8Array([
0
]));
controller.close();
}
}),
tags: [
...fullEntry.tags,
...rootParamTags
],
stale: fullEntry.stale,
timestamp: fullEntry.timestamp,
expire: fullEntry.expire,
revalidate: fullEntry.revalidate
};
}
return fullEntry;
});
const promise = cacheHandler.set(serializedCacheKey, pendingCoarseEntry);
workStore.pendingRevalidateWrites ??= [];
workStore.pendingRevalidateWrites.push(promise);
}
function generateCacheEntry(workStore, cacheContext, clientReferenceManifest, encodedArguments, fn, timeoutError) {
// We need to run this inside a clean AsyncLocalStorage snapshot so that the cache
// generation cannot read anything from the context we're currently executing which
// might include request specific things like cookies() inside a React.cache().
// Note: It is important that we await at least once before this because it lets us
// pop out of any stack specific contexts as well - aka "Sync" Local Storage.
return workStore.runInCleanSnapshot(generateCacheEntryWithRestoredWorkStore, workStore, cacheContext, clientReferenceManifest, encodedArguments, fn, timeoutError);
}
function generateCacheEntryWithRestoredWorkStore(workStore, cacheContext, clientReferenceManifest, encodedArguments, fn, timeoutError) {
// Since we cleared the AsyncLocalStorage we need to restore the workStore.
// Note: We explicitly don't restore the RequestStore nor the PrerenderStore.
// We don't want any request specific information leaking an we don't want to create a
// bloated fake request mock for every cache call. So any feature that currently lives
// in RequestStore but should be available to Caches need to move to WorkStore.
// PrerenderStore is not needed inside the cache scope because the outer most one will
// be the one to report its result to the outer Prerender.
return _workasyncstorageexternal.workAsyncStorage.run(workStore, generateCacheEntryWithCacheContext, workStore, cacheContext, clientReferenceManifest, encodedArguments, fn, timeoutError);
}
function createUseCacheStore(workStore, cacheContext, defaultCacheLife) {
if (cacheContext.kind === 'private') {
const outerWorkUnitStore = cacheContext.outerWorkUnitStore;
return {
type: 'private-cache',
phase: 'render',
implicitTags: outerWorkUnitStore == null ? void 0 : outerWorkUnitStore.implicitTags,
revalidate: defaultCacheLife.revalidate,
expire: defaultCacheLife.expire,
stale: defaultCacheLife.stale,
explicitRevalidate: undefined,
explicitExpire: undefined,
explicitStale: undefined,
tags: null,
hmrRefreshHash: (0, _workunitasyncstorageexternal.getHmrRefreshHash)(outerWorkUnitStore),
isHmrRefresh: (0, _workunitasyncstorageexternal.isHmrRefresh)(outerWorkUnitStore),
serverComponentsHmrCache: (0, _workunitasyncstorageexternal.getServerComponentsHmrCache)(outerWorkUnitStore),
forceRevalidate: shouldForceRevalidate(workStore, outerWorkUnitStore),
draftMode: (0, _workunitasyncstorageexternal.getDraftModeProviderForCacheScope)(workStore, outerWorkUnitStore),
rootParams: outerWorkUnitStore.rootParams,
headers: outerWorkUnitStore.headers,
cookies: outerWorkUnitStore.cookies
};
} else {
let useCacheOrRequestStore;
const outerWorkUnitStore = cacheContext.outerWorkUnitStore;
switch(outerWorkUnitStore.type){
case 'cache':
case 'private-cache':
case 'request':
useCacheOrRequestStore = outerWorkUnitStore;
break;
case 'prerender-runtime':
case 'prerender':
case 'prerender-ppr':
case 'prerender-legacy':
case 'unstable-cache':
case 'generate-static-params':
break;
default:
outerWorkUnitStore;
}
return {
type: 'cache',
phase: 'render',
implicitTags: outerWorkUnitStore.implicitTags,
revalidate: defaultCacheLife.revalidate,
expire: defaultCacheLife.expire,
stale: defaultCacheLife.stale,
explicitRevalidate: undefined,
explicitExpire: undefined,
explicitStale: undefined,
tags: null,
hmrRefreshHash: (0, _workunitasyncstorageexternal.getHmrRefreshHash)(outerWorkUnitStore),
isHmrRefresh: (useCacheOrRequestStore == null ? void 0 : useCacheOrRequestStore.isHmrRefresh) ?? false,
serverComponentsHmrCache: useCacheOrRequestStore == null ? void 0 : useCacheOrRequestStore.serverComponentsHmrCache,
forceRevalidate: shouldForceRevalidate(workStore, outerWorkUnitStore),
draftMode: (0, _workunitasyncstorageexternal.getDraftModeProviderForCacheScope)(workStore, outerWorkUnitStore),
rootParams: outerWorkUnitStore.rootParams,
readRootParamNames: new Set()
};
}
}
function assertDefaultCacheLife(defaultCacheLife) {
if (!defaultCacheLife || defaultCacheLife.revalidate == null || defaultCacheLife.expire == null || defaultCacheLife.stale == null) {
throw Object.defineProperty(new _invarianterror.InvariantError('A default cacheLife profile must always be provided.'), "__NEXT_ERROR_CODE", {
value: "E750",
enumerable: false,
configurable: true
});
}
}
function generateCacheEntryWithCacheContext(workStore, cacheContext, clientReferenceManifest, encodedArguments, fn, timeoutError) {
if (!workStore.cacheLifeProfiles) {
throw Object.defineProperty(new _invarianterror.InvariantError('cacheLifeProfiles should always be provided.'), "__NEXT_ERROR_CODE", {
value: "E748",
enumerable: false,
configurable: true
});
}
const defaultCacheLife = workStore.cacheLifeProfiles['default'];
assertDefaultCacheLife(defaultCacheLife);
// Initialize the Store for this Cache entry.
const cacheStore = createUseCacheStore(workStore, cacheContext, defaultCacheLife);
return _workunitasyncstorageexternal.workUnitAsyncStorage.run(cacheStore, ()=>_dynamicaccessasyncstorageexternal.dynamicAccessAsyncStorage.run({
abortController: new AbortController()
}, generateCacheEntryImpl, workStore, cacheContext, cacheStore, clientReferenceManifest, encodedArguments, fn, timeoutError));
}
function propagateCacheLifeAndTagsToRevalidateStore(revalidateStore, entry) {
const outerTags = revalidateStore.tags ??= [];
for (const tag of entry.tags){
if (!outerTags.includes(tag)) {
outerTags.push(tag);
}
}
if (revalidateStore.stale > entry.stale) {
revalidateStore.stale = entry.stale;
}
if (revalidateStore.revalidate > entry.revalidate) {
revalidateStore.revalidate = entry.revalidate;
}
if (revalidateStore.expire > entry.expire) {
revalidateStore.expire = entry.expire;
}
}
function propagateCacheStaleTimeToRequestStore(requestStore, entry) {
if (requestStore.stale !== undefined && requestStore.stale > entry.stale) {
requestStore.stale = entry.stale;
}
}
function propagateCacheEntryMetadata(cacheContext, entry, readRootParamNames) {
if (cacheContext.kind === 'private') {
switch(cacheContext.outerWorkUnitStore.type){
case 'prerender-runtime':
case 'private-cache':
propagateCacheLifeAndTagsToRevalidateStore(cacheContext.outerWorkUnitStore, entry);
break;
case 'request':
propagateCacheStaleTimeToRequestStore(cacheContext.outerWorkUnitStore, entry);
break;
case undefined:
break;
default:
cacheContext.outerWorkUnitStore;
}
} else {
switch(cacheContext.outerWorkUnitStore.type){
case 'cache':
if (readRootParamNames) {
for (const paramName of readRootParamNames){
cacheContext.outerWorkUnitStore.readRootParamNames.add(paramName);
}
}
// fallthrough
case 'private-cache':
case 'prerender':
case 'prerender-runtime':
case 'prerender-ppr':
case 'prerender-legacy':
propagateCacheLifeAndTagsToRevalidateStore(cacheContext.outerWorkUnitStore, entry);
break;
case 'request':
propagateCacheStaleTimeToRequestStore(cacheContext.outerWorkUnitStore, entry);
break;
case 'unstable-cache':
case 'generate-static-params':
break;
default:
cacheContext.outerWorkUnitStore;
}
}
}
/**
* Conditionally propagates cache life, tags, and root param names to the outer
* context. During prerenders (`prerender` / `prerender-runtime`) and dev
* cache-filling requests, propagation is deferred because the entry might be
* omitted from the final prerender due to short expire/stale times. If omitted,
* it should not affect the prerender. The final decision happens when the entry
* is read from the resume data cache in the final render phase — at that point
* `propagateCacheEntryMetadata` is called unconditionally (after the omission
* checks have already filtered out short-lived entries).
*
* Note: Root param names are only propagated when the outer context is a
* `cache` store (i.e. an enclosing `"use cache"` function), which is never
* deferred. For prerender contexts, root param names are tracked separately
* via `addKnownRootParamNames` in the resume data cache read path.
*/ function maybePropagateCacheEntryMetadata(cacheContext, entry, readRootParamNames) {
const outerWorkUnitStore = cacheContext.outerWorkUnitStore;
switch(outerWorkUnitStore.type){
case 'prerender':
case 'prerender-runtime':
{
break;
}
case 'request':
{
if (process.env.NODE_ENV === 'development' && outerWorkUnitStore.cacheSignal) {
break;
}
// fallthrough
}
case 'private-cache':
case 'cache':
case 'unstable-cache':
case 'prerender-legacy':
case 'prerender-ppr':
{
propagateCacheEntryMetadata(cacheContext, entry, readRootParamNames);
break;
}
case 'generate-static-params':
break;
default:
{
outerWorkUnitStore;
}
}
}
async function collectResult(savedStream, workStore, cacheContext, innerCacheStore, startTime, errors) {
// We create a buffered stream that collects all chunks until the end to
// ensure that RSC has finished rendering and therefore we have collected
// all tags. In the future the RSC API might allow for the equivalent of
// the allReady Promise that exists on SSR streams.
//
// If something errored or rejected anywhere in the render, we close
// the stream as errored. This lets a CacheHandler choose to save the
// partial result up until that point for future hits for a while to avoid
// unnecessary retries or not to retry. We use the end of the stream for
// this to avoid another complicated side-channel. A receiver has to consider
// that the stream might also error for other reasons anyway such as losing
// connection.
const buffer = [];
const reader = savedStream.getReader();
try {
for(let entry; !(entry = await reader.read()).done;){
buffer.push(entry.value);
}
} catch (error) {
errors.push(error);
}
let idx = 0;
const bufferStream = new ReadableStream({
pull (controller) {
if (workStore.invalidDynamicUsageError) {
controller.error(workStore.invalidDynamicUsageError);
} else if (idx < buffer.length) {
controller.enqueue(buffer[idx++]);
} else if (errors.length > 0) {
// TODO: Should we use AggregateError here?
controller.error(errors[0]);
} else {
controller.close();
}
}
});
const collectedTags = innerCacheStore.tags;
// If cacheLife() was used to set an explicit revalidate time we use that.
// Otherwise, we use the lowest of all inner fetch()/unstable_cache() or nested "use cache".
// If they're lower than our default.
const collectedRevalidate = innerCacheStore.explicitRevalidate !== undefined ? innerCacheStore.explicitRevalidate : innerCacheStore.revalidate;
const collectedExpire = innerCacheStore.explicitExpire !== undefined ? innerCacheStore.explicitExpire : innerCacheStore.expire;
const collectedStale = innerCacheStore.explicitStale !== undefined ? innerCacheStore.explicitStale : innerCacheStore.stale;
const entry = {
value: bufferStream,
timestamp: startTime,
revalidate: collectedRevalidate,
expire: collectedExpire,
stale: collectedStale,
tags: collectedTags === null ? [] : collectedTags
};
if (!cacheContext.skipPropagation) {
maybePropagateCacheEntryMetadata(cacheContext, entry, innerCacheStore.type === 'cache' ? innerCacheStore.readRootParamNames : undefined);
const cacheSignal = (0, _workunitasyncstorageexternal.getCacheSignal)(cacheContext.outerWorkUnitStore);
if (cacheSignal) {
cacheSignal.endRead();
}
}
return {
entry,
hasExplicitRevalidate: innerCacheStore.explicitRevalidate !== undefined,
hasExplicitExpire: innerCacheStore.explicitExpire !== undefined,
readRootParamNames: innerCacheStore.type === 'cache' ? innerCacheStore.readRootParamNames : undefined
};
}
async function generateCacheEntryImpl(workStore, cacheContext, innerCacheStore, clientReferenceManifest, encodedArguments, fn, timeoutError) {
const temporaryReferences = (0, _server.createTemporaryReferenceSet)();
const outerWorkUnitStore = cacheContext.outerWorkUnitStore;
const [, , args] = typeof encodedArguments === 'string' ? await (0, _server.decodeReply)(encodedArguments, (0, _manifestssingleton.getServerModuleMap)(), {
temporaryReferences
}) : await (0, _server.decodeReplyFromAsyncIterable)({
async *[Symbol.asyncIterator] () {
for (const entry of encodedArguments){
yield entry;
}
switch(outerWorkUnitStore.type){
case 'prerender-runtime':
case 'prerender':
// The encoded arguments might contain hanging promises. In
// this case we don't want to reject with "Error: Connection
// closed.", so we intentionally keep the iterable alive. This
// is similar to the halting trick that we do while rendering.
await new Promise((resolve)=>{
if (outerWorkUnitStore.renderSignal.aborted) {
resolve();
} else {
outerWorkUnitStore.renderSignal.addEventListener('abort', ()=>resolve(), {
once: true
});
}
});
break;
case 'prerender-ppr':
case 'prerender-legacy':
case 'request':
case 'cache':
case 'private-cache':
case 'unstable-cache':
case 'generate-static-params':
break;
default:
outerWorkUnitStore;
}
}
}, (0, _manifestssingleton.getServerModuleMap)(), {
temporaryReferences
});
// Track the timestamp when we started computing the result.
const startTime = performance.timeOrigin + performance.now();
// Invoke the inner function to load a new result. We delay the invocation
// though, until React awaits the promise so that React's request store (ALS)
// is available when the function is invoked. This allows us, for example, to
// capture logs so that we can later replay them.
const resultPromise = (0, _lazyresult.createLazyResult)(fn.bind(null, ...args));
let errors = [];
// In the "Cache" environment, we only need to make sure that the error
// digests are handled correctly. Error formatting and reporting is not
// necessary here; the errors are encoded in the stream, and will be reported
// in the "Server" environment.
const handleError = (0, _createerrorhandler.createReactServerErrorHandler)(process.env.NODE_ENV === 'development', workStore.isBuildTimePrerendering ?? false, workStore.reactServerErrorsByDigest, (error)=>{
// In production, we log the original error here. It gets a digest that
// can be used to associate the error with the obfuscated error that might
// be logged if the error is caught. In development, we prefer logging the
// transported error in the server environment. It's not obfuscated and
// also includes the (dev-only) environment name.
if (process.env.NODE_ENV === 'production') {
_log.error(error);
}
errors.push(error);
});
let stream;
switch(outerWorkUnitStore.type){
case 'prerender-runtime':
case 'prerender':
var _dynamicAccessAsyncStorage_getStore;
const timeoutAbortController = new AbortController();
// If we're prerendering, we give you 50 seconds to fill a cache entry.
// Otherwise we assume you stalled on hanging input and de-opt. This needs
// to be lower than just the general timeout of 60 seconds.
const timer = setTimeout(()=>{
workStore.invalidDynamicUsageError = timeoutError;
timeoutAbortController.abort(timeoutError);
}, 50000);
const dynamicAccessAbortSignal = (_dynamicAccessAsyncStorage_getStore = _dynamicaccessasyncstorageexternal.dynamicAccessAsyncStorage.getStore()) == null ? void 0 : _dynamicAccessAsyncStorage_getStore.abortController.signal;
const abortSignal = dynamicAccessAbortSignal ? AbortSignal.any([
dynamicAccessAbortSignal,
outerWorkUnitStore.renderSignal,
timeoutAbortController.signal
]) : timeoutAbortController.signal;
const { prelude } = await (0, _static.prerender)(resultPromise, clientReferenceManifest.clientModules, {
environmentName: 'Cache',
filterStackFrame,
signal: abortSignal,
temporaryReferences,
onError (error) {
if (abortSignal.aborted && abortSignal.reason === error) {
return undefined;
}
return handleError(error);
}
});
clearTimeout(timer);
if (timeoutAbortController.signal.aborted) {
// When the timeout is reached we always error the stream. Even for
// fallback shell prerenders we don't want to return a hanging promise,
// which would allow the function to become a dynamic hole. Because that
// would mean that a non-empty shell could be generated which would be
// subject to revalidation, and we don't want to create long
// revalidation times.
stream = new ReadableStream({
start (controller) {
controller.error(timeoutAbortController.signal.reason);
}
});
} else if (dynamicAccessAbortSignal == null ? void 0 : dynamicAccessAbortSignal.aborted) {
// If the prerender is aborted because of dynamic access (e.g. reading
// fallback params), we return a hanging promise. This essentially makes
// the "use cache" function dynamic.
const hangingPromise = (0, _dynamicrenderingutils.makeHangingPromise)(outerWorkUnitStore.renderSignal, workStore.route, 'dynamic "use cache"');
if (outerWorkUnitStore.cacheSignal) {
outerWorkUnitStore.cacheSignal.endRead();
}
return {
type: 'prerender-dynamic',
hangingPromise
};
} else {
stream = prelude;
}
break;
case 'request':
// If we're filling caches for a staged render, make sure that
// it takes at least a task, so we'll always notice a cache miss between stages.
//
// TODO(restart-on-cache-miss): This is suboptimal.
// Ideally we wouldn't need to restart for microtasky caches,
// but the current logic for omitting short-lived caches only works correctly
// if we do a second render, so that's the best we can do until we refactor that.
if (process.env.NODE_ENV === 'development' && outerWorkUnitStore.cacheSignal) {
await new Promise((resolve)=>setTimeout(resolve));
}
// fallthrough
case 'prerender-ppr':
case 'prerender-legacy':
case 'cache':
case 'private-cache':
case 'unstable-cache':
case 'generate-static-params':
stream = (0, _server.renderToReadableStream)(resultPromise, clientReferenceManifest.clientModules, {
environmentName: 'Cache',
filterStackFrame,
temporaryReferences,
onError: handleError
});
break;
default:
return outerWorkUnitStore;
}
const [returnStream, savedStream] = stream.tee();
const pendingCacheResult = collectResult(savedStream, workStore, cacheContext, innerCacheStore, startTime, errors);
if (process.env.NODE_ENV === 'development') {
// Name the stream for React DevTools.
// @ts-expect-error
returnStream.name = 'use cache';
}
return {
type: 'cached',
// Return the stream as we're creating it. This means that if it ends up
// erroring we cannot return a stale-if-error version but it allows
// streaming back the result earlier.
stream: returnStream,
pendingCacheResult
};
}
function cloneCacheEntry(entry) {
const [streamA, streamB] = entry.value.tee();
entry.value = streamA;
const clonedEntry = {
value: streamB,
timestamp: entry.timestamp,
revalidate: entry.revalidate,
expire: entry.expire,
stale: entry.stale,
tags: entry.tags
};
return [
entry,
clonedEntry
];
}
function cloneCacheResult(result) {
const [entryA, entryB] = cloneCacheEntry(result.entry);
return [
{
entry: entryA,
hasExplicitRevalidate: result.hasExplicitRevalidate,
hasExplicitExpire: result.hasExplicitExpire,
readRootParamNames: result.readRootParamNames
},
{
entry: entryB,
hasExplicitRevalidate: result.hasExplicitRevalidate,
hasExplicitExpire: result.hasExplicitExpire,
readRootParamNames: result.readRootParamNames
}
];
}
async function clonePendingCacheResult(pendingCacheResult) {
const result = await pendingCacheResult;
return cloneCacheResult(result);
}
async function getNthCacheResult(split, i) {
return (await split)[i];
}
async function encodeFormData(formData) {
let result = '';
for (let [key, value] of formData){
// We don't need this key to be serializable but from a security perspective it should not be
// possible to generate a string that looks the same from a different structure. To ensure this
// we need a delimeter between fields but just using a delimeter is not enough since a string
// might contain that delimeter. We use the length of each field as the delimeter to avoid
// escaping the values.
result += key.length.toString(16) + ':' + key;
let stringValue;
if (typeof value === 'string') {
stringValue = value;
} else {
// The FormData might contain binary data that is not valid UTF-8 so this cache
// key may generate a UCS-2 string. Passing this to another service needs to be
// aware that the key might not be compatible.
const arrayBuffer = await value.arrayBuffer();
if (arrayBuffer.byteLength % 2 === 0) {
stringValue = String.fromCodePoint(...new Uint16Array(arrayBuffer));
} else {
stringValue = String.fromCodePoint(...new Uint16Array(arrayBuffer, 0, (arrayBuffer.byteLength - 1) / 2)) + String.fromCodePoint(new Uint8Array(arrayBuffer, arrayBuffer.byteLength - 1, 1)[0]);
}
}
result += stringValue.length.toString(16) + ':' + stringValue;
}
return result;
}
function createTrackedReadableStream(stream, cacheSignal) {
const reader = stream.getReader();
return new ReadableStream({
async pull (controller) {
const { done, value } = await reader.read();
if (done) {
controller.close();
cacheSignal.endRead();
} else {
controller.enqueue(value);
}
}
});
}
async function cache(kind, id, boundArgsLength, originalFn, args) {
var _workUnitStore_implicitTags;
const isPrivate = kind === 'private';
// Private caches are currently only stored in the Resume Data Cache (RDC),
// and not in cache handlers.
const cacheHandler = isPrivate ? undefined : (0, _handlers.getCacheHandler)(kind);
if (!isPrivate && !cacheHandler) {
throw Object.defineProperty(new Error('Unknown cache handler: ' + kind), "__NEXT_ERROR_CODE", {
value: "E248",
enumerable: false,
configurable: true
});
}
const timeoutError = new _usecacheerrors.UseCacheTimeoutError();
Error.captureStackTrace(timeoutError, cache);
const wrapAsInvalidDynamicUsageError = (error, workStore)=>{
Error.captureStackTrace(error, cache);
workStore.invalidDynamicUsageError ??= error;
return error;
};
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
if (workStore === undefined) {
throw Object.defineProperty(new Error('"use cache" cannot be used outside of App Router. Expected a WorkStore.'), "__NEXT_ERROR_CODE", {
value: "E279",
enumerable: false,
configurable: true
});
}
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
if (workUnitStore === undefined) {
throw Object.defineProperty(new _invarianterror.InvariantError('"use cache" cannot be used outside of App Router. Expected a WorkUnitStore.'), "__NEXT_ERROR_CODE", {
value: "E1135",
enumerable: false,
configurable: true
});
}
const name = originalFn.name;
let fn = originalFn;
let cacheContext;
if (isPrivate) {
const expression = '"use cache: private"';
switch(workUnitStore.type){
// "use cache: private" is dynamic in prerendering contexts.
case 'prerender':
return (0, _dynamicrenderingutils.makeHangingPromise)(workUnitStore.renderSignal, workStore.route, expression);
case 'prerender-ppr':
return (0, _dynamicrendering.postponeWithTracking)(workStore.route, expression, workUnitStore.dynamicTracking);
case 'prerender-legacy':
return (0, _dynamicrendering.throwToInterruptStaticGeneration)(expression, workStore, workUnitStore);
case 'prerender-client':
case 'validation-client':
throw Object.defineProperty(new _invarianterror.InvariantError(`${expression} must not be used within a client component. Next.js should be preventing ${expression} from being allowed in client components statically, but did not in this case.`), "__NEXT_ERROR_CODE", {
value: "E1020",
enumerable: false,
configurable: true
});
case 'unstable-cache':
{
throw wrapAsInvalidDynamicUsageError(Object.defineProperty(new Error(// TODO: Add a link to an error documentation page when we have one.
`${expression} must not be used within \`unstable_cache()\`.`), "__NEXT_ERROR_CODE", {
value: "E1016",
enumerable: false,
configurable: true
}), workStore);
}
case 'cache':
{
throw wrapAsInvalidDynamicUsageError(Object.defineProperty(new Error(// TODO: Add a link to an error documentation page when we have one.
`${expression} must not be used within "use cache". It can only be nested inside of another ${expression}.`), "__NEXT_ERROR_CODE", {
value: "E1001",
enumerable: false,
configurable: true
}), workStore);
}
case 'request':
case 'prerender-runtime':
case 'private-cache':
cacheContext = {
kind: 'private',
outerWorkUnitStore: workUnitStore,
skipPropagation: false
};
break;
case 'generate-static-params':
throw wrapAsInvalidDynamicUsageError(Object.defineProperty(new Error(// TODO: Add a link to an error documentation page when we have one.
`${expression} cannot be used outside of a request context.`), "__NEXT_ERROR_CODE", {
value: "E1008",
enumerable: false,
configurable: true
}), workStore);
default:
workUnitStore;
// This is dead code, but without throwing an error here, TypeScript
// will assume that cacheContext is used before being assigned.
throw Object.defineProperty(new _invarianterror.InvariantError(`Unexpected work unit store.`), "__NEXT_ERROR_CODE", {
value: "E737",
enumerable: false,
configurable: true
});
}
} else {
switch(workUnitStore.type){
case 'prerender-client':
case 'validation-client':
const expression = '"use cache"';
throw Object.defineProperty(new _invarianterror.InvariantError(`${expression} must not be used within a client component. Next.js should be preventing ${expression} from being allowed in client components statically, but did not in this case.`), "__NEXT_ERROR_CODE", {
value: "E1038",
enumerable: false,
configurable: true
});
case 'prerender':
case 'prerender-runtime':
case 'prerender-ppr':
case 'prerender-legacy':
case 'request':
case 'cache':
case 'private-cache':
// TODO: We should probably forbid nesting "use cache" inside
// unstable_cache. (fallthrough)
case 'unstable-cache':
case 'generate-static-params':
cacheContext = {
kind: 'public',
outerWorkUnitStore: workUnitStore,
skipPropagation: false
};
break;
default:
workUnitStore;
// This is dead code, but without throwing an error here, TypeScript
// will assume that cacheContext is used before being assigned.
throw Object.defineProperty(new _invarianterror.InvariantError(`Unexpected work unit store.`), "__NEXT_ERROR_CODE", {
value: "E737",
enumerable: false,
configurable: true
});
}
}
// Get the clientReferenceManifest while we're still in the outer Context.
// In case getClientReferenceManifestSingleton is implemented using AsyncLocalStorage.
const clientReferenceManifest = (0, _manifestssingleton.getClientReferenceManifest)();
// Because the Action ID is not yet unique per implementation of that Action we can't
// safely reuse the results across builds yet. In the meantime we add the buildId to the
// arguments as a seed to ensure they're not reused. Remove this once Action IDs hash
// the implementation.
const buildId = workStore.buildId;
// In dev mode, when the HMR refresh hash is set, we include it in the
// cache key. This ensures that cache entries are not reused when server
// components have been edited. This is a very coarse approach. But it's
// also only a temporary solution until Action IDs are unique per
// implementation. Remove this once Action IDs hash the implementation.
const hmrRefreshHash = (0, _workunitasyncstorageexternal.getHmrRefreshHash)(workUnitStore);
const hangingInputAbortSignal = (0, _dynamicrendering.createHangingInputAbortSignal)(workUnitStore);
if (cacheContext.kind === 'private') {
const { outerWorkUnitStore } = cacheContext;
switch(outerWorkUnitStore.type){
case 'prerender-runtime':
{
// In a runtime prerender, we have to make sure that APIs that would hang during a static prerender
// are resolved with a delay, in the appropriate runtime stage. Private caches read from
// Segments not using runtime prefetch resolve at EarlyRuntime,
// while runtime-prefetchable segments resolve at Runtime.
const stagedRendering = outerWorkUnitStore.stagedRendering;
if (stagedRendering) {
await stagedRendering.waitForStage((0, _dynamicrenderingutils.getRuntimeStage)(stagedRendering));
}
break;
}
case 'request':
{
if (process.env.NODE_ENV === 'development') {
// Similar to runtime prerenders, private caches should not resolve in the static stage
// of a dev request, so we delay them. We pick the appropriate runtime stage based on
// whether we're in the early or late stages.
const stagedRendering = outerWorkUnitStore.stagedRendering;
const stage = stagedRendering ? (0, _dynamicrenderingutils.getRuntimeStage)(stagedRendering) : _stagedrendering.RenderStage.Runtime;
await (0, _dynamicrenderingutils.makeDevtoolsIOAwarePromise)(undefined, outerWorkUnitStore, stage);
}
break;
}
case 'private-cache':
break;
default:
{
outerWorkUnitStore;
}
}
}
let isPageOrLayoutSegmentFunction = false;
// For page and layout segment functions (i.e. the page/layout component,
// or generateMetadata/generateViewport), the cache function is
// overwritten, which allows us to apply special handling for params and
// searchParams. For pages and layouts we're using the outer params prop,
// and not the inner one that was serialized/deserialized. While it's not
// generally true for "use cache" args, in the case of `params` the inner
// and outer object are essentially equivalent, so this is safe to do
// (including fallback params that are hanging promises). It allows us to
// avoid waiting for the timeout, when prerendering a fallback shell of a
// cached page or layout that awaits params.
if (isPageSegmentFunction(args)) {
isPageOrLayoutSegmentFunction = true;
const [{ params: outerParams, searchParams: outerSearchParams }, ...otherOuterArgs] = args;
const props = {
params: outerParams
};
if (isPrivate) {
// Private caches allow accessing search params. We need to include
// them in the serialized args and when generating the cache key.
props.searchParams = outerSearchParams;
}
args = [
props,
...otherOuterArgs
];
fn = ({
[name]: async ({ params: _innerParams, searchParams: innerSearchParams }, ...otherInnerArgs)=>originalFn.apply(null, [
{
params: outerParams,
searchParams: innerSearchParams ?? // For public caches, search params are omitted from the cache
// key (and the serialized args) to avoid mismatches between
// prerendering and resuming a cached page that does not
// access search params. This is also the reason why we're not
// using a hanging promise for search params. For cached pages
// that do access them, which is an invalid dynamic usage, we
// need to ensure that an error is shown.
(0, _searchparams.makeErroringSearchParamsForUseCache)()
},
...otherInnerArgs
])
})[name];
} else if (isLayoutSegmentFunction(args)) {
isPageOrLayoutSegmentFunction = true;
const [{ params: outerParams, $$isLayout, ...outerSlots }, ...otherOuterArgs] = args;
// Overwrite the props to omit $$isLayout. Note that slots are only
// passed to the layout component (if any are defined), and not to
// generateMetadata nor generateViewport. For those functions,
// outerSlots/innerSlots is an empty object, which is fine because we're
// just spreading it into the props.
args = [
{
params: outerParams,
...outerSlots
},
...otherOuterArgs
];
fn = ({
[name]: async ({ params: _innerParams, ...innerSlots }, ...otherInnerArgs)=>originalFn.apply(null, [
{
params: outerParams,
...innerSlots
},
...otherInnerArgs
])
})[name];
}
if (boundArgsLength > 0) {
if (args.length === 0) {
throw Object.defineProperty(new _invarianterror.InvariantError(`Expected the "use cache" function ${JSON.stringify(fn.name)} to receive its encrypted bound arguments as the first argument.`), "__NEXT_ERROR_CODE", {
value: "E524",
enumerable: false,
configurable: true
});
}
const encryptedBoundArgs = args.shift();
const boundArgs = await (0, _encryption.decryptActionBoundArgs)(id, encryptedBoundArgs);
if (!Array.isArray(boundArgs)) {
throw Object.defineProperty(new _invarianterror.InvariantError(`Expected the bound arguments of "use cache" function ${JSON.stringify(fn.name)} to deserialize into an array, got ${typeof boundArgs} instead.`), "__NEXT_ERROR_CODE", {
value: "E581",
enumerable: false,
configurable: true
});
}
if (boundArgsLength !== boundArgs.length) {
throw Object.defineProperty(new _invarianterror.InvariantError(`Expected the "use cache" function ${JSON.stringify(fn.name)} to receive ${boundArgsLength} bound arguments, got ${boundArgs.length} instead.`), "__NEXT_ERROR_CODE", {
value: "E559",
enumerable: false,
configurable: true
});
}
args.unshift(boundArgs);
}
const temporaryReferences = (0, _client.createTemporaryReferenceSet)();
// For private caches, which are allowed to read cookies, we still don't
// need to include the cookies in the cache key. This is because we don't
// store the cache entries in a cache handler, but only in the Resume Data
// Cache (RDC). Private caches are only used during dynamic requests and
// runtime prefetches. For dynamic requests, the RDC is immutable, so it
// does not include any private caches. For runtime prefetches, the RDC is
// mutable, but only lives as long as the request, so the key does not
// need to include cookies.
const cacheKeyParts = hmrRefreshHash ? [
buildId,
id,
args,
hmrRefreshHash
] : [
buildId,
id,
args
];
const encodeCacheKeyParts = ()=>(0, _client.encodeReply)(cacheKeyPart