next
Version:
The React Framework
934 lines • 71.1 kB
JavaScript
import { HasLoadingBoundary } from '../../../shared/lib/app-router-types';
import { NEXT_DID_POSTPONE_HEADER, NEXT_ROUTER_PREFETCH_HEADER, NEXT_ROUTER_SEGMENT_PREFETCH_HEADER, NEXT_ROUTER_STALE_TIME_HEADER, NEXT_ROUTER_STATE_TREE_HEADER, NEXT_URL, RSC_CONTENT_TYPE_HEADER, RSC_HEADER } from '../app-router-headers';
import { createFetch, createFromNextReadableStream } from '../router-reducer/fetch-server-response';
import { pingPrefetchTask, isPrefetchTaskDirty, startRevalidationCooldown } from './scheduler';
import { getAppBuildId } from '../../app-build-id';
import { createHrefFromUrl } from '../router-reducer/create-href-from-url';
// TODO: Rename this module to avoid confusion with other types of cache keys
import { createCacheKey as createPrefetchRequestKey } from './cache-key';
import { doesStaticSegmentAppearInURL, getCacheKeyForDynamicParam, getParamValueFromCacheKey, getRenderedPathname, getRenderedSearch, parseDynamicParamFromURLPart } from '../../route-params';
import { createCacheMap, getFromCacheMap, setInCacheMap, setSizeInCacheMap, deleteFromCacheMap, isValueExpired, Fallback } from './cache-map';
import { appendSegmentCacheKeyPart, appendSegmentRequestKeyPart, convertSegmentPathToStaticExportFilename, createSegmentCacheKeyPart, createSegmentRequestKeyPart, ROOT_SEGMENT_CACHE_KEY, ROOT_SEGMENT_REQUEST_KEY } from '../../../shared/lib/segment-cache/segment-value-encoding';
import { normalizeFlightData, prepareFlightRouterStateForRequest } from '../../flight-data-helpers';
import { STATIC_STALETIME_MS } from '../router-reducer/reducers/navigate-reducer';
import { pingVisibleLinks } from '../links';
import { PAGE_SEGMENT_KEY } from '../../../shared/lib/segment';
import { DOC_PREFETCH_RANGE_HEADER_VALUE, doesExportedHtmlMatchBuildId } from '../../../shared/lib/segment-cache/output-export-prefetch-encoding';
import { FetchStrategy } from '../segment-cache';
import { createPromiseWithResolvers } from '../../../shared/lib/promise-with-resolvers';
/**
* Tracks the status of a cache entry as it progresses from no data (Empty),
* waiting for server data (Pending), and finished (either Fulfilled or
* Rejected depending on the response from the server.
*/ export var EntryStatus = /*#__PURE__*/ function(EntryStatus) {
EntryStatus[EntryStatus["Empty"] = 0] = "Empty";
EntryStatus[EntryStatus["Pending"] = 1] = "Pending";
EntryStatus[EntryStatus["Fulfilled"] = 2] = "Fulfilled";
EntryStatus[EntryStatus["Rejected"] = 3] = "Rejected";
return EntryStatus;
}({});
const isOutputExportMode = process.env.NODE_ENV === 'production' && process.env.__NEXT_CONFIG_OUTPUT === 'export';
/**
* Ensures a minimum stale time of 30s to avoid issues where the server sends a too
* short-lived stale time, which would prevent anything from being prefetched.
*/ function getStaleTimeMs(staleTimeSeconds) {
return Math.max(staleTimeSeconds, 30) * 1000;
}
let routeCacheMap = createCacheMap();
let segmentCacheMap = createCacheMap();
// All invalidation listeners for the whole cache are tracked in single set.
// Since we don't yet support tag or path-based invalidation, there's no point
// tracking them any more granularly than this. Once we add granular
// invalidation, that may change, though generally the model is to just notify
// the listeners and allow the caller to poll the prefetch cache with a new
// prefetch task if desired.
let invalidationListeners = null;
// Incrementing counter used to track cache invalidations.
let currentCacheVersion = 0;
export function getCurrentCacheVersion() {
return currentCacheVersion;
}
/**
* Used to clear the client prefetch cache when a server action calls
* revalidatePath or revalidateTag. Eventually we will support only clearing the
* segments that were actually affected, but there's more work to be done on the
* server before the client is able to do this correctly.
*/ export function revalidateEntireCache(nextUrl, tree) {
// Increment the current cache version. This does not eagerly evict anything
// from the cache, but because all the entries are versioned, and we check
// the version when reading from the cache, this effectively causes all
// entries to be evicted lazily. We do it lazily because in the future,
// actions like revalidateTag or refresh will not evict the entire cache,
// but rather some subset of the entries.
currentCacheVersion++;
// Start a cooldown before re-prefetching to allow CDN cache propagation.
startRevalidationCooldown();
// Prefetch all the currently visible links again, to re-fill the cache.
pingVisibleLinks(nextUrl, tree);
// Similarly, notify all invalidation listeners (i.e. those passed to
// `router.prefetch(onInvalidate)`), so they can trigger a new prefetch
// if needed.
pingInvalidationListeners(nextUrl, tree);
}
function attachInvalidationListener(task) {
// This function is called whenever a prefetch task reads a cache entry. If
// the task has an onInvalidate function associated with it — i.e. the one
// optionally passed to router.prefetch(onInvalidate) — then we attach that
// listener to the every cache entry that the task reads. Then, if an entry
// is invalidated, we call the function.
if (task.onInvalidate !== null) {
if (invalidationListeners === null) {
invalidationListeners = new Set([
task
]);
} else {
invalidationListeners.add(task);
}
}
}
function notifyInvalidationListener(task) {
const onInvalidate = task.onInvalidate;
if (onInvalidate !== null) {
// Clear the callback from the task object to guarantee it's not called more
// than once.
task.onInvalidate = null;
// This is a user-space function, so we must wrap in try/catch.
try {
onInvalidate();
} catch (error) {
if (typeof reportError === 'function') {
reportError(error);
} else {
console.error(error);
}
}
}
}
export function pingInvalidationListeners(nextUrl, tree) {
// The rough equivalent of pingVisibleLinks, but for onInvalidate callbacks.
// This is called when the Next-Url or the base tree changes, since those
// may affect the result of a prefetch task. It's also called after a
// cache invalidation.
if (invalidationListeners !== null) {
const tasks = invalidationListeners;
invalidationListeners = null;
for (const task of tasks){
if (isPrefetchTaskDirty(task, nextUrl, tree)) {
notifyInvalidationListener(task);
}
}
}
}
export function readRouteCacheEntry(now, key) {
const keypath = [
key.pathname,
key.search,
key.nextUrl
];
const isRevalidation = false;
return getFromCacheMap(now, getCurrentCacheVersion(), routeCacheMap, keypath, isRevalidation);
}
export function getCanonicalSegmentKeypath(route, cacheKey) {
// Returns the actual keypath for a segment, without omitting any params.
return [
cacheKey,
cacheKey.endsWith('/' + PAGE_SEGMENT_KEY) ? route.renderedSearch : // to include it in the keypath.
Fallback
];
}
export function getGenericSegmentKeypathFromFetchStrategy(fetchStrategy, route, cacheKey) {
// Returns the most generic possible keypath for a segment, based on the
// strategy used to fetch it, i.e. static/PPR versus runtime prefetching.
//
// This is used when _writing_ to the cache. We want to choose the most
// generic keypath so that it can be reused as much as possible.
//
// We may be able to re-key the response to something even more generic once
// we receive it — for example, if the server tells us that the response
// doesn't vary on a particular param — but even before we send the request,
// we know somethings based on the fetch strategy alone.
const doesVaryOnSearchParams = // Non-page segments never include search params
cacheKey.endsWith('/' + PAGE_SEGMENT_KEY) && // Only a runtime prefetch will include search params in the result. Static
// prefetches never include search params, so they can be reused across all
// possible search param values.
(fetchStrategy === FetchStrategy.Full || fetchStrategy === FetchStrategy.PPRRuntime);
const keypath = [
cacheKey,
doesVaryOnSearchParams ? route.renderedSearch : Fallback
];
return keypath;
}
export function readSegmentCacheEntry(now, keypath) {
const isRevalidation = false;
return getFromCacheMap(now, getCurrentCacheVersion(), segmentCacheMap, keypath, isRevalidation);
}
function readRevalidatingSegmentCacheEntry(now, keypath) {
const isRevalidation = true;
return getFromCacheMap(now, getCurrentCacheVersion(), segmentCacheMap, keypath, isRevalidation);
}
export function waitForSegmentCacheEntry(pendingEntry) {
// Because the entry is pending, there's already a in-progress request.
// Attach a promise to the entry that will resolve when the server responds.
let promiseWithResolvers = pendingEntry.promise;
if (promiseWithResolvers === null) {
promiseWithResolvers = pendingEntry.promise = createPromiseWithResolvers();
} else {
// There's already a promise we can use
}
return promiseWithResolvers.promise;
}
/**
* Checks if an entry for a route exists in the cache. If so, it returns the
* entry, If not, it adds an empty entry to the cache and returns it.
*/ export function readOrCreateRouteCacheEntry(now, task, key) {
attachInvalidationListener(task);
const existingEntry = readRouteCacheEntry(now, key);
if (existingEntry !== null) {
return existingEntry;
}
// Create a pending entry and add it to the cache.
const pendingEntry = {
canonicalUrl: null,
status: 0,
blockedTasks: null,
tree: null,
head: null,
isHeadPartial: true,
// This is initialized to true because we don't know yet whether the route
// could be intercepted. It's only set to false once we receive a response
// from the server.
couldBeIntercepted: true,
// Similarly, we don't yet know if the route supports PPR.
isPPREnabled: false,
renderedSearch: null,
TODO_metadataStatus: 0,
TODO_isHeadDynamic: false,
// Map-related fields
ref: null,
size: 0,
// Since this is an empty entry, there's no reason to ever evict it. It will
// be updated when the data is populated.
staleAt: Infinity,
version: getCurrentCacheVersion()
};
const keypath = [
key.pathname,
key.search,
key.nextUrl
];
const isRevalidation = false;
setInCacheMap(routeCacheMap, keypath, pendingEntry, isRevalidation);
return pendingEntry;
}
export function requestOptimisticRouteCacheEntry(now, requestedUrl, nextUrl) {
// This function is called during a navigation when there was no matching
// route tree in the prefetch cache. Before de-opting to a blocking,
// unprefetched navigation, we will first attempt to construct an "optimistic"
// route tree by checking the cache for similar routes.
//
// Check if there's a route with the same pathname, but with different
// search params. We can then base our optimistic route tree on this entry.
//
// Conceptually, we are simulating what would happen if we did perform a
// prefetch the requested URL, under the assumption that the server will
// not redirect or rewrite the request in a different manner than the
// base route tree. This assumption might not hold, in which case we'll have
// to recover when we perform the dynamic navigation request. However, this
// is what would happen if a route were dynamically rewritten/redirected
// in between the prefetch and the navigation. So the logic needs to exist
// to handle this case regardless.
// Look for a route with the same pathname, but with an empty search string.
// TODO: There's nothing inherently special about the empty search string;
// it's chosen somewhat arbitrarily, with the rationale that it's the most
// likely one to exist. But we should update this to match _any_ search
// string. The plan is to generalize this logic alongside other improvements
// related to "fallback" cache entries.
const requestedSearch = requestedUrl.search;
if (requestedSearch === '') {
// The caller would have already checked if a route with an empty search
// string is in the cache. So we can bail out here.
return null;
}
const urlWithoutSearchParams = new URL(requestedUrl);
urlWithoutSearchParams.search = '';
const routeWithNoSearchParams = readRouteCacheEntry(now, createPrefetchRequestKey(urlWithoutSearchParams.href, nextUrl));
if (routeWithNoSearchParams === null || routeWithNoSearchParams.status !== 2) {
// Bail out of constructing an optimistic route tree. This will result in
// a blocking, unprefetched navigation.
return null;
}
// Now we have a base route tree we can "patch" with our optimistic values.
const TODO_isHeadDynamic = routeWithNoSearchParams.TODO_isHeadDynamic;
let head;
let isHeadPartial;
let TODO_metadataStatus;
if (TODO_isHeadDynamic) {
// If the head was fetched via dynamic request, then we don't know
// whether it accessed search params. So we must be conservative — we
// cannot reuse it. The head will stream in during the dynamic navigation.
// TODO: When Cache Components is enabled, we should track whether the
// head varied on search params.
// TODO: Because we're rendering a `null` viewport as the partial state, the
// viewport will not block the navigation; it will stream in later,
// alongside the metadata. Viewport is supposed to be blocking. This is
// a subtle bug in the old implementation that we've preserved here. It's
// rare enough that we're not going to fix it for apps that don't enable
// Cache Components; when Cache Components is enabled, though, we should
// use an infinite promise here to block the navigation. But only if the
// entry actually varies on search params.
head = [
null,
null
];
// Setting this to `true` ensures that on navigation, the head is requested.
isHeadPartial = true;
TODO_metadataStatus = 0;
} else {
// The head was fetched via a static/PPR request. So it's guaranteed to
// not contain search params. We can reuse it.
head = routeWithNoSearchParams.head;
isHeadPartial = routeWithNoSearchParams.isHeadPartial;
TODO_metadataStatus = 0;
}
// Optimistically assume that redirects for the requested pathname do
// not vary on the search string. Therefore, if the base route was
// redirected to a different search string, then the optimistic route
// should be redirected to the same search string. Otherwise, we use
// the requested search string.
const canonicalUrlForRouteWithNoSearchParams = new URL(routeWithNoSearchParams.canonicalUrl, requestedUrl.origin);
const optimisticCanonicalSearch = canonicalUrlForRouteWithNoSearchParams.search !== '' ? canonicalUrlForRouteWithNoSearchParams.search : requestedSearch;
// Similarly, optimistically assume that rewrites for the requested
// pathname do not vary on the search string. Therefore, if the base
// route was rewritten to a different search string, then the optimistic
// route should be rewritten to the same search string. Otherwise, we use
// the requested search string.
const optimisticRenderedSearch = routeWithNoSearchParams.renderedSearch !== '' ? routeWithNoSearchParams.renderedSearch : requestedSearch;
const optimisticUrl = new URL(routeWithNoSearchParams.canonicalUrl, location.origin);
optimisticUrl.search = optimisticCanonicalSearch;
const optimisticCanonicalUrl = createHrefFromUrl(optimisticUrl);
// Clone the base route tree, and override the relevant fields with our
// optimistic values.
const optimisticEntry = {
canonicalUrl: optimisticCanonicalUrl,
status: 2,
// This isn't cloned because it's instance-specific
blockedTasks: null,
tree: routeWithNoSearchParams.tree,
head,
isHeadPartial,
couldBeIntercepted: routeWithNoSearchParams.couldBeIntercepted,
isPPREnabled: routeWithNoSearchParams.isPPREnabled,
// Override the rendered search with the optimistic value.
renderedSearch: optimisticRenderedSearch,
TODO_metadataStatus,
TODO_isHeadDynamic,
// Map-related fields
ref: null,
size: 0,
staleAt: routeWithNoSearchParams.staleAt,
version: routeWithNoSearchParams.version
};
// Do not insert this entry into the cache. It only exists so we can
// perform the current navigation. Just return it to the caller.
return optimisticEntry;
}
/**
* Checks if an entry for a segment exists in the cache. If so, it returns the
* entry, If not, it adds an empty entry to the cache and returns it.
*/ export function readOrCreateSegmentCacheEntry(now, fetchStrategy, route, cacheKey) {
const canonicalKeypath = getCanonicalSegmentKeypath(route, cacheKey);
const existingEntry = readSegmentCacheEntry(now, canonicalKeypath);
if (existingEntry !== null) {
return existingEntry;
}
// Create a pending entry and add it to the cache.
const genericKeypath = getGenericSegmentKeypathFromFetchStrategy(fetchStrategy, route, cacheKey);
const pendingEntry = createDetachedSegmentCacheEntry(route.staleAt);
const isRevalidation = false;
setInCacheMap(segmentCacheMap, genericKeypath, pendingEntry, isRevalidation);
return pendingEntry;
}
export function readOrCreateRevalidatingSegmentEntry(now, fetchStrategy, route, cacheKey) {
// This function is called when we've already confirmed that a particular
// segment is cached, but we want to perform another request anyway in case it
// returns more complete and/or fresher data than we already have. The logic
// for deciding whether to replace the existing entry is handled elsewhere;
// this function just handles retrieving a cache entry that we can use to
// track the revalidation.
//
// The reason revalidations are stored in the cache is because we need to be
// able to dedupe multiple revalidation requests. The reason they have to be
// handled specially is because we shouldn't overwrite a "normal" entry if
// one exists at the same keypath. So, for each internal cache location, there
// is a special "revalidation" slot that is used solely for this purpose.
//
// You can think of it as if all the revalidation entries were stored in a
// separate cache map from the canonical entries, and then transfered to the
// canonical cache map once the request is complete — this isn't how it's
// actually implemented, since it's more efficient to store them in the same
// data structure as the normal entries, but that's how it's modeled
// conceptually.
// TODO: Once we implement Fallback behavior for params, where an entry is
// re-keyed based on response information, we'll need to account for the
// possibility that the keypath of the previous entry is more generic than
// the keypath of the revalidating entry. In other words, the server could
// return a less generic entry upon revalidation. For now, though, this isn't
// a concern because the keypath is based solely on the prefetch strategy,
// not on data contained in the response.
const canonicalKeypath = getCanonicalSegmentKeypath(route, cacheKey);
const existingEntry = readRevalidatingSegmentCacheEntry(now, canonicalKeypath);
if (existingEntry !== null) {
return existingEntry;
}
// Create a pending entry and add it to the cache.
const genericKeypath = getGenericSegmentKeypathFromFetchStrategy(fetchStrategy, route, cacheKey);
const pendingEntry = createDetachedSegmentCacheEntry(route.staleAt);
const isRevalidation = true;
setInCacheMap(segmentCacheMap, genericKeypath, pendingEntry, isRevalidation);
return pendingEntry;
}
export function overwriteRevalidatingSegmentCacheEntry(fetchStrategy, route, cacheKey) {
// This function is called when we've already decided to replace an existing
// revalidation entry. Create a new entry and write it into the cache,
// overwriting the previous value.
const genericKeypath = getGenericSegmentKeypathFromFetchStrategy(fetchStrategy, route, cacheKey);
const pendingEntry = createDetachedSegmentCacheEntry(route.staleAt);
const isRevalidation = true;
setInCacheMap(segmentCacheMap, genericKeypath, pendingEntry, isRevalidation);
return pendingEntry;
}
export function upsertSegmentEntry(now, keypath, candidateEntry) {
// We have a new entry that has not yet been inserted into the cache. Before
// we do so, we need to confirm whether it takes precedence over the existing
// entry (if one exists).
// TODO: We should not upsert an entry if its key was invalidated in the time
// since the request was made. We can do that by passing the "owner" entry to
// this function and confirming it's the same as `existingEntry`.
if (isValueExpired(now, getCurrentCacheVersion(), candidateEntry)) {
// The entry is expired. We cannot upsert it.
return null;
}
const existingEntry = readSegmentCacheEntry(now, keypath);
if (existingEntry !== null) {
// Don't replace a more specific segment with a less-specific one. A case where this
// might happen is if the existing segment was fetched via
// `<Link prefetch={true}>`.
if (// We fetched the new segment using a different, less specific fetch strategy
// than the segment we already have in the cache, so it can't have more content.
candidateEntry.fetchStrategy !== existingEntry.fetchStrategy && !canNewFetchStrategyProvideMoreContent(existingEntry.fetchStrategy, candidateEntry.fetchStrategy) || // The existing entry isn't partial, but the new one is.
// (TODO: can this be true if `candidateEntry.fetchStrategy >= existingEntry.fetchStrategy`?)
!existingEntry.isPartial && candidateEntry.isPartial) {
// We're going to leave revalidating entry in the cache so that it doesn't
// get revalidated again unnecessarily. Downgrade the Fulfilled entry to
// Rejected and null out the data so it can be garbage collected. We leave
// `staleAt` intact to prevent subsequent revalidation attempts only until
// the entry expires.
const rejectedEntry = candidateEntry;
rejectedEntry.status = 3;
rejectedEntry.loading = null;
rejectedEntry.rsc = null;
return null;
}
// Evict the existing entry from the cache.
deleteFromCacheMap(existingEntry);
}
const isRevalidation = false;
setInCacheMap(segmentCacheMap, keypath, candidateEntry, isRevalidation);
return candidateEntry;
}
export function createDetachedSegmentCacheEntry(staleAt) {
const emptyEntry = {
status: 0,
// Default to assuming the fetch strategy will be PPR. This will be updated
// when a fetch is actually initiated.
fetchStrategy: FetchStrategy.PPR,
rsc: null,
loading: null,
isPartial: true,
promise: null,
// Map-related fields
ref: null,
size: 0,
staleAt,
version: 0
};
return emptyEntry;
}
export function upgradeToPendingSegment(emptyEntry, fetchStrategy) {
const pendingEntry = emptyEntry;
pendingEntry.status = 1;
pendingEntry.fetchStrategy = fetchStrategy;
// Set the version here, since this is right before the request is initiated.
// The next time the global cache version is incremented, the entry will
// effectively be evicted. This happens before initiating the request, rather
// than when receiving the response, because it's guaranteed to happen
// before the data is read on the server.
pendingEntry.version = getCurrentCacheVersion();
return pendingEntry;
}
function pingBlockedTasks(entry) {
const blockedTasks = entry.blockedTasks;
if (blockedTasks !== null) {
for (const task of blockedTasks){
pingPrefetchTask(task);
}
entry.blockedTasks = null;
}
}
function fulfillRouteCacheEntry(entry, tree, head, isHeadPartial, staleAt, couldBeIntercepted, canonicalUrl, renderedSearch, isPPREnabled, isHeadDynamic) {
const fulfilledEntry = entry;
fulfilledEntry.status = 2;
fulfilledEntry.tree = tree;
fulfilledEntry.head = head;
fulfilledEntry.isHeadPartial = isHeadPartial;
fulfilledEntry.staleAt = staleAt;
fulfilledEntry.couldBeIntercepted = couldBeIntercepted;
fulfilledEntry.canonicalUrl = canonicalUrl;
fulfilledEntry.renderedSearch = renderedSearch;
fulfilledEntry.isPPREnabled = isPPREnabled;
fulfilledEntry.TODO_isHeadDynamic = isHeadDynamic;
pingBlockedTasks(entry);
return fulfilledEntry;
}
function fulfillSegmentCacheEntry(segmentCacheEntry, rsc, loading, staleAt, isPartial) {
const fulfilledEntry = segmentCacheEntry;
fulfilledEntry.status = 2;
fulfilledEntry.rsc = rsc;
fulfilledEntry.loading = loading;
fulfilledEntry.staleAt = staleAt;
fulfilledEntry.isPartial = isPartial;
// Resolve any listeners that were waiting for this data.
if (segmentCacheEntry.promise !== null) {
segmentCacheEntry.promise.resolve(fulfilledEntry);
// Free the promise for garbage collection.
fulfilledEntry.promise = null;
}
return fulfilledEntry;
}
function rejectRouteCacheEntry(entry, staleAt) {
const rejectedEntry = entry;
rejectedEntry.status = 3;
rejectedEntry.staleAt = staleAt;
pingBlockedTasks(entry);
}
function rejectSegmentCacheEntry(entry, staleAt) {
const rejectedEntry = entry;
rejectedEntry.status = 3;
rejectedEntry.staleAt = staleAt;
if (entry.promise !== null) {
// NOTE: We don't currently propagate the reason the prefetch was canceled
// but we could by accepting a `reason` argument.
entry.promise.resolve(null);
entry.promise = null;
}
}
function convertRootTreePrefetchToRouteTree(rootTree, renderedPathname) {
// Remove trailing and leading slashes
const pathnameParts = renderedPathname.split('/').filter((p)=>p !== '');
const index = 0;
const rootSegment = ROOT_SEGMENT_CACHE_KEY;
return convertTreePrefetchToRouteTree(rootTree.tree, rootSegment, null, ROOT_SEGMENT_REQUEST_KEY, ROOT_SEGMENT_CACHE_KEY, pathnameParts, index);
}
function convertTreePrefetchToRouteTree(prefetch, segment, param, requestKey, cacheKey, pathnameParts, pathnamePartsIndex) {
// Converts the route tree sent by the server into the format used by the
// cache. The cached version of the tree includes additional fields, such as a
// cache key for each segment. Since this is frequently accessed, we compute
// it once instead of on every access. This same cache key is also used to
// request the segment from the server.
let slots = null;
const prefetchSlots = prefetch.slots;
if (prefetchSlots !== null) {
slots = {};
for(let parallelRouteKey in prefetchSlots){
const childPrefetch = prefetchSlots[parallelRouteKey];
const childParamName = childPrefetch.name;
const childParamType = childPrefetch.paramType;
const childServerSentParamKey = childPrefetch.paramKey;
let childDoesAppearInURL;
let childParam = null;
let childSegment;
if (childParamType !== null) {
// This segment is parameterized. Get the param from the pathname.
const childParamValue = parseDynamicParamFromURLPart(childParamType, pathnameParts, pathnamePartsIndex);
// Assign a cache key to the segment, based on the param value. In the
// pre-Segment Cache implementation, the server computes this and sends
// it in the body of the response. In the Segment Cache implementation,
// the server sends an empty string and we fill it in here.
// TODO: We're intentionally not adding the search param to page
// segments here; it's tracked separately and added back during a read.
// This would clearer if we waited to construct the segment until it's
// read from the cache, since that's effectively what we're
// doing anyway.
const renderedSearch = '';
const childParamKey = // The server omits this field from the prefetch response when
// cacheComponents is enabled.
childServerSentParamKey !== null ? childServerSentParamKey : getCacheKeyForDynamicParam(childParamValue, renderedSearch);
childParam = {
name: childParamName,
value: childParamValue,
type: childParamType
};
childSegment = [
childParamName,
childParamKey,
childParamType
];
childDoesAppearInURL = true;
} else {
childSegment = childParamName;
childDoesAppearInURL = doesStaticSegmentAppearInURL(childParamName);
}
// Only increment the index if the segment appears in the URL. If it's a
// "virtual" segment, like a route group, it remains the same.
const childPathnamePartsIndex = childDoesAppearInURL ? pathnamePartsIndex + 1 : pathnamePartsIndex;
const childRequestKeyPart = createSegmentRequestKeyPart(childSegment);
const childRequestKey = appendSegmentRequestKeyPart(requestKey, parallelRouteKey, childRequestKeyPart);
const childCacheKey = appendSegmentCacheKeyPart(cacheKey, parallelRouteKey, createSegmentCacheKeyPart(childRequestKeyPart, childSegment));
slots[parallelRouteKey] = convertTreePrefetchToRouteTree(childPrefetch, childSegment, childParam, childRequestKey, childCacheKey, pathnameParts, childPathnamePartsIndex);
}
}
return {
cacheKey,
requestKey,
segment,
param,
slots,
isRootLayout: prefetch.isRootLayout,
// This field is only relevant to dynamic routes. For a PPR/static route,
// there's always some partial loading state we can fetch.
hasLoadingBoundary: HasLoadingBoundary.SegmentHasLoadingBoundary,
hasRuntimePrefetch: prefetch.hasRuntimePrefetch
};
}
function convertRootFlightRouterStateToRouteTree(flightRouterState) {
return convertFlightRouterStateToRouteTree(flightRouterState, ROOT_SEGMENT_CACHE_KEY, ROOT_SEGMENT_REQUEST_KEY);
}
function convertFlightRouterStateToRouteTree(flightRouterState, cacheKey, requestKey) {
let slots = null;
const parallelRoutes = flightRouterState[1];
for(let parallelRouteKey in parallelRoutes){
const childRouterState = parallelRoutes[parallelRouteKey];
const childSegment = childRouterState[0];
// TODO: Eventually, the param values will not be included in the response
// from the server. We'll instead fill them in on the client by parsing
// the URL. This is where we'll do that.
const childRequestKeyPart = createSegmentRequestKeyPart(childSegment);
const childRequestKey = appendSegmentRequestKeyPart(requestKey, parallelRouteKey, childRequestKeyPart);
const childCacheKey = appendSegmentCacheKeyPart(cacheKey, parallelRouteKey, createSegmentCacheKeyPart(childRequestKeyPart, childSegment));
const childTree = convertFlightRouterStateToRouteTree(childRouterState, childCacheKey, childRequestKey);
if (slots === null) {
slots = {
[parallelRouteKey]: childTree
};
} else {
slots[parallelRouteKey] = childTree;
}
}
const originalSegment = flightRouterState[0];
let segment;
let param = null;
if (Array.isArray(originalSegment)) {
const paramCacheKey = originalSegment[1];
const paramType = originalSegment[2];
const paramValue = getParamValueFromCacheKey(paramCacheKey, paramType);
param = {
name: originalSegment[0],
value: paramValue === undefined ? null : paramValue,
type: originalSegment[2]
};
segment = originalSegment;
} else {
// The navigation implementation expects the search params to be included
// in the segment. However, in the case of a static response, the search
// params are omitted. So the client needs to add them back in when reading
// from the Segment Cache.
//
// For consistency, we'll do this for dynamic responses, too.
//
// TODO: We should move search params out of FlightRouterState and handle
// them entirely on the client, similar to our plan for dynamic params.
segment = typeof originalSegment === 'string' && originalSegment.startsWith(PAGE_SEGMENT_KEY) ? PAGE_SEGMENT_KEY : originalSegment;
}
return {
cacheKey,
requestKey,
segment,
param,
slots,
isRootLayout: flightRouterState[4] === true,
hasLoadingBoundary: flightRouterState[5] !== undefined ? flightRouterState[5] : HasLoadingBoundary.SubtreeHasNoLoadingBoundary,
// Non-static tree responses are only used by apps that haven't adopted
// Cache Components. So this is always false.
hasRuntimePrefetch: false
};
}
export function convertRouteTreeToFlightRouterState(routeTree) {
const parallelRoutes = {};
if (routeTree.slots !== null) {
for(const parallelRouteKey in routeTree.slots){
parallelRoutes[parallelRouteKey] = convertRouteTreeToFlightRouterState(routeTree.slots[parallelRouteKey]);
}
}
const flightRouterState = [
routeTree.segment,
parallelRoutes,
null,
null,
routeTree.isRootLayout
];
return flightRouterState;
}
export async function fetchRouteOnCacheMiss(entry, task, key) {
// This function is allowed to use async/await because it contains the actual
// fetch that gets issued on a cache miss. Notice it writes the result to the
// cache entry directly, rather than return data that is then written by
// the caller.
const pathname = key.pathname;
const search = key.search;
const nextUrl = key.nextUrl;
const segmentPath = '/_tree';
const headers = {
[RSC_HEADER]: '1',
[NEXT_ROUTER_PREFETCH_HEADER]: '1',
[NEXT_ROUTER_SEGMENT_PREFETCH_HEADER]: segmentPath
};
if (nextUrl !== null) {
headers[NEXT_URL] = nextUrl;
}
try {
const url = new URL(pathname + search, location.origin);
let response;
let urlAfterRedirects;
if (isOutputExportMode) {
// In output: "export" mode, we can't use headers to request a particular
// segment. Instead, we encode the extra request information into the URL.
// This is not part of the "public" interface of the app; it's an internal
// Next.js implementation detail that the app developer should not need to
// concern themselves with.
//
// For example, to request a segment:
//
// Path passed to <Link>: /path/to/page
// Path passed to fetch: /path/to/page/__next-segments/_tree
//
// (This is not the exact protocol, just an illustration.)
//
// Before we do that, though, we need to account for redirects. Even in
// output: "export" mode, a proxy might redirect the page to a different
// location, but we shouldn't assume or expect that they also redirect all
// the segment files, too.
//
// To check whether the page is redirected, we perform a range request of
// the first N bytes of the HTML document. The canonical URL is determined
// from the response.
//
// Then we can use the canonical URL to request the route tree.
//
// NOTE: We could embed the route tree into the HTML document, to avoid
// a second request. We're not doing that currently because it would make
// the HTML document larger and affect normal page loads.
const htmlResponse = await fetch(url, {
headers: {
Range: DOC_PREFETCH_RANGE_HEADER_VALUE
}
});
const partialHtml = await htmlResponse.text();
if (!doesExportedHtmlMatchBuildId(partialHtml, getAppBuildId())) {
// The target page is not part of this app, or it belongs to a
// different build.
rejectRouteCacheEntry(entry, Date.now() + 10 * 1000);
return null;
}
urlAfterRedirects = htmlResponse.redirected ? new URL(htmlResponse.url) : url;
response = await fetchPrefetchResponse(addSegmentPathToUrlInOutputExportMode(urlAfterRedirects, segmentPath), headers);
} else {
// "Server" mode. We can use request headers instead of the pathname.
// TODO: The eventual plan is to get rid of our custom request headers and
// encode everything into the URL, using a similar strategy to the
// "output: export" block above.
response = await fetchPrefetchResponse(url, headers);
urlAfterRedirects = response !== null && response.redirected ? new URL(response.url) : url;
}
if (!response || !response.ok || // 204 is a Cache miss. Though theoretically this shouldn't happen when
// PPR is enabled, because we always respond to route tree requests, even
// if it needs to be blockingly generated on demand.
response.status === 204 || !response.body) {
// Server responded with an error, or with a miss. We should still cache
// the response, but we can try again after 10 seconds.
rejectRouteCacheEntry(entry, Date.now() + 10 * 1000);
return null;
}
// TODO: The canonical URL is the href without the origin. I think
// historically the reason for this is because the initial canonical URL
// gets passed as a prop to the top-level React component, which means it
// needs to be computed during SSR. If it were to include the origin, it
// would need to always be same as location.origin on the client, to prevent
// a hydration mismatch. To sidestep this complexity, we omit the origin.
//
// However, since this is neither a native URL object nor a fully qualified
// URL string, we need to be careful about how we use it. To prevent subtle
// mistakes, we should create a special type for it, instead of just string.
// Or, we should just use a (readonly) URL object instead. The type of the
// prop that we pass to seed the initial state does not need to be the same
// type as the state itself.
const canonicalUrl = createHrefFromUrl(urlAfterRedirects);
// Check whether the response varies based on the Next-Url header.
const varyHeader = response.headers.get('vary');
const couldBeIntercepted = varyHeader !== null && varyHeader.includes(NEXT_URL);
// Track when the network connection closes.
const closed = createPromiseWithResolvers();
// This checks whether the response was served from the per-segment cache,
// rather than the old prefetching flow. If it fails, it implies that PPR
// is disabled on this route.
const routeIsPPREnabled = response.headers.get(NEXT_DID_POSTPONE_HEADER) === '2' || // In output: "export" mode, we can't rely on response headers. But if we
// receive a well-formed response, we can assume it's a static response,
// because all data is static in this mode.
isOutputExportMode;
// Regardless of the type of response, we will never receive dynamic
// metadata as part of this prefetch request.
const isHeadDynamic = false;
if (routeIsPPREnabled) {
const prefetchStream = createPrefetchResponseStream(response.body, closed.resolve, function onResponseSizeUpdate(size) {
setSizeInCacheMap(entry, size);
});
const serverData = await createFromNextReadableStream(prefetchStream, headers);
if (serverData.buildId !== getAppBuildId()) {
// The server build does not match the client. Treat as a 404. During
// an actual navigation, the router will trigger an MPA navigation.
// TODO: Consider moving the build ID to a response header so we can check
// it before decoding the response, and so there's one way of checking
// across all response types.
// TODO: We should cache the fact that this is an MPA navigation.
rejectRouteCacheEntry(entry, Date.now() + 10 * 1000);
return null;
}
// Get the params that were used to render the target page. These may
// be different from the params in the request URL, if the page
// was rewritten.
const renderedPathname = getRenderedPathname(response);
const renderedSearch = getRenderedSearch(response);
const routeTree = convertRootTreePrefetchToRouteTree(serverData, renderedPathname);
const staleTimeMs = getStaleTimeMs(serverData.staleTime);
fulfillRouteCacheEntry(entry, routeTree, serverData.head, serverData.isHeadPartial, Date.now() + staleTimeMs, couldBeIntercepted, canonicalUrl, renderedSearch, routeIsPPREnabled, isHeadDynamic);
} else {
// PPR is not enabled for this route. The server responds with a
// different format (FlightRouterState) that we need to convert.
// TODO: We will unify the responses eventually. I'm keeping the types
// separate for now because FlightRouterState has so many
// overloaded concerns.
const prefetchStream = createPrefetchResponseStream(response.body, closed.resolve, function onResponseSizeUpdate(size) {
setSizeInCacheMap(entry, size);
});
const serverData = await createFromNextReadableStream(prefetchStream, headers);
if (serverData.b !== getAppBuildId()) {
// The server build does not match the client. Treat as a 404. During
// an actual navigation, the router will trigger an MPA navigation.
// TODO: Consider moving the build ID to a response header so we can check
// it before decoding the response, and so there's one way of checking
// across all response types.
// TODO: We should cache the fact that this is an MPA navigation.
rejectRouteCacheEntry(entry, Date.now() + 10 * 1000);
return null;
}
writeDynamicTreeResponseIntoCache(Date.now(), task, // The non-PPR response format is what we'd get if we prefetched these segments
// using the LoadingBoundary fetch strategy, so mark their cache entries accordingly.
FetchStrategy.LoadingBoundary, response, serverData, entry, couldBeIntercepted, canonicalUrl, routeIsPPREnabled);
}
if (!couldBeIntercepted) {
// This route will never be intercepted. So we can use this entry for all
// requests to this route, regardless of the Next-Url header. This works
// because when reading the cache we always check for a valid
// non-intercepted entry first.
// Re-key the entry. The `set` implementation handles removing it from
// its previous position in the cache. We don't need to do anything to
// update the LRU, because the entry is already in it.
// TODO: Treat this as an upsert — should check if an entry already
// exists at the new keypath, and if so, whether we should keep that
// one instead.
const newKeypath = [
pathname,
search,
Fallback
];
const isRevalidation = false;
setInCacheMap(routeCacheMap, newKeypath, entry, isRevalidation);
}
// Return a promise that resolves when the network connection closes, so
// the scheduler can track the number of concurrent network connections.
return {
value: null,
closed: closed.promise
};
} catch (error) {
// Either the connection itself failed, or something bad happened while
// decoding the response.
rejectRouteCacheEntry(entry, Date.now() + 10 * 1000);
return null;
}
}
export async function fetchSegmentOnCacheMiss(route, segmentCacheEntry, routeKey, tree) {
// This function is allowed to use async/await because it contains the actual
// fetch that gets issued on a cache miss. Notice it writes the result to the
// cache entry directly, rather than return data that is then written by
// the caller.
//
// Segment fetches are non-blocking so we don't need to ping the scheduler
// on completion.
// Use the canonical URL to request the segment, not the original URL. These
// are usually the same, but the canonical URL will be different if the route
// tree response was redirected. To avoid an extra waterfall on every segment
// request, we pass the redirected URL instead of the original one.
const url = new URL(route.canonicalUrl, location.origin);
const nextUrl = routeKey.nextUrl;
const requestKey = tree.requestKey;
const normalizedRequestKey = requestKey === ROOT_SEGMENT_REQUEST_KEY ? // handling of these requests, we encode the root segment path as
// `_index` instead of as an empty string. This should be treated as
// an implementation detail and not as a stable part of the protocol.
// It just needs to match the equivalent logic that happens when
// prerendering the responses. It should not leak outside of Next.js.
'/_index' : requestKey;
const headers = {
[RSC_HEADER]: '1',
[NEXT_ROUTER_PREFETCH_HEADER]: '1',
[NEXT_ROUTER_SEGMENT_PREFETCH_HEADER]: normalizedRequestKey
};
if (nextUrl !== null) {
headers[NEXT_URL] = nextUrl;
}
const requestUrl = isOutputExportMode ? addSegmentPathToUrlInOutputExportMode(url, normalizedRequestKey) : url;
try {
const response = await fetchPrefetchResponse(requestUrl, headers);
if (!response || !response.ok || response.status === 204 || // Cache miss
// This checks whether the response was served from the per-segment cache,
// rather than the old prefetching flow. If it fails, it implies that PPR
// is disabled on this route. Theoretically this should never happen
// because we only issue requests for segments once we've verified that
// the route supports PPR.
response.headers.get(NEXT_DID_POSTPONE_HEADER) !== '2' && // In output: "export" mode, we can't rely on response headers. But if
// we receive a well-formed response, we can assume it's a static
// response, because all data is static in this mode.
!isOutputExportMode || !response.body) {
// Server responded with an error, or with a miss. We should still cache
// the response, but we can try again after 10 seconds.
rejectSegmentCacheEntry(segmentCacheEntry, Date.now() + 10 * 1000);
return null;
}
// Track when the network connection closes.
const closed = createPromiseWithResolvers();
// Wrap the original stream in a new stream that never closes. That way the
// Flight client doesn't error if there's a hanging promise.
const prefetchStream = createPrefetchResponseStream(response.body, closed.resolve, function onResponseSizeUpdate(size) {
setSizeInCacheMap(segmentCacheEntry, size);
});
const serverData = await createFromNextReadableStream(prefetchStream, headers);
if (serverData.buildId !== getAppBuildId()) {
// The server build does not match the client. Treat as a 404. During
// an actual navigation, the router will trigger an MPA navigation.
// TODO: Consider moving the build ID to a response header so we can check
// it before decoding the response, and so there's one way of checking
// across all response types.
rejectSegmentCacheEntry(segmentCacheEntry, Date.now() + 10 * 1000);
return null;
}
return {
value: fulfillSegmentCacheEntry(segmentCacheEntry, serverData.rsc, serverData.loading, // TODO: The server does not currently provi