From e379e5e882b455899cb791282b5ab2ff8e71ccdd Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Sat, 16 Nov 2024 17:16:30 -0500 Subject: [PATCH] [Segment Cache] Send during route prefetch The does not belong to any particular segment; it represents the entire page. Except in the case where two URLs rewrite to the same result, it's very unlikely to benefit from deduplication, so there's no benefit to caching it separately from the route tree. So we'll send it in the same response. Since the head may contain dynamic data, the route tree response may now contain hanging promises, so we'll need to use the same AbortController trick that we use for the segments. --- .../app-render/collect-segment-data.tsx | 48 +++++++++---------- 1 file changed, 22 insertions(+), 26 deletions(-) diff --git a/packages/next/src/server/app-render/collect-segment-data.tsx b/packages/next/src/server/app-render/collect-segment-data.tsx index dc21003e487ea1..c26a0a9a306e21 100644 --- a/packages/next/src/server/app-render/collect-segment-data.tsx +++ b/packages/next/src/server/app-render/collect-segment-data.tsx @@ -23,6 +23,7 @@ import type { LoadingModuleData } from '../../shared/lib/app-router-context.shar // it can fetch any actual segment data. type RootTreePrefetch = { tree: TreePrefetch + head: React.ReactNode | null staleTime: number } @@ -86,7 +87,7 @@ export async function collectSegmentData( // The promises for these tasks are pushed to a mutable array that we will // await once the route tree is fully rendered. const segmentTasks: Array> = [] - const { prelude: treeStream } = await prerender( + const { prelude: treeStream } = await prerenderCachedData( // RootTreePrefetch is not a valid return type for a React component, but // we need to use a component so that when we decode the original stream // inside of it, the side effects are transferred to the new stream. @@ -98,17 +99,7 @@ export async function collectSegmentData( staleTime={staleTime} segmentTasks={segmentTasks} />, - clientModules, - { - // Unlike when rendering the segment streams, we do not pass an abort - // controller here. There's nothing dynamic in the prefetch metadata; we - // will always render the result. We do still have to account for hanging - // promises, but we use a different strategy. See PrefetchTreeData. - onError() { - // Ignore any errors. These would have already been reported when - // we created the full page data. - }, - } + clientModules ) // Write the route tree to a special `/_tree` segment. @@ -161,6 +152,7 @@ async function PrefetchTreeData({ } const flightRouterState: FlightRouterState = flightDataPaths[0][0] const seedData: CacheNodeSeedData = flightDataPaths[0][1] + const head: React.ReactNode | null = flightDataPaths[0][2] // Compute the route metadata tree by traversing the FlightRouterState. As we // walk the tree, we will also spawn a task to produce a prefetch response for @@ -179,6 +171,7 @@ async function PrefetchTreeData({ // Render the route tree to a special `/_tree` segment. const treePrefetch: RootTreePrefetch = { tree, + head, staleTime, } return treePrefetch @@ -275,21 +268,9 @@ async function renderSegmentPrefetch( rsc, loading, } - // Since all we're doing is decoding and re-encoding a cached prerender, if - // it takes longer than a microtask, it must because of hanging promises - // caused by dynamic data. Abort the stream at the end of the current task. - const abortController = new AbortController() - waitAtLeastOneReactRenderTask().then(() => abortController.abort()) - const { prelude: segmentStream } = await prerender( + const { prelude: segmentStream } = await prerenderCachedData( segmentPrefetch, - clientModules, - { - signal: abortController.signal, - onError() { - // Ignore any errors. These would have already been reported when - // we created the full page data. - }, - } + clientModules ) const segmentBuffer = await streamToBuffer(segmentStream) // Add the buffer to the result map. @@ -453,3 +434,18 @@ function createUnclosingPrefetchStream( }, }) } + +async function prerenderCachedData(model: any, clientModules: ManifestNode) { + // Since all we're doing is re-encoding a cached prerender, if it takes longer + // than a microtask, it must because of hanging promises caused by dynamic + // data. Abort the stream at the end of the current task. + const abortController = new AbortController() + waitAtLeastOneReactRenderTask().then(() => abortController.abort()) + return await prerender(model, clientModules, { + signal: abortController.signal, + onError() { + // Ignore any errors. These would have already been reported when + // we created the full page data. + }, + }) +}