Skip to content

Commit

Permalink
[Segment Cache] Send <head> during route prefetch
Browse files Browse the repository at this point in the history
The <head> does not belong to any particular segment; it represents the
entire page. Except in the case where two URLs rewrite to the same
result, it's very unlikely to benefit from deduplication, so there's
no benefit to caching it separately from the route tree. So we'll send
it in the same response.

Since the head may contain dynamic data, the route tree response may
now contain hanging promises, so we'll need to use the same
AbortController trick that we use for the segments.
  • Loading branch information
acdlite committed Nov 16, 2024
1 parent 359d4bb commit e379e5e
Showing 1 changed file with 22 additions and 26 deletions.
48 changes: 22 additions & 26 deletions packages/next/src/server/app-render/collect-segment-data.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import type { LoadingModuleData } from '../../shared/lib/app-router-context.shar
// it can fetch any actual segment data.
type RootTreePrefetch = {
tree: TreePrefetch
head: React.ReactNode | null
staleTime: number
}

Expand Down Expand Up @@ -86,7 +87,7 @@ export async function collectSegmentData(
// The promises for these tasks are pushed to a mutable array that we will
// await once the route tree is fully rendered.
const segmentTasks: Array<Promise<[string, Buffer]>> = []
const { prelude: treeStream } = await prerender(
const { prelude: treeStream } = await prerenderCachedData(
// RootTreePrefetch is not a valid return type for a React component, but
// we need to use a component so that when we decode the original stream
// inside of it, the side effects are transferred to the new stream.
Expand All @@ -98,17 +99,7 @@ export async function collectSegmentData(
staleTime={staleTime}
segmentTasks={segmentTasks}
/>,
clientModules,
{
// Unlike when rendering the segment streams, we do not pass an abort
// controller here. There's nothing dynamic in the prefetch metadata; we
// will always render the result. We do still have to account for hanging
// promises, but we use a different strategy. See PrefetchTreeData.
onError() {
// Ignore any errors. These would have already been reported when
// we created the full page data.
},
}
clientModules
)

// Write the route tree to a special `/_tree` segment.
Expand Down Expand Up @@ -161,6 +152,7 @@ async function PrefetchTreeData({
}
const flightRouterState: FlightRouterState = flightDataPaths[0][0]
const seedData: CacheNodeSeedData = flightDataPaths[0][1]
const head: React.ReactNode | null = flightDataPaths[0][2]

// Compute the route metadata tree by traversing the FlightRouterState. As we
// walk the tree, we will also spawn a task to produce a prefetch response for
Expand All @@ -179,6 +171,7 @@ async function PrefetchTreeData({
// Render the route tree to a special `/_tree` segment.
const treePrefetch: RootTreePrefetch = {
tree,
head,
staleTime,
}
return treePrefetch
Expand Down Expand Up @@ -275,21 +268,9 @@ async function renderSegmentPrefetch(
rsc,
loading,
}
// Since all we're doing is decoding and re-encoding a cached prerender, if
// it takes longer than a microtask, it must because of hanging promises
// caused by dynamic data. Abort the stream at the end of the current task.
const abortController = new AbortController()
waitAtLeastOneReactRenderTask().then(() => abortController.abort())
const { prelude: segmentStream } = await prerender(
const { prelude: segmentStream } = await prerenderCachedData(
segmentPrefetch,
clientModules,
{
signal: abortController.signal,
onError() {
// Ignore any errors. These would have already been reported when
// we created the full page data.
},
}
clientModules
)
const segmentBuffer = await streamToBuffer(segmentStream)
// Add the buffer to the result map.
Expand Down Expand Up @@ -453,3 +434,18 @@ function createUnclosingPrefetchStream(
},
})
}

async function prerenderCachedData(model: any, clientModules: ManifestNode) {
// Since all we're doing is re-encoding a cached prerender, if it takes longer
// than a microtask, it must because of hanging promises caused by dynamic
// data. Abort the stream at the end of the current task.
const abortController = new AbortController()
waitAtLeastOneReactRenderTask().then(() => abortController.abort())
return await prerender(model, clientModules, {
signal: abortController.signal,
onError() {
// Ignore any errors. These would have already been reported when
// we created the full page data.
},
})
}

0 comments on commit e379e5e

Please sign in to comment.