Skip to content

Commit 2f185ff

Browse files
committed
Retain in-progress caches on the root, per lane
All the data that loaded as a result of a single transition/update should share the same cache. This includes nested content that gets progressively "filled in" after the initial shell is displayed. If the shell itself were wrapped in a Cache boundary, such that the cache can commit with suspending, then this is easy: once the boundary mounts, the cache is attached the React tree. The tricky part is when the shell does not include a cache boundary. In the naive approach, since the cache is not part of the initial tree, it does not get retained; during the retry, a fresh cache is created, leading to duplicate requests and possibly an infinite loop as requests are endlessly created then discarded. This is the essential problem we faced several years ago when building Simple Cache Provider (later the react-cache package). Our solution is to retain in-flight caches on the root, associated by lane. The cache cleared from the root once all of the lanes that depend on it finish rendering. Because progressively rendering nested boundaries ("retry" updates) uses a different lane from the update that spawned it, we must take extra care to transfer the cache to the new lane when scheduling the retry.
1 parent 299405f commit 2f185ff

10 files changed

+564
-61
lines changed

packages/react-reconciler/src/ReactFiberCommitWork.new.js

Lines changed: 23 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import type {FunctionComponentUpdateQueue} from './ReactFiberHooks.new';
2424
import type {Wakeable} from 'shared/ReactTypes';
2525
import type {ReactPriorityLevel} from './ReactInternalTypes';
2626
import type {OffscreenState} from './ReactFiberOffscreenComponent';
27+
import type {Cache} from './ReactFiberCacheComponent';
2728

2829
import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing';
2930
import {
@@ -55,6 +56,7 @@ import {
5556
ScopeComponent,
5657
OffscreenComponent,
5758
LegacyHiddenComponent,
59+
CacheComponent,
5860
} from './ReactWorkTags';
5961
import {
6062
invokeGuardedCallback,
@@ -1488,7 +1490,11 @@ function commitDeletion(
14881490
}
14891491
}
14901492

1491-
function commitWork(current: Fiber | null, finishedWork: Fiber): void {
1493+
function commitWork(
1494+
current: Fiber | null,
1495+
finishedWork: Fiber,
1496+
cache: Cache | null,
1497+
): void {
14921498
if (!supportsMutation) {
14931499
switch (finishedWork.tag) {
14941500
case FunctionComponent:
@@ -1524,11 +1530,11 @@ function commitWork(current: Fiber | null, finishedWork: Fiber): void {
15241530
}
15251531
case SuspenseComponent: {
15261532
commitSuspenseComponent(finishedWork);
1527-
attachSuspenseRetryListeners(finishedWork);
1533+
attachSuspenseRetryListeners(finishedWork, cache);
15281534
return;
15291535
}
15301536
case SuspenseListComponent: {
1531-
attachSuspenseRetryListeners(finishedWork);
1537+
attachSuspenseRetryListeners(finishedWork, cache);
15321538
return;
15331539
}
15341540
case HostRoot: {
@@ -1639,11 +1645,11 @@ function commitWork(current: Fiber | null, finishedWork: Fiber): void {
16391645
}
16401646
case SuspenseComponent: {
16411647
commitSuspenseComponent(finishedWork);
1642-
attachSuspenseRetryListeners(finishedWork);
1648+
attachSuspenseRetryListeners(finishedWork, cache);
16431649
return;
16441650
}
16451651
case SuspenseListComponent: {
1646-
attachSuspenseRetryListeners(finishedWork);
1652+
attachSuspenseRetryListeners(finishedWork, cache);
16471653
return;
16481654
}
16491655
case IncompleteClassComponent: {
@@ -1672,6 +1678,8 @@ function commitWork(current: Fiber | null, finishedWork: Fiber): void {
16721678
hideOrUnhideAllChildren(finishedWork, isHidden);
16731679
return;
16741680
}
1681+
case CacheComponent:
1682+
return;
16751683
}
16761684
invariant(
16771685
false,
@@ -1747,7 +1755,10 @@ function commitSuspenseHydrationCallbacks(
17471755
}
17481756
}
17491757

1750-
function attachSuspenseRetryListeners(finishedWork: Fiber) {
1758+
function attachSuspenseRetryListeners(
1759+
finishedWork: Fiber,
1760+
cache: Cache | null,
1761+
) {
17511762
// If this boundary just timed out, then it will have a set of wakeables.
17521763
// For each wakeable, attach a listener so that when it resolves, React
17531764
// attempts to re-render the boundary in the primary (pre-timeout) state.
@@ -1760,7 +1771,12 @@ function attachSuspenseRetryListeners(finishedWork: Fiber) {
17601771
}
17611772
wakeables.forEach(wakeable => {
17621773
// Memoize using the boundary fiber to prevent redundant listeners.
1763-
let retry = resolveRetryWakeable.bind(null, finishedWork, wakeable);
1774+
let retry = resolveRetryWakeable.bind(
1775+
null,
1776+
finishedWork,
1777+
wakeable,
1778+
cache,
1779+
);
17641780
if (!retryCache.has(wakeable)) {
17651781
if (enableSchedulerTracing) {
17661782
if (wakeable.__reactDoNotTraceInteractions !== true) {

packages/react-reconciler/src/ReactFiberCommitWork.old.js

Lines changed: 23 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import type {FunctionComponentUpdateQueue} from './ReactFiberHooks.old';
2424
import type {Wakeable} from 'shared/ReactTypes';
2525
import type {ReactPriorityLevel} from './ReactInternalTypes';
2626
import type {OffscreenState} from './ReactFiberOffscreenComponent';
27+
import type {Cache} from './ReactFiberCacheComponent';
2728

2829
import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing';
2930
import {
@@ -56,6 +57,7 @@ import {
5657
ScopeComponent,
5758
OffscreenComponent,
5859
LegacyHiddenComponent,
60+
CacheComponent,
5961
} from './ReactWorkTags';
6062
import {
6163
invokeGuardedCallback,
@@ -1489,7 +1491,11 @@ function commitDeletion(
14891491
}
14901492
}
14911493

1492-
function commitWork(current: Fiber | null, finishedWork: Fiber): void {
1494+
function commitWork(
1495+
current: Fiber | null,
1496+
finishedWork: Fiber,
1497+
cache: Cache | null,
1498+
): void {
14931499
if (!supportsMutation) {
14941500
switch (finishedWork.tag) {
14951501
case FunctionComponent:
@@ -1525,11 +1531,11 @@ function commitWork(current: Fiber | null, finishedWork: Fiber): void {
15251531
}
15261532
case SuspenseComponent: {
15271533
commitSuspenseComponent(finishedWork);
1528-
attachSuspenseRetryListeners(finishedWork);
1534+
attachSuspenseRetryListeners(finishedWork, cache);
15291535
return;
15301536
}
15311537
case SuspenseListComponent: {
1532-
attachSuspenseRetryListeners(finishedWork);
1538+
attachSuspenseRetryListeners(finishedWork, cache);
15331539
return;
15341540
}
15351541
case HostRoot: {
@@ -1640,11 +1646,11 @@ function commitWork(current: Fiber | null, finishedWork: Fiber): void {
16401646
}
16411647
case SuspenseComponent: {
16421648
commitSuspenseComponent(finishedWork);
1643-
attachSuspenseRetryListeners(finishedWork);
1649+
attachSuspenseRetryListeners(finishedWork, cache);
16441650
return;
16451651
}
16461652
case SuspenseListComponent: {
1647-
attachSuspenseRetryListeners(finishedWork);
1653+
attachSuspenseRetryListeners(finishedWork, cache);
16481654
return;
16491655
}
16501656
case IncompleteClassComponent: {
@@ -1673,6 +1679,8 @@ function commitWork(current: Fiber | null, finishedWork: Fiber): void {
16731679
hideOrUnhideAllChildren(finishedWork, isHidden);
16741680
return;
16751681
}
1682+
case CacheComponent:
1683+
return;
16761684
}
16771685
invariant(
16781686
false,
@@ -1748,7 +1756,10 @@ function commitSuspenseHydrationCallbacks(
17481756
}
17491757
}
17501758

1751-
function attachSuspenseRetryListeners(finishedWork: Fiber) {
1759+
function attachSuspenseRetryListeners(
1760+
finishedWork: Fiber,
1761+
cache: Cache | null,
1762+
) {
17521763
// If this boundary just timed out, then it will have a set of wakeables.
17531764
// For each wakeable, attach a listener so that when it resolves, React
17541765
// attempts to re-render the boundary in the primary (pre-timeout) state.
@@ -1761,7 +1772,12 @@ function attachSuspenseRetryListeners(finishedWork: Fiber) {
17611772
}
17621773
wakeables.forEach(wakeable => {
17631774
// Memoize using the boundary fiber to prevent redundant listeners.
1764-
let retry = resolveRetryWakeable.bind(null, finishedWork, wakeable);
1775+
let retry = resolveRetryWakeable.bind(
1776+
null,
1777+
finishedWork,
1778+
wakeable,
1779+
cache,
1780+
);
17651781
if (!retryCache.has(wakeable)) {
17661782
if (enableSchedulerTracing) {
17671783
if (wakeable.__reactDoNotTraceInteractions !== true) {

packages/react-reconciler/src/ReactFiberLane.new.js

Lines changed: 134 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -742,6 +742,7 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) {
742742
const entanglements = root.entanglements;
743743
const eventTimes = root.eventTimes;
744744
const expirationTimes = root.expirationTimes;
745+
const pooledCache = root.pooledCache;
745746

746747
// Clear the lanes that no longer have pending work
747748
let lanes = noLongerPendingLanes;
@@ -753,15 +754,31 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) {
753754
eventTimes[index] = NoTimestamp;
754755
expirationTimes[index] = NoTimestamp;
755756

756-
lanes &= ~lane;
757-
}
757+
if (enableCache) {
758+
// Subsequent loads in this lane should use a fresh cache.
759+
// TODO: If a cache is no longer associated with any lane, we should issue
760+
// an abort signal.
761+
const caches = root.caches;
762+
if (caches !== null) {
763+
if (remainingLanes === 0) {
764+
// Fast path. Clear all caches at once.
765+
root.caches = createLaneMap(null);
766+
root.pooledCache = null;
767+
} else {
768+
const cache = caches[index];
769+
if (cache !== null) {
770+
caches[index] = null;
771+
if (cache === pooledCache) {
772+
// The pooled cache is now part of the committed tree. We'll now
773+
// clear it so that the next transition gets a fresh cache.
774+
root.pooledCache = null;
775+
}
776+
}
777+
}
778+
}
779+
}
758780

759-
if (enableCache) {
760-
// Clear the pooled cache so subsequent updates get fresh data.
761-
// TODO: This is very naive and only works if the shell of a cache boundary
762-
// doesn't suspend. The next, key feature is to preserve caches across
763-
// multiple attempts (suspend -> ping) to render a new tree.
764-
root.pooledCache = null;
781+
lanes &= ~lane;
765782
}
766783
}
767784

@@ -785,12 +802,62 @@ export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache {
785802
return (null: any);
786803
}
787804

788-
// Check if there's a pooled cache. This is really just a batching heuristic
789-
// so that two transitions that happen in a similar timeframe can share the
790-
// same cache.
791-
const pooledCache = root.pooledCache;
792-
if (pooledCache !== null) {
793-
return pooledCache;
805+
// 1. Check if the currently rendering lanes already have a pending cache
806+
// associated with them. If so, use this cache. If for some reason two or
807+
// more lanes have different caches, pick the highest priority one.
808+
// 2. Otherwise, check the root's `pooledCache`. This the oldest cache
809+
// that has not yet been committed. This is really just a batching
810+
// heuristic so that two transitions that happen in a similar timeframe can
811+
// share the same cache. If it exists, use this cache.
812+
// 3. If there's no pooled cache, create a fresh cache. This is now the
813+
// pooled cache.
814+
815+
let caches = root.caches;
816+
817+
// TODO: There should be a primary render lane, and we should use whatever
818+
// cache is associated with that one.
819+
if (caches === null) {
820+
caches = root.caches = createLaneMap(null);
821+
} else {
822+
let lanes = renderLanes;
823+
while (lanes > 0) {
824+
const lane = getHighestPriorityLanes(lanes);
825+
const index = laneToIndex(lane);
826+
const inProgressCache: Cache | null = caches[index];
827+
if (inProgressCache !== null) {
828+
// This render lane already has a cache associated with it. Reuse it.
829+
830+
// If the other render lanes are not already associated with a cache,
831+
// associate them with this one.
832+
let otherRenderLanes = renderLanes & ~lane;
833+
while (otherRenderLanes > 0) {
834+
const otherIndex = pickArbitraryLaneIndex(otherRenderLanes);
835+
const otherLane = 1 << otherIndex;
836+
// We shouldn't overwrite a cache that already exists, since that could
837+
// lead to dropped requests or data, i.e. if the current render suspends.
838+
if (caches[otherIndex] === null) {
839+
caches[otherIndex] = inProgressCache;
840+
}
841+
otherRenderLanes &= ~otherLane;
842+
}
843+
return inProgressCache;
844+
}
845+
lanes &= ~lane;
846+
}
847+
// There are no in-progress caches associated with the current render. Check
848+
// if there's a pooled cache.
849+
const pooledCache = root.pooledCache;
850+
if (pooledCache !== null) {
851+
// Associate the pooled cache with each of the render lanes.
852+
lanes = renderLanes;
853+
while (lanes > 0) {
854+
const index = pickArbitraryLaneIndex(lanes);
855+
const lane = 1 << index;
856+
caches[index] = pooledCache;
857+
lanes &= ~lane;
858+
}
859+
return pooledCache;
860+
}
794861
}
795862

796863
// Create a fresh cache.
@@ -801,8 +868,61 @@ export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache {
801868

802869
// This is now the pooled cache.
803870
root.pooledCache = freshCache;
871+
872+
// Associate the new cache with each of the render lanes.
873+
let lanes = renderLanes;
874+
while (lanes > 0) {
875+
const index = pickArbitraryLaneIndex(lanes);
876+
const lane = 1 << index;
877+
caches[index] = freshCache;
878+
lanes &= ~lane;
879+
}
880+
804881
return freshCache;
805882
}
883+
884+
export function getWorkInProgressCache(
885+
root: FiberRoot,
886+
renderLanes: Lanes,
887+
): Cache | null {
888+
if (enableCache) {
889+
// TODO: There should be a primary render lane, and we should use whatever
890+
// cache is associated with that one.
891+
const caches = root.caches;
892+
if (caches !== null) {
893+
let lanes = renderLanes;
894+
while (lanes > 0) {
895+
const lane = getHighestPriorityLanes(lanes);
896+
const index = laneToIndex(lane);
897+
const inProgressCache: Cache | null = caches[index];
898+
if (inProgressCache !== null) {
899+
return inProgressCache;
900+
}
901+
lanes &= ~lane;
902+
}
903+
}
904+
}
905+
return null;
906+
}
907+
908+
export function transferCacheToSpawnedLane(
909+
root: FiberRoot,
910+
cache: Cache,
911+
lane: Lane,
912+
) {
913+
const index = laneToIndex(lane);
914+
let caches = root.caches;
915+
if (caches !== null) {
916+
const existingCache: Cache | null = caches[index];
917+
if (existingCache === null) {
918+
caches[index] = cache;
919+
}
920+
} else {
921+
caches = root.caches = createLaneMap(null);
922+
caches[index] = cache;
923+
}
924+
}
925+
806926
export function getBumpedLaneForHydration(
807927
root: FiberRoot,
808928
renderLanes: Lanes,

0 commit comments

Comments
 (0)