10
10
11
11
let React ;
12
12
let ReactNoop ;
13
+ let Scheduler ;
13
14
let act ;
15
+ let assertLog ;
14
16
let useState ;
15
17
let useMemoCache ;
16
18
let MemoCacheSentinel ;
@@ -22,7 +24,9 @@ describe('useMemoCache()', () => {
22
24
23
25
React = require ( 'react' ) ;
24
26
ReactNoop = require ( 'react-noop-renderer' ) ;
27
+ Scheduler = require ( 'scheduler' ) ;
25
28
act = require ( 'internal-test-utils' ) . act ;
29
+ assertLog = require ( 'internal-test-utils' ) . assertLog ;
26
30
useState = React . useState ;
27
31
useMemoCache = React . unstable_useMemoCache ;
28
32
MemoCacheSentinel = Symbol . for ( 'react.memo_cache_sentinel' ) ;
@@ -363,4 +367,258 @@ describe('useMemoCache()', () => {
363
367
expect ( Text ) . toBeCalledTimes ( 3 ) ;
364
368
expect ( data ) . toBe ( data1 ) ; // confirm that the cache persisted across renders
365
369
} ) ;
370
+
371
+ // @gate enableUseMemoCacheHook
372
+ test ( 'reuses computations from suspended/interrupted render attempts during an update' , async ( ) => {
373
+ // This test demonstrates the benefit of a shared memo cache. By "shared" I
374
+ // mean multiple concurrent render attempts of the same component/hook use
375
+ // the same cache. (When the feature flag is off, we don't do this — the
376
+ // cache is copy-on-write.)
377
+ //
378
+ // If an update is interrupted, either because it suspended or because of
379
+ // another update, we can reuse the memoized computations from the previous
380
+ // attempt. We can do this because the React Compiler performs atomic writes
381
+ // to the memo cache, i.e. it will not record the inputs to a memoization
382
+ // without also recording its output.
383
+ //
384
+ // This gives us a form of "resuming" within components and hooks.
385
+ //
386
+ // This only works when updating a component that already mounted. It has no
387
+ // impact during initial render, because the memo cache is stored on the
388
+ // fiber, and since we have not implemented resuming for fibers, it's always
389
+ // a fresh memo cache, anyway.
390
+ //
391
+ // However, this alone is pretty useful — it happens whenever you update the
392
+ // UI with fresh data after a mutation/action, which is extremely common in
393
+ // a Suspense-driven (e.g. RSC or Relay) app. That's the scenario that this
394
+ // test simulates.
395
+ //
396
+ // So the impact of this feature is faster data mutations/actions.
397
+
398
+ function someExpensiveProcessing ( t ) {
399
+ Scheduler . log ( `Some expensive processing... [${ t } ]` ) ;
400
+ return t ;
401
+ }
402
+
403
+ function useWithLog ( t , msg ) {
404
+ try {
405
+ return React . use ( t ) ;
406
+ } catch ( x ) {
407
+ Scheduler . log ( `Suspend! [${ msg } ]` ) ;
408
+ throw x ;
409
+ }
410
+ }
411
+
412
+ // Original code:
413
+ //
414
+ // function Data({chunkA, chunkB}) {
415
+ // const a = someExpensiveProcessing(useWithLog(chunkA, 'chunkA'));
416
+ // const b = useWithLog(chunkB, 'chunkB');
417
+ // return (
418
+ // <>
419
+ // {a}
420
+ // {b}
421
+ // </>
422
+ // );
423
+ // }
424
+ //
425
+ // function Input() {
426
+ // const [input, _setText] = useState('');
427
+ // return input;
428
+ // }
429
+ //
430
+ // function App({chunkA, chunkB}) {
431
+ // return (
432
+ // <>
433
+ // <div>
434
+ // Input: <Input />
435
+ // </div>
436
+ // <div>
437
+ // Data: <Data chunkA={chunkA} chunkB={chunkB} />
438
+ // </div>
439
+ // </>
440
+ // );
441
+ // }
442
+ function Data ( t0 ) {
443
+ const $ = useMemoCache ( 5 ) ;
444
+ const { chunkA, chunkB} = t0 ;
445
+ const t1 = useWithLog ( chunkA , 'chunkA' ) ;
446
+ let t2 ;
447
+
448
+ if ( $ [ 0 ] !== t1 ) {
449
+ t2 = someExpensiveProcessing ( t1 ) ;
450
+ $ [ 0 ] = t1 ;
451
+ $ [ 1 ] = t2 ;
452
+ } else {
453
+ t2 = $ [ 1 ] ;
454
+ }
455
+
456
+ const a = t2 ;
457
+ const b = useWithLog ( chunkB , 'chunkB' ) ;
458
+ let t3 ;
459
+
460
+ if ( $ [ 2 ] !== a || $ [ 3 ] !== b ) {
461
+ t3 = (
462
+ < >
463
+ { a }
464
+ { b }
465
+ </ >
466
+ ) ;
467
+ $ [ 2 ] = a ;
468
+ $ [ 3 ] = b ;
469
+ $ [ 4 ] = t3 ;
470
+ } else {
471
+ t3 = $ [ 4 ] ;
472
+ }
473
+
474
+ return t3 ;
475
+ }
476
+
477
+ let setInput ;
478
+ function Input ( ) {
479
+ const [ input , _set ] = useState ( '' ) ;
480
+ setInput = _set ;
481
+ return input ;
482
+ }
483
+
484
+ function App ( t0 ) {
485
+ const $ = useMemoCache ( 4 ) ;
486
+ const { chunkA, chunkB} = t0 ;
487
+ let t1 ;
488
+
489
+ if ( $ [ 0 ] === Symbol . for ( 'react.memo_cache_sentinel' ) ) {
490
+ t1 = (
491
+ < div >
492
+ Input: < Input />
493
+ </ div >
494
+ ) ;
495
+ $ [ 0 ] = t1 ;
496
+ } else {
497
+ t1 = $ [ 0 ] ;
498
+ }
499
+
500
+ let t2 ;
501
+
502
+ if ( $ [ 1 ] !== chunkA || $ [ 2 ] !== chunkB ) {
503
+ t2 = (
504
+ < >
505
+ { t1 }
506
+ < div >
507
+ Data: < Data chunkA = { chunkA } chunkB = { chunkB } />
508
+ </ div >
509
+ </ >
510
+ ) ;
511
+ $ [ 1 ] = chunkA ;
512
+ $ [ 2 ] = chunkB ;
513
+ $ [ 3 ] = t2 ;
514
+ } else {
515
+ t2 = $ [ 3 ] ;
516
+ }
517
+
518
+ return t2 ;
519
+ }
520
+
521
+ function createInstrumentedResolvedPromise ( value ) {
522
+ return {
523
+ then ( ) { } ,
524
+ status : 'fulfilled' ,
525
+ value,
526
+ } ;
527
+ }
528
+
529
+ function createDeferred ( ) {
530
+ let resolve ;
531
+ const p = new Promise ( res => {
532
+ resolve = res ;
533
+ } ) ;
534
+ p . resolve = resolve ;
535
+ return p ;
536
+ }
537
+
538
+ // Initial render. We pass the data in as two separate "chunks" to simulate
539
+ // a stream (e.g. RSC).
540
+ const root = ReactNoop . createRoot ( ) ;
541
+ const initialChunkA = createInstrumentedResolvedPromise ( 'A1' ) ;
542
+ const initialChunkB = createInstrumentedResolvedPromise ( 'B1' ) ;
543
+ await act ( ( ) =>
544
+ root . render ( < App chunkA = { initialChunkA } chunkB = { initialChunkB } /> ) ,
545
+ ) ;
546
+ assertLog ( [ 'Some expensive processing... [A1]' ] ) ;
547
+ expect ( root ) . toMatchRenderedOutput (
548
+ < >
549
+ < div > Input: </ div >
550
+ < div > Data: A1B1</ div >
551
+ </ > ,
552
+ ) ;
553
+
554
+ // Update the UI in a transition. This would happen after a data mutation.
555
+ const updatedChunkA = createDeferred ( ) ;
556
+ const updatedChunkB = createDeferred ( ) ;
557
+ await act ( ( ) => {
558
+ React . startTransition ( ( ) => {
559
+ root . render ( < App chunkA = { updatedChunkA } chunkB = { updatedChunkB } /> ) ;
560
+ } ) ;
561
+ } ) ;
562
+ assertLog ( [ 'Suspend! [chunkA]' ] ) ;
563
+
564
+ // The data starts to stream in. Loading the data in the first chunk
565
+ // triggers an expensive computation in the UI. Later, we'll test whether
566
+ // this computation is reused.
567
+ await act ( ( ) => updatedChunkA . resolve ( 'A2' ) ) ;
568
+ assertLog ( [ 'Some expensive processing... [A2]' , 'Suspend! [chunkB]' ] ) ;
569
+
570
+ // The second chunk hasn't loaded yet, so we're still showing the
571
+ // initial UI.
572
+ expect ( root ) . toMatchRenderedOutput (
573
+ < >
574
+ < div > Input: </ div >
575
+ < div > Data: A1B1</ div >
576
+ </ > ,
577
+ ) ;
578
+
579
+ // While waiting for the data to finish loading, update a different part of
580
+ // the screen. This interrupts the refresh transition.
581
+ //
582
+ // In a real app, this might be an input or hover event.
583
+ await act ( ( ) => setInput ( 'hi!' ) ) ;
584
+
585
+ // Once the input has updated, we go back to rendering the transition.
586
+ if ( gate ( flags => flags . enableNoCloningMemoCache ) ) {
587
+ // We did not have process the first chunk again. We reused the
588
+ // computation from the earlier attempt.
589
+ assertLog ( [ 'Suspend! [chunkB]' ] ) ;
590
+ } else {
591
+ // Because we clone/reset the memo cache after every aborted attempt, we
592
+ // must process the first chunk again.
593
+ assertLog ( [ 'Some expensive processing... [A2]' , 'Suspend! [chunkB]' ] ) ;
594
+ }
595
+
596
+ expect ( root ) . toMatchRenderedOutput (
597
+ < >
598
+ < div > Input: hi!</ div >
599
+ < div > Data: A1B1</ div >
600
+ </ > ,
601
+ ) ;
602
+
603
+ // Finish loading the data.
604
+ await act ( ( ) => updatedChunkB . resolve ( 'B2' ) ) ;
605
+ if ( gate ( flags => flags . enableNoCloningMemoCache ) ) {
606
+ // We did not have process the first chunk again. We reused the
607
+ // computation from the earlier attempt.
608
+ assertLog ( [ ] ) ;
609
+ } else {
610
+ // Because we clone/reset the memo cache after every aborted attempt, we
611
+ // must process the first chunk again.
612
+ //
613
+ // That's three total times we've processed the first chunk, compared to
614
+ // just once when enableNoCloningMemoCache is on.
615
+ assertLog ( [ 'Some expensive processing... [A2]' ] ) ;
616
+ }
617
+ expect ( root ) . toMatchRenderedOutput (
618
+ < >
619
+ < div > Input: hi!</ div >
620
+ < div > Data: A2B2</ div >
621
+ </ > ,
622
+ ) ;
623
+ } ) ;
366
624
} ) ;
0 commit comments