@@ -24,6 +24,7 @@ import {
24
24
completeWriting ,
25
25
flushBuffered ,
26
26
close ,
27
+ closeWithError ,
27
28
processModelChunk ,
28
29
processModuleChunk ,
29
30
processSymbolChunk ,
@@ -83,16 +84,20 @@ export type Request = {
83
84
completedErrorChunks : Array < Chunk > ,
84
85
writtenSymbols : Map < Symbol , number> ,
85
86
writtenModules : Map < ModuleKey , number> ,
87
+ onError : ( error : mixed ) => void ,
86
88
flowing : boolean ,
87
89
toJSON : ( key : string , value : ReactModel ) => ReactJSONValue ,
88
90
} ;
89
91
90
92
const ReactCurrentDispatcher = ReactSharedInternals . ReactCurrentDispatcher ;
91
93
94
+ function defaultErrorHandler ( ) { }
95
+
92
96
export function createRequest (
93
97
model : ReactModel ,
94
98
destination : Destination ,
95
99
bundlerConfig : BundlerConfig ,
100
+ onError : ( error : mixed ) = > void = defaultErrorHandler ,
96
101
) : Request {
97
102
const pingedSegments = [ ] ;
98
103
const request = {
@@ -107,6 +112,7 @@ export function createRequest(
107
112
completedErrorChunks : [ ] ,
108
113
writtenSymbols : new Map ( ) ,
109
114
writtenModules : new Map ( ) ,
115
+ onError,
110
116
flowing : false ,
111
117
toJSON : function ( key : string , value : ReactModel ) : ReactJSONValue {
112
118
return resolveModelToJSON ( request , this , key , value ) ;
@@ -413,6 +419,7 @@ export function resolveModelToJSON(
413
419
x . then ( ping , ping ) ;
414
420
return serializeByRefID ( newSegment . id ) ;
415
421
} else {
422
+ reportError ( request , x ) ;
416
423
// Something errored. We'll still send everything we have up until this point.
417
424
// We'll replace this element with a lazy reference that throws on the client
418
425
// once it gets rendered.
@@ -589,6 +596,15 @@ export function resolveModelToJSON(
589
596
);
590
597
}
591
598
599
+ function reportError ( request : Request , error : mixed ) : void {
600
+ request . onError ( error ) ;
601
+ }
602
+
603
+ function fatalError(request: Request, error: mixed): void {
604
+ // This is called outside error handling code such as if an error happens in React internals.
605
+ closeWithError ( request . destination , error ) ;
606
+ }
607
+
592
608
function emitErrorChunk(request: Request, id: number, error: mixed): void {
593
609
// TODO: We should not leak error messages to the client in prod.
594
610
// Give this an error code instead and log on the server.
@@ -654,6 +670,7 @@ function retrySegment(request: Request, segment: Segment): void {
654
670
x . then ( ping , ping ) ;
655
671
return ;
656
672
} else {
673
+ reportError ( request , x ) ;
657
674
// This errored, we need to serialize this error to the
658
675
emitErrorChunk ( request , segment . id , x ) ;
659
676
}
@@ -666,18 +683,23 @@ function performWork(request: Request): void {
666
683
ReactCurrentDispatcher . current = Dispatcher ;
667
684
currentCache = request . cache ;
668
685
669
- const pingedSegments = request . pingedSegments ;
670
- request . pingedSegments = [ ] ;
671
- for ( let i = 0 ; i < pingedSegments . length ; i ++ ) {
672
- const segment = pingedSegments [ i ] ;
673
- retrySegment ( request , segment ) ;
674
- }
675
- if ( request . flowing ) {
676
- flushCompletedChunks ( request ) ;
686
+ try {
687
+ const pingedSegments = request . pingedSegments ;
688
+ request . pingedSegments = [ ] ;
689
+ for ( let i = 0 ; i < pingedSegments . length ; i ++ ) {
690
+ const segment = pingedSegments [ i ] ;
691
+ retrySegment ( request , segment ) ;
692
+ }
693
+ if ( request . flowing ) {
694
+ flushCompletedChunks ( request ) ;
695
+ }
696
+ } catch ( error ) {
697
+ reportError ( request , error ) ;
698
+ fatalError ( request , error ) ;
699
+ } finally {
700
+ ReactCurrentDispatcher . current = prevDispatcher ;
701
+ currentCache = prevCache ;
677
702
}
678
-
679
- ReactCurrentDispatcher . current = prevDispatcher ;
680
- currentCache = prevCache ;
681
703
}
682
704
683
705
let reentrant = false ;
@@ -749,7 +771,12 @@ export function startWork(request: Request): void {
749
771
750
772
export function startFlowing ( request : Request ) : void {
751
773
request . flowing = true ;
752
- flushCompletedChunks ( request ) ;
774
+ try {
775
+ flushCompletedChunks ( request ) ;
776
+ } catch ( error ) {
777
+ reportError ( request , error ) ;
778
+ fatalError ( request , error ) ;
779
+ }
753
780
}
754
781
755
782
function unsupportedHook ( ) : void {
0 commit comments