@@ -124,10 +124,7 @@ pub struct Scope<'tcx> {
124
124
/// The cache for drop chain on "generator drop" exit.
125
125
cached_generator_drop : Option < BasicBlock > ,
126
126
127
- /// The cache for drop chain on "unwind" exit. This block
128
- /// contains code to run the current drop and all the preceding
129
- /// drops (i.e., those having lower index in Drop’s Scope drop
130
- /// array)
127
+ /// The cache for drop chain on "unwind" exit.
131
128
cached_unwind : CachedBlock ,
132
129
}
133
130
@@ -144,7 +141,21 @@ struct DropData<'tcx> {
144
141
}
145
142
146
143
#[ derive( Debug , Default , Clone , Copy ) ]
147
- pub ( crate ) struct CachedBlock ( Option < BasicBlock > ) ;
144
+ pub ( crate ) struct CachedBlock {
145
+ /// The cached block for the cleanups-on-diverge path. This block
146
+ /// contains code to run the current drop and all the preceding
147
+ /// drops (i.e., those having lower index in Drop’s Scope drop
148
+ /// array)
149
+ unwind : Option < BasicBlock > ,
150
+
151
+ /// The cached block for unwinds during cleanups-on-generator-drop path
152
+ ///
153
+ /// This is split from the standard unwind path here to prevent drop
154
+ /// elaboration from creating drop flags that would have to be captured
155
+ /// by the generator. I'm not sure how important this optimization is,
156
+ /// but it is here.
157
+ generator_drop : Option < BasicBlock > ,
158
+ }
148
159
149
160
#[ derive( Debug ) ]
150
161
pub ( crate ) enum DropKind {
@@ -170,15 +181,24 @@ pub struct BreakableScope<'tcx> {
170
181
171
182
impl CachedBlock {
172
183
fn invalidate ( & mut self ) {
173
- self . 0 = None ;
184
+ self . generator_drop = None ;
185
+ self . unwind = None ;
174
186
}
175
187
176
- fn get ( & self ) -> Option < BasicBlock > {
177
- self . 0
188
+ fn get ( & self , generator_drop : bool ) -> Option < BasicBlock > {
189
+ if generator_drop {
190
+ self . generator_drop
191
+ } else {
192
+ self . unwind
193
+ }
178
194
}
179
195
180
- fn ref_mut ( & mut self ) -> & mut Option < BasicBlock > {
181
- & mut self . 0
196
+ fn ref_mut ( & mut self , generator_drop : bool ) -> & mut Option < BasicBlock > {
197
+ if generator_drop {
198
+ & mut self . generator_drop
199
+ } else {
200
+ & mut self . unwind
201
+ }
182
202
}
183
203
}
184
204
@@ -358,7 +378,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
358
378
assert_eq ! ( scope. region_scope, region_scope. 0 ) ;
359
379
360
380
let unwind_to = self . scopes . last ( ) . and_then ( |next_scope| {
361
- next_scope. cached_unwind . get ( )
381
+ next_scope. cached_unwind . get ( false )
362
382
} ) . unwrap_or_else ( || self . resume_block ( ) ) ;
363
383
364
384
unpack ! ( block = build_scope_drops(
@@ -367,6 +387,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
367
387
block,
368
388
unwind_to,
369
389
self . arg_count,
390
+ false ,
370
391
) ) ;
371
392
372
393
block. unit ( )
@@ -421,7 +442,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
421
442
}
422
443
} ;
423
444
424
- let unwind_to = next_scope. cached_unwind . get ( ) . unwrap_or_else ( || {
445
+ let unwind_to = next_scope. cached_unwind . get ( false ) . unwrap_or_else ( || {
425
446
debug_assert ! ( !may_panic, "cached block not present?" ) ;
426
447
START_BLOCK
427
448
} ) ;
@@ -432,6 +453,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
432
453
block,
433
454
unwind_to,
434
455
self . arg_count,
456
+ false ,
435
457
) ) ;
436
458
437
459
scope = next_scope;
@@ -448,7 +470,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
448
470
/// None indicates there’s no cleanup to do at this point.
449
471
pub fn generator_drop_cleanup ( & mut self ) -> Option < BasicBlock > {
450
472
// Fill in the cache for unwinds
451
- self . diverge_cleanup_gen ( ) ;
473
+ self . diverge_cleanup_gen ( true ) ;
452
474
453
475
let src_info = self . scopes [ 0 ] . source_info ( self . fn_span ) ;
454
476
let resume_block = self . resume_block ( ) ;
@@ -474,7 +496,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
474
496
} ;
475
497
476
498
let unwind_to = scopes. peek ( ) . as_ref ( ) . map ( |scope| {
477
- scope. cached_unwind . get ( ) . unwrap_or_else ( || {
499
+ scope. cached_unwind . get ( true ) . unwrap_or_else ( || {
478
500
span_bug ! ( src_info. span, "cached block not present?" )
479
501
} )
480
502
} ) . unwrap_or ( resume_block) ;
@@ -485,6 +507,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
485
507
block,
486
508
unwind_to,
487
509
self . arg_count,
510
+ true ,
488
511
) ) ;
489
512
}
490
513
@@ -737,7 +760,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
737
760
/// This path terminates in Resume. Returns the start of the path.
738
761
/// See module comment for more details.
739
762
pub fn diverge_cleanup ( & mut self ) -> BasicBlock {
740
- self . diverge_cleanup_gen ( )
763
+ self . diverge_cleanup_gen ( false )
741
764
}
742
765
743
766
fn resume_block ( & mut self ) -> BasicBlock {
@@ -756,7 +779,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
756
779
}
757
780
}
758
781
759
- fn diverge_cleanup_gen ( & mut self ) -> BasicBlock {
782
+ fn diverge_cleanup_gen ( & mut self , generator_drop : bool ) -> BasicBlock {
760
783
// Build up the drops in **reverse** order. The end result will
761
784
// look like:
762
785
//
@@ -770,15 +793,15 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
770
793
771
794
// Find the last cached block
772
795
let ( mut target, first_uncached) = if let Some ( cached_index) = self . scopes . iter ( )
773
- . rposition ( |scope| scope. cached_unwind . get ( ) . is_some ( ) ) {
774
- ( self . scopes [ cached_index] . cached_unwind . get ( ) . unwrap ( ) , cached_index + 1 )
796
+ . rposition ( |scope| scope. cached_unwind . get ( generator_drop ) . is_some ( ) ) {
797
+ ( self . scopes [ cached_index] . cached_unwind . get ( generator_drop ) . unwrap ( ) , cached_index + 1 )
775
798
} else {
776
799
( self . resume_block ( ) , 0 )
777
800
} ;
778
801
779
802
for scope in self . scopes [ first_uncached..] . iter_mut ( ) {
780
803
target = build_diverge_scope ( & mut self . cfg , scope. region_scope_span ,
781
- scope, target, self . is_generator ) ;
804
+ scope, target, generator_drop , self . is_generator ) ;
782
805
}
783
806
784
807
target
@@ -858,6 +881,7 @@ fn build_scope_drops<'tcx>(
858
881
mut block : BasicBlock ,
859
882
last_unwind_to : BasicBlock ,
860
883
arg_count : usize ,
884
+ generator_drop : bool ,
861
885
) -> BlockAnd < ( ) > {
862
886
debug ! ( "build_scope_drops({:?} -> {:?}" , block, scope) ;
863
887
@@ -878,7 +902,7 @@ fn build_scope_drops<'tcx>(
878
902
879
903
let mut unwind_blocks = scope. drops . iter ( ) . rev ( ) . filter_map ( |drop_data| {
880
904
if let DropKind :: Value { cached_block } = drop_data. kind {
881
- Some ( cached_block. get ( ) . unwrap_or_else ( || {
905
+ Some ( cached_block. get ( generator_drop ) . unwrap_or_else ( || {
882
906
span_bug ! ( drop_data. span, "cached block not present?" )
883
907
} ) )
884
908
} else {
@@ -922,12 +946,13 @@ fn build_scope_drops<'tcx>(
922
946
block. unit ( )
923
947
}
924
948
925
- fn build_diverge_scope ( cfg : & mut CFG < ' tcx > ,
926
- span : Span ,
927
- scope : & mut Scope < ' tcx > ,
928
- mut target : BasicBlock ,
929
- is_generator : bool )
930
- -> BasicBlock
949
+ fn build_diverge_scope < ' tcx > ( cfg : & mut CFG < ' tcx > ,
950
+ span : Span ,
951
+ scope : & mut Scope < ' tcx > ,
952
+ mut target : BasicBlock ,
953
+ generator_drop : bool ,
954
+ is_generator : bool )
955
+ -> BasicBlock
931
956
{
932
957
// Build up the drops in **reverse** order. The end result will
933
958
// look like:
@@ -979,7 +1004,7 @@ fn build_diverge_scope(cfg: &mut CFG<'tcx>,
979
1004
}
980
1005
DropKind :: Storage => { }
981
1006
DropKind :: Value { ref mut cached_block } => {
982
- let cached_block = cached_block. ref_mut ( ) ;
1007
+ let cached_block = cached_block. ref_mut ( generator_drop ) ;
983
1008
target = if let Some ( cached_block) = * cached_block {
984
1009
storage_deads. clear ( ) ;
985
1010
target_built_by_us = false ;
@@ -1002,7 +1027,7 @@ fn build_diverge_scope(cfg: &mut CFG<'tcx>,
1002
1027
} ;
1003
1028
}
1004
1029
push_storage_deads ( cfg, & mut target, & mut storage_deads, target_built_by_us, source_scope) ;
1005
- * scope. cached_unwind . ref_mut ( ) = Some ( target) ;
1030
+ * scope. cached_unwind . ref_mut ( generator_drop ) = Some ( target) ;
1006
1031
1007
1032
assert ! ( storage_deads. is_empty( ) ) ;
1008
1033
debug ! ( "build_diverge_scope({:?}, {:?}) = {:?}" , scope, span, target) ;
0 commit comments