@@ -353,8 +353,10 @@ impl<'a> CoverageSpansGenerator<'a> {
353
353
354
354
let prev = self . take_prev ( ) ;
355
355
debug ! ( " AT END, adding last prev={prev:?}" ) ;
356
- let pending_dups = self . pending_dups . split_off ( 0 ) ;
357
- for dup in pending_dups {
356
+
357
+ // Take `pending_dups` so that we can drain it while calling self methods.
358
+ // It is never used as a field after this point.
359
+ for dup in std:: mem:: take ( & mut self . pending_dups ) {
358
360
debug ! ( " ...adding at least one pending dup={:?}" , dup) ;
359
361
self . push_refined_span ( dup) ;
360
362
}
@@ -474,11 +476,16 @@ impl<'a> CoverageSpansGenerator<'a> {
474
476
previous iteration, or prev started a new disjoint span"
475
477
) ;
476
478
if last_dup. span . hi ( ) <= self . curr ( ) . span . lo ( ) {
477
- let pending_dups = self . pending_dups . split_off ( 0 ) ;
478
- for dup in pending_dups. into_iter ( ) {
479
+ // Temporarily steal `pending_dups` into a local, so that we can
480
+ // drain it while calling other self methods.
481
+ let mut pending_dups = std:: mem:: take ( & mut self . pending_dups ) ;
482
+ for dup in pending_dups. drain ( ..) {
479
483
debug ! ( " ...adding at least one pending={:?}" , dup) ;
480
484
self . push_refined_span ( dup) ;
481
485
}
486
+ // The list of dups is now empty, but we can recycle its capacity.
487
+ assert ! ( pending_dups. is_empty( ) && self . pending_dups. is_empty( ) ) ;
488
+ self . pending_dups = pending_dups;
482
489
} else {
483
490
self . pending_dups . clear ( ) ;
484
491
}
@@ -526,7 +533,10 @@ impl<'a> CoverageSpansGenerator<'a> {
526
533
let has_pre_closure_span = prev. span . lo ( ) < right_cutoff;
527
534
let has_post_closure_span = prev. span . hi ( ) > right_cutoff;
528
535
529
- let mut pending_dups = self . pending_dups . split_off ( 0 ) ;
536
+ // Temporarily steal `pending_dups` into a local, so that we can
537
+ // mutate and/or drain it while calling other self methods.
538
+ let mut pending_dups = std:: mem:: take ( & mut self . pending_dups ) ;
539
+
530
540
if has_pre_closure_span {
531
541
let mut pre_closure = self . prev ( ) . clone ( ) ;
532
542
pre_closure. span = pre_closure. span . with_hi ( left_cutoff) ;
@@ -540,6 +550,7 @@ impl<'a> CoverageSpansGenerator<'a> {
540
550
}
541
551
self . push_refined_span ( pre_closure) ;
542
552
}
553
+
543
554
if has_post_closure_span {
544
555
// Mutate `prev.span()` to start after the closure (and discard curr).
545
556
// (**NEVER** update `prev_original_span` because it affects the assumptions
@@ -550,12 +561,15 @@ impl<'a> CoverageSpansGenerator<'a> {
550
561
debug ! ( " ...and at least one overlapping dup={:?}" , dup) ;
551
562
dup. span = dup. span . with_lo ( right_cutoff) ;
552
563
}
553
- self . pending_dups . append ( & mut pending_dups) ;
554
564
let closure_covspan = self . take_curr ( ) ; // Prevent this curr from becoming prev.
555
565
self . push_refined_span ( closure_covspan) ; // since self.prev() was already updated
556
566
} else {
557
567
pending_dups. clear ( ) ;
558
568
}
569
+
570
+ // Restore the modified post-closure spans, or the empty vector's capacity.
571
+ assert ! ( self . pending_dups. is_empty( ) ) ;
572
+ self . pending_dups = pending_dups;
559
573
}
560
574
561
575
/// Called if `curr.span` equals `prev_original_span` (and potentially equal to all
0 commit comments