@@ -613,7 +613,6 @@ impl<Tag> DerefMut for Relocations<Tag> {
613
613
////////////////////////////////////////////////////////////////////////////////
614
614
615
615
type Block = u64 ;
616
- const BLOCK_SIZE : u64 = 64 ;
617
616
618
617
#[ derive( Clone , Debug , Eq , PartialEq , PartialOrd , Ord , Hash , RustcEncodable , RustcDecodable ) ]
619
618
pub struct UndefMask {
@@ -624,6 +623,8 @@ pub struct UndefMask {
624
623
impl_stable_hash_for ! ( struct mir:: interpret:: UndefMask { blocks, len} ) ;
625
624
626
625
impl UndefMask {
626
+ pub const BLOCK_SIZE : u64 = 64 ;
627
+
627
628
pub fn new ( size : Size ) -> Self {
628
629
let mut m = UndefMask {
629
630
blocks : vec ! [ ] ,
@@ -643,6 +644,7 @@ impl UndefMask {
643
644
return Err ( self . len ) ;
644
645
}
645
646
647
+ // FIXME(oli-obk): optimize this for allocations larger than a block.
646
648
let idx = ( start. bytes ( ) ..end. bytes ( ) )
647
649
. map ( |i| Size :: from_bytes ( i) )
648
650
. find ( |& i| !self . get ( i) ) ;
@@ -662,8 +664,31 @@ impl UndefMask {
662
664
}
663
665
664
666
pub fn set_range_inbounds ( & mut self , start : Size , end : Size , new_state : bool ) {
665
- for i in start. bytes ( ) ..end. bytes ( ) {
666
- self . set ( Size :: from_bytes ( i) , new_state) ;
667
+ let ( blocka, bita) = bit_index ( start) ;
668
+ let ( blockb, bitb) = bit_index ( end) ;
669
+ if blocka == blockb {
670
+ // within a single block
671
+ for i in bita .. bitb {
672
+ self . set_bit ( blocka, i, new_state) ;
673
+ }
674
+ return ;
675
+ }
676
+ // across block boundaries
677
+ for i in bita .. Self :: BLOCK_SIZE as usize {
678
+ self . set_bit ( blocka, i, new_state) ;
679
+ }
680
+ for i in 0 .. bitb {
681
+ self . set_bit ( blockb, i, new_state) ;
682
+ }
683
+ // fill in all the other blocks (much faster than one bit at a time)
684
+ if new_state {
685
+ for block in ( blocka + 1 ) .. blockb {
686
+ self . blocks [ block] = 0xFFFF_FFFF_FFFF_FFFF ;
687
+ }
688
+ } else {
689
+ for block in ( blocka + 1 ) .. blockb {
690
+ self . blocks [ block] = 0 ;
691
+ }
667
692
}
668
693
}
669
694
@@ -676,6 +701,11 @@ impl UndefMask {
676
701
#[ inline]
677
702
pub fn set ( & mut self , i : Size , new_state : bool ) {
678
703
let ( block, bit) = bit_index ( i) ;
704
+ self . set_bit ( block, bit, new_state) ;
705
+ }
706
+
707
+ #[ inline]
708
+ fn set_bit ( & mut self , block : usize , bit : usize , new_state : bool ) {
679
709
if new_state {
680
710
self . blocks [ block] |= 1 << bit;
681
711
} else {
@@ -684,11 +714,12 @@ impl UndefMask {
684
714
}
685
715
686
716
pub fn grow ( & mut self , amount : Size , new_state : bool ) {
687
- let unused_trailing_bits = self . blocks . len ( ) as u64 * BLOCK_SIZE - self . len . bytes ( ) ;
717
+ let unused_trailing_bits = self . blocks . len ( ) as u64 * Self :: BLOCK_SIZE - self . len . bytes ( ) ;
688
718
if amount. bytes ( ) > unused_trailing_bits {
689
- let additional_blocks = amount. bytes ( ) / BLOCK_SIZE + 1 ;
719
+ let additional_blocks = amount. bytes ( ) / Self :: BLOCK_SIZE + 1 ;
690
720
assert_eq ! ( additional_blocks as usize as u64 , additional_blocks) ;
691
721
self . blocks . extend (
722
+ // FIXME(oli-obk): optimize this by repeating `new_state as Block`
692
723
iter:: repeat ( 0 ) . take ( additional_blocks as usize ) ,
693
724
) ;
694
725
}
@@ -701,8 +732,8 @@ impl UndefMask {
701
732
#[ inline]
702
733
fn bit_index ( bits : Size ) -> ( usize , usize ) {
703
734
let bits = bits. bytes ( ) ;
704
- let a = bits / BLOCK_SIZE ;
705
- let b = bits % BLOCK_SIZE ;
735
+ let a = bits / UndefMask :: BLOCK_SIZE ;
736
+ let b = bits % UndefMask :: BLOCK_SIZE ;
706
737
assert_eq ! ( a as usize as u64 , a) ;
707
738
assert_eq ! ( b as usize as u64 , b) ;
708
739
( a as usize , b as usize )
0 commit comments