@@ -10,7 +10,7 @@ use rustc_data_structures::sorted_map::SortedMap;
10
10
use rustc_target:: abi:: { Align , HasDataLayout , Size } ;
11
11
12
12
use super :: {
13
- read_target_uint, write_target_uint, AllocId , InterpResult , Pointer , Scalar , ScalarMaybeUndef ,
13
+ read_target_uint, write_target_uint, AllocId , InterpResult , Pointer , Scalar , ScalarMaybeUninit ,
14
14
} ;
15
15
16
16
#[ derive( Clone , Debug , Eq , PartialEq , PartialOrd , Ord , Hash , RustcEncodable , RustcDecodable ) ]
@@ -25,7 +25,7 @@ pub struct Allocation<Tag = (), Extra = ()> {
25
25
/// at the given offset.
26
26
relocations : Relocations < Tag > ,
27
27
/// Denotes which part of this allocation is initialized.
28
- undef_mask : UndefMask ,
28
+ init_mask : InitMask ,
29
29
/// The size of the allocation. Currently, must always equal `bytes.len()`.
30
30
pub size : Size ,
31
31
/// The alignment of the allocation to detect unaligned reads.
@@ -92,7 +92,7 @@ impl<Tag> Allocation<Tag> {
92
92
Self {
93
93
bytes,
94
94
relocations : Relocations :: new ( ) ,
95
- undef_mask : UndefMask :: new ( size, true ) ,
95
+ init_mask : InitMask :: new ( size, true ) ,
96
96
size,
97
97
align,
98
98
mutability : Mutability :: Not ,
@@ -108,7 +108,7 @@ impl<Tag> Allocation<Tag> {
108
108
Allocation {
109
109
bytes : vec ! [ 0 ; size. bytes_usize( ) ] ,
110
110
relocations : Relocations :: new ( ) ,
111
- undef_mask : UndefMask :: new ( size, false ) ,
111
+ init_mask : InitMask :: new ( size, false ) ,
112
112
size,
113
113
align,
114
114
mutability : Mutability :: Mut ,
@@ -138,7 +138,7 @@ impl Allocation<(), ()> {
138
138
} )
139
139
. collect ( ) ,
140
140
) ,
141
- undef_mask : self . undef_mask ,
141
+ init_mask : self . init_mask ,
142
142
align : self . align ,
143
143
mutability : self . mutability ,
144
144
extra,
@@ -160,9 +160,9 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
160
160
& self . bytes [ range]
161
161
}
162
162
163
- /// Returns the undef mask.
164
- pub fn undef_mask ( & self ) -> & UndefMask {
165
- & self . undef_mask
163
+ /// Returns the mask indicating which bytes are initialized .
164
+ pub fn init_mask ( & self ) -> & InitMask {
165
+ & self . init_mask
166
166
}
167
167
168
168
/// Returns the relocation list.
@@ -358,15 +358,15 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
358
358
cx : & impl HasDataLayout ,
359
359
ptr : Pointer < Tag > ,
360
360
size : Size ,
361
- ) -> InterpResult < ' tcx , ScalarMaybeUndef < Tag > > {
361
+ ) -> InterpResult < ' tcx , ScalarMaybeUninit < Tag > > {
362
362
// `get_bytes_unchecked` tests relocation edges.
363
363
let bytes = self . get_bytes_with_undef_and_ptr ( cx, ptr, size) ?;
364
- // Undef check happens *after* we established that the alignment is correct.
364
+ // Uninit check happens *after* we established that the alignment is correct.
365
365
// We must not return `Ok()` for unaligned pointers!
366
366
if self . is_defined ( ptr, size) . is_err ( ) {
367
- // This inflates undefined bytes to the entire scalar, even if only a few
368
- // bytes are undefined .
369
- return Ok ( ScalarMaybeUndef :: Undef ) ;
367
+ // This inflates uninitialized bytes to the entire scalar, even if only a few
368
+ // bytes are uninitialized .
369
+ return Ok ( ScalarMaybeUninit :: Uninit ) ;
370
370
}
371
371
// Now we do the actual reading.
372
372
let bits = read_target_uint ( cx. data_layout ( ) . endian , bytes) . unwrap ( ) ;
@@ -377,11 +377,11 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
377
377
} else {
378
378
if let Some ( & ( tag, alloc_id) ) = self . relocations . get ( & ptr. offset ) {
379
379
let ptr = Pointer :: new_with_tag ( alloc_id, Size :: from_bytes ( bits) , tag) ;
380
- return Ok ( ScalarMaybeUndef :: Scalar ( ptr. into ( ) ) ) ;
380
+ return Ok ( ScalarMaybeUninit :: Scalar ( ptr. into ( ) ) ) ;
381
381
}
382
382
}
383
383
// We don't. Just return the bits.
384
- Ok ( ScalarMaybeUndef :: Scalar ( Scalar :: from_uint ( bits, size) ) )
384
+ Ok ( ScalarMaybeUninit :: Scalar ( Scalar :: from_uint ( bits, size) ) )
385
385
}
386
386
387
387
/// Reads a pointer-sized scalar.
@@ -392,7 +392,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
392
392
& self ,
393
393
cx : & impl HasDataLayout ,
394
394
ptr : Pointer < Tag > ,
395
- ) -> InterpResult < ' tcx , ScalarMaybeUndef < Tag > > {
395
+ ) -> InterpResult < ' tcx , ScalarMaybeUninit < Tag > > {
396
396
self . read_scalar ( cx, ptr, cx. data_layout ( ) . pointer_size )
397
397
}
398
398
@@ -409,12 +409,12 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
409
409
& mut self ,
410
410
cx : & impl HasDataLayout ,
411
411
ptr : Pointer < Tag > ,
412
- val : ScalarMaybeUndef < Tag > ,
412
+ val : ScalarMaybeUninit < Tag > ,
413
413
type_size : Size ,
414
414
) -> InterpResult < ' tcx > {
415
415
let val = match val {
416
- ScalarMaybeUndef :: Scalar ( scalar) => scalar,
417
- ScalarMaybeUndef :: Undef => {
416
+ ScalarMaybeUninit :: Scalar ( scalar) => scalar,
417
+ ScalarMaybeUninit :: Uninit => {
418
418
self . mark_definedness ( ptr, type_size, false ) ;
419
419
return Ok ( ( ) ) ;
420
420
}
@@ -445,7 +445,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
445
445
& mut self ,
446
446
cx : & impl HasDataLayout ,
447
447
ptr : Pointer < Tag > ,
448
- val : ScalarMaybeUndef < Tag > ,
448
+ val : ScalarMaybeUninit < Tag > ,
449
449
) -> InterpResult < ' tcx > {
450
450
let ptr_size = cx. data_layout ( ) . pointer_size ;
451
451
self . write_scalar ( cx, ptr, val, ptr_size)
@@ -514,10 +514,10 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
514
514
// Mark parts of the outermost relocations as undefined if they partially fall outside the
515
515
// given range.
516
516
if first < start {
517
- self . undef_mask . set_range ( first, start, false ) ;
517
+ self . init_mask . set_range ( first, start, false ) ;
518
518
}
519
519
if last > end {
520
- self . undef_mask . set_range ( end, last, false ) ;
520
+ self . init_mask . set_range ( end, last, false ) ;
521
521
}
522
522
523
523
// Forget all the relocations.
@@ -548,21 +548,21 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
548
548
/// Returns `Ok(())` if it's defined. Otherwise returns the index of the byte
549
549
/// at which the first undefined access begins.
550
550
fn is_defined ( & self , ptr : Pointer < Tag > , size : Size ) -> Result < ( ) , Size > {
551
- self . undef_mask . is_range_defined ( ptr. offset , ptr. offset + size) // `Size` addition
551
+ self . init_mask . is_range_initialized ( ptr. offset , ptr. offset + size) // `Size` addition
552
552
}
553
553
554
554
/// Checks that a range of bytes is defined. If not, returns the `ReadUndefBytes`
555
555
/// error which will report the first byte which is undefined.
556
556
fn check_defined ( & self , ptr : Pointer < Tag > , size : Size ) -> InterpResult < ' tcx > {
557
557
self . is_defined ( ptr, size)
558
- . or_else ( |idx| throw_ub ! ( InvalidUndefBytes ( Some ( Pointer :: new( ptr. alloc_id, idx) ) ) ) )
558
+ . or_else ( |idx| throw_ub ! ( InvalidUninitBytes ( Some ( Pointer :: new( ptr. alloc_id, idx) ) ) ) )
559
559
}
560
560
561
561
pub fn mark_definedness ( & mut self , ptr : Pointer < Tag > , size : Size , new_state : bool ) {
562
562
if size. bytes ( ) == 0 {
563
563
return ;
564
564
}
565
- self . undef_mask . set_range ( ptr. offset , ptr. offset + size, new_state) ;
565
+ self . init_mask . set_range ( ptr. offset , ptr. offset + size, new_state) ;
566
566
}
567
567
}
568
568
@@ -601,13 +601,13 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
601
601
// where each element toggles the state.
602
602
603
603
let mut ranges = smallvec:: SmallVec :: < [ u64 ; 1 ] > :: new ( ) ;
604
- let initial = self . undef_mask . get ( src. offset ) ;
604
+ let initial = self . init_mask . get ( src. offset ) ;
605
605
let mut cur_len = 1 ;
606
606
let mut cur = initial;
607
607
608
608
for i in 1 ..size. bytes ( ) {
609
609
// FIXME: optimize to bitshift the current undef block's bits and read the top bit.
610
- if self . undef_mask . get ( src. offset + Size :: from_bytes ( i) ) == cur {
610
+ if self . init_mask . get ( src. offset + Size :: from_bytes ( i) ) == cur {
611
611
cur_len += 1 ;
612
612
} else {
613
613
ranges. push ( cur_len) ;
@@ -632,7 +632,7 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
632
632
// An optimization where we can just overwrite an entire range of definedness bits if
633
633
// they are going to be uniformly `1` or `0`.
634
634
if defined. ranges . len ( ) <= 1 {
635
- self . undef_mask . set_range_inbounds (
635
+ self . init_mask . set_range_inbounds (
636
636
dest. offset ,
637
637
dest. offset + size * repeat, // `Size` operations
638
638
defined. initial ,
@@ -647,7 +647,7 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
647
647
for range in & defined. ranges {
648
648
let old_j = j;
649
649
j += range;
650
- self . undef_mask . set_range_inbounds (
650
+ self . init_mask . set_range_inbounds (
651
651
Size :: from_bytes ( old_j) ,
652
652
Size :: from_bytes ( j) ,
653
653
cur,
@@ -739,29 +739,29 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
739
739
type Block = u64 ;
740
740
741
741
/// A bitmask where each bit refers to the byte with the same index. If the bit is `true`, the byte
742
- /// is defined . If it is `false` the byte is undefined .
742
+ /// is initialized . If it is `false` the byte is uninitialized .
743
743
#[ derive( Clone , Debug , Eq , PartialEq , PartialOrd , Ord , Hash , RustcEncodable , RustcDecodable ) ]
744
744
#[ derive( HashStable ) ]
745
- pub struct UndefMask {
745
+ pub struct InitMask {
746
746
blocks : Vec < Block > ,
747
747
len : Size ,
748
748
}
749
749
750
- impl UndefMask {
750
+ impl InitMask {
751
751
pub const BLOCK_SIZE : u64 = 64 ;
752
752
753
753
pub fn new ( size : Size , state : bool ) -> Self {
754
- let mut m = UndefMask { blocks : vec ! [ ] , len : Size :: ZERO } ;
754
+ let mut m = InitMask { blocks : vec ! [ ] , len : Size :: ZERO } ;
755
755
m. grow ( size, state) ;
756
756
m
757
757
}
758
758
759
- /// Checks whether the range `start..end` (end-exclusive) is entirely defined .
759
+ /// Checks whether the range `start..end` (end-exclusive) is entirely initialized .
760
760
///
761
- /// Returns `Ok(())` if it's defined . Otherwise returns the index of the byte
762
- /// at which the first undefined access begins.
761
+ /// Returns `Ok(())` if it's initialized . Otherwise returns the index of the byte
762
+ /// at which the first uninitialized access begins.
763
763
#[ inline]
764
- pub fn is_range_defined ( & self , start : Size , end : Size ) -> Result < ( ) , Size > {
764
+ pub fn is_range_initialized ( & self , start : Size , end : Size ) -> Result < ( ) , Size > {
765
765
if end > self . len {
766
766
return Err ( self . len ) ;
767
767
}
@@ -870,7 +870,7 @@ impl UndefMask {
870
870
#[ inline]
871
871
fn bit_index ( bits : Size ) -> ( usize , usize ) {
872
872
let bits = bits. bytes ( ) ;
873
- let a = bits / UndefMask :: BLOCK_SIZE ;
874
- let b = bits % UndefMask :: BLOCK_SIZE ;
873
+ let a = bits / InitMask :: BLOCK_SIZE ;
874
+ let b = bits % InitMask :: BLOCK_SIZE ;
875
875
( usize:: try_from ( a) . unwrap ( ) , usize:: try_from ( b) . unwrap ( ) )
876
876
}
0 commit comments