Skip to content

Commit 7196556

Browse files
committedMay 10, 2022
tighten sanity checks around Scalar and ScalarPair
1 parent d53f1e8 commit 7196556

File tree

3 files changed

+44
-44
lines changed

3 files changed

+44
-44
lines changed
 

Diff for: ‎compiler/rustc_const_eval/src/interpret/operand.rs

+30-15
Original file line numberDiff line numberDiff line change
@@ -284,8 +284,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
284284
Abi::Scalar(s) if force => Some(s.primitive()),
285285
_ => None,
286286
};
287-
if let Some(_) = scalar_layout {
288-
let scalar = alloc.read_scalar(alloc_range(Size::ZERO, mplace.layout.size))?;
287+
if let Some(s) = scalar_layout {
288+
let size = s.size(self);
289+
assert_eq!(size, mplace.layout.size, "abi::Scalar size does not match layout size");
290+
let scalar = alloc.read_scalar(alloc_range(Size::ZERO, size))?;
289291
return Ok(Some(ImmTy { imm: scalar.into(), layout: mplace.layout }));
290292
}
291293
let scalar_pair_layout = match mplace.layout.abi {
@@ -302,7 +304,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
302304
// which `ptr.offset(b_offset)` cannot possibly fail to satisfy.
303305
let (a_size, b_size) = (a.size(self), b.size(self));
304306
let b_offset = a_size.align_to(b.align(self).abi);
305-
assert!(b_offset.bytes() > 0); // we later use the offset to tell apart the fields
307+
assert!(b_offset.bytes() > 0); // in `operand_field` we use the offset to tell apart the fields
306308
let a_val = alloc.read_scalar(alloc_range(Size::ZERO, a_size))?;
307309
let b_val = alloc.read_scalar(alloc_range(b_offset, b_size))?;
308310
return Ok(Some(ImmTy {
@@ -394,28 +396,41 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
394396
Err(value) => value,
395397
};
396398

397-
let field_layout = op.layout.field(self, field);
399+
let field_layout = base.layout.field(self, field);
398400
if field_layout.is_zst() {
399401
let immediate = Scalar::ZST.into();
400402
return Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout });
401403
}
402-
let offset = op.layout.fields.offset(field);
403-
let immediate = match *base {
404+
405+
let offset = base.layout.fields.offset(field);
406+
// This makes several assumptions about what layouts we will encounter; we match what
407+
// codegen does as good as we can (see `extract_field` in `rustc_codegen_ssa/src/mir/operand.rs`).
408+
let field_val = match (*base, base.layout.abi) {
404409
// the field covers the entire type
405-
_ if offset.bytes() == 0 && field_layout.size == op.layout.size => *base,
410+
_ if field_layout.size == base.layout.size => {
411+
assert!(offset.bytes() == 0);
412+
*base
413+
}
406414
// extract fields from types with `ScalarPair` ABI
407-
Immediate::ScalarPair(a, b) => {
408-
let val = if offset.bytes() == 0 { a } else { b };
409-
Immediate::from(val)
415+
(Immediate::ScalarPair(a_val, b_val), Abi::ScalarPair(a, b)) => {
416+
Immediate::from(if offset.bytes() == 0 {
417+
assert_eq!(field_layout.size, a.size(self));
418+
a_val
419+
} else {
420+
assert_eq!(offset, a.size(self).align_to(b.align(self).abi));
421+
assert_eq!(field_layout.size, b.size(self));
422+
b_val
423+
})
410424
}
411-
Immediate::Scalar(val) => span_bug!(
425+
_ => span_bug!(
412426
self.cur_span(),
413-
"field access on non aggregate {:#?}, {:#?}",
414-
val,
415-
op.layout
427+
"invalid field access on immediate {}, layout {:#?}",
428+
base,
429+
base.layout
416430
),
417431
};
418-
Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout })
432+
433+
Ok(OpTy { op: Operand::Immediate(field_val), layout: field_layout })
419434
}
420435

421436
pub fn operand_index(

Diff for: ‎compiler/rustc_const_eval/src/interpret/place.rs

+9-25
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ use rustc_target::abi::{HasDataLayout, Size, VariantIdx, Variants};
1616
use super::{
1717
alloc_range, mir_assign_valid_types, AllocId, AllocRef, AllocRefMut, CheckInAllocMsg,
1818
ConstAlloc, ImmTy, Immediate, InterpCx, InterpResult, LocalValue, Machine, MemoryKind, OpTy,
19-
Operand, Pointer, PointerArithmetic, Provenance, Scalar, ScalarMaybeUninit,
19+
Operand, Pointer, Provenance, Scalar, ScalarMaybeUninit,
2020
};
2121

2222
#[derive(Copy, Clone, Hash, PartialEq, Eq, HashStable, Debug)]
@@ -700,24 +700,7 @@ where
700700
src: Immediate<M::PointerTag>,
701701
dest: &PlaceTy<'tcx, M::PointerTag>,
702702
) -> InterpResult<'tcx> {
703-
if cfg!(debug_assertions) {
704-
// This is a very common path, avoid some checks in release mode
705-
assert!(!dest.layout.is_unsized(), "Cannot write unsized data");
706-
match src {
707-
Immediate::Scalar(ScalarMaybeUninit::Scalar(Scalar::Ptr(..))) => assert_eq!(
708-
self.pointer_size(),
709-
dest.layout.size,
710-
"Size mismatch when writing pointer"
711-
),
712-
Immediate::Scalar(ScalarMaybeUninit::Scalar(Scalar::Int(int))) => {
713-
assert_eq!(int.size(), dest.layout.size, "Size mismatch when writing bits")
714-
}
715-
Immediate::Scalar(ScalarMaybeUninit::Uninit) => {} // uninit can have any size
716-
Immediate::ScalarPair(_, _) => {
717-
// FIXME: Can we check anything here?
718-
}
719-
}
720-
}
703+
assert!(!dest.layout.is_unsized(), "Cannot write unsized data");
721704
trace!("write_immediate: {:?} <- {:?}: {}", *dest, src, dest.layout.ty);
722705

723706
// See if we can avoid an allocation. This is the counterpart to `read_immediate_raw`,
@@ -769,15 +752,15 @@ where
769752
// cover all the bytes!
770753
match value {
771754
Immediate::Scalar(scalar) => {
772-
match dest.layout.abi {
773-
Abi::Scalar(_) => {} // fine
774-
_ => span_bug!(
755+
let Abi::Scalar(s) = dest.layout.abi else { span_bug!(
775756
self.cur_span(),
776757
"write_immediate_to_mplace: invalid Scalar layout: {:#?}",
777758
dest.layout
778-
),
779-
}
780-
alloc.write_scalar(alloc_range(Size::ZERO, dest.layout.size), scalar)
759+
)
760+
};
761+
let size = s.size(&tcx);
762+
assert_eq!(dest.layout.size, size, "abi::Scalar size does not match layout size");
763+
alloc.write_scalar(alloc_range(Size::ZERO, size), scalar)
781764
}
782765
Immediate::ScalarPair(a_val, b_val) => {
783766
// We checked `ptr_align` above, so all fields will have the alignment they need.
@@ -791,6 +774,7 @@ where
791774
};
792775
let (a_size, b_size) = (a.size(&tcx), b.size(&tcx));
793776
let b_offset = a_size.align_to(b.align(&tcx).abi);
777+
assert!(b_offset.bytes() > 0); // in `operand_field` we use the offset to tell apart the fields
794778

795779
// It is tempting to verify `b_offset` against `layout.fields.offset(1)`,
796780
// but that does not work: We could be a newtype around a pair, then the

Diff for: ‎compiler/rustc_const_eval/src/interpret/validity.rs

+5-4
Original file line numberDiff line numberDiff line change
@@ -645,17 +645,18 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
645645
// i.e. that we go over the `check_init` below.
646646
let size = scalar_layout.size(self.ecx);
647647
let is_full_range = match scalar_layout {
648-
ScalarAbi::Initialized { valid_range, .. } => {
648+
ScalarAbi::Initialized { .. } => {
649649
if M::enforce_number_validity(self.ecx) {
650650
false // not "full" since uninit is not accepted
651651
} else {
652-
valid_range.is_full_for(size)
652+
scalar_layout.is_always_valid(self.ecx)
653653
}
654654
}
655655
ScalarAbi::Union { .. } => true,
656656
};
657657
if is_full_range {
658-
// Nothing to check
658+
// Nothing to check. Cruciall we don't even `read_scalar` until here, since that would
659+
// fail for `Union` scalars!
659660
return Ok(());
660661
}
661662
// We have something to check: it must at least be initialized.
@@ -688,7 +689,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
688689
} else {
689690
return Ok(());
690691
}
691-
} else if scalar_layout.valid_range(self.ecx).is_full_for(size) {
692+
} else if scalar_layout.is_always_valid(self.ecx) {
692693
// Easy. (This is reachable if `enforce_number_validity` is set.)
693694
return Ok(());
694695
} else {

0 commit comments

Comments
 (0)
Please sign in to comment.