From 719655658a5ba40224143b120865865c08a90732 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 19 Apr 2022 14:12:21 -0400 Subject: [PATCH 1/6] tighten sanity checks around Scalar and ScalarPair --- .../rustc_const_eval/src/interpret/operand.rs | 45 ++++++++++++------- .../rustc_const_eval/src/interpret/place.rs | 34 ++++---------- .../src/interpret/validity.rs | 9 ++-- 3 files changed, 44 insertions(+), 44 deletions(-) diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index a8a5ac2f9d95d..254c6dd64653f 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -284,8 +284,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Abi::Scalar(s) if force => Some(s.primitive()), _ => None, }; - if let Some(_) = scalar_layout { - let scalar = alloc.read_scalar(alloc_range(Size::ZERO, mplace.layout.size))?; + if let Some(s) = scalar_layout { + let size = s.size(self); + assert_eq!(size, mplace.layout.size, "abi::Scalar size does not match layout size"); + let scalar = alloc.read_scalar(alloc_range(Size::ZERO, size))?; return Ok(Some(ImmTy { imm: scalar.into(), layout: mplace.layout })); } let scalar_pair_layout = match mplace.layout.abi { @@ -302,7 +304,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { // which `ptr.offset(b_offset)` cannot possibly fail to satisfy. let (a_size, b_size) = (a.size(self), b.size(self)); let b_offset = a_size.align_to(b.align(self).abi); - assert!(b_offset.bytes() > 0); // we later use the offset to tell apart the fields + assert!(b_offset.bytes() > 0); // in `operand_field` we use the offset to tell apart the fields let a_val = alloc.read_scalar(alloc_range(Size::ZERO, a_size))?; let b_val = alloc.read_scalar(alloc_range(b_offset, b_size))?; return Ok(Some(ImmTy { @@ -394,28 +396,41 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Err(value) => value, }; - let field_layout = op.layout.field(self, field); + let field_layout = base.layout.field(self, field); if field_layout.is_zst() { let immediate = Scalar::ZST.into(); return Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout }); } - let offset = op.layout.fields.offset(field); - let immediate = match *base { + + let offset = base.layout.fields.offset(field); + // This makes several assumptions about what layouts we will encounter; we match what + // codegen does as good as we can (see `extract_field` in `rustc_codegen_ssa/src/mir/operand.rs`). + let field_val = match (*base, base.layout.abi) { // the field covers the entire type - _ if offset.bytes() == 0 && field_layout.size == op.layout.size => *base, + _ if field_layout.size == base.layout.size => { + assert!(offset.bytes() == 0); + *base + } // extract fields from types with `ScalarPair` ABI - Immediate::ScalarPair(a, b) => { - let val = if offset.bytes() == 0 { a } else { b }; - Immediate::from(val) + (Immediate::ScalarPair(a_val, b_val), Abi::ScalarPair(a, b)) => { + Immediate::from(if offset.bytes() == 0 { + assert_eq!(field_layout.size, a.size(self)); + a_val + } else { + assert_eq!(offset, a.size(self).align_to(b.align(self).abi)); + assert_eq!(field_layout.size, b.size(self)); + b_val + }) } - Immediate::Scalar(val) => span_bug!( + _ => span_bug!( self.cur_span(), - "field access on non aggregate {:#?}, {:#?}", - val, - op.layout + "invalid field access on immediate {}, layout {:#?}", + base, + base.layout ), }; - Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout }) + + Ok(OpTy { op: Operand::Immediate(field_val), layout: field_layout }) } pub fn operand_index( diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs index df6e05bb13cde..600316fcbc0f7 100644 --- a/compiler/rustc_const_eval/src/interpret/place.rs +++ b/compiler/rustc_const_eval/src/interpret/place.rs @@ -16,7 +16,7 @@ use rustc_target::abi::{HasDataLayout, Size, VariantIdx, Variants}; use super::{ alloc_range, mir_assign_valid_types, AllocId, AllocRef, AllocRefMut, CheckInAllocMsg, ConstAlloc, ImmTy, Immediate, InterpCx, InterpResult, LocalValue, Machine, MemoryKind, OpTy, - Operand, Pointer, PointerArithmetic, Provenance, Scalar, ScalarMaybeUninit, + Operand, Pointer, Provenance, Scalar, ScalarMaybeUninit, }; #[derive(Copy, Clone, Hash, PartialEq, Eq, HashStable, Debug)] @@ -700,24 +700,7 @@ where src: Immediate, dest: &PlaceTy<'tcx, M::PointerTag>, ) -> InterpResult<'tcx> { - if cfg!(debug_assertions) { - // This is a very common path, avoid some checks in release mode - assert!(!dest.layout.is_unsized(), "Cannot write unsized data"); - match src { - Immediate::Scalar(ScalarMaybeUninit::Scalar(Scalar::Ptr(..))) => assert_eq!( - self.pointer_size(), - dest.layout.size, - "Size mismatch when writing pointer" - ), - Immediate::Scalar(ScalarMaybeUninit::Scalar(Scalar::Int(int))) => { - assert_eq!(int.size(), dest.layout.size, "Size mismatch when writing bits") - } - Immediate::Scalar(ScalarMaybeUninit::Uninit) => {} // uninit can have any size - Immediate::ScalarPair(_, _) => { - // FIXME: Can we check anything here? - } - } - } + assert!(!dest.layout.is_unsized(), "Cannot write unsized data"); trace!("write_immediate: {:?} <- {:?}: {}", *dest, src, dest.layout.ty); // See if we can avoid an allocation. This is the counterpart to `read_immediate_raw`, @@ -769,15 +752,15 @@ where // cover all the bytes! match value { Immediate::Scalar(scalar) => { - match dest.layout.abi { - Abi::Scalar(_) => {} // fine - _ => span_bug!( + let Abi::Scalar(s) = dest.layout.abi else { span_bug!( self.cur_span(), "write_immediate_to_mplace: invalid Scalar layout: {:#?}", dest.layout - ), - } - alloc.write_scalar(alloc_range(Size::ZERO, dest.layout.size), scalar) + ) + }; + let size = s.size(&tcx); + assert_eq!(dest.layout.size, size, "abi::Scalar size does not match layout size"); + alloc.write_scalar(alloc_range(Size::ZERO, size), scalar) } Immediate::ScalarPair(a_val, b_val) => { // We checked `ptr_align` above, so all fields will have the alignment they need. @@ -791,6 +774,7 @@ where }; let (a_size, b_size) = (a.size(&tcx), b.size(&tcx)); let b_offset = a_size.align_to(b.align(&tcx).abi); + assert!(b_offset.bytes() > 0); // in `operand_field` we use the offset to tell apart the fields // It is tempting to verify `b_offset` against `layout.fields.offset(1)`, // but that does not work: We could be a newtype around a pair, then the diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs index 92e3ac04dc418..2dab9ff89868f 100644 --- a/compiler/rustc_const_eval/src/interpret/validity.rs +++ b/compiler/rustc_const_eval/src/interpret/validity.rs @@ -645,17 +645,18 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, ' // i.e. that we go over the `check_init` below. let size = scalar_layout.size(self.ecx); let is_full_range = match scalar_layout { - ScalarAbi::Initialized { valid_range, .. } => { + ScalarAbi::Initialized { .. } => { if M::enforce_number_validity(self.ecx) { false // not "full" since uninit is not accepted } else { - valid_range.is_full_for(size) + scalar_layout.is_always_valid(self.ecx) } } ScalarAbi::Union { .. } => true, }; if is_full_range { - // Nothing to check + // Nothing to check. Cruciall we don't even `read_scalar` until here, since that would + // fail for `Union` scalars! return Ok(()); } // We have something to check: it must at least be initialized. @@ -688,7 +689,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, ' } else { return Ok(()); } - } else if scalar_layout.valid_range(self.ecx).is_full_for(size) { + } else if scalar_layout.is_always_valid(self.ecx) { // Easy. (This is reachable if `enforce_number_validity` is set.) return Ok(()); } else { From 79c169d5cfab584ac3f9b5c3264d876c9f95db71 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 10 May 2022 13:16:00 +0200 Subject: [PATCH 2/6] disable one check for now until #96185 is fixed --- compiler/rustc_const_eval/src/interpret/operand.rs | 7 ++++--- compiler/rustc_const_eval/src/interpret/place.rs | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index 254c6dd64653f..83fb83fd3c160 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -284,9 +284,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Abi::Scalar(s) if force => Some(s.primitive()), _ => None, }; - if let Some(s) = scalar_layout { - let size = s.size(self); - assert_eq!(size, mplace.layout.size, "abi::Scalar size does not match layout size"); + if let Some(_s) = scalar_layout { + //FIXME(#96185): let size = s.size(self); + //FIXME(#96185): assert_eq!(size, mplace.layout.size, "abi::Scalar size does not match layout size"); + let size = mplace.layout.size; //FIXME(#96185): remove this line let scalar = alloc.read_scalar(alloc_range(Size::ZERO, size))?; return Ok(Some(ImmTy { imm: scalar.into(), layout: mplace.layout })); } diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs index 600316fcbc0f7..1a378de139b27 100644 --- a/compiler/rustc_const_eval/src/interpret/place.rs +++ b/compiler/rustc_const_eval/src/interpret/place.rs @@ -759,7 +759,7 @@ where ) }; let size = s.size(&tcx); - assert_eq!(dest.layout.size, size, "abi::Scalar size does not match layout size"); + //FIXME(#96185): assert_eq!(dest.layout.size, size, "abi::Scalar size does not match layout size"); alloc.write_scalar(alloc_range(Size::ZERO, size), scalar) } Immediate::ScalarPair(a_val, b_val) => { From 600d9602610d1e6a79f98a2a07a660e945e3d22a Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 10 May 2022 13:24:39 +0200 Subject: [PATCH 3/6] even tighter checks for layouts on immediate field projections --- .../rustc_const_eval/src/interpret/operand.rs | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index 83fb83fd3c160..1b32d88736785 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -398,22 +398,27 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { }; let field_layout = base.layout.field(self, field); - if field_layout.is_zst() { - let immediate = Scalar::ZST.into(); - return Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout }); - } - let offset = base.layout.fields.offset(field); // This makes several assumptions about what layouts we will encounter; we match what // codegen does as good as we can (see `extract_field` in `rustc_codegen_ssa/src/mir/operand.rs`). - let field_val = match (*base, base.layout.abi) { + let field_val: Immediate<_> = match (*base, base.layout.abi) { + // the field contains no information + _ if field_layout.is_zst() => { + Scalar::ZST.into() + } // the field covers the entire type _ if field_layout.size == base.layout.size => { + assert!(match (base.layout.abi, field_layout.abi) { + (Abi::Scalar(..), Abi::Scalar(..)) => true, + (Abi::ScalarPair(..), Abi::ScalarPair(..)) => true, + _ => false, + }); assert!(offset.bytes() == 0); *base } // extract fields from types with `ScalarPair` ABI (Immediate::ScalarPair(a_val, b_val), Abi::ScalarPair(a, b)) => { + assert!(matches!(field_layout.abi, Abi::Scalar(..))); Immediate::from(if offset.bytes() == 0 { assert_eq!(field_layout.size, a.size(self)); a_val From aef8a9306d62fbad04a70c369e8472e47519f25d Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 10 May 2022 13:28:22 +0200 Subject: [PATCH 4/6] update/remove some old comments --- compiler/rustc_const_eval/src/interpret/place.rs | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs index 1a378de139b27..2c85a61035269 100644 --- a/compiler/rustc_const_eval/src/interpret/place.rs +++ b/compiler/rustc_const_eval/src/interpret/place.rs @@ -736,20 +736,16 @@ where dest: &MPlaceTy<'tcx, M::PointerTag>, ) -> InterpResult<'tcx> { // Note that it is really important that the type here is the right one, and matches the - // type things are read at. In case `src_val` is a `ScalarPair`, we don't do any magic here + // type things are read at. In case `value` is a `ScalarPair`, we don't do any magic here // to handle padding properly, which is only correct if we never look at this data with the // wrong type. - // Invalid places are a thing: the return place of a diverging function let tcx = *self.tcx; let Some(mut alloc) = self.get_place_alloc_mut(dest)? else { // zero-sized access return Ok(()); }; - // FIXME: We should check that there are dest.layout.size many bytes available in - // memory. The code below is not sufficient, with enough padding it might not - // cover all the bytes! match value { Immediate::Scalar(scalar) => { let Abi::Scalar(s) = dest.layout.abi else { span_bug!( From 761077e19e4f59e4e68f39ec875b3f3799bf8a4f Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 10 May 2022 14:38:00 +0200 Subject: [PATCH 5/6] fmt --- compiler/rustc_const_eval/src/interpret/operand.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index 1b32d88736785..62a95fa57cfa9 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -403,9 +403,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { // codegen does as good as we can (see `extract_field` in `rustc_codegen_ssa/src/mir/operand.rs`). let field_val: Immediate<_> = match (*base, base.layout.abi) { // the field contains no information - _ if field_layout.is_zst() => { - Scalar::ZST.into() - } + _ if field_layout.is_zst() => Scalar::ZST.into(), // the field covers the entire type _ if field_layout.size == base.layout.size => { assert!(match (base.layout.abi, field_layout.abi) { From 14f6daf9359c920da3da6ccce41fa910dc2074a3 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 11 May 2022 13:32:19 +0200 Subject: [PATCH 6/6] avoid computing Scalar size/align in debug builds --- compiler/rustc_const_eval/src/interpret/operand.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index 62a95fa57cfa9..b18948ce926c0 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -418,11 +418,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { (Immediate::ScalarPair(a_val, b_val), Abi::ScalarPair(a, b)) => { assert!(matches!(field_layout.abi, Abi::Scalar(..))); Immediate::from(if offset.bytes() == 0 { - assert_eq!(field_layout.size, a.size(self)); + debug_assert_eq!(field_layout.size, a.size(self)); a_val } else { - assert_eq!(offset, a.size(self).align_to(b.align(self).abi)); - assert_eq!(field_layout.size, b.size(self)); + debug_assert_eq!(offset, a.size(self).align_to(b.align(self).abi)); + debug_assert_eq!(field_layout.size, b.size(self)); b_val }) }