diff --git a/Cargo.lock b/Cargo.lock index 93abab8469a7b..367636575cb35 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3468,11 +3468,9 @@ name = "rustc_codegen_ssa" version = "0.0.0" dependencies = [ "ar_archive_writer", - "arrayvec", "bitflags", "bstr", "cc", - "either", "itertools", "libc", "object 0.37.0", diff --git a/compiler/rustc_codegen_ssa/Cargo.toml b/compiler/rustc_codegen_ssa/Cargo.toml index e9c4c255bce08..cfae1b3ec98e5 100644 --- a/compiler/rustc_codegen_ssa/Cargo.toml +++ b/compiler/rustc_codegen_ssa/Cargo.toml @@ -6,13 +6,11 @@ edition = "2024" [dependencies] # tidy-alphabetical-start ar_archive_writer = "0.4.2" -arrayvec = { version = "0.7", default-features = false } bitflags = "2.4.1" bstr = "1.11.3" # Pinned so `cargo update` bumps don't cause breakage. Please also update the # `cc` in `rustc_llvm` if you update the `cc` here. cc = "=1.2.16" -either = "1.5.0" itertools = "0.12" pathdiff = "0.2.0" regex = "1.4" diff --git a/compiler/rustc_codegen_ssa/src/mir/operand.rs b/compiler/rustc_codegen_ssa/src/mir/operand.rs index e9389ddf93b3f..99957c6770845 100644 --- a/compiler/rustc_codegen_ssa/src/mir/operand.rs +++ b/compiler/rustc_codegen_ssa/src/mir/operand.rs @@ -1,9 +1,9 @@ use std::fmt; -use arrayvec::ArrayVec; -use either::Either; use rustc_abi as abi; -use rustc_abi::{Align, BackendRepr, FIRST_VARIANT, Primitive, Size, TagEncoding, Variants}; +use rustc_abi::{ + Align, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, TagEncoding, VariantIdx, Variants, +}; use rustc_middle::mir::interpret::{Pointer, Scalar, alloc_range}; use rustc_middle::mir::{self, ConstValue}; use rustc_middle::ty::Ty; @@ -13,6 +13,7 @@ use rustc_session::config::OptLevel; use tracing::{debug, instrument}; use super::place::{PlaceRef, PlaceValue}; +use super::rvalue::transmute_immediate; use super::{FunctionCx, LocalRef}; use crate::common::IntPredicate; use crate::traits::*; @@ -69,31 +70,6 @@ pub enum OperandValue { } impl OperandValue { - /// If this is ZeroSized/Immediate/Pair, return an array of the 0/1/2 values. - /// If this is Ref, return the place. - #[inline] - pub(crate) fn immediates_or_place(self) -> Either, PlaceValue> { - match self { - OperandValue::ZeroSized => Either::Left(ArrayVec::new()), - OperandValue::Immediate(a) => Either::Left(ArrayVec::from_iter([a])), - OperandValue::Pair(a, b) => Either::Left([a, b].into()), - OperandValue::Ref(p) => Either::Right(p), - } - } - - /// Given an array of 0/1/2 immediate values, return ZeroSized/Immediate/Pair. - #[inline] - pub(crate) fn from_immediates(immediates: ArrayVec) -> Self { - let mut it = immediates.into_iter(); - let Some(a) = it.next() else { - return OperandValue::ZeroSized; - }; - let Some(b) = it.next() else { - return OperandValue::Immediate(a); - }; - OperandValue::Pair(a, b) - } - /// Treat this value as a pointer and return the data pointer and /// optional metadata as backend values. /// @@ -595,6 +571,105 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> { } } } + + /// Creates an incomplete operand containing the [`abi::Scalar`]s expected based + /// on the `layout` passed. This is for use with [`OperandRef::insert_field`] + /// later to set the necessary immediate(s). + /// + /// Returns `None` for `layout`s which cannot be built this way. + pub(crate) fn builder( + layout: TyAndLayout<'tcx>, + ) -> Option>> { + let val = match layout.backend_repr { + BackendRepr::Memory { .. } if layout.is_zst() => OperandValue::ZeroSized, + BackendRepr::Scalar(s) => OperandValue::Immediate(Err(s)), + BackendRepr::ScalarPair(a, b) => OperandValue::Pair(Err(a), Err(b)), + BackendRepr::Memory { .. } | BackendRepr::SimdVector { .. } => return None, + }; + Some(OperandRef { val, layout }) + } +} + +impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, Result> { + pub(crate) fn insert_field>( + &mut self, + bx: &mut Bx, + v: VariantIdx, + f: FieldIdx, + operand: OperandRef<'tcx, V>, + ) { + let (expect_zst, is_zero_offset) = if let abi::FieldsShape::Primitive = self.layout.fields { + // The other branch looking at field layouts ICEs for primitives, + // so we need to handle them separately. + // Multiple fields is possible for cases such as aggregating + // a thin pointer, where the second field is the unit. + assert!(!self.layout.is_zst()); + assert_eq!(v, FIRST_VARIANT); + let first_field = f == FieldIdx::ZERO; + (!first_field, first_field) + } else { + let variant_layout = self.layout.for_variant(bx.cx(), v); + let field_layout = variant_layout.field(bx.cx(), f.as_usize()); + let field_offset = variant_layout.fields.offset(f.as_usize()); + (field_layout.is_zst(), field_offset == Size::ZERO) + }; + + let mut update = |tgt: &mut Result, src, from_scalar| { + let from_bty = bx.cx().type_from_scalar(from_scalar); + let to_scalar = tgt.unwrap_err(); + let to_bty = bx.cx().type_from_scalar(to_scalar); + let imm = transmute_immediate(bx, src, from_scalar, from_bty, to_scalar, to_bty); + *tgt = Ok(imm); + }; + + match (operand.val, operand.layout.backend_repr) { + (OperandValue::ZeroSized, _) if expect_zst => {} + (OperandValue::Immediate(v), BackendRepr::Scalar(from_scalar)) => match &mut self.val { + OperandValue::Immediate(val @ Err(_)) if is_zero_offset => { + update(val, v, from_scalar); + } + OperandValue::Pair(fst @ Err(_), _) if is_zero_offset => { + update(fst, v, from_scalar); + } + OperandValue::Pair(_, snd @ Err(_)) if !is_zero_offset => { + update(snd, v, from_scalar); + } + _ => bug!("Tried to insert {operand:?} into {v:?}.{f:?} of {self:?}"), + }, + (OperandValue::Pair(a, b), BackendRepr::ScalarPair(from_sa, from_sb)) => { + match &mut self.val { + OperandValue::Pair(fst @ Err(_), snd @ Err(_)) => { + update(fst, a, from_sa); + update(snd, b, from_sb); + } + _ => bug!("Tried to insert {operand:?} into {v:?}.{f:?} of {self:?}"), + } + } + _ => bug!("Unsupported operand {operand:?} inserting into {v:?}.{f:?} of {self:?}"), + } + } + + /// After having set all necessary fields, this converts the + /// `OperandValue>` (as obtained from [`OperandRef::builder`]) + /// to the normal `OperandValue`. + /// + /// ICEs if any required fields were not set. + pub fn build(&self) -> OperandRef<'tcx, V> { + let OperandRef { val, layout } = *self; + + let unwrap = |r: Result| match r { + Ok(v) => v, + Err(_) => bug!("OperandRef::build called while fields are missing {self:?}"), + }; + + let val = match val { + OperandValue::ZeroSized => OperandValue::ZeroSized, + OperandValue::Immediate(v) => OperandValue::Immediate(unwrap(v)), + OperandValue::Pair(a, b) => OperandValue::Pair(unwrap(a), unwrap(b)), + OperandValue::Ref(_) => bug!(), + }; + OperandRef { val, layout } + } } impl<'a, 'tcx, V: CodegenObject> OperandValue { diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs index b62ac89661f26..e1d8b7546cf42 100644 --- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs +++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs @@ -1,7 +1,6 @@ use std::assert_matches::assert_matches; -use arrayvec::ArrayVec; -use rustc_abi::{self as abi, FIRST_VARIANT, FieldIdx}; +use rustc_abi::{self as abi, FIRST_VARIANT}; use rustc_middle::ty::adjustment::PointerCoercion; use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutOf, TyAndLayout}; use rustc_middle::ty::{self, Instance, Ty, TyCtxt}; @@ -708,38 +707,15 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // `rvalue_creates_operand` has arranged that we only get here if // we can build the aggregate immediate from the field immediates. - let mut inputs = ArrayVec::::new(); - let mut input_scalars = ArrayVec::::new(); - for field_idx in layout.fields.index_by_increasing_offset() { - let field_idx = FieldIdx::from_usize(field_idx); - let op = self.codegen_operand(bx, &fields[field_idx]); - let values = op.val.immediates_or_place().left_or_else(|p| { - bug!("Field {field_idx:?} is {p:?} making {layout:?}"); - }); - let scalars = self.value_kind(op.layout).scalars().unwrap(); - assert_eq!(values.len(), scalars.len()); - inputs.extend(values); - input_scalars.extend(scalars); + let Some(mut builder) = OperandRef::builder(layout) else { + bug!("Cannot use type in operand builder: {layout:?}") + }; + for (field_idx, field) in fields.iter_enumerated() { + let op = self.codegen_operand(bx, field); + builder.insert_field(bx, FIRST_VARIANT, field_idx, op); } - let output_scalars = self.value_kind(layout).scalars().unwrap(); - itertools::izip!(&mut inputs, input_scalars, output_scalars).for_each( - |(v, in_s, out_s)| { - if in_s != out_s { - // We have to be really careful about bool here, because - // `(bool,)` stays i1 but `Cell` becomes i8. - *v = bx.from_immediate(*v); - *v = bx.to_immediate_scalar(*v, out_s); - } - }, - ); - - let val = OperandValue::from_immediates(inputs); - assert!( - val.is_expected_variant_for_type(self.cx, layout), - "Made wrong variant {val:?} for type {layout:?}", - ); - OperandRef { val, layout } + builder.build() } mir::Rvalue::ShallowInitBox(ref operand, content_ty) => { let operand = self.codegen_operand(bx, operand); @@ -1082,10 +1058,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { mir::AggregateKind::Coroutine(..) | mir::AggregateKind::CoroutineClosure(..) => false, }; allowed_kind && { - let ty = rvalue.ty(self.mir, self.cx.tcx()); - let ty = self.monomorphize(ty); + let ty = rvalue.ty(self.mir, self.cx.tcx()); + let ty = self.monomorphize(ty); let layout = self.cx.spanned_layout_of(ty, span); - !self.cx.is_backend_ref(layout) + OperandRef::::builder(layout).is_some() } } } @@ -1129,23 +1105,12 @@ enum OperandValueKind { ZeroSized, } -impl OperandValueKind { - fn scalars(self) -> Option> { - Some(match self { - OperandValueKind::ZeroSized => ArrayVec::new(), - OperandValueKind::Immediate(a) => ArrayVec::from_iter([a]), - OperandValueKind::Pair(a, b) => [a, b].into(), - OperandValueKind::Ref => return None, - }) - } -} - /// Transmutes one of the immediates from an [`OperandValue::Immediate`] /// or an [`OperandValue::Pair`] to an immediate of the target type. /// /// `to_backend_ty` must be the *non*-immediate backend type (so it will be /// `i8`, not `i1`, for `bool`-like types.) -fn transmute_immediate<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( +pub(super) fn transmute_immediate<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( bx: &mut Bx, mut imm: Bx::Value, from_scalar: abi::Scalar, diff --git a/compiler/rustc_codegen_ssa/src/traits/type_.rs b/compiler/rustc_codegen_ssa/src/traits/type_.rs index c3fc21a92854a..79d15a03972bf 100644 --- a/compiler/rustc_codegen_ssa/src/traits/type_.rs +++ b/compiler/rustc_codegen_ssa/src/traits/type_.rs @@ -1,4 +1,4 @@ -use rustc_abi::{AddressSpace, Float, Integer, Reg}; +use rustc_abi::{AddressSpace, Float, Integer, Primitive, Reg, Scalar}; use rustc_middle::bug; use rustc_middle::ty::Ty; use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, TyAndLayout}; @@ -84,6 +84,24 @@ pub trait DerivedTypeCodegenMethods<'tcx>: fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool { ty.is_freeze(self.tcx(), self.typing_env()) } + + fn type_from_primitive(&self, p: Primitive) -> Self::Type { + use Primitive::*; + match p { + Int(i, _) => self.type_from_integer(i), + Float(f) => self.type_from_float(f), + Pointer(address_space) => self.type_ptr_ext(address_space), + } + } + + fn type_from_scalar(&self, s: Scalar) -> Self::Type { + // `MaybeUninit` being `repr(transparent)` somewhat implies that the type + // of a scalar has to be the type of its primitive (which is true in LLVM, + // where noundef is a parameter attribute or metadata) but if we ever get + // a backend where that's no longer true, every use of this will need to + // to carefully scrutinized and re-evaluated. + self.type_from_primitive(s.primitive()) + } } impl<'tcx, T> DerivedTypeCodegenMethods<'tcx> for T where