Skip to main content

rustc_codegen_ssa/mir/
place.rs

1use std::ops::Deref as _;
2
3use rustc_abi::{
4    Align, BackendRepr, FieldIdx, FieldsShape, Size, TagEncoding, VariantIdx, Variants,
5};
6use rustc_middle::mir::PlaceTy;
7use rustc_middle::mir::interpret::Scalar;
8use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutOf, TyAndLayout};
9use rustc_middle::ty::{self, Ty};
10use rustc_middle::{bug, mir};
11use tracing::{debug, instrument};
12
13use super::operand::OperandValue;
14use super::{FunctionCx, LocalRef};
15use crate::common::IntPredicate;
16use crate::size_of_val;
17use crate::traits::*;
18
19/// The location and extra runtime properties of the place.
20///
21/// Typically found in a [`PlaceRef`] or an [`OperandValue::Ref`].
22///
23/// As a location in memory, this has no specific type. If you want to
24/// load or store it using a typed operation, use [`Self::with_type`].
25#[derive(#[automatically_derived]
impl<V: ::core::marker::Copy> ::core::marker::Copy for PlaceValue<V> { }Copy, #[automatically_derived]
impl<V: ::core::clone::Clone> ::core::clone::Clone for PlaceValue<V> {
    #[inline]
    fn clone(&self) -> PlaceValue<V> {
        PlaceValue {
            llval: ::core::clone::Clone::clone(&self.llval),
            llextra: ::core::clone::Clone::clone(&self.llextra),
            align: ::core::clone::Clone::clone(&self.align),
        }
    }
}Clone, #[automatically_derived]
impl<V: ::core::fmt::Debug> ::core::fmt::Debug for PlaceValue<V> {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field3_finish(f, "PlaceValue",
            "llval", &self.llval, "llextra", &self.llextra, "align",
            &&self.align)
    }
}Debug)]
26pub struct PlaceValue<V> {
27    /// A pointer to the contents of the place.
28    pub llval: V,
29
30    /// This place's extra data if it is unsized, or `None` if null.
31    pub llextra: Option<V>,
32
33    /// The alignment we know for this place.
34    pub align: Align,
35}
36
37impl<V: CodegenObject> PlaceValue<V> {
38    /// Constructor for the ordinary case of `Sized` types.
39    ///
40    /// Sets `llextra` to `None`.
41    pub fn new_sized(llval: V, align: Align) -> PlaceValue<V> {
42        PlaceValue { llval, llextra: None, align }
43    }
44
45    /// Allocates a stack slot in the function for a value
46    /// of the specified size and alignment.
47    ///
48    /// The allocation itself is untyped.
49    pub fn alloca<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx, Value = V>>(
50        bx: &mut Bx,
51        size: Size,
52        align: Align,
53    ) -> PlaceValue<V> {
54        let llval = bx.alloca(size, align);
55        PlaceValue::new_sized(llval, align)
56    }
57
58    /// Creates a `PlaceRef` to this location with the given type.
59    pub fn with_type<'tcx>(self, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
60        if !(layout.is_unsized() || layout.is_uninhabited() || self.llextra.is_none())
    {
    {
        ::core::panicking::panic_fmt(format_args!("Had pointer metadata {0:?} for sized type {1:?}",
                self.llextra, layout));
    }
};assert!(
61            layout.is_unsized() || layout.is_uninhabited() || self.llextra.is_none(),
62            "Had pointer metadata {:?} for sized type {layout:?}",
63            self.llextra,
64        );
65        PlaceRef { val: self, layout }
66    }
67
68    /// Gets the pointer to this place as an [`OperandValue::Immediate`]
69    /// or, for those needing metadata, an [`OperandValue::Pair`].
70    ///
71    /// This is the inverse of [`OperandValue::deref`].
72    pub fn address(self) -> OperandValue<V> {
73        if let Some(llextra) = self.llextra {
74            OperandValue::Pair(self.llval, llextra)
75        } else {
76            OperandValue::Immediate(self.llval)
77        }
78    }
79}
80
81#[derive(#[automatically_derived]
impl<'tcx, V: ::core::marker::Copy> ::core::marker::Copy for PlaceRef<'tcx, V>
    {
}Copy, #[automatically_derived]
impl<'tcx, V: ::core::clone::Clone> ::core::clone::Clone for PlaceRef<'tcx, V>
    {
    #[inline]
    fn clone(&self) -> PlaceRef<'tcx, V> {
        PlaceRef {
            val: ::core::clone::Clone::clone(&self.val),
            layout: ::core::clone::Clone::clone(&self.layout),
        }
    }
}Clone, #[automatically_derived]
impl<'tcx, V: ::core::fmt::Debug> ::core::fmt::Debug for PlaceRef<'tcx, V> {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f, "PlaceRef",
            "val", &self.val, "layout", &&self.layout)
    }
}Debug)]
82pub struct PlaceRef<'tcx, V> {
83    /// The location and extra runtime properties of the place.
84    pub val: PlaceValue<V>,
85
86    /// The monomorphized type of this place, including variant information.
87    ///
88    /// You probably shouldn't use the alignment from this layout;
89    /// rather you should use the `.val.align` of the actual place,
90    /// which might be different from the type's normal alignment.
91    pub layout: TyAndLayout<'tcx>,
92}
93
94impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
95    pub fn new_sized(llval: V, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
96        PlaceRef::new_sized_aligned(llval, layout, layout.align.abi)
97    }
98
99    pub fn new_sized_aligned(
100        llval: V,
101        layout: TyAndLayout<'tcx>,
102        align: Align,
103    ) -> PlaceRef<'tcx, V> {
104        if !layout.is_sized() {
    ::core::panicking::panic("assertion failed: layout.is_sized()")
};assert!(layout.is_sized());
105        PlaceValue::new_sized(llval, align).with_type(layout)
106    }
107
108    // FIXME(eddyb) pass something else for the name so no work is done
109    // unless LLVM IR names are turned on (e.g. for `--emit=llvm-ir`).
110    pub fn alloca<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
111        bx: &mut Bx,
112        layout: TyAndLayout<'tcx>,
113    ) -> Self {
114        if layout.deref().is_scalable_vector() {
115            Self::alloca_scalable(bx, layout)
116        } else {
117            Self::alloca_size(bx, layout.size, layout)
118        }
119    }
120
121    pub fn alloca_size<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
122        bx: &mut Bx,
123        size: Size,
124        layout: TyAndLayout<'tcx>,
125    ) -> Self {
126        if !layout.is_sized() {
    {
        ::core::panicking::panic_fmt(format_args!("tried to statically allocate unsized place"));
    }
};assert!(layout.is_sized(), "tried to statically allocate unsized place");
127        PlaceValue::alloca(bx, size, layout.align.abi).with_type(layout)
128    }
129
130    /// Returns a place for an indirect reference to an unsized place.
131    // FIXME(eddyb) pass something else for the name so no work is done
132    // unless LLVM IR names are turned on (e.g. for `--emit=llvm-ir`).
133    pub fn alloca_unsized_indirect<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
134        bx: &mut Bx,
135        layout: TyAndLayout<'tcx>,
136    ) -> Self {
137        if !layout.is_unsized() {
    {
        ::core::panicking::panic_fmt(format_args!("tried to allocate indirect place for sized values"));
    }
};assert!(layout.is_unsized(), "tried to allocate indirect place for sized values");
138        let ptr_ty = Ty::new_mut_ptr(bx.cx().tcx(), layout.ty);
139        let ptr_layout = bx.cx().layout_of(ptr_ty);
140        Self::alloca(bx, ptr_layout)
141    }
142
143    pub fn len<Cx: ConstCodegenMethods<Value = V>>(&self, cx: &Cx) -> V {
144        if let FieldsShape::Array { count, .. } = self.layout.fields {
145            if self.layout.is_unsized() {
146                match (&count, &0) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(count, 0);
147                self.val.llextra.unwrap()
148            } else {
149                cx.const_usize(count)
150            }
151        } else {
152            ::rustc_middle::util::bug::bug_fmt(format_args!("unexpected layout `{0:#?}` in PlaceRef::len",
        self.layout))bug!("unexpected layout `{:#?}` in PlaceRef::len", self.layout)
153        }
154    }
155
156    fn alloca_scalable<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
157        bx: &mut Bx,
158        layout: TyAndLayout<'tcx>,
159    ) -> Self {
160        PlaceValue::new_sized(bx.alloca_with_ty(layout), layout.align.abi).with_type(layout)
161    }
162}
163
164impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
165    /// Access a field, at a point when the value's case is known.
166    pub fn project_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
167        self,
168        bx: &mut Bx,
169        ix: usize,
170    ) -> Self {
171        let field = self.layout.field(bx.cx(), ix);
172        let offset = self.layout.fields.offset(ix);
173        let effective_field_align = self.val.align.restrict_for_offset(offset);
174
175        // `simple` is called when we don't need to adjust the offset to
176        // the dynamic alignment of the field.
177        let mut simple = || {
178            let llval = if offset.bytes() == 0 {
179                self.val.llval
180            } else {
181                bx.inbounds_ptradd(self.val.llval, bx.const_usize(offset.bytes()))
182            };
183            let val = PlaceValue {
184                llval,
185                llextra: if bx.cx().tcx().type_has_metadata(field.ty, bx.cx().typing_env()) {
186                    self.val.llextra
187                } else {
188                    None
189                },
190                align: effective_field_align,
191            };
192            val.with_type(field)
193        };
194
195        // Simple cases, which don't need DST adjustment:
196        //   * known alignment - sized types, `[T]`, `str`
197        //   * offset 0 -- rounding up to alignment cannot change the offset
198        // Note that looking at `field.align` is incorrect since that is not necessarily equal
199        // to the dynamic alignment of the type.
200        match field.ty.kind() {
201            _ if field.is_sized() => return simple(),
202            ty::Slice(..) | ty::Str => return simple(),
203            _ if offset.bytes() == 0 => return simple(),
204            _ => {}
205        }
206
207        // We need to get the pointer manually now.
208        // We do this by casting to a `*i8`, then offsetting it by the appropriate amount.
209        // We do this instead of, say, simply adjusting the pointer from the result of a GEP
210        // because the field may have an arbitrary alignment in the LLVM representation.
211        //
212        // To demonstrate:
213        //
214        //     struct Foo<T: ?Sized> {
215        //         x: u16,
216        //         y: T
217        //     }
218        //
219        // The type `Foo<Foo<Trait>>` is represented in LLVM as `{ u16, { u16, u8 }}`, meaning that
220        // the `y` field has 16-bit alignment.
221
222        let meta = self.val.llextra;
223
224        let unaligned_offset = bx.cx().const_usize(offset.bytes());
225
226        // Get the alignment of the field
227        let (_, mut unsized_align) = size_of_val::size_and_align_of_dst(bx, field.ty, meta);
228
229        // For packed types, we need to cap alignment.
230        if let ty::Adt(def, _) = self.layout.ty.kind()
231            && let Some(packed) = def.repr().pack
232        {
233            let packed = bx.const_usize(packed.bytes());
234            let cmp = bx.icmp(IntPredicate::IntULT, unsized_align, packed);
235            unsized_align = bx.select(cmp, unsized_align, packed)
236        }
237
238        // Bump the unaligned offset up to the appropriate alignment
239        let offset = round_up_const_value_to_alignment(bx, unaligned_offset, unsized_align);
240
241        {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/place.rs:241",
                        "rustc_codegen_ssa::mir::place", ::tracing::Level::DEBUG,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/place.rs"),
                        ::tracing_core::__macro_support::Option::Some(241u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir::place"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("struct_field_ptr: DST field offset: {0:?}",
                                                    offset) as &dyn Value))])
            });
    } else { ; }
};debug!("struct_field_ptr: DST field offset: {:?}", offset);
242
243        // Adjust pointer.
244        let ptr = bx.inbounds_ptradd(self.val.llval, offset);
245        let val =
246            PlaceValue { llval: ptr, llextra: self.val.llextra, align: effective_field_align };
247        val.with_type(field)
248    }
249
250    /// Sets the discriminant for a new value of the given case of the given
251    /// representation.
252    pub fn codegen_set_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
253        &self,
254        bx: &mut Bx,
255        variant_index: VariantIdx,
256    ) {
257        match codegen_tag_value(bx.cx(), variant_index, self.layout) {
258            Err(UninhabitedVariantError) => {
259                // We play it safe by using a well-defined `abort`, but we could go for immediate UB
260                // if that turns out to be helpful.
261                bx.abort();
262            }
263            Ok(Some((tag_field, imm))) => {
264                let tag_place = self.project_field(bx, tag_field.as_usize());
265                OperandValue::Immediate(imm).store(bx, tag_place);
266            }
267            Ok(None) => {}
268        }
269    }
270
271    pub fn project_index<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
272        &self,
273        bx: &mut Bx,
274        llindex: V,
275    ) -> Self {
276        // Statically compute the offset if we can, otherwise just use the element size,
277        // as this will yield the lowest alignment.
278        let layout = self.layout.field(bx, 0);
279        let offset = if let Some(llindex) = bx.const_to_opt_uint(llindex) {
280            layout.size.checked_mul(llindex, bx).unwrap_or(layout.size)
281        } else {
282            layout.size
283        };
284
285        let llval = bx.inbounds_nuw_gep(bx.cx().backend_type(layout), self.val.llval, &[llindex]);
286        let align = self.val.align.restrict_for_offset(offset);
287        PlaceValue::new_sized(llval, align).with_type(layout)
288    }
289
290    pub fn project_downcast<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
291        &self,
292        bx: &mut Bx,
293        variant_index: VariantIdx,
294    ) -> Self {
295        let mut downcast = *self;
296        downcast.layout = self.layout.for_variant(bx.cx(), variant_index);
297        downcast
298    }
299
300    pub fn project_type<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
301        &self,
302        bx: &mut Bx,
303        ty: Ty<'tcx>,
304    ) -> Self {
305        let mut downcast = *self;
306        downcast.layout = bx.cx().layout_of(ty);
307        downcast
308    }
309
310    pub fn storage_live<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
311        bx.lifetime_start(self.val.llval, self.layout.size);
312    }
313
314    pub fn storage_dead<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
315        bx.lifetime_end(self.val.llval, self.layout.size);
316    }
317}
318
319impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
320    #[allow(clippy :: suspicious_else_formatting)]
{
    let __tracing_attr_span;
    let __tracing_attr_guard;
    if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() ||
            { false } {
        __tracing_attr_span =
            {
                use ::tracing::__macro_support::Callsite as _;
                static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                    {
                        static META: ::tracing::Metadata<'static> =
                            {
                                ::tracing_core::metadata::Metadata::new("codegen_place",
                                    "rustc_codegen_ssa::mir::place", ::tracing::Level::TRACE,
                                    ::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/place.rs"),
                                    ::tracing_core::__macro_support::Option::Some(320u32),
                                    ::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir::place"),
                                    ::tracing_core::field::FieldSet::new(&["place_ref"],
                                        ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                    ::tracing::metadata::Kind::SPAN)
                            };
                        ::tracing::callsite::DefaultCallsite::new(&META)
                    };
                let mut interest = ::tracing::subscriber::Interest::never();
                if ::tracing::Level::TRACE <=
                                    ::tracing::level_filters::STATIC_MAX_LEVEL &&
                                ::tracing::Level::TRACE <=
                                    ::tracing::level_filters::LevelFilter::current() &&
                            { interest = __CALLSITE.interest(); !interest.is_never() }
                        &&
                        ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                            interest) {
                    let meta = __CALLSITE.metadata();
                    ::tracing::Span::new(meta,
                        &{
                                #[allow(unused_imports)]
                                use ::tracing::field::{debug, display, Value};
                                let mut iter = meta.fields().iter();
                                meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&::tracing::field::debug(&place_ref)
                                                            as &dyn Value))])
                            })
                } else {
                    let span =
                        ::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
                    {};
                    span
                }
            };
        __tracing_attr_guard = __tracing_attr_span.enter();
    }

    #[warn(clippy :: suspicious_else_formatting)]
    {

        #[allow(unknown_lints, unreachable_code, clippy ::
        diverging_sub_expression, clippy :: empty_loop, clippy ::
        let_unit_value, clippy :: let_with_type_underscore, clippy ::
        needless_return, clippy :: unreachable)]
        if false {
            let __tracing_attr_fake_return: PlaceRef<'tcx, Bx::Value> =
                loop {};
            return __tracing_attr_fake_return;
        }
        {
            let cx = self.cx;
            let tcx = self.cx.tcx();
            let mut base = 0;
            let mut cg_base =
                match self.locals[place_ref.local] {
                    LocalRef::Place(place) => place,
                    LocalRef::UnsizedPlace(place) =>
                        bx.load_operand(place).deref(cx),
                    LocalRef::Operand(..) => {
                        if place_ref.is_indirect_first_projection() {
                            base = 1;
                            let cg_base =
                                self.codegen_consume(bx,
                                    mir::PlaceRef {
                                        projection: &place_ref.projection[..0],
                                        ..place_ref
                                    });
                            cg_base.deref(bx.cx())
                        } else {
                            ::rustc_middle::util::bug::bug_fmt(format_args!("using operand local {0:?} as place",
                                    place_ref));
                        }
                    }
                    LocalRef::PendingOperand => {
                        ::rustc_middle::util::bug::bug_fmt(format_args!("using still-pending operand local {0:?} as place",
                                place_ref));
                    }
                };
            for elem in place_ref.projection[base..].iter() {
                cg_base =
                    match *elem {
                        mir::ProjectionElem::Deref =>
                            bx.load_operand(cg_base).deref(bx.cx()),
                        mir::ProjectionElem::Field(ref field, _) => {
                            if !!cg_base.layout.ty.is_any_ptr() {
                                {
                                    ::core::panicking::panic_fmt(format_args!("Bad PlaceRef: destructing pointers should use cast/PtrMetadata, but tried to access field {0:?} of pointer {1:?}",
                                            field, cg_base));
                                }
                            };
                            cg_base.project_field(bx, field.index())
                        }
                        mir::ProjectionElem::OpaqueCast(ty) => {
                            ::rustc_middle::util::bug::bug_fmt(format_args!("encountered OpaqueCast({0}) in codegen",
                                    ty))
                        }
                        mir::ProjectionElem::UnwrapUnsafeBinder(ty) => {
                            cg_base.project_type(bx, self.monomorphize(ty))
                        }
                        mir::ProjectionElem::Index(index) => {
                            let index = &mir::Operand::Copy(mir::Place::from(index));
                            let index = self.codegen_operand(bx, index);
                            let llindex = index.immediate();
                            cg_base.project_index(bx, llindex)
                        }
                        mir::ProjectionElem::ConstantIndex {
                            offset, from_end: false, min_length: _ } => {
                            let lloffset = bx.cx().const_usize(offset);
                            cg_base.project_index(bx, lloffset)
                        }
                        mir::ProjectionElem::ConstantIndex {
                            offset, from_end: true, min_length: _ } => {
                            let lloffset = bx.cx().const_usize(offset);
                            let lllen = cg_base.len(bx.cx());
                            let llindex = bx.sub(lllen, lloffset);
                            cg_base.project_index(bx, llindex)
                        }
                        mir::ProjectionElem::Subslice { from, to, from_end } => {
                            let mut subslice =
                                cg_base.project_index(bx, bx.cx().const_usize(from));
                            let projected_ty =
                                PlaceTy::from_ty(cg_base.layout.ty).projection_ty(tcx,
                                        *elem).ty;
                            subslice.layout =
                                bx.cx().layout_of(self.monomorphize(projected_ty));
                            if subslice.layout.is_unsized() {
                                if !from_end {
                                    {
                                        ::core::panicking::panic_fmt(format_args!("slice subslices should be `from_end`"));
                                    }
                                };
                                subslice.val.llextra =
                                    Some(bx.sub(cg_base.val.llextra.unwrap(),
                                            bx.cx().const_usize(from + to)));
                            }
                            subslice
                        }
                        mir::ProjectionElem::Downcast(_, v) =>
                            cg_base.project_downcast(bx, v),
                    };
            }
            {
                use ::tracing::__macro_support::Callsite as _;
                static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                    {
                        static META: ::tracing::Metadata<'static> =
                            {
                                ::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/place.rs:400",
                                    "rustc_codegen_ssa::mir::place", ::tracing::Level::DEBUG,
                                    ::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/place.rs"),
                                    ::tracing_core::__macro_support::Option::Some(400u32),
                                    ::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir::place"),
                                    ::tracing_core::field::FieldSet::new(&["message"],
                                        ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                    ::tracing::metadata::Kind::EVENT)
                            };
                        ::tracing::callsite::DefaultCallsite::new(&META)
                    };
                let enabled =
                    ::tracing::Level::DEBUG <=
                                ::tracing::level_filters::STATIC_MAX_LEVEL &&
                            ::tracing::Level::DEBUG <=
                                ::tracing::level_filters::LevelFilter::current() &&
                        {
                            let interest = __CALLSITE.interest();
                            !interest.is_never() &&
                                ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                                    interest)
                        };
                if enabled {
                    (|value_set: ::tracing::field::ValueSet|
                                {
                                    let meta = __CALLSITE.metadata();
                                    ::tracing::Event::dispatch(meta, &value_set);
                                    ;
                                })({
                            #[allow(unused_imports)]
                            use ::tracing::field::{debug, display, Value};
                            let mut iter = __CALLSITE.metadata().fields().iter();
                            __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                ::tracing::__macro_support::Option::Some(&format_args!("codegen_place(place={0:?}) => {1:?}",
                                                                place_ref, cg_base) as &dyn Value))])
                        });
                } else { ; }
            };
            cg_base
        }
    }
}#[instrument(level = "trace", skip(self, bx))]
321    pub fn codegen_place(
322        &mut self,
323        bx: &mut Bx,
324        place_ref: mir::PlaceRef<'tcx>,
325    ) -> PlaceRef<'tcx, Bx::Value> {
326        let cx = self.cx;
327        let tcx = self.cx.tcx();
328
329        let mut base = 0;
330        let mut cg_base = match self.locals[place_ref.local] {
331            LocalRef::Place(place) => place,
332            LocalRef::UnsizedPlace(place) => bx.load_operand(place).deref(cx),
333            LocalRef::Operand(..) => {
334                if place_ref.is_indirect_first_projection() {
335                    base = 1;
336                    let cg_base = self.codegen_consume(
337                        bx,
338                        mir::PlaceRef { projection: &place_ref.projection[..0], ..place_ref },
339                    );
340                    cg_base.deref(bx.cx())
341                } else {
342                    bug!("using operand local {:?} as place", place_ref);
343                }
344            }
345            LocalRef::PendingOperand => {
346                bug!("using still-pending operand local {:?} as place", place_ref);
347            }
348        };
349        for elem in place_ref.projection[base..].iter() {
350            cg_base = match *elem {
351                mir::ProjectionElem::Deref => bx.load_operand(cg_base).deref(bx.cx()),
352                mir::ProjectionElem::Field(ref field, _) => {
353                    assert!(
354                        !cg_base.layout.ty.is_any_ptr(),
355                        "Bad PlaceRef: destructing pointers should use cast/PtrMetadata, \
356                         but tried to access field {field:?} of pointer {cg_base:?}",
357                    );
358                    cg_base.project_field(bx, field.index())
359                }
360                mir::ProjectionElem::OpaqueCast(ty) => {
361                    bug!("encountered OpaqueCast({ty}) in codegen")
362                }
363                mir::ProjectionElem::UnwrapUnsafeBinder(ty) => {
364                    cg_base.project_type(bx, self.monomorphize(ty))
365                }
366                mir::ProjectionElem::Index(index) => {
367                    let index = &mir::Operand::Copy(mir::Place::from(index));
368                    let index = self.codegen_operand(bx, index);
369                    let llindex = index.immediate();
370                    cg_base.project_index(bx, llindex)
371                }
372                mir::ProjectionElem::ConstantIndex { offset, from_end: false, min_length: _ } => {
373                    let lloffset = bx.cx().const_usize(offset);
374                    cg_base.project_index(bx, lloffset)
375                }
376                mir::ProjectionElem::ConstantIndex { offset, from_end: true, min_length: _ } => {
377                    let lloffset = bx.cx().const_usize(offset);
378                    let lllen = cg_base.len(bx.cx());
379                    let llindex = bx.sub(lllen, lloffset);
380                    cg_base.project_index(bx, llindex)
381                }
382                mir::ProjectionElem::Subslice { from, to, from_end } => {
383                    let mut subslice = cg_base.project_index(bx, bx.cx().const_usize(from));
384                    let projected_ty =
385                        PlaceTy::from_ty(cg_base.layout.ty).projection_ty(tcx, *elem).ty;
386                    subslice.layout = bx.cx().layout_of(self.monomorphize(projected_ty));
387
388                    if subslice.layout.is_unsized() {
389                        assert!(from_end, "slice subslices should be `from_end`");
390                        subslice.val.llextra = Some(
391                            bx.sub(cg_base.val.llextra.unwrap(), bx.cx().const_usize(from + to)),
392                        );
393                    }
394
395                    subslice
396                }
397                mir::ProjectionElem::Downcast(_, v) => cg_base.project_downcast(bx, v),
398            };
399        }
400        debug!("codegen_place(place={:?}) => {:?}", place_ref, cg_base);
401        cg_base
402    }
403
404    pub fn monomorphized_place_ty(&self, place_ref: mir::PlaceRef<'tcx>) -> Ty<'tcx> {
405        let tcx = self.cx.tcx();
406        let place_ty = place_ref.ty(self.mir, tcx);
407        self.monomorphize(place_ty.ty)
408    }
409}
410
411fn round_up_const_value_to_alignment<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
412    bx: &mut Bx,
413    value: Bx::Value,
414    align: Bx::Value,
415) -> Bx::Value {
416    // In pseudo code:
417    //
418    //     if value & (align - 1) == 0 {
419    //         value
420    //     } else {
421    //         (value & !(align - 1)) + align
422    //     }
423    //
424    // Usually this is written without branches as
425    //
426    //     (value + align - 1) & !(align - 1)
427    //
428    // But this formula cannot take advantage of constant `value`. E.g. if `value` is known
429    // at compile time to be `1`, this expression should be optimized to `align`. However,
430    // optimization only holds if `align` is a power of two. Since the optimizer doesn't know
431    // that `align` is a power of two, it cannot perform this optimization.
432    //
433    // Instead we use
434    //
435    //     value + (-value & (align - 1))
436    //
437    // Since `align` is used only once, the expression can be optimized. For `value = 0`
438    // its optimized to `0` even in debug mode.
439    //
440    // NB: The previous version of this code used
441    //
442    //     (value + align - 1) & -align
443    //
444    // Even though `-align == !(align - 1)`, LLVM failed to optimize this even for
445    // `value = 0`. Bug report: https://bugs.llvm.org/show_bug.cgi?id=48559
446    let one = bx.const_usize(1);
447    let align_minus_1 = bx.sub(align, one);
448    let neg_value = bx.neg(value);
449    let offset = bx.and(neg_value, align_minus_1);
450    bx.add(value, offset)
451}
452
453/// Calculates the value that needs to be stored to mark the discriminant.
454///
455/// This might be `None` for a `struct` or a niched variant (like `Some(&3)`).
456///
457/// If it's `Some`, it returns the value to store and the field in which to
458/// store it. Note that this value is *not* the same as the discriminant, in
459/// general, as it might be a niche value or have a different size.
460///
461/// It might also be an `Err` because the variant is uninhabited.
462pub(super) fn codegen_tag_value<'tcx, V>(
463    cx: &impl CodegenMethods<'tcx, Value = V>,
464    variant_index: VariantIdx,
465    layout: TyAndLayout<'tcx>,
466) -> Result<Option<(FieldIdx, V)>, UninhabitedVariantError> {
467    // By checking uninhabited-ness first we don't need to worry about types
468    // like `(u32, !)` which are single-variant but weird.
469    if layout.for_variant(cx, variant_index).is_uninhabited() {
470        return Err(UninhabitedVariantError);
471    }
472
473    Ok(match layout.variants {
474        Variants::Empty => {
    ::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
            format_args!("we already handled uninhabited types")));
}unreachable!("we already handled uninhabited types"),
475        Variants::Single { index } => {
476            match (&index, &variant_index) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(index, variant_index);
477            None
478        }
479
480        Variants::Multiple { tag_encoding: TagEncoding::Direct, tag_field, .. } => {
481            let discr = layout.ty.discriminant_for_variant(cx.tcx(), variant_index);
482            let to = discr.unwrap().val;
483            let tag_layout = layout.field(cx, tag_field.as_usize());
484            let tag_llty = cx.immediate_backend_type(tag_layout);
485            let imm = cx.const_uint_big(tag_llty, to);
486            Some((tag_field, imm))
487        }
488        Variants::Multiple {
489            tag_encoding: TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start },
490            tag_field,
491            ..
492        } => {
493            if variant_index != untagged_variant {
494                let niche_layout = layout.field(cx, tag_field.as_usize());
495                let niche_llty = cx.immediate_backend_type(niche_layout);
496                let BackendRepr::Scalar(scalar) = niche_layout.backend_repr else {
497                    ::rustc_middle::util::bug::bug_fmt(format_args!("expected a scalar placeref for the niche"));bug!("expected a scalar placeref for the niche");
498                };
499                // We are supposed to compute `niche_value.wrapping_add(niche_start)` wrapping
500                // around the `niche`'s type.
501                // The easiest way to do that is to do wrapping arithmetic on `u128` and then
502                // masking off any extra bits that occur because we did the arithmetic with too many bits.
503                let niche_value = variant_index.as_u32() - niche_variants.start().as_u32();
504                let niche_value = (niche_value as u128).wrapping_add(niche_start);
505                let niche_value = niche_value & niche_layout.size.unsigned_int_max();
506
507                let niche_llval = cx.scalar_to_backend(
508                    Scalar::from_uint(niche_value, niche_layout.size),
509                    scalar,
510                    niche_llty,
511                );
512                Some((tag_field, niche_llval))
513            } else {
514                None
515            }
516        }
517    })
518}
519
520#[derive(#[automatically_derived]
impl ::core::fmt::Debug for UninhabitedVariantError {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::write_str(f, "UninhabitedVariantError")
    }
}Debug)]
521pub(super) struct UninhabitedVariantError;