Skip to main content

rustc_codegen_ssa/mir/
place.rs

1use rustc_abi::{
2    Align, BackendRepr, FieldIdx, FieldsShape, Size, TagEncoding, VariantIdx, Variants,
3};
4use rustc_middle::mir::PlaceTy;
5use rustc_middle::mir::interpret::Scalar;
6use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutOf, TyAndLayout};
7use rustc_middle::ty::{self, Ty};
8use rustc_middle::{bug, mir};
9use tracing::{debug, instrument};
10
11use super::operand::OperandValue;
12use super::{FunctionCx, LocalRef};
13use crate::common::IntPredicate;
14use crate::size_of_val;
15use crate::traits::*;
16
17/// The location and extra runtime properties of the place.
18///
19/// Typically found in a [`PlaceRef`] or an [`OperandValue::Ref`].
20///
21/// As a location in memory, this has no specific type. If you want to
22/// load or store it using a typed operation, use [`Self::with_type`].
23#[derive(#[automatically_derived]
impl<V: ::core::marker::Copy> ::core::marker::Copy for PlaceValue<V> { }Copy, #[automatically_derived]
impl<V: ::core::clone::Clone> ::core::clone::Clone for PlaceValue<V> {
    #[inline]
    fn clone(&self) -> PlaceValue<V> {
        PlaceValue {
            llval: ::core::clone::Clone::clone(&self.llval),
            llextra: ::core::clone::Clone::clone(&self.llextra),
            align: ::core::clone::Clone::clone(&self.align),
        }
    }
}Clone, #[automatically_derived]
impl<V: ::core::fmt::Debug> ::core::fmt::Debug for PlaceValue<V> {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field3_finish(f, "PlaceValue",
            "llval", &self.llval, "llextra", &self.llextra, "align",
            &&self.align)
    }
}Debug)]
24pub struct PlaceValue<V> {
25    /// A pointer to the contents of the place.
26    pub llval: V,
27
28    /// This place's extra data if it is unsized, or `None` if null.
29    pub llextra: Option<V>,
30
31    /// The alignment we know for this place.
32    pub align: Align,
33}
34
35impl<V: CodegenObject> PlaceValue<V> {
36    /// Constructor for the ordinary case of `Sized` types.
37    ///
38    /// Sets `llextra` to `None`.
39    pub fn new_sized(llval: V, align: Align) -> PlaceValue<V> {
40        PlaceValue { llval, llextra: None, align }
41    }
42
43    /// Allocates a stack slot in the function for a value
44    /// of the specified size and alignment.
45    ///
46    /// The allocation itself is untyped.
47    pub fn alloca<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx, Value = V>>(
48        bx: &mut Bx,
49        size: Size,
50        align: Align,
51    ) -> PlaceValue<V> {
52        let llval = bx.alloca(size, align);
53        PlaceValue::new_sized(llval, align)
54    }
55
56    /// Creates a `PlaceRef` to this location with the given type.
57    pub fn with_type<'tcx>(self, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
58        if !(layout.is_unsized() || layout.is_uninhabited() || self.llextra.is_none())
    {
    {
        ::core::panicking::panic_fmt(format_args!("Had pointer metadata {0:?} for sized type {1:?}",
                self.llextra, layout));
    }
};assert!(
59            layout.is_unsized() || layout.is_uninhabited() || self.llextra.is_none(),
60            "Had pointer metadata {:?} for sized type {layout:?}",
61            self.llextra,
62        );
63        PlaceRef { val: self, layout }
64    }
65
66    /// Gets the pointer to this place as an [`OperandValue::Immediate`]
67    /// or, for those needing metadata, an [`OperandValue::Pair`].
68    ///
69    /// This is the inverse of [`OperandValue::deref`].
70    pub fn address(self) -> OperandValue<V> {
71        if let Some(llextra) = self.llextra {
72            OperandValue::Pair(self.llval, llextra)
73        } else {
74            OperandValue::Immediate(self.llval)
75        }
76    }
77}
78
79#[derive(#[automatically_derived]
impl<'tcx, V: ::core::marker::Copy> ::core::marker::Copy for PlaceRef<'tcx, V>
    {
}Copy, #[automatically_derived]
impl<'tcx, V: ::core::clone::Clone> ::core::clone::Clone for PlaceRef<'tcx, V>
    {
    #[inline]
    fn clone(&self) -> PlaceRef<'tcx, V> {
        PlaceRef {
            val: ::core::clone::Clone::clone(&self.val),
            layout: ::core::clone::Clone::clone(&self.layout),
        }
    }
}Clone, #[automatically_derived]
impl<'tcx, V: ::core::fmt::Debug> ::core::fmt::Debug for PlaceRef<'tcx, V> {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f, "PlaceRef",
            "val", &self.val, "layout", &&self.layout)
    }
}Debug)]
80pub struct PlaceRef<'tcx, V> {
81    /// The location and extra runtime properties of the place.
82    pub val: PlaceValue<V>,
83
84    /// The monomorphized type of this place, including variant information.
85    ///
86    /// You probably shouldn't use the alignment from this layout;
87    /// rather you should use the `.val.align` of the actual place,
88    /// which might be different from the type's normal alignment.
89    pub layout: TyAndLayout<'tcx>,
90}
91
92impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
93    pub fn new_sized(llval: V, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
94        PlaceRef::new_sized_aligned(llval, layout, layout.align.abi)
95    }
96
97    pub fn new_sized_aligned(
98        llval: V,
99        layout: TyAndLayout<'tcx>,
100        align: Align,
101    ) -> PlaceRef<'tcx, V> {
102        if !layout.is_sized() {
    ::core::panicking::panic("assertion failed: layout.is_sized()")
};assert!(layout.is_sized());
103        PlaceValue::new_sized(llval, align).with_type(layout)
104    }
105
106    // FIXME(eddyb) pass something else for the name so no work is done
107    // unless LLVM IR names are turned on (e.g. for `--emit=llvm-ir`).
108    pub fn alloca<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
109        bx: &mut Bx,
110        layout: TyAndLayout<'tcx>,
111    ) -> Self {
112        if layout.is_runtime_sized() {
113            Self::alloca_runtime_sized(bx, layout)
114        } else {
115            Self::alloca_size(bx, layout.size, layout)
116        }
117    }
118
119    pub fn alloca_size<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
120        bx: &mut Bx,
121        size: Size,
122        layout: TyAndLayout<'tcx>,
123    ) -> Self {
124        if !layout.is_sized() {
    {
        ::core::panicking::panic_fmt(format_args!("tried to statically allocate unsized place"));
    }
};assert!(layout.is_sized(), "tried to statically allocate unsized place");
125        PlaceValue::alloca(bx, size, layout.align.abi).with_type(layout)
126    }
127
128    /// Returns a place for an indirect reference to an unsized place.
129    // FIXME(eddyb) pass something else for the name so no work is done
130    // unless LLVM IR names are turned on (e.g. for `--emit=llvm-ir`).
131    pub fn alloca_unsized_indirect<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
132        bx: &mut Bx,
133        layout: TyAndLayout<'tcx>,
134    ) -> Self {
135        if !layout.is_unsized() {
    {
        ::core::panicking::panic_fmt(format_args!("tried to allocate indirect place for sized values"));
    }
};assert!(layout.is_unsized(), "tried to allocate indirect place for sized values");
136        let ptr_ty = Ty::new_mut_ptr(bx.cx().tcx(), layout.ty);
137        let ptr_layout = bx.cx().layout_of(ptr_ty);
138        Self::alloca(bx, ptr_layout)
139    }
140
141    pub fn len<Cx: ConstCodegenMethods<Value = V>>(&self, cx: &Cx) -> V {
142        if let FieldsShape::Array { count, .. } = self.layout.fields {
143            if self.layout.is_unsized() {
144                match (&count, &0) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(count, 0);
145                self.val.llextra.unwrap()
146            } else {
147                cx.const_usize(count)
148            }
149        } else {
150            ::rustc_middle::util::bug::bug_fmt(format_args!("unexpected layout `{0:#?}` in PlaceRef::len",
        self.layout))bug!("unexpected layout `{:#?}` in PlaceRef::len", self.layout)
151        }
152    }
153
154    fn alloca_runtime_sized<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
155        bx: &mut Bx,
156        layout: TyAndLayout<'tcx>,
157    ) -> Self {
158        let (element_count, ty) = layout.ty.scalable_vector_element_count_and_type(bx.tcx());
159        PlaceValue::new_sized(
160            bx.scalable_alloca(element_count as u64, layout.align.abi, ty),
161            layout.align.abi,
162        )
163        .with_type(layout)
164    }
165}
166
167impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
168    /// Access a field, at a point when the value's case is known.
169    pub fn project_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
170        self,
171        bx: &mut Bx,
172        ix: usize,
173    ) -> Self {
174        let field = self.layout.field(bx.cx(), ix);
175        let offset = self.layout.fields.offset(ix);
176        let effective_field_align = self.val.align.restrict_for_offset(offset);
177
178        // `simple` is called when we don't need to adjust the offset to
179        // the dynamic alignment of the field.
180        let mut simple = || {
181            let llval = if offset.bytes() == 0 {
182                self.val.llval
183            } else {
184                bx.inbounds_ptradd(self.val.llval, bx.const_usize(offset.bytes()))
185            };
186            let val = PlaceValue {
187                llval,
188                llextra: if bx.cx().tcx().type_has_metadata(field.ty, bx.cx().typing_env()) {
189                    self.val.llextra
190                } else {
191                    None
192                },
193                align: effective_field_align,
194            };
195            val.with_type(field)
196        };
197
198        // Simple cases, which don't need DST adjustment:
199        //   * known alignment - sized types, `[T]`, `str`
200        //   * offset 0 -- rounding up to alignment cannot change the offset
201        // Note that looking at `field.align` is incorrect since that is not necessarily equal
202        // to the dynamic alignment of the type.
203        match field.ty.kind() {
204            _ if field.is_sized() => return simple(),
205            ty::Slice(..) | ty::Str => return simple(),
206            _ if offset.bytes() == 0 => return simple(),
207            _ => {}
208        }
209
210        // We need to get the pointer manually now.
211        // We do this by casting to a `*i8`, then offsetting it by the appropriate amount.
212        // We do this instead of, say, simply adjusting the pointer from the result of a GEP
213        // because the field may have an arbitrary alignment in the LLVM representation.
214        //
215        // To demonstrate:
216        //
217        //     struct Foo<T: ?Sized> {
218        //         x: u16,
219        //         y: T
220        //     }
221        //
222        // The type `Foo<Foo<Trait>>` is represented in LLVM as `{ u16, { u16, u8 }}`, meaning that
223        // the `y` field has 16-bit alignment.
224
225        let meta = self.val.llextra;
226
227        let unaligned_offset = bx.cx().const_usize(offset.bytes());
228
229        // Get the alignment of the field
230        let (_, mut unsized_align) = size_of_val::size_and_align_of_dst(bx, field.ty, meta);
231
232        // For packed types, we need to cap alignment.
233        if let ty::Adt(def, _) = self.layout.ty.kind()
234            && let Some(packed) = def.repr().pack
235        {
236            let packed = bx.const_usize(packed.bytes());
237            let cmp = bx.icmp(IntPredicate::IntULT, unsized_align, packed);
238            unsized_align = bx.select(cmp, unsized_align, packed)
239        }
240
241        // Bump the unaligned offset up to the appropriate alignment
242        let offset = round_up_const_value_to_alignment(bx, unaligned_offset, unsized_align);
243
244        {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/place.rs:244",
                        "rustc_codegen_ssa::mir::place", ::tracing::Level::DEBUG,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/place.rs"),
                        ::tracing_core::__macro_support::Option::Some(244u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir::place"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("struct_field_ptr: DST field offset: {0:?}",
                                                    offset) as &dyn Value))])
            });
    } else { ; }
};debug!("struct_field_ptr: DST field offset: {:?}", offset);
245
246        // Adjust pointer.
247        let ptr = bx.inbounds_ptradd(self.val.llval, offset);
248        let val =
249            PlaceValue { llval: ptr, llextra: self.val.llextra, align: effective_field_align };
250        val.with_type(field)
251    }
252
253    /// Sets the discriminant for a new value of the given case of the given
254    /// representation.
255    pub fn codegen_set_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
256        &self,
257        bx: &mut Bx,
258        variant_index: VariantIdx,
259    ) {
260        match codegen_tag_value(bx.cx(), variant_index, self.layout) {
261            Err(UninhabitedVariantError) => {
262                // We play it safe by using a well-defined `abort`, but we could go for immediate UB
263                // if that turns out to be helpful.
264                bx.abort();
265            }
266            Ok(Some((tag_field, imm))) => {
267                let tag_place = self.project_field(bx, tag_field.as_usize());
268                OperandValue::Immediate(imm).store(bx, tag_place);
269            }
270            Ok(None) => {}
271        }
272    }
273
274    pub fn project_index<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
275        &self,
276        bx: &mut Bx,
277        llindex: V,
278    ) -> Self {
279        // Statically compute the offset if we can, otherwise just use the element size,
280        // as this will yield the lowest alignment.
281        let layout = self.layout.field(bx, 0);
282        let offset = if let Some(llindex) = bx.const_to_opt_uint(llindex) {
283            layout.size.checked_mul(llindex, bx).unwrap_or(layout.size)
284        } else {
285            layout.size
286        };
287
288        let llval = bx.inbounds_nuw_gep(bx.cx().backend_type(layout), self.val.llval, &[llindex]);
289        let align = self.val.align.restrict_for_offset(offset);
290        PlaceValue::new_sized(llval, align).with_type(layout)
291    }
292
293    pub fn project_downcast<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
294        &self,
295        bx: &mut Bx,
296        variant_index: VariantIdx,
297    ) -> Self {
298        let mut downcast = *self;
299        downcast.layout = self.layout.for_variant(bx.cx(), variant_index);
300        downcast
301    }
302
303    pub fn project_type<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
304        &self,
305        bx: &mut Bx,
306        ty: Ty<'tcx>,
307    ) -> Self {
308        let mut downcast = *self;
309        downcast.layout = bx.cx().layout_of(ty);
310        downcast
311    }
312
313    pub fn storage_live<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
314        bx.lifetime_start(self.val.llval, self.layout.size);
315    }
316
317    pub fn storage_dead<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
318        bx.lifetime_end(self.val.llval, self.layout.size);
319    }
320}
321
322impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
323    #[allow(clippy :: suspicious_else_formatting)]
{
    let __tracing_attr_span;
    let __tracing_attr_guard;
    if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() ||
            { false } {
        __tracing_attr_span =
            {
                use ::tracing::__macro_support::Callsite as _;
                static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                    {
                        static META: ::tracing::Metadata<'static> =
                            {
                                ::tracing_core::metadata::Metadata::new("codegen_place",
                                    "rustc_codegen_ssa::mir::place", ::tracing::Level::TRACE,
                                    ::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/place.rs"),
                                    ::tracing_core::__macro_support::Option::Some(323u32),
                                    ::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir::place"),
                                    ::tracing_core::field::FieldSet::new(&["place_ref"],
                                        ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                    ::tracing::metadata::Kind::SPAN)
                            };
                        ::tracing::callsite::DefaultCallsite::new(&META)
                    };
                let mut interest = ::tracing::subscriber::Interest::never();
                if ::tracing::Level::TRACE <=
                                    ::tracing::level_filters::STATIC_MAX_LEVEL &&
                                ::tracing::Level::TRACE <=
                                    ::tracing::level_filters::LevelFilter::current() &&
                            { interest = __CALLSITE.interest(); !interest.is_never() }
                        &&
                        ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                            interest) {
                    let meta = __CALLSITE.metadata();
                    ::tracing::Span::new(meta,
                        &{
                                #[allow(unused_imports)]
                                use ::tracing::field::{debug, display, Value};
                                let mut iter = meta.fields().iter();
                                meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&::tracing::field::debug(&place_ref)
                                                            as &dyn Value))])
                            })
                } else {
                    let span =
                        ::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
                    {};
                    span
                }
            };
        __tracing_attr_guard = __tracing_attr_span.enter();
    }

    #[warn(clippy :: suspicious_else_formatting)]
    {

        #[allow(unknown_lints, unreachable_code, clippy ::
        diverging_sub_expression, clippy :: empty_loop, clippy ::
        let_unit_value, clippy :: let_with_type_underscore, clippy ::
        needless_return, clippy :: unreachable)]
        if false {
            let __tracing_attr_fake_return: PlaceRef<'tcx, Bx::Value> =
                loop {};
            return __tracing_attr_fake_return;
        }
        {
            let cx = self.cx;
            let tcx = self.cx.tcx();
            let mut base = 0;
            let mut cg_base =
                match self.locals[place_ref.local] {
                    LocalRef::Place(place) => place,
                    LocalRef::UnsizedPlace(place) =>
                        bx.load_operand(place).deref(cx),
                    LocalRef::Operand(..) => {
                        if place_ref.is_indirect_first_projection() {
                            base = 1;
                            let cg_base =
                                self.codegen_consume(bx,
                                    mir::PlaceRef {
                                        projection: &place_ref.projection[..0],
                                        ..place_ref
                                    });
                            cg_base.deref(bx.cx())
                        } else {
                            ::rustc_middle::util::bug::bug_fmt(format_args!("using operand local {0:?} as place",
                                    place_ref));
                        }
                    }
                    LocalRef::PendingOperand => {
                        ::rustc_middle::util::bug::bug_fmt(format_args!("using still-pending operand local {0:?} as place",
                                place_ref));
                    }
                };
            for elem in place_ref.projection[base..].iter() {
                cg_base =
                    match *elem {
                        mir::ProjectionElem::Deref =>
                            bx.load_operand(cg_base).deref(bx.cx()),
                        mir::ProjectionElem::Field(ref field, _) => {
                            if !!cg_base.layout.ty.is_any_ptr() {
                                {
                                    ::core::panicking::panic_fmt(format_args!("Bad PlaceRef: destructing pointers should use cast/PtrMetadata, but tried to access field {0:?} of pointer {1:?}",
                                            field, cg_base));
                                }
                            };
                            cg_base.project_field(bx, field.index())
                        }
                        mir::ProjectionElem::OpaqueCast(ty) => {
                            ::rustc_middle::util::bug::bug_fmt(format_args!("encountered OpaqueCast({0}) in codegen",
                                    ty))
                        }
                        mir::ProjectionElem::UnwrapUnsafeBinder(ty) => {
                            cg_base.project_type(bx, self.monomorphize(ty))
                        }
                        mir::ProjectionElem::Index(index) => {
                            let index = &mir::Operand::Copy(mir::Place::from(index));
                            let index = self.codegen_operand(bx, index);
                            let llindex = index.immediate();
                            cg_base.project_index(bx, llindex)
                        }
                        mir::ProjectionElem::ConstantIndex {
                            offset, from_end: false, min_length: _ } => {
                            let lloffset = bx.cx().const_usize(offset);
                            cg_base.project_index(bx, lloffset)
                        }
                        mir::ProjectionElem::ConstantIndex {
                            offset, from_end: true, min_length: _ } => {
                            let lloffset = bx.cx().const_usize(offset);
                            let lllen = cg_base.len(bx.cx());
                            let llindex = bx.sub(lllen, lloffset);
                            cg_base.project_index(bx, llindex)
                        }
                        mir::ProjectionElem::Subslice { from, to, from_end } => {
                            let mut subslice =
                                cg_base.project_index(bx, bx.cx().const_usize(from));
                            let projected_ty =
                                PlaceTy::from_ty(cg_base.layout.ty).projection_ty(tcx,
                                        *elem).ty;
                            subslice.layout =
                                bx.cx().layout_of(self.monomorphize(projected_ty));
                            if subslice.layout.is_unsized() {
                                if !from_end {
                                    {
                                        ::core::panicking::panic_fmt(format_args!("slice subslices should be `from_end`"));
                                    }
                                };
                                subslice.val.llextra =
                                    Some(bx.sub(cg_base.val.llextra.unwrap(),
                                            bx.cx().const_usize(from + to)));
                            }
                            subslice
                        }
                        mir::ProjectionElem::Downcast(_, v) =>
                            cg_base.project_downcast(bx, v),
                    };
            }
            {
                use ::tracing::__macro_support::Callsite as _;
                static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                    {
                        static META: ::tracing::Metadata<'static> =
                            {
                                ::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/place.rs:403",
                                    "rustc_codegen_ssa::mir::place", ::tracing::Level::DEBUG,
                                    ::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/place.rs"),
                                    ::tracing_core::__macro_support::Option::Some(403u32),
                                    ::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir::place"),
                                    ::tracing_core::field::FieldSet::new(&["message"],
                                        ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                    ::tracing::metadata::Kind::EVENT)
                            };
                        ::tracing::callsite::DefaultCallsite::new(&META)
                    };
                let enabled =
                    ::tracing::Level::DEBUG <=
                                ::tracing::level_filters::STATIC_MAX_LEVEL &&
                            ::tracing::Level::DEBUG <=
                                ::tracing::level_filters::LevelFilter::current() &&
                        {
                            let interest = __CALLSITE.interest();
                            !interest.is_never() &&
                                ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                                    interest)
                        };
                if enabled {
                    (|value_set: ::tracing::field::ValueSet|
                                {
                                    let meta = __CALLSITE.metadata();
                                    ::tracing::Event::dispatch(meta, &value_set);
                                    ;
                                })({
                            #[allow(unused_imports)]
                            use ::tracing::field::{debug, display, Value};
                            let mut iter = __CALLSITE.metadata().fields().iter();
                            __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                ::tracing::__macro_support::Option::Some(&format_args!("codegen_place(place={0:?}) => {1:?}",
                                                                place_ref, cg_base) as &dyn Value))])
                        });
                } else { ; }
            };
            cg_base
        }
    }
}#[instrument(level = "trace", skip(self, bx))]
324    pub fn codegen_place(
325        &mut self,
326        bx: &mut Bx,
327        place_ref: mir::PlaceRef<'tcx>,
328    ) -> PlaceRef<'tcx, Bx::Value> {
329        let cx = self.cx;
330        let tcx = self.cx.tcx();
331
332        let mut base = 0;
333        let mut cg_base = match self.locals[place_ref.local] {
334            LocalRef::Place(place) => place,
335            LocalRef::UnsizedPlace(place) => bx.load_operand(place).deref(cx),
336            LocalRef::Operand(..) => {
337                if place_ref.is_indirect_first_projection() {
338                    base = 1;
339                    let cg_base = self.codegen_consume(
340                        bx,
341                        mir::PlaceRef { projection: &place_ref.projection[..0], ..place_ref },
342                    );
343                    cg_base.deref(bx.cx())
344                } else {
345                    bug!("using operand local {:?} as place", place_ref);
346                }
347            }
348            LocalRef::PendingOperand => {
349                bug!("using still-pending operand local {:?} as place", place_ref);
350            }
351        };
352        for elem in place_ref.projection[base..].iter() {
353            cg_base = match *elem {
354                mir::ProjectionElem::Deref => bx.load_operand(cg_base).deref(bx.cx()),
355                mir::ProjectionElem::Field(ref field, _) => {
356                    assert!(
357                        !cg_base.layout.ty.is_any_ptr(),
358                        "Bad PlaceRef: destructing pointers should use cast/PtrMetadata, \
359                         but tried to access field {field:?} of pointer {cg_base:?}",
360                    );
361                    cg_base.project_field(bx, field.index())
362                }
363                mir::ProjectionElem::OpaqueCast(ty) => {
364                    bug!("encountered OpaqueCast({ty}) in codegen")
365                }
366                mir::ProjectionElem::UnwrapUnsafeBinder(ty) => {
367                    cg_base.project_type(bx, self.monomorphize(ty))
368                }
369                mir::ProjectionElem::Index(index) => {
370                    let index = &mir::Operand::Copy(mir::Place::from(index));
371                    let index = self.codegen_operand(bx, index);
372                    let llindex = index.immediate();
373                    cg_base.project_index(bx, llindex)
374                }
375                mir::ProjectionElem::ConstantIndex { offset, from_end: false, min_length: _ } => {
376                    let lloffset = bx.cx().const_usize(offset);
377                    cg_base.project_index(bx, lloffset)
378                }
379                mir::ProjectionElem::ConstantIndex { offset, from_end: true, min_length: _ } => {
380                    let lloffset = bx.cx().const_usize(offset);
381                    let lllen = cg_base.len(bx.cx());
382                    let llindex = bx.sub(lllen, lloffset);
383                    cg_base.project_index(bx, llindex)
384                }
385                mir::ProjectionElem::Subslice { from, to, from_end } => {
386                    let mut subslice = cg_base.project_index(bx, bx.cx().const_usize(from));
387                    let projected_ty =
388                        PlaceTy::from_ty(cg_base.layout.ty).projection_ty(tcx, *elem).ty;
389                    subslice.layout = bx.cx().layout_of(self.monomorphize(projected_ty));
390
391                    if subslice.layout.is_unsized() {
392                        assert!(from_end, "slice subslices should be `from_end`");
393                        subslice.val.llextra = Some(
394                            bx.sub(cg_base.val.llextra.unwrap(), bx.cx().const_usize(from + to)),
395                        );
396                    }
397
398                    subslice
399                }
400                mir::ProjectionElem::Downcast(_, v) => cg_base.project_downcast(bx, v),
401            };
402        }
403        debug!("codegen_place(place={:?}) => {:?}", place_ref, cg_base);
404        cg_base
405    }
406
407    pub fn monomorphized_place_ty(&self, place_ref: mir::PlaceRef<'tcx>) -> Ty<'tcx> {
408        let tcx = self.cx.tcx();
409        let place_ty = place_ref.ty(self.mir, tcx);
410        self.monomorphize(place_ty.ty)
411    }
412}
413
414fn round_up_const_value_to_alignment<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
415    bx: &mut Bx,
416    value: Bx::Value,
417    align: Bx::Value,
418) -> Bx::Value {
419    // In pseudo code:
420    //
421    //     if value & (align - 1) == 0 {
422    //         value
423    //     } else {
424    //         (value & !(align - 1)) + align
425    //     }
426    //
427    // Usually this is written without branches as
428    //
429    //     (value + align - 1) & !(align - 1)
430    //
431    // But this formula cannot take advantage of constant `value`. E.g. if `value` is known
432    // at compile time to be `1`, this expression should be optimized to `align`. However,
433    // optimization only holds if `align` is a power of two. Since the optimizer doesn't know
434    // that `align` is a power of two, it cannot perform this optimization.
435    //
436    // Instead we use
437    //
438    //     value + (-value & (align - 1))
439    //
440    // Since `align` is used only once, the expression can be optimized. For `value = 0`
441    // its optimized to `0` even in debug mode.
442    //
443    // NB: The previous version of this code used
444    //
445    //     (value + align - 1) & -align
446    //
447    // Even though `-align == !(align - 1)`, LLVM failed to optimize this even for
448    // `value = 0`. Bug report: https://bugs.llvm.org/show_bug.cgi?id=48559
449    let one = bx.const_usize(1);
450    let align_minus_1 = bx.sub(align, one);
451    let neg_value = bx.neg(value);
452    let offset = bx.and(neg_value, align_minus_1);
453    bx.add(value, offset)
454}
455
456/// Calculates the value that needs to be stored to mark the discriminant.
457///
458/// This might be `None` for a `struct` or a niched variant (like `Some(&3)`).
459///
460/// If it's `Some`, it returns the value to store and the field in which to
461/// store it. Note that this value is *not* the same as the discriminant, in
462/// general, as it might be a niche value or have a different size.
463///
464/// It might also be an `Err` because the variant is uninhabited.
465pub(super) fn codegen_tag_value<'tcx, V>(
466    cx: &impl CodegenMethods<'tcx, Value = V>,
467    variant_index: VariantIdx,
468    layout: TyAndLayout<'tcx>,
469) -> Result<Option<(FieldIdx, V)>, UninhabitedVariantError> {
470    // By checking uninhabited-ness first we don't need to worry about types
471    // like `(u32, !)` which are single-variant but weird.
472    if layout.for_variant(cx, variant_index).is_uninhabited() {
473        return Err(UninhabitedVariantError);
474    }
475
476    Ok(match layout.variants {
477        Variants::Empty => {
    ::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
            format_args!("we already handled uninhabited types")));
}unreachable!("we already handled uninhabited types"),
478        Variants::Single { index } => {
479            match (&index, &variant_index) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(index, variant_index);
480            None
481        }
482
483        Variants::Multiple { tag_encoding: TagEncoding::Direct, tag_field, .. } => {
484            let discr = layout.ty.discriminant_for_variant(cx.tcx(), variant_index);
485            let to = discr.unwrap().val;
486            let tag_layout = layout.field(cx, tag_field.as_usize());
487            let tag_llty = cx.immediate_backend_type(tag_layout);
488            let imm = cx.const_uint_big(tag_llty, to);
489            Some((tag_field, imm))
490        }
491        Variants::Multiple {
492            tag_encoding: TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start },
493            tag_field,
494            ..
495        } => {
496            if variant_index != untagged_variant {
497                let niche_layout = layout.field(cx, tag_field.as_usize());
498                let niche_llty = cx.immediate_backend_type(niche_layout);
499                let BackendRepr::Scalar(scalar) = niche_layout.backend_repr else {
500                    ::rustc_middle::util::bug::bug_fmt(format_args!("expected a scalar placeref for the niche"));bug!("expected a scalar placeref for the niche");
501                };
502                // We are supposed to compute `niche_value.wrapping_add(niche_start)` wrapping
503                // around the `niche`'s type.
504                // The easiest way to do that is to do wrapping arithmetic on `u128` and then
505                // masking off any extra bits that occur because we did the arithmetic with too many bits.
506                let niche_value = variant_index.as_u32() - niche_variants.start().as_u32();
507                let niche_value = (niche_value as u128).wrapping_add(niche_start);
508                let niche_value = niche_value & niche_layout.size.unsigned_int_max();
509
510                let niche_llval = cx.scalar_to_backend(
511                    Scalar::from_uint(niche_value, niche_layout.size),
512                    scalar,
513                    niche_llty,
514                );
515                Some((tag_field, niche_llval))
516            } else {
517                None
518            }
519        }
520    })
521}
522
523#[derive(#[automatically_derived]
impl ::core::fmt::Debug for UninhabitedVariantError {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::write_str(f, "UninhabitedVariantError")
    }
}Debug)]
524pub(super) struct UninhabitedVariantError;