Skip to main content

rustc_codegen_ssa/mir/
operand.rs

1use std::fmt;
2
3use itertools::Either;
4use rustc_abi as abi;
5use rustc_abi::{
6    Align, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, TagEncoding, VariantIdx, Variants,
7};
8use rustc_hir::LangItem;
9use rustc_middle::mir::interpret::{Pointer, Scalar, alloc_range};
10use rustc_middle::mir::{self, ConstValue};
11use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
12use rustc_middle::ty::{self, Ty};
13use rustc_middle::{bug, span_bug};
14use rustc_session::config::{AnnotateMoves, DebugInfo, OptLevel};
15use tracing::{debug, instrument};
16
17use super::place::{PlaceRef, PlaceValue};
18use super::rvalue::transmute_scalar;
19use super::{FunctionCx, LocalRef};
20use crate::MemFlags;
21use crate::common::IntPredicate;
22use crate::traits::*;
23
24/// The representation of a Rust value. The enum variant is in fact
25/// uniquely determined by the value's type, but is kept as a
26/// safety check.
27#[derive(#[automatically_derived]
impl<V: ::core::marker::Copy> ::core::marker::Copy for OperandValue<V> { }Copy, #[automatically_derived]
impl<V: ::core::clone::Clone> ::core::clone::Clone for OperandValue<V> {
    #[inline]
    fn clone(&self) -> OperandValue<V> {
        match self {
            OperandValue::Ref(__self_0) =>
                OperandValue::Ref(::core::clone::Clone::clone(__self_0)),
            OperandValue::Immediate(__self_0) =>
                OperandValue::Immediate(::core::clone::Clone::clone(__self_0)),
            OperandValue::Pair(__self_0, __self_1) =>
                OperandValue::Pair(::core::clone::Clone::clone(__self_0),
                    ::core::clone::Clone::clone(__self_1)),
            OperandValue::ZeroSized => OperandValue::ZeroSized,
        }
    }
}Clone, #[automatically_derived]
impl<V: ::core::fmt::Debug> ::core::fmt::Debug for OperandValue<V> {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        match self {
            OperandValue::Ref(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f, "Ref",
                    &__self_0),
            OperandValue::Immediate(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "Immediate", &__self_0),
            OperandValue::Pair(__self_0, __self_1) =>
                ::core::fmt::Formatter::debug_tuple_field2_finish(f, "Pair",
                    __self_0, &__self_1),
            OperandValue::ZeroSized =>
                ::core::fmt::Formatter::write_str(f, "ZeroSized"),
        }
    }
}Debug)]
28pub enum OperandValue<V> {
29    /// A reference to the actual operand. The data is guaranteed
30    /// to be valid for the operand's lifetime.
31    /// The second value, if any, is the extra data (vtable or length)
32    /// which indicates that it refers to an unsized rvalue.
33    ///
34    /// An `OperandValue` *must* be this variant for any type for which
35    /// [`LayoutTypeCodegenMethods::is_backend_ref`] returns `true`.
36    /// (That basically amounts to "isn't one of the other variants".)
37    ///
38    /// This holds a [`PlaceValue`] (like a [`PlaceRef`] does) with a pointer
39    /// to the location holding the value. The type behind that pointer is the
40    /// one returned by [`LayoutTypeCodegenMethods::backend_type`].
41    Ref(PlaceValue<V>),
42    /// A single LLVM immediate value.
43    ///
44    /// An `OperandValue` *must* be this variant for any type for which
45    /// [`LayoutTypeCodegenMethods::is_backend_immediate`] returns `true`.
46    /// The backend value in this variant must be the *immediate* backend type,
47    /// as returned by [`LayoutTypeCodegenMethods::immediate_backend_type`].
48    Immediate(V),
49    /// A pair of immediate LLVM values. Used by wide pointers too.
50    ///
51    /// # Invariants
52    /// - For `Pair(a, b)`, `a` is always at offset 0, but may have `FieldIdx(1..)`
53    /// - `b` is not at offset 0, because `V` is not a 1ZST type.
54    /// - `a` and `b` will have a different FieldIdx, but otherwise `b`'s may be lower
55    ///   or they may not be adjacent, due to arbitrary numbers of 1ZST fields that
56    ///   will not affect the shape of the data which determines if `Pair` will be used.
57    /// - An `OperandValue` *must* be this variant for any type for which
58    /// [`LayoutTypeCodegenMethods::is_backend_scalar_pair`] returns `true`.
59    /// - The backend values in this variant must be the *immediate* backend types,
60    /// as returned by [`LayoutTypeCodegenMethods::scalar_pair_element_backend_type`]
61    /// with `immediate: true`.
62    Pair(V, V),
63    /// A value taking no bytes, and which therefore needs no LLVM value at all.
64    ///
65    /// If you ever need a `V` to pass to something, get a fresh poison value
66    /// from [`ConstCodegenMethods::const_poison`].
67    ///
68    /// An `OperandValue` *must* be this variant for any type for which
69    /// `is_zst` on its `Layout` returns `true`. Note however that
70    /// these values can still require alignment.
71    ZeroSized,
72}
73
74impl<V: CodegenObject> OperandValue<V> {
75    /// Return the data pointer and optional metadata as backend values
76    /// if this value can be treat as a pointer.
77    pub(crate) fn try_pointer_parts(self) -> Option<(V, Option<V>)> {
78        match self {
79            OperandValue::Immediate(llptr) => Some((llptr, None)),
80            OperandValue::Pair(llptr, llextra) => Some((llptr, Some(llextra))),
81            OperandValue::Ref(_) | OperandValue::ZeroSized => None,
82        }
83    }
84
85    /// Treat this value as a pointer and return the data pointer and
86    /// optional metadata as backend values.
87    ///
88    /// If you're making a place, use [`Self::deref`] instead.
89    pub(crate) fn pointer_parts(self) -> (V, Option<V>) {
90        self.try_pointer_parts()
91            .unwrap_or_else(|| ::rustc_middle::util::bug::bug_fmt(format_args!("OperandValue cannot be a pointer: {0:?}",
        self))bug!("OperandValue cannot be a pointer: {self:?}"))
92    }
93
94    /// Treat this value as a pointer and return the place to which it points.
95    ///
96    /// The pointer immediate doesn't inherently know its alignment,
97    /// so you need to pass it in. If you want to get it from a type's ABI
98    /// alignment, then maybe you want [`OperandRef::deref`] instead.
99    ///
100    /// This is the inverse of [`PlaceValue::address`].
101    pub(crate) fn deref(self, align: Align) -> PlaceValue<V> {
102        let (llval, llextra) = self.pointer_parts();
103        PlaceValue { llval, llextra, align }
104    }
105
106    pub(crate) fn is_expected_variant_for_type<'tcx, Cx: LayoutTypeCodegenMethods<'tcx>>(
107        &self,
108        cx: &Cx,
109        ty: TyAndLayout<'tcx>,
110    ) -> bool {
111        match self {
112            OperandValue::ZeroSized => ty.is_zst(),
113            OperandValue::Immediate(_) => cx.is_backend_immediate(ty),
114            OperandValue::Pair(_, _) => cx.is_backend_scalar_pair(ty),
115            OperandValue::Ref(_) => cx.is_backend_ref(ty),
116        }
117    }
118}
119
120/// An `OperandRef` is an "SSA" reference to a Rust value, along with
121/// its type.
122///
123/// NOTE: unless you know a value's type exactly, you should not
124/// generate LLVM opcodes acting on it and instead act via methods,
125/// to avoid nasty edge cases. In particular, using `Builder::store`
126/// directly is sure to cause problems -- use `OperandRef::store`
127/// instead.
128#[derive(#[automatically_derived]
impl<'tcx, V: ::core::marker::Copy> ::core::marker::Copy for
    OperandRef<'tcx, V> {
}Copy, #[automatically_derived]
impl<'tcx, V: ::core::clone::Clone> ::core::clone::Clone for
    OperandRef<'tcx, V> {
    #[inline]
    fn clone(&self) -> OperandRef<'tcx, V> {
        OperandRef {
            val: ::core::clone::Clone::clone(&self.val),
            layout: ::core::clone::Clone::clone(&self.layout),
            move_annotation: ::core::clone::Clone::clone(&self.move_annotation),
        }
    }
}Clone)]
129pub struct OperandRef<'tcx, V> {
130    /// The value.
131    pub val: OperandValue<V>,
132
133    /// The layout of value, based on its Rust type.
134    pub layout: TyAndLayout<'tcx>,
135
136    /// Annotation for profiler visibility of move/copy operations.
137    /// When set, the store operation should appear as an inlined call to this function.
138    pub move_annotation: Option<ty::Instance<'tcx>>,
139}
140
141impl<V: CodegenObject> fmt::Debug for OperandRef<'_, V> {
142    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
143        f.write_fmt(format_args!("OperandRef({0:?} @ {1:?})", self.val, self.layout))write!(f, "OperandRef({:?} @ {:?})", self.val, self.layout)
144    }
145}
146
147impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
148    pub fn zero_sized(layout: TyAndLayout<'tcx>) -> OperandRef<'tcx, V> {
149        if !layout.is_zst() {
    ::core::panicking::panic("assertion failed: layout.is_zst()")
};assert!(layout.is_zst());
150        OperandRef { val: OperandValue::ZeroSized, layout, move_annotation: None }
151    }
152
153    pub(crate) fn from_const<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
154        bx: &mut Bx,
155        val: mir::ConstValue,
156        ty: Ty<'tcx>,
157    ) -> Self {
158        let layout = bx.layout_of(ty);
159
160        let val = match val {
161            ConstValue::Scalar(x) => {
162                let BackendRepr::Scalar(scalar) = layout.backend_repr else {
163                    ::rustc_middle::util::bug::bug_fmt(format_args!("from_const: invalid ByVal layout: {0:#?}",
        layout));bug!("from_const: invalid ByVal layout: {:#?}", layout);
164                };
165                let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
166                OperandValue::Immediate(llval)
167            }
168            ConstValue::ZeroSized => return OperandRef::zero_sized(layout),
169            ConstValue::Slice { alloc_id, meta } => {
170                let BackendRepr::ScalarPair(a_scalar, _) = layout.backend_repr else {
171                    ::rustc_middle::util::bug::bug_fmt(format_args!("from_const: invalid ScalarPair layout: {0:#?}",
        layout));bug!("from_const: invalid ScalarPair layout: {:#?}", layout);
172                };
173                let a = Scalar::from_pointer(Pointer::new(alloc_id.into(), Size::ZERO), &bx.tcx());
174                let a_llval = bx.scalar_to_backend(
175                    a,
176                    a_scalar,
177                    bx.scalar_pair_element_backend_type(layout, 0, true),
178                );
179                let b_llval = bx.const_usize(meta);
180                OperandValue::Pair(a_llval, b_llval)
181            }
182            ConstValue::Indirect { alloc_id, offset } => {
183                let alloc = bx.tcx().global_alloc(alloc_id).unwrap_memory();
184                return Self::from_const_alloc(bx, layout, alloc, offset);
185            }
186        };
187
188        OperandRef { val, layout, move_annotation: None }
189    }
190
191    fn from_const_alloc<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
192        bx: &mut Bx,
193        layout: TyAndLayout<'tcx>,
194        alloc: rustc_middle::mir::interpret::ConstAllocation<'tcx>,
195        offset: Size,
196    ) -> Self {
197        let alloc_align = alloc.inner().align;
198        if !(alloc_align >= layout.align.abi) {
    {
        ::core::panicking::panic_fmt(format_args!("{1:?} < {0:?}",
                layout.align.abi, alloc_align));
    }
};assert!(alloc_align >= layout.align.abi, "{alloc_align:?} < {:?}", layout.align.abi);
199
200        let read_scalar = |start, size, s: abi::Scalar, ty| {
201            match alloc.0.read_scalar(
202                bx,
203                alloc_range(start, size),
204                /*read_provenance*/ #[allow(non_exhaustive_omitted_patterns)] match s.primitive() {
    abi::Primitive::Pointer(_) => true,
    _ => false,
}matches!(s.primitive(), abi::Primitive::Pointer(_)),
205            ) {
206                Ok(val) => bx.scalar_to_backend(val, s, ty),
207                Err(_) => bx.const_poison(ty),
208            }
209        };
210
211        // It may seem like all types with `Scalar` or `ScalarPair` ABI are fair game at this point.
212        // However, `MaybeUninit<u64>` is considered a `Scalar` as far as its layout is concerned --
213        // and yet cannot be represented by an interpreter `Scalar`, since we have to handle the
214        // case where some of the bytes are initialized and others are not. So, we need an extra
215        // check that walks over the type of `mplace` to make sure it is truly correct to treat this
216        // like a `Scalar` (or `ScalarPair`).
217        match layout.backend_repr {
218            BackendRepr::Scalar(s @ abi::Scalar::Initialized { .. }) => {
219                let size = s.size(bx);
220                match (&size, &layout.size) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::Some(format_args!("abi::Scalar size does not match layout size")));
        }
    }
};assert_eq!(size, layout.size, "abi::Scalar size does not match layout size");
221                let val = read_scalar(offset, size, s, bx.immediate_backend_type(layout));
222                OperandRef { val: OperandValue::Immediate(val), layout, move_annotation: None }
223            }
224            BackendRepr::ScalarPair(
225                a @ abi::Scalar::Initialized { .. },
226                b @ abi::Scalar::Initialized { .. },
227            ) => {
228                let (a_size, b_size) = (a.size(bx), b.size(bx));
229                let b_offset = (offset + a_size).align_to(b.align(bx).abi);
230                if !(b_offset.bytes() > 0) {
    ::core::panicking::panic("assertion failed: b_offset.bytes() > 0")
};assert!(b_offset.bytes() > 0);
231                let a_val = read_scalar(
232                    offset,
233                    a_size,
234                    a,
235                    bx.scalar_pair_element_backend_type(layout, 0, true),
236                );
237                let b_val = read_scalar(
238                    b_offset,
239                    b_size,
240                    b,
241                    bx.scalar_pair_element_backend_type(layout, 1, true),
242                );
243                OperandRef { val: OperandValue::Pair(a_val, b_val), layout, move_annotation: None }
244            }
245            _ if layout.is_zst() => OperandRef::zero_sized(layout),
246            _ => {
247                // Neither a scalar nor scalar pair. Load from a place
248                let base_addr = bx.static_addr_of(alloc, None);
249
250                let llval = bx.const_ptr_byte_offset(base_addr, offset);
251                bx.load_operand(PlaceRef::new_sized(llval, layout))
252            }
253        }
254    }
255
256    /// Asserts that this operand refers to a scalar and returns
257    /// a reference to its value.
258    pub fn immediate(self) -> V {
259        match self.val {
260            OperandValue::Immediate(s) => s,
261            _ => ::rustc_middle::util::bug::bug_fmt(format_args!("not immediate: {0:?}", self))bug!("not immediate: {:?}", self),
262        }
263    }
264
265    /// Asserts that this operand is a pointer (or reference) and returns
266    /// the place to which it points.  (This requires no code to be emitted
267    /// as we represent places using the pointer to the place.)
268    ///
269    /// This uses [`Ty::builtin_deref`] to include the type of the place and
270    /// assumes the place is aligned to the pointee's usual ABI alignment.
271    ///
272    /// If you don't need the type, see [`OperandValue::pointer_parts`]
273    /// or [`OperandValue::deref`].
274    pub fn deref<Cx: CodegenMethods<'tcx>>(self, cx: &Cx) -> PlaceRef<'tcx, V> {
275        if self.layout.ty.is_box() {
276            // Derefer should have removed all Box derefs
277            ::rustc_middle::util::bug::bug_fmt(format_args!("dereferencing {0:?} in codegen",
        self.layout.ty));bug!("dereferencing {:?} in codegen", self.layout.ty);
278        }
279
280        let projected_ty = self
281            .layout
282            .ty
283            .builtin_deref(true)
284            .unwrap_or_else(|| ::rustc_middle::util::bug::bug_fmt(format_args!("deref of non-pointer {0:?}",
        self))bug!("deref of non-pointer {:?}", self));
285
286        let layout = cx.layout_of(projected_ty);
287        self.val.deref(layout.align.abi).with_type(layout)
288    }
289
290    /// Store this operand into a place, applying move/copy annotation if present.
291    ///
292    /// This is the preferred method for storing operands, as it automatically
293    /// applies profiler annotations for tracked move/copy operations.
294    pub fn store_with_annotation<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
295        self,
296        bx: &mut Bx,
297        dest: PlaceRef<'tcx, V>,
298    ) {
299        if let Some(instance) = self.move_annotation {
300            bx.with_move_annotation(instance, |bx| self.val.store(bx, dest))
301        } else {
302            self.val.store(bx, dest)
303        }
304    }
305
306    /// If this operand is a `Pair`, we return an aggregate with the two values.
307    /// For other cases, see `immediate`.
308    pub fn immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
309        self,
310        bx: &mut Bx,
311    ) -> V {
312        if let OperandValue::Pair(a, b) = self.val {
313            let llty = bx.cx().immediate_backend_type(self.layout);
314            {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/operand.rs:314",
                        "rustc_codegen_ssa::mir::operand", ::tracing::Level::DEBUG,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/operand.rs"),
                        ::tracing_core::__macro_support::Option::Some(314u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir::operand"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("Operand::immediate_or_packed_pair: packing {0:?} into {1:?}",
                                                    self, llty) as &dyn Value))])
            });
    } else { ; }
};debug!("Operand::immediate_or_packed_pair: packing {:?} into {:?}", self, llty);
315            // Reconstruct the immediate aggregate.
316            let mut llpair = bx.cx().const_poison(llty);
317            llpair = bx.insert_value(llpair, a, 0);
318            llpair = bx.insert_value(llpair, b, 1);
319            llpair
320        } else {
321            self.immediate()
322        }
323    }
324
325    /// If the type is a pair, we return a `Pair`, otherwise, an `Immediate`.
326    pub fn from_immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
327        bx: &mut Bx,
328        llval: V,
329        layout: TyAndLayout<'tcx>,
330    ) -> Self {
331        let val = if let BackendRepr::ScalarPair(..) = layout.backend_repr {
332            {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/operand.rs:332",
                        "rustc_codegen_ssa::mir::operand", ::tracing::Level::DEBUG,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/operand.rs"),
                        ::tracing_core::__macro_support::Option::Some(332u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir::operand"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("Operand::from_immediate_or_packed_pair: unpacking {0:?} @ {1:?}",
                                                    llval, layout) as &dyn Value))])
            });
    } else { ; }
};debug!("Operand::from_immediate_or_packed_pair: unpacking {:?} @ {:?}", llval, layout);
333
334            // Deconstruct the immediate aggregate.
335            let a_llval = bx.extract_value(llval, 0);
336            let b_llval = bx.extract_value(llval, 1);
337            OperandValue::Pair(a_llval, b_llval)
338        } else {
339            OperandValue::Immediate(llval)
340        };
341        OperandRef { val, layout, move_annotation: None }
342    }
343
344    pub(crate) fn extract_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
345        &self,
346        fx: &mut FunctionCx<'a, 'tcx, Bx>,
347        bx: &mut Bx,
348        i: usize,
349    ) -> Self {
350        let field = self.layout.field(bx.cx(), i);
351        let offset = self.layout.fields.offset(i);
352
353        if !bx.is_backend_ref(self.layout) && bx.is_backend_ref(field) {
354            // Part of https://github.com/rust-lang/compiler-team/issues/838
355            ::rustc_middle::util::bug::span_bug_fmt(fx.mir.span,
    format_args!("Non-ref type {0:?} cannot project to ref field type {1:?}",
        self, field));span_bug!(
356                fx.mir.span,
357                "Non-ref type {self:?} cannot project to ref field type {field:?}",
358            );
359        }
360
361        let val = if field.is_zst() {
362            OperandValue::ZeroSized
363        } else if field.size == self.layout.size {
364            match (&offset.bytes(), &0) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(offset.bytes(), 0);
365            fx.codegen_transmute_operand(bx, *self, field)
366        } else {
367            let (in_scalar, imm) = match (self.val, self.layout.backend_repr) {
368                // Extract a scalar component from a pair.
369                (OperandValue::Pair(a_llval, b_llval), BackendRepr::ScalarPair(a, b)) => {
370                    if offset.bytes() == 0 {
371                        match (&field.size, &a.size(bx.cx())) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(field.size, a.size(bx.cx()));
372                        (Some(a), a_llval)
373                    } else {
374                        match (&offset, &a.size(bx.cx()).align_to(b.align(bx.cx()).abi)) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(offset, a.size(bx.cx()).align_to(b.align(bx.cx()).abi));
375                        match (&field.size, &b.size(bx.cx())) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(field.size, b.size(bx.cx()));
376                        (Some(b), b_llval)
377                    }
378                }
379
380                _ => {
381                    ::rustc_middle::util::bug::span_bug_fmt(fx.mir.span,
    format_args!("OperandRef::extract_field({0:?}): not applicable", self))span_bug!(fx.mir.span, "OperandRef::extract_field({:?}): not applicable", self)
382                }
383            };
384            OperandValue::Immediate(match field.backend_repr {
385                BackendRepr::SimdVector { .. } => imm,
386                BackendRepr::Scalar(out_scalar) => {
387                    let Some(in_scalar) = in_scalar else {
388                        ::rustc_middle::util::bug::span_bug_fmt(fx.mir.span,
    format_args!("OperandRef::extract_field({0:?}): missing input scalar for output scalar",
        self))span_bug!(
389                            fx.mir.span,
390                            "OperandRef::extract_field({:?}): missing input scalar for output scalar",
391                            self
392                        )
393                    };
394                    if in_scalar != out_scalar {
395                        // If the backend and backend_immediate types might differ,
396                        // flip back to the backend type then to the new immediate.
397                        // This avoids nop truncations, but still handles things like
398                        // Bools in union fields needs to be truncated.
399                        let backend = bx.from_immediate(imm);
400                        bx.to_immediate_scalar(backend, out_scalar)
401                    } else {
402                        imm
403                    }
404                }
405                BackendRepr::ScalarPair(_, _)
406                | BackendRepr::Memory { .. }
407                | BackendRepr::ScalableVector { .. } => ::rustc_middle::util::bug::bug_fmt(format_args!("impossible case reached"))bug!(),
408            })
409        };
410
411        OperandRef { val, layout: field, move_annotation: None }
412    }
413
414    /// Obtain the actual discriminant of a value.
415    #[allow(clippy :: suspicious_else_formatting)]
{
    let __tracing_attr_span;
    let __tracing_attr_guard;
    if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() ||
            { false } {
        __tracing_attr_span =
            {
                use ::tracing::__macro_support::Callsite as _;
                static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                    {
                        static META: ::tracing::Metadata<'static> =
                            {
                                ::tracing_core::metadata::Metadata::new("codegen_get_discr",
                                    "rustc_codegen_ssa::mir::operand", ::tracing::Level::TRACE,
                                    ::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/operand.rs"),
                                    ::tracing_core::__macro_support::Option::Some(415u32),
                                    ::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir::operand"),
                                    ::tracing_core::field::FieldSet::new(&["self", "cast_to"],
                                        ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                    ::tracing::metadata::Kind::SPAN)
                            };
                        ::tracing::callsite::DefaultCallsite::new(&META)
                    };
                let mut interest = ::tracing::subscriber::Interest::never();
                if ::tracing::Level::TRACE <=
                                    ::tracing::level_filters::STATIC_MAX_LEVEL &&
                                ::tracing::Level::TRACE <=
                                    ::tracing::level_filters::LevelFilter::current() &&
                            { interest = __CALLSITE.interest(); !interest.is_never() }
                        &&
                        ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                            interest) {
                    let meta = __CALLSITE.metadata();
                    ::tracing::Span::new(meta,
                        &{
                                #[allow(unused_imports)]
                                use ::tracing::field::{debug, display, Value};
                                let mut iter = meta.fields().iter();
                                meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&::tracing::field::debug(&self)
                                                            as &dyn Value)),
                                                (&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&::tracing::field::debug(&cast_to)
                                                            as &dyn Value))])
                            })
                } else {
                    let span =
                        ::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
                    {};
                    span
                }
            };
        __tracing_attr_guard = __tracing_attr_span.enter();
    }

    #[warn(clippy :: suspicious_else_formatting)]
    {

        #[allow(unknown_lints, unreachable_code, clippy ::
        diverging_sub_expression, clippy :: empty_loop, clippy ::
        let_unit_value, clippy :: let_with_type_underscore, clippy ::
        needless_return, clippy :: unreachable)]
        if false {
            let __tracing_attr_fake_return: V = loop {};
            return __tracing_attr_fake_return;
        }
        {
            let dl = &bx.tcx().data_layout;
            let cast_to_layout = bx.cx().layout_of(cast_to);
            let cast_to = bx.cx().immediate_backend_type(cast_to_layout);
            if self.layout.is_uninhabited() {
                return bx.cx().const_poison(cast_to);
            }
            let (tag_scalar, tag_encoding, tag_field) =
                match self.layout.variants {
                    Variants::Empty => {
                        ::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
                                format_args!("we already handled uninhabited types")));
                    }
                    Variants::Single { index } => {
                        let discr_val =
                            if let Some(discr) =
                                    self.layout.ty.discriminant_for_variant(bx.tcx(), index) {
                                discr.val
                            } else {
                                match (&index, &FIRST_VARIANT) {
                                    (left_val, right_val) => {
                                        if !(*left_val == *right_val) {
                                            let kind = ::core::panicking::AssertKind::Eq;
                                            ::core::panicking::assert_failed(kind, &*left_val,
                                                &*right_val, ::core::option::Option::None);
                                        }
                                    }
                                };
                                0
                            };
                        return bx.cx().const_uint_big(cast_to, discr_val);
                    }
                    Variants::Multiple { tag, ref tag_encoding, tag_field, .. }
                        => {
                        (tag, tag_encoding, tag_field)
                    }
                };
            let tag_op =
                match self.val {
                    OperandValue::ZeroSized =>
                        ::rustc_middle::util::bug::bug_fmt(format_args!("impossible case reached")),
                    OperandValue::Immediate(_) | OperandValue::Pair(_, _) => {
                        self.extract_field(fx, bx, tag_field.as_usize())
                    }
                    OperandValue::Ref(place) => {
                        let tag =
                            place.with_type(self.layout).project_field(bx,
                                tag_field.as_usize());
                        bx.load_operand(tag)
                    }
                };
            let tag_imm = tag_op.immediate();
            match *tag_encoding {
                TagEncoding::Direct => {
                    let signed =
                        match tag_scalar.primitive() {
                            Primitive::Int(_, signed) =>
                                !tag_scalar.is_bool() && signed,
                            _ => false,
                        };
                    bx.intcast(tag_imm, cast_to, signed)
                }
                TagEncoding::Niche {
                    untagged_variant, ref niche_variants, niche_start } => {
                    let (tag, tag_llty) =
                        match tag_scalar.primitive() {
                            Primitive::Pointer(_) => {
                                let t = bx.type_from_integer(dl.ptr_sized_integer());
                                let tag = bx.ptrtoint(tag_imm, t);
                                (tag, t)
                            }
                            _ =>
                                (tag_imm, bx.cx().immediate_backend_type(tag_op.layout)),
                        };
                    let relative_max =
                        niche_variants.end().as_u32() -
                            niche_variants.start().as_u32();
                    let niche_start_const =
                        bx.cx().const_uint_big(tag_llty, niche_start);
                    let (is_niche, tagged_discr, delta) =
                        if relative_max == 0 {
                            let is_niche =
                                bx.icmp(IntPredicate::IntEQ, tag, niche_start_const);
                            let tagged_discr =
                                bx.cx().const_uint(cast_to,
                                    niche_variants.start().as_u32() as u64);
                            (is_niche, tagged_discr, 0)
                        } else {
                            if niche_variants.contains(&untagged_variant) &&
                                    bx.cx().sess().opts.optimize != OptLevel::No {
                                let impossible =
                                    niche_start.wrapping_add(u128::from(untagged_variant.as_u32())).wrapping_sub(u128::from(niche_variants.start().as_u32()));
                                let impossible =
                                    bx.cx().const_uint_big(tag_llty, impossible);
                                let ne = bx.icmp(IntPredicate::IntNE, tag, impossible);
                                bx.assume(ne);
                            }
                            let tag_range = tag_scalar.valid_range(&dl);
                            let tag_size = tag_scalar.size(&dl);
                            let niche_end =
                                u128::from(relative_max).wrapping_add(niche_start);
                            let niche_end = tag_size.truncate(niche_end);
                            let relative_discr = bx.sub(tag, niche_start_const);
                            let cast_tag = bx.intcast(relative_discr, cast_to, false);
                            let is_niche =
                                if tag_range.no_unsigned_wraparound(tag_size) == Ok(true) {
                                    if niche_start == tag_range.start {
                                        let niche_end_const =
                                            bx.cx().const_uint_big(tag_llty, niche_end);
                                        bx.icmp(IntPredicate::IntULE, tag, niche_end_const)
                                    } else {
                                        match (&niche_end, &tag_range.end) {
                                            (left_val, right_val) => {
                                                if !(*left_val == *right_val) {
                                                    let kind = ::core::panicking::AssertKind::Eq;
                                                    ::core::panicking::assert_failed(kind, &*left_val,
                                                        &*right_val, ::core::option::Option::None);
                                                }
                                            }
                                        };
                                        bx.icmp(IntPredicate::IntUGE, tag, niche_start_const)
                                    }
                                } else if tag_range.no_signed_wraparound(tag_size) ==
                                        Ok(true) {
                                    if niche_start == tag_range.start {
                                        let niche_end_const =
                                            bx.cx().const_uint_big(tag_llty, niche_end);
                                        bx.icmp(IntPredicate::IntSLE, tag, niche_end_const)
                                    } else {
                                        match (&niche_end, &tag_range.end) {
                                            (left_val, right_val) => {
                                                if !(*left_val == *right_val) {
                                                    let kind = ::core::panicking::AssertKind::Eq;
                                                    ::core::panicking::assert_failed(kind, &*left_val,
                                                        &*right_val, ::core::option::Option::None);
                                                }
                                            }
                                        };
                                        bx.icmp(IntPredicate::IntSGE, tag, niche_start_const)
                                    }
                                } else {
                                    bx.icmp(IntPredicate::IntULE, relative_discr,
                                        bx.cx().const_uint(tag_llty, relative_max as u64))
                                };
                            (is_niche, cast_tag,
                                niche_variants.start().as_u32() as u128)
                        };
                    let tagged_discr =
                        if delta == 0 {
                            tagged_discr
                        } else {
                            bx.add(tagged_discr, bx.cx().const_uint_big(cast_to, delta))
                        };
                    let untagged_variant_const =
                        bx.cx().const_uint(cast_to,
                            u64::from(untagged_variant.as_u32()));
                    let discr =
                        bx.select(is_niche, tagged_discr, untagged_variant_const);
                    discr
                }
            }
        }
    }
}#[instrument(level = "trace", skip(fx, bx))]
416    pub fn codegen_get_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
417        self,
418        fx: &mut FunctionCx<'a, 'tcx, Bx>,
419        bx: &mut Bx,
420        cast_to: Ty<'tcx>,
421    ) -> V {
422        let dl = &bx.tcx().data_layout;
423        let cast_to_layout = bx.cx().layout_of(cast_to);
424        let cast_to = bx.cx().immediate_backend_type(cast_to_layout);
425
426        // We check uninhabitedness separately because a type like
427        // `enum Foo { Bar(i32, !) }` is still reported as `Variants::Single`,
428        // *not* as `Variants::Empty`.
429        if self.layout.is_uninhabited() {
430            return bx.cx().const_poison(cast_to);
431        }
432
433        let (tag_scalar, tag_encoding, tag_field) = match self.layout.variants {
434            Variants::Empty => unreachable!("we already handled uninhabited types"),
435            Variants::Single { index } => {
436                let discr_val =
437                    if let Some(discr) = self.layout.ty.discriminant_for_variant(bx.tcx(), index) {
438                        discr.val
439                    } else {
440                        // This arm is for types which are neither enums nor coroutines,
441                        // and thus for which the only possible "variant" should be the first one.
442                        assert_eq!(index, FIRST_VARIANT);
443                        // There's thus no actual discriminant to return, so we return
444                        // what it would have been if this was a single-variant enum.
445                        0
446                    };
447                return bx.cx().const_uint_big(cast_to, discr_val);
448            }
449            Variants::Multiple { tag, ref tag_encoding, tag_field, .. } => {
450                (tag, tag_encoding, tag_field)
451            }
452        };
453
454        // Read the tag/niche-encoded discriminant from memory.
455        let tag_op = match self.val {
456            OperandValue::ZeroSized => bug!(),
457            OperandValue::Immediate(_) | OperandValue::Pair(_, _) => {
458                self.extract_field(fx, bx, tag_field.as_usize())
459            }
460            OperandValue::Ref(place) => {
461                let tag = place.with_type(self.layout).project_field(bx, tag_field.as_usize());
462                bx.load_operand(tag)
463            }
464        };
465        let tag_imm = tag_op.immediate();
466
467        // Decode the discriminant (specifically if it's niche-encoded).
468        match *tag_encoding {
469            TagEncoding::Direct => {
470                let signed = match tag_scalar.primitive() {
471                    // We use `i1` for bytes that are always `0` or `1`,
472                    // e.g., `#[repr(i8)] enum E { A, B }`, but we can't
473                    // let LLVM interpret the `i1` as signed, because
474                    // then `i1 1` (i.e., `E::B`) is effectively `i8 -1`.
475                    Primitive::Int(_, signed) => !tag_scalar.is_bool() && signed,
476                    _ => false,
477                };
478                bx.intcast(tag_imm, cast_to, signed)
479            }
480            TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start } => {
481                // Cast to an integer so we don't have to treat a pointer as a
482                // special case.
483                let (tag, tag_llty) = match tag_scalar.primitive() {
484                    // FIXME(erikdesjardins): handle non-default addrspace ptr sizes
485                    Primitive::Pointer(_) => {
486                        let t = bx.type_from_integer(dl.ptr_sized_integer());
487                        let tag = bx.ptrtoint(tag_imm, t);
488                        (tag, t)
489                    }
490                    _ => (tag_imm, bx.cx().immediate_backend_type(tag_op.layout)),
491                };
492
493                // `layout_sanity_check` ensures that we only get here for cases where the discriminant
494                // value and the variant index match, since that's all `Niche` can encode.
495
496                let relative_max = niche_variants.end().as_u32() - niche_variants.start().as_u32();
497                let niche_start_const = bx.cx().const_uint_big(tag_llty, niche_start);
498
499                // We have a subrange `niche_start..=niche_end` inside `range`.
500                // If the value of the tag is inside this subrange, it's a
501                // "niche value", an increment of the discriminant. Otherwise it
502                // indicates the untagged variant.
503                // A general algorithm to extract the discriminant from the tag
504                // is:
505                // relative_tag = tag - niche_start
506                // is_niche = relative_tag <= (ule) relative_max
507                // discr = if is_niche {
508                //     cast(relative_tag) + niche_variants.start()
509                // } else {
510                //     untagged_variant
511                // }
512                // However, we will likely be able to emit simpler code.
513                let (is_niche, tagged_discr, delta) = if relative_max == 0 {
514                    // Best case scenario: only one tagged variant. This will
515                    // likely become just a comparison and a jump.
516                    // The algorithm is:
517                    // is_niche = tag == niche_start
518                    // discr = if is_niche {
519                    //     niche_start
520                    // } else {
521                    //     untagged_variant
522                    // }
523                    let is_niche = bx.icmp(IntPredicate::IntEQ, tag, niche_start_const);
524                    let tagged_discr =
525                        bx.cx().const_uint(cast_to, niche_variants.start().as_u32() as u64);
526                    (is_niche, tagged_discr, 0)
527                } else {
528                    // Thanks to parameter attributes and load metadata, LLVM already knows
529                    // the general valid range of the tag. It's possible, though, for there
530                    // to be an impossible value *in the middle*, which those ranges don't
531                    // communicate, so it's worth an `assume` to let the optimizer know.
532                    // Most importantly, this means when optimizing a variant test like
533                    // `SELECT(is_niche, complex, CONST) == CONST` it's ok to simplify that
534                    // to `!is_niche` because the `complex` part can't possibly match.
535                    //
536                    // This was previously asserted on `tagged_discr` below, where the
537                    // impossible value is more obvious, but that caused an intermediate
538                    // value to become multi-use and thus not optimize, so instead this
539                    // assumes on the original input which is always multi-use. See
540                    // <https://github.com/llvm/llvm-project/issues/134024#issuecomment-3131782555>
541                    //
542                    // FIXME: If we ever get range assume operand bundles in LLVM (so we
543                    // don't need the `icmp`s in the instruction stream any more), it
544                    // might be worth moving this back to being on the switch argument
545                    // where it's more obviously applicable.
546                    if niche_variants.contains(&untagged_variant)
547                        && bx.cx().sess().opts.optimize != OptLevel::No
548                    {
549                        let impossible = niche_start
550                            .wrapping_add(u128::from(untagged_variant.as_u32()))
551                            .wrapping_sub(u128::from(niche_variants.start().as_u32()));
552                        let impossible = bx.cx().const_uint_big(tag_llty, impossible);
553                        let ne = bx.icmp(IntPredicate::IntNE, tag, impossible);
554                        bx.assume(ne);
555                    }
556
557                    // With multiple niched variants we'll have to actually compute
558                    // the variant index from the stored tag.
559                    //
560                    // However, there's still one small optimization we can often do for
561                    // determining *whether* a tag value is a natural value or a niched
562                    // variant. The general algorithm involves a subtraction that often
563                    // wraps in practice, making it tricky to analyse. However, in cases
564                    // where there are few enough possible values of the tag that it doesn't
565                    // need to wrap around, we can instead just look for the contiguous
566                    // tag values on the end of the range with a single comparison.
567                    //
568                    // For example, take the type `enum Demo { A, B, Untagged(bool) }`.
569                    // The `bool` is {0, 1}, and the two other variants are given the
570                    // tags {2, 3} respectively. That means the `tag_range` is
571                    // `[0, 3]`, which doesn't wrap as unsigned (nor as signed), so
572                    // we can test for the niched variants with just `>= 2`.
573                    //
574                    // That means we're looking either for the niche values *above*
575                    // the natural values of the untagged variant:
576                    //
577                    //             niche_start                  niche_end
578                    //                  |                           |
579                    //                  v                           v
580                    // MIN -------------+---------------------------+---------- MAX
581                    //         ^        |         is niche          |
582                    //         |        +---------------------------+
583                    //         |                                    |
584                    //   tag_range.start                      tag_range.end
585                    //
586                    // Or *below* the natural values:
587                    //
588                    //    niche_start              niche_end
589                    //         |                       |
590                    //         v                       v
591                    // MIN ----+-----------------------+---------------------- MAX
592                    //         |       is niche        |           ^
593                    //         +-----------------------+           |
594                    //         |                                   |
595                    //   tag_range.start                      tag_range.end
596                    //
597                    // With those two options and having the flexibility to choose
598                    // between a signed or unsigned comparison on the tag, that
599                    // covers most realistic scenarios. The tests have a (contrived)
600                    // example of a 1-byte enum with over 128 niched variants which
601                    // wraps both as signed as unsigned, though, and for something
602                    // like that we're stuck with the general algorithm.
603
604                    let tag_range = tag_scalar.valid_range(&dl);
605                    let tag_size = tag_scalar.size(&dl);
606                    let niche_end = u128::from(relative_max).wrapping_add(niche_start);
607                    let niche_end = tag_size.truncate(niche_end);
608
609                    let relative_discr = bx.sub(tag, niche_start_const);
610                    let cast_tag = bx.intcast(relative_discr, cast_to, false);
611                    let is_niche = if tag_range.no_unsigned_wraparound(tag_size) == Ok(true) {
612                        if niche_start == tag_range.start {
613                            let niche_end_const = bx.cx().const_uint_big(tag_llty, niche_end);
614                            bx.icmp(IntPredicate::IntULE, tag, niche_end_const)
615                        } else {
616                            assert_eq!(niche_end, tag_range.end);
617                            bx.icmp(IntPredicate::IntUGE, tag, niche_start_const)
618                        }
619                    } else if tag_range.no_signed_wraparound(tag_size) == Ok(true) {
620                        if niche_start == tag_range.start {
621                            let niche_end_const = bx.cx().const_uint_big(tag_llty, niche_end);
622                            bx.icmp(IntPredicate::IntSLE, tag, niche_end_const)
623                        } else {
624                            assert_eq!(niche_end, tag_range.end);
625                            bx.icmp(IntPredicate::IntSGE, tag, niche_start_const)
626                        }
627                    } else {
628                        bx.icmp(
629                            IntPredicate::IntULE,
630                            relative_discr,
631                            bx.cx().const_uint(tag_llty, relative_max as u64),
632                        )
633                    };
634
635                    (is_niche, cast_tag, niche_variants.start().as_u32() as u128)
636                };
637
638                let tagged_discr = if delta == 0 {
639                    tagged_discr
640                } else {
641                    bx.add(tagged_discr, bx.cx().const_uint_big(cast_to, delta))
642                };
643
644                let untagged_variant_const =
645                    bx.cx().const_uint(cast_to, u64::from(untagged_variant.as_u32()));
646
647                let discr = bx.select(is_niche, tagged_discr, untagged_variant_const);
648
649                // In principle we could insert assumes on the possible range of `discr`, but
650                // currently in LLVM this isn't worth it because the original `tag` will
651                // have either a `range` parameter attribute or `!range` metadata,
652                // or come from a `transmute` that already `assume`d it.
653
654                discr
655            }
656        }
657    }
658}
659
660/// Each of these variants starts out as `Either::Right` when it's uninitialized,
661/// then setting the field changes that to `Either::Left` with the backend value.
662#[derive(#[automatically_derived]
impl<V: ::core::fmt::Debug> ::core::fmt::Debug for OperandValueBuilder<V> {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        match self {
            OperandValueBuilder::ZeroSized =>
                ::core::fmt::Formatter::write_str(f, "ZeroSized"),
            OperandValueBuilder::Immediate(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "Immediate", &__self_0),
            OperandValueBuilder::Pair(__self_0, __self_1) =>
                ::core::fmt::Formatter::debug_tuple_field2_finish(f, "Pair",
                    __self_0, &__self_1),
            OperandValueBuilder::Vector(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f, "Vector",
                    &__self_0),
        }
    }
}Debug, #[automatically_derived]
impl<V: ::core::marker::Copy> ::core::marker::Copy for OperandValueBuilder<V>
    {
}Copy, #[automatically_derived]
impl<V: ::core::clone::Clone> ::core::clone::Clone for OperandValueBuilder<V>
    {
    #[inline]
    fn clone(&self) -> OperandValueBuilder<V> {
        match self {
            OperandValueBuilder::ZeroSized => OperandValueBuilder::ZeroSized,
            OperandValueBuilder::Immediate(__self_0) =>
                OperandValueBuilder::Immediate(::core::clone::Clone::clone(__self_0)),
            OperandValueBuilder::Pair(__self_0, __self_1) =>
                OperandValueBuilder::Pair(::core::clone::Clone::clone(__self_0),
                    ::core::clone::Clone::clone(__self_1)),
            OperandValueBuilder::Vector(__self_0) =>
                OperandValueBuilder::Vector(::core::clone::Clone::clone(__self_0)),
        }
    }
}Clone)]
663enum OperandValueBuilder<V> {
664    ZeroSized,
665    Immediate(Either<V, abi::Scalar>),
666    Pair(Either<V, abi::Scalar>, Either<V, abi::Scalar>),
667    /// `repr(simd)` types need special handling because they each have a non-empty
668    /// array field (which uses [`OperandValue::Ref`]) despite the SIMD type itself
669    /// using [`OperandValue::Immediate`] which for any other kind of type would
670    /// mean that its one non-ZST field would also be [`OperandValue::Immediate`].
671    Vector(Either<V, ()>),
672}
673
674/// Allows building up an `OperandRef` by setting fields one at a time.
675#[derive(#[automatically_derived]
impl<'tcx, V: ::core::fmt::Debug> ::core::fmt::Debug for
    OperandRefBuilder<'tcx, V> {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f,
            "OperandRefBuilder", "val", &self.val, "layout", &&self.layout)
    }
}Debug, #[automatically_derived]
impl<'tcx, V: ::core::marker::Copy> ::core::marker::Copy for
    OperandRefBuilder<'tcx, V> {
}Copy, #[automatically_derived]
impl<'tcx, V: ::core::clone::Clone> ::core::clone::Clone for
    OperandRefBuilder<'tcx, V> {
    #[inline]
    fn clone(&self) -> OperandRefBuilder<'tcx, V> {
        OperandRefBuilder {
            val: ::core::clone::Clone::clone(&self.val),
            layout: ::core::clone::Clone::clone(&self.layout),
        }
    }
}Clone)]
676pub(super) struct OperandRefBuilder<'tcx, V> {
677    val: OperandValueBuilder<V>,
678    layout: TyAndLayout<'tcx>,
679}
680
681impl<'a, 'tcx, V: CodegenObject> OperandRefBuilder<'tcx, V> {
682    /// Creates an uninitialized builder for an instance of the `layout`.
683    ///
684    /// ICEs for [`BackendRepr::Memory`] types (other than ZSTs), which should
685    /// be built up inside a [`PlaceRef`] instead as they need an allocated place
686    /// into which to write the values of the fields.
687    pub(super) fn new(layout: TyAndLayout<'tcx>) -> Self {
688        let val = match layout.backend_repr {
689            BackendRepr::Memory { .. } if layout.is_zst() => OperandValueBuilder::ZeroSized,
690            BackendRepr::Scalar(s) => OperandValueBuilder::Immediate(Either::Right(s)),
691            BackendRepr::ScalarPair(a, b) => {
692                OperandValueBuilder::Pair(Either::Right(a), Either::Right(b))
693            }
694            BackendRepr::SimdVector { .. } | BackendRepr::ScalableVector { .. } => {
695                OperandValueBuilder::Vector(Either::Right(()))
696            }
697            BackendRepr::Memory { .. } => {
698                ::rustc_middle::util::bug::bug_fmt(format_args!("Cannot use non-ZST Memory-ABI type in operand builder: {0:?}",
        layout));bug!("Cannot use non-ZST Memory-ABI type in operand builder: {layout:?}");
699            }
700        };
701        OperandRefBuilder { val, layout }
702    }
703
704    pub(super) fn insert_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
705        &mut self,
706        bx: &mut Bx,
707        variant: VariantIdx,
708        field: FieldIdx,
709        field_operand: OperandRef<'tcx, V>,
710    ) {
711        if let OperandValue::ZeroSized = field_operand.val {
712            // A ZST never adds any state, so just ignore it.
713            // This special-casing is worth it because of things like
714            // `Result<!, !>` where `Ok(never)` is legal to write,
715            // but the type shows as FieldShape::Primitive so we can't
716            // actually look at the layout for the field being set.
717            return;
718        }
719
720        let is_zero_offset = if let abi::FieldsShape::Primitive = self.layout.fields {
721            // The other branch looking at field layouts ICEs for primitives,
722            // so we need to handle them separately.
723            // Because we handled ZSTs above (like the metadata in a thin pointer),
724            // the only possibility is that we're setting the one-and-only field.
725            if !!self.layout.is_zst() {
    ::core::panicking::panic("assertion failed: !self.layout.is_zst()")
};assert!(!self.layout.is_zst());
726            match (&variant, &FIRST_VARIANT) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(variant, FIRST_VARIANT);
727            match (&field, &FieldIdx::ZERO) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(field, FieldIdx::ZERO);
728            true
729        } else {
730            let variant_layout = self.layout.for_variant(bx.cx(), variant);
731            let field_offset = variant_layout.fields.offset(field.as_usize());
732            field_offset == Size::ZERO
733        };
734
735        let mut update = |tgt: &mut Either<V, abi::Scalar>, src, from_scalar| {
736            let to_scalar = tgt.unwrap_right();
737            // We transmute here (rather than just `from_immediate`) because in
738            // `Result<usize, *const ()>` the field of the `Ok` is an integer,
739            // but the corresponding scalar in the enum is a pointer.
740            let imm = transmute_scalar(bx, src, from_scalar, to_scalar);
741            *tgt = Either::Left(imm);
742        };
743
744        match (field_operand.val, field_operand.layout.backend_repr) {
745            (OperandValue::ZeroSized, _) => {
    ::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
            format_args!("Handled above")));
}unreachable!("Handled above"),
746            (OperandValue::Immediate(v), BackendRepr::Scalar(from_scalar)) => match &mut self.val {
747                OperandValueBuilder::Immediate(val @ Either::Right(_)) if is_zero_offset => {
748                    update(val, v, from_scalar);
749                }
750                OperandValueBuilder::Pair(fst @ Either::Right(_), _) if is_zero_offset => {
751                    update(fst, v, from_scalar);
752                }
753                OperandValueBuilder::Pair(_, snd @ Either::Right(_)) if !is_zero_offset => {
754                    update(snd, v, from_scalar);
755                }
756                _ => {
757                    ::rustc_middle::util::bug::bug_fmt(format_args!("Tried to insert {0:?} into {1:?}.{2:?} of {3:?}",
        field_operand, variant, field, self))bug!("Tried to insert {field_operand:?} into {variant:?}.{field:?} of {self:?}")
758                }
759            },
760            (OperandValue::Immediate(v), BackendRepr::SimdVector { .. }) => match &mut self.val {
761                OperandValueBuilder::Vector(val @ Either::Right(())) if is_zero_offset => {
762                    *val = Either::Left(v);
763                }
764                _ => {
765                    ::rustc_middle::util::bug::bug_fmt(format_args!("Tried to insert {0:?} into {1:?}.{2:?} of {3:?}",
        field_operand, variant, field, self))bug!("Tried to insert {field_operand:?} into {variant:?}.{field:?} of {self:?}")
766                }
767            },
768            (OperandValue::Pair(a, b), BackendRepr::ScalarPair(from_sa, from_sb)) => {
769                match &mut self.val {
770                    OperandValueBuilder::Pair(fst @ Either::Right(_), snd @ Either::Right(_)) => {
771                        update(fst, a, from_sa);
772                        update(snd, b, from_sb);
773                    }
774                    _ => ::rustc_middle::util::bug::bug_fmt(format_args!("Tried to insert {0:?} into {1:?}.{2:?} of {3:?}",
        field_operand, variant, field, self))bug!(
775                        "Tried to insert {field_operand:?} into {variant:?}.{field:?} of {self:?}"
776                    ),
777                }
778            }
779            (OperandValue::Ref(place), BackendRepr::Memory { .. }) => match &mut self.val {
780                OperandValueBuilder::Vector(val @ Either::Right(())) => {
781                    let ibty = bx.cx().immediate_backend_type(self.layout);
782                    let simd = bx.load_from_place(ibty, place);
783                    *val = Either::Left(simd);
784                }
785                _ => {
786                    ::rustc_middle::util::bug::bug_fmt(format_args!("Tried to insert {0:?} into {1:?}.{2:?} of {3:?}",
        field_operand, variant, field, self))bug!("Tried to insert {field_operand:?} into {variant:?}.{field:?} of {self:?}")
787                }
788            },
789            _ => ::rustc_middle::util::bug::bug_fmt(format_args!("Operand cannot be used with `insert_field`: {0:?}",
        field_operand))bug!("Operand cannot be used with `insert_field`: {field_operand:?}"),
790        }
791    }
792
793    /// Insert the immediate value `imm` for field `f` in the *type itself*,
794    /// rather than into one of the variants.
795    ///
796    /// Most things want [`Self::insert_field`] instead, but this one is
797    /// necessary for writing things like enum tags that aren't in any variant.
798    pub(super) fn insert_imm(&mut self, f: FieldIdx, imm: V) {
799        let field_offset = self.layout.fields.offset(f.as_usize());
800        let is_zero_offset = field_offset == Size::ZERO;
801        match &mut self.val {
802            OperandValueBuilder::Immediate(val @ Either::Right(_)) if is_zero_offset => {
803                *val = Either::Left(imm);
804            }
805            OperandValueBuilder::Pair(fst @ Either::Right(_), _) if is_zero_offset => {
806                *fst = Either::Left(imm);
807            }
808            OperandValueBuilder::Pair(_, snd @ Either::Right(_)) if !is_zero_offset => {
809                *snd = Either::Left(imm);
810            }
811            _ => ::rustc_middle::util::bug::bug_fmt(format_args!("Tried to insert {0:?} into field {1:?} of {2:?}",
        imm, f, self))bug!("Tried to insert {imm:?} into field {f:?} of {self:?}"),
812        }
813    }
814
815    /// After having set all necessary fields, this converts the builder back
816    /// to the normal `OperandRef`.
817    ///
818    /// ICEs if any required fields were not set.
819    pub(super) fn build(&self, cx: &impl CodegenMethods<'tcx, Value = V>) -> OperandRef<'tcx, V> {
820        let OperandRefBuilder { val, layout } = *self;
821
822        // For something like `Option::<u32>::None`, it's expected that the
823        // payload scalar will not actually have been set, so this converts
824        // unset scalars to corresponding `undef` values so long as the scalar
825        // from the layout allows uninit.
826        let unwrap = |r: Either<V, abi::Scalar>| match r {
827            Either::Left(v) => v,
828            Either::Right(s) if s.is_uninit_valid() => {
829                let bty = cx.type_from_scalar(s);
830                cx.const_undef(bty)
831            }
832            Either::Right(_) => ::rustc_middle::util::bug::bug_fmt(format_args!("OperandRef::build called while fields are missing {0:?}",
        self))bug!("OperandRef::build called while fields are missing {self:?}"),
833        };
834
835        let val = match val {
836            OperandValueBuilder::ZeroSized => OperandValue::ZeroSized,
837            OperandValueBuilder::Immediate(v) => OperandValue::Immediate(unwrap(v)),
838            OperandValueBuilder::Pair(a, b) => OperandValue::Pair(unwrap(a), unwrap(b)),
839            OperandValueBuilder::Vector(v) => match v {
840                Either::Left(v) => OperandValue::Immediate(v),
841                Either::Right(())
842                    if let BackendRepr::SimdVector { element, .. } = layout.backend_repr
843                        && element.is_uninit_valid() =>
844                {
845                    let bty = cx.immediate_backend_type(layout);
846                    OperandValue::Immediate(cx.const_undef(bty))
847                }
848                Either::Right(()) => {
849                    ::rustc_middle::util::bug::bug_fmt(format_args!("OperandRef::build called while fields are missing {0:?}",
        self))bug!("OperandRef::build called while fields are missing {self:?}")
850                }
851            },
852        };
853        OperandRef { val, layout, move_annotation: None }
854    }
855}
856
857/// Default size limit for move/copy annotations (in bytes). 64 bytes is a common size of a cache
858/// line, and the assumption is that anything this size or below is very cheap to move/copy, so only
859/// annotate copies larger than this.
860const MOVE_ANNOTATION_DEFAULT_LIMIT: u64 = 65;
861
862impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
863    /// Returns an `OperandValue` that's generally UB to use in any way.
864    ///
865    /// Depending on the `layout`, returns `ZeroSized` for ZSTs, an `Immediate` or
866    /// `Pair` containing poison value(s), or a `Ref` containing a poison pointer.
867    ///
868    /// Supports sized types only.
869    pub fn poison<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
870        bx: &mut Bx,
871        layout: TyAndLayout<'tcx>,
872    ) -> OperandValue<V> {
873        if !layout.is_sized() {
    ::core::panicking::panic("assertion failed: layout.is_sized()")
};assert!(layout.is_sized());
874        if layout.is_zst() {
875            OperandValue::ZeroSized
876        } else if bx.cx().is_backend_immediate(layout) {
877            let ibty = bx.cx().immediate_backend_type(layout);
878            OperandValue::Immediate(bx.const_poison(ibty))
879        } else if bx.cx().is_backend_scalar_pair(layout) {
880            let ibty0 = bx.cx().scalar_pair_element_backend_type(layout, 0, true);
881            let ibty1 = bx.cx().scalar_pair_element_backend_type(layout, 1, true);
882            OperandValue::Pair(bx.const_poison(ibty0), bx.const_poison(ibty1))
883        } else {
884            let ptr = bx.cx().type_ptr();
885            OperandValue::Ref(PlaceValue::new_sized(bx.const_poison(ptr), layout.align.abi))
886        }
887    }
888
889    pub fn store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
890        self,
891        bx: &mut Bx,
892        dest: PlaceRef<'tcx, V>,
893    ) {
894        self.store_with_flags(bx, dest, MemFlags::empty());
895    }
896
897    pub fn volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
898        self,
899        bx: &mut Bx,
900        dest: PlaceRef<'tcx, V>,
901    ) {
902        self.store_with_flags(bx, dest, MemFlags::VOLATILE);
903    }
904
905    pub fn unaligned_volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
906        self,
907        bx: &mut Bx,
908        dest: PlaceRef<'tcx, V>,
909    ) {
910        self.store_with_flags(bx, dest, MemFlags::VOLATILE | MemFlags::UNALIGNED);
911    }
912
913    pub fn nontemporal_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
914        self,
915        bx: &mut Bx,
916        dest: PlaceRef<'tcx, V>,
917    ) {
918        self.store_with_flags(bx, dest, MemFlags::NONTEMPORAL);
919    }
920
921    pub(crate) fn store_with_flags<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
922        self,
923        bx: &mut Bx,
924        dest: PlaceRef<'tcx, V>,
925        flags: MemFlags,
926    ) {
927        {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/operand.rs:927",
                        "rustc_codegen_ssa::mir::operand", ::tracing::Level::DEBUG,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/operand.rs"),
                        ::tracing_core::__macro_support::Option::Some(927u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir::operand"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("OperandRef::store: operand={0:?}, dest={1:?}",
                                                    self, dest) as &dyn Value))])
            });
    } else { ; }
};debug!("OperandRef::store: operand={:?}, dest={:?}", self, dest);
928        match self {
929            OperandValue::ZeroSized => {
930                // Avoid generating stores of zero-sized values, because the only way to have a
931                // zero-sized value is through `undef`/`poison`, and the store itself is useless.
932            }
933            OperandValue::Ref(val) => {
934                if !dest.layout.is_sized() {
    {
        ::core::panicking::panic_fmt(format_args!("cannot directly store unsized values"));
    }
};assert!(dest.layout.is_sized(), "cannot directly store unsized values");
935                if val.llextra.is_some() {
936                    ::rustc_middle::util::bug::bug_fmt(format_args!("cannot directly store unsized values"));bug!("cannot directly store unsized values");
937                }
938                bx.typed_place_copy_with_flags(dest.val, val, dest.layout, flags);
939            }
940            OperandValue::Immediate(s) => {
941                let val = bx.from_immediate(s);
942                bx.store_with_flags(val, dest.val.llval, dest.val.align, flags);
943            }
944            OperandValue::Pair(a, b) => {
945                let BackendRepr::ScalarPair(a_scalar, b_scalar) = dest.layout.backend_repr else {
946                    ::rustc_middle::util::bug::bug_fmt(format_args!("store_with_flags: invalid ScalarPair layout: {0:#?}",
        dest.layout));bug!("store_with_flags: invalid ScalarPair layout: {:#?}", dest.layout);
947                };
948                let b_offset = a_scalar.size(bx).align_to(b_scalar.align(bx).abi);
949
950                let val = bx.from_immediate(a);
951                let align = dest.val.align;
952                bx.store_with_flags(val, dest.val.llval, align, flags);
953
954                let llptr = bx.inbounds_ptradd(dest.val.llval, bx.const_usize(b_offset.bytes()));
955                let val = bx.from_immediate(b);
956                let align = dest.val.align.restrict_for_offset(b_offset);
957                bx.store_with_flags(val, llptr, align, flags);
958            }
959        }
960    }
961}
962
963impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
964    fn maybe_codegen_consume_direct(
965        &mut self,
966        bx: &mut Bx,
967        place_ref: mir::PlaceRef<'tcx>,
968    ) -> Option<OperandRef<'tcx, Bx::Value>> {
969        {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/operand.rs:969",
                        "rustc_codegen_ssa::mir::operand", ::tracing::Level::DEBUG,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/operand.rs"),
                        ::tracing_core::__macro_support::Option::Some(969u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir::operand"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("maybe_codegen_consume_direct(place_ref={0:?})",
                                                    place_ref) as &dyn Value))])
            });
    } else { ; }
};debug!("maybe_codegen_consume_direct(place_ref={:?})", place_ref);
970
971        match self.locals[place_ref.local] {
972            LocalRef::Operand(mut o) => {
973                // We only need to handle the projections that
974                // `LocalAnalyzer::process_place` let make it here.
975                for elem in place_ref.projection {
976                    match *elem {
977                        mir::ProjectionElem::Field(f, _) => {
978                            if !!o.layout.ty.is_any_ptr() {
    {
        ::core::panicking::panic_fmt(format_args!("Bad PlaceRef: destructing pointers should use cast/PtrMetadata, but tried to access field {0:?} of pointer {1:?}",
                f, o));
    }
};assert!(
979                                !o.layout.ty.is_any_ptr(),
980                                "Bad PlaceRef: destructing pointers should use cast/PtrMetadata, \
981                                 but tried to access field {f:?} of pointer {o:?}",
982                            );
983                            o = o.extract_field(self, bx, f.index());
984                        }
985                        mir::PlaceElem::Downcast(_, vidx) => {
986                            if true {
    match (&o.layout.variants, &abi::Variants::Single { index: vidx }) {
        (left_val, right_val) => {
            if !(*left_val == *right_val) {
                let kind = ::core::panicking::AssertKind::Eq;
                ::core::panicking::assert_failed(kind, &*left_val,
                    &*right_val, ::core::option::Option::None);
            }
        }
    };
};debug_assert_eq!(
987                                o.layout.variants,
988                                abi::Variants::Single { index: vidx },
989                            );
990                            let layout = o.layout.for_variant(bx.cx(), vidx);
991                            o = OperandRef { layout, ..o }
992                        }
993                        _ => return None,
994                    }
995                }
996
997                Some(o)
998            }
999            LocalRef::PendingOperand => {
1000                ::rustc_middle::util::bug::bug_fmt(format_args!("use of {0:?} before def",
        place_ref));bug!("use of {:?} before def", place_ref);
1001            }
1002            LocalRef::Place(..) | LocalRef::UnsizedPlace(..) => {
1003                // watch out for locals that do not have an
1004                // alloca; they are handled somewhat differently
1005                None
1006            }
1007        }
1008    }
1009
1010    pub fn codegen_consume(
1011        &mut self,
1012        bx: &mut Bx,
1013        place_ref: mir::PlaceRef<'tcx>,
1014    ) -> OperandRef<'tcx, Bx::Value> {
1015        {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/operand.rs:1015",
                        "rustc_codegen_ssa::mir::operand", ::tracing::Level::DEBUG,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/operand.rs"),
                        ::tracing_core::__macro_support::Option::Some(1015u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir::operand"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("codegen_consume(place_ref={0:?})",
                                                    place_ref) as &dyn Value))])
            });
    } else { ; }
};debug!("codegen_consume(place_ref={:?})", place_ref);
1016
1017        let ty = self.monomorphized_place_ty(place_ref);
1018        let layout = bx.cx().layout_of(ty);
1019
1020        // ZSTs don't require any actual memory access.
1021        if layout.is_zst() {
1022            return OperandRef::zero_sized(layout);
1023        }
1024
1025        if let Some(o) = self.maybe_codegen_consume_direct(bx, place_ref) {
1026            return o;
1027        }
1028
1029        // for most places, to consume them we just load them
1030        // out from their home
1031        let place = self.codegen_place(bx, place_ref);
1032        bx.load_operand(place)
1033    }
1034
1035    pub fn codegen_operand(
1036        &mut self,
1037        bx: &mut Bx,
1038        operand: &mir::Operand<'tcx>,
1039    ) -> OperandRef<'tcx, Bx::Value> {
1040        {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/operand.rs:1040",
                        "rustc_codegen_ssa::mir::operand", ::tracing::Level::DEBUG,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/operand.rs"),
                        ::tracing_core::__macro_support::Option::Some(1040u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir::operand"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("codegen_operand(operand={0:?})",
                                                    operand) as &dyn Value))])
            });
    } else { ; }
};debug!("codegen_operand(operand={:?})", operand);
1041
1042        match *operand {
1043            mir::Operand::Copy(ref place) | mir::Operand::Move(ref place) => {
1044                let kind = match operand {
1045                    mir::Operand::Move(_) => LangItem::CompilerMove,
1046                    mir::Operand::Copy(_) => LangItem::CompilerCopy,
1047                    _ => ::core::panicking::panic("internal error: entered unreachable code")unreachable!(),
1048                };
1049
1050                // Check if we should annotate this move/copy for profiling
1051                let move_annotation = self.move_copy_annotation_instance(bx, place.as_ref(), kind);
1052
1053                OperandRef { move_annotation, ..self.codegen_consume(bx, place.as_ref()) }
1054            }
1055
1056            mir::Operand::RuntimeChecks(checks) => {
1057                let layout = bx.layout_of(bx.tcx().types.bool);
1058                let BackendRepr::Scalar(scalar) = layout.backend_repr else {
1059                    ::rustc_middle::util::bug::bug_fmt(format_args!("from_const: invalid ByVal layout: {0:#?}",
        layout));bug!("from_const: invalid ByVal layout: {:#?}", layout);
1060                };
1061                let x = Scalar::from_bool(checks.value(bx.tcx().sess));
1062                let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
1063                let val = OperandValue::Immediate(llval);
1064                OperandRef { val, layout, move_annotation: None }
1065            }
1066
1067            mir::Operand::Constant(ref constant) => {
1068                let constant_ty = self.monomorphize(constant.ty());
1069                // Most SIMD vector constants should be passed as immediates.
1070                // (In particular, some intrinsics really rely on this.)
1071                if constant_ty.is_simd() {
1072                    // However, some SIMD types do not actually use the vector ABI
1073                    // (in particular, packed SIMD types do not). Ensure we exclude those.
1074                    //
1075                    // We also have to exclude vectors of pointers because `immediate_const_vector`
1076                    // does not work for those.
1077                    let layout = bx.layout_of(constant_ty);
1078                    let (_, element_ty) = constant_ty.simd_size_and_type(bx.tcx());
1079                    if let BackendRepr::SimdVector { .. } = layout.backend_repr
1080                        && element_ty.is_numeric()
1081                    {
1082                        let (llval, ty) = self.immediate_const_vector(bx, constant);
1083                        return OperandRef {
1084                            val: OperandValue::Immediate(llval),
1085                            layout: bx.layout_of(ty),
1086                            move_annotation: None,
1087                        };
1088                    }
1089                }
1090                self.eval_mir_constant_to_operand(bx, constant)
1091            }
1092        }
1093    }
1094
1095    /// Creates an `Instance` for annotating a move/copy operation at codegen time.
1096    ///
1097    /// Returns `Some(instance)` if the operation should be annotated with debug info, `None`
1098    /// otherwise. The instance represents a monomorphized `compiler_move<T, SIZE>` or
1099    /// `compiler_copy<T, SIZE>` function that can be used to create debug scopes.
1100    ///
1101    /// There are a number of conditions that must be met for an annotation to be created, but aside
1102    /// from the basics (annotation is enabled, we're generating debuginfo), the primary concern is
1103    /// moves/copies which could result in a real `memcpy`. So we check for the size limit, but also
1104    /// that the underlying representation of the type is in memory.
1105    fn move_copy_annotation_instance(
1106        &self,
1107        bx: &Bx,
1108        place: mir::PlaceRef<'tcx>,
1109        kind: LangItem,
1110    ) -> Option<ty::Instance<'tcx>> {
1111        let tcx = bx.tcx();
1112        let sess = tcx.sess;
1113
1114        // Skip if we're not generating debuginfo
1115        if sess.opts.debuginfo == DebugInfo::None {
1116            return None;
1117        }
1118
1119        // Check if annotation is enabled and get size limit (otherwise skip)
1120        let size_limit = match sess.opts.unstable_opts.annotate_moves {
1121            AnnotateMoves::Disabled => return None,
1122            AnnotateMoves::Enabled(None) => MOVE_ANNOTATION_DEFAULT_LIMIT,
1123            AnnotateMoves::Enabled(Some(limit)) => limit,
1124        };
1125
1126        let ty = self.monomorphized_place_ty(place);
1127        let layout = bx.cx().layout_of(ty);
1128        let ty_size = layout.size.bytes();
1129
1130        // Only annotate if type has a memory representation and exceeds size limit (and has a
1131        // non-zero size)
1132        if layout.is_zst()
1133            || ty_size < size_limit
1134            || !#[allow(non_exhaustive_omitted_patterns)] match layout.backend_repr {
    BackendRepr::Memory { .. } => true,
    _ => false,
}matches!(layout.backend_repr, BackendRepr::Memory { .. })
1135        {
1136            return None;
1137        }
1138
1139        // Look up the DefId for compiler_move or compiler_copy lang item
1140        let def_id = tcx.lang_items().get(kind)?;
1141
1142        // Create generic args: compiler_move<T, SIZE> or compiler_copy<T, SIZE>
1143        let size_const = ty::Const::from_target_usize(tcx, ty_size);
1144        let generic_args = tcx.mk_args(&[ty.into(), size_const.into()]);
1145
1146        // Create the Instance
1147        let typing_env = self.mir.typing_env(tcx);
1148        let instance = ty::Instance::expect_resolve(
1149            tcx,
1150            typing_env,
1151            def_id,
1152            generic_args,
1153            rustc_span::DUMMY_SP, // span only used for error messages
1154        );
1155
1156        Some(instance)
1157    }
1158}