Skip to main content

rustc_mir_transform/
gvn.rs

1//! Global value numbering.
2//!
3//! MIR may contain repeated and/or redundant computations. The objective of this pass is to detect
4//! such redundancies and re-use the already-computed result when possible.
5//!
6//! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
7//! values, the locals in which they are stored, and the assignment location.
8//!
9//! We traverse all assignments `x = rvalue` and operands.
10//!
11//! For each SSA one, we compute a symbolic representation of values that are assigned to SSA
12//! locals. This symbolic representation is defined by the `Value` enum. Each produced instance of
13//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
14//!
15//! For each non-SSA
16//! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
17//! replace the rvalue/operand by that constant. Otherwise, if there is an SSA local `y`
18//! associated to this `VnIndex`, and if its definition location strictly dominates the assignment
19//! to `x`, we replace the assignment by `x = y`.
20//!
21//! By opportunity, this pass simplifies some `Rvalue`s based on the accumulated knowledge.
22//!
23//! # Operational semantic
24//!
25//! Operationally, this pass attempts to prove bitwise equality between locals. Given this MIR:
26//! ```ignore (MIR)
27//! _a = some value // has VnIndex i
28//! // some MIR
29//! _b = some other value // also has VnIndex i
30//! ```
31//!
32//! We consider it to be replaceable by:
33//! ```ignore (MIR)
34//! _a = some value // has VnIndex i
35//! // some MIR
36//! _c = some other value // also has VnIndex i
37//! assume(_a bitwise equal to _c) // follows from having the same VnIndex
38//! _b = _a // follows from the `assume`
39//! ```
40//!
41//! Which is simplifiable to:
42//! ```ignore (MIR)
43//! _a = some value // has VnIndex i
44//! // some MIR
45//! _b = _a
46//! ```
47//!
48//! # Handling of references
49//!
50//! We handle references by assigning a different "provenance" index to each Ref/RawPtr rvalue.
51//! This ensure that we do not spuriously merge borrows that should not be merged. Meanwhile, we
52//! consider all the derefs of an immutable reference to a freeze type to give the same value:
53//! ```ignore (MIR)
54//! _a = *_b // _b is &Freeze
55//! _c = *_b // replaced by _c = _a
56//! ```
57//!
58//! # Determinism of constant propagation
59//!
60//! When registering a new `Value`, we attempt to opportunistically evaluate it as a constant.
61//! The evaluated form is inserted in `evaluated` as an `OpTy` or `None` if evaluation failed.
62//!
63//! The difficulty is non-deterministic evaluation of MIR constants. Some `Const` can have
64//! different runtime values each time they are evaluated. This is the case with
65//! `Const::Slice` which have a new pointer each time they are evaluated, and constants that
66//! contain a fn pointer (`AllocId` pointing to a `GlobalAlloc::Function`) pointing to a different
67//! symbol in each codegen unit.
68//!
69//! Meanwhile, we want to be able to read indirect constants. For instance:
70//! ```
71//! static A: &'static &'static u8 = &&63;
72//! fn foo() -> u8 {
73//!     **A // We want to replace by 63.
74//! }
75//! fn bar() -> u8 {
76//!     b"abc"[1] // We want to replace by 'b'.
77//! }
78//! ```
79//!
80//! The `Value::Constant` variant stores a possibly unevaluated constant. Evaluating that constant
81//! may be non-deterministic. When that happens, we assign a disambiguator to ensure that we do not
82//! merge the constants. See `duplicate_slice` test in `gvn.rs`.
83//!
84//! Second, when writing constants in MIR, we do not write `Const::Slice` or `Const`
85//! that contain `AllocId`s.
86
87use std::borrow::Cow;
88use std::hash::{Hash, Hasher};
89
90use either::Either;
91use itertools::Itertools as _;
92use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
93use rustc_arena::DroplessArena;
94use rustc_const_eval::const_eval::DummyMachine;
95use rustc_const_eval::interpret::{
96    ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar,
97    intern_const_alloc_for_constprop,
98};
99use rustc_data_structures::fx::FxHasher;
100use rustc_data_structures::graph::dominators::Dominators;
101use rustc_data_structures::hash_table::{Entry, HashTable};
102use rustc_hir::def::DefKind;
103use rustc_index::bit_set::DenseBitSet;
104use rustc_index::{IndexVec, newtype_index};
105use rustc_middle::bug;
106use rustc_middle::mir::interpret::GlobalAlloc;
107use rustc_middle::mir::visit::*;
108use rustc_middle::mir::*;
109use rustc_middle::ty::layout::HasTypingEnv;
110use rustc_middle::ty::{self, Ty, TyCtxt};
111use rustc_span::DUMMY_SP;
112use smallvec::SmallVec;
113use tracing::{debug, instrument, trace};
114
115use crate::ssa::SsaLocals;
116
117pub(super) struct GVN;
118
119impl<'tcx> crate::MirPass<'tcx> for GVN {
120    fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
121        sess.mir_opt_level() >= 2
122    }
123
124    #[instrument(level = "trace", skip(self, tcx, body))]
125    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
126        debug!(def_id = ?body.source.def_id());
127
128        let typing_env = body.typing_env(tcx);
129        let ssa = SsaLocals::new(tcx, body, typing_env);
130        // Clone dominators because we need them while mutating the body.
131        let dominators = body.basic_blocks.dominators().clone();
132
133        let arena = DroplessArena::default();
134        let mut state =
135            VnState::new(tcx, body, typing_env, &ssa, dominators, &body.local_decls, &arena);
136
137        for local in body.args_iter().filter(|&local| ssa.is_ssa(local)) {
138            let opaque = state.new_argument(body.local_decls[local].ty);
139            state.assign(local, opaque);
140        }
141
142        let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
143        for bb in reverse_postorder {
144            let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
145            state.visit_basic_block_data(bb, data);
146        }
147
148        // For each local that is reused (`y` above), we remove its storage statements do avoid any
149        // difficulty. Those locals are SSA, so should be easy to optimize by LLVM without storage
150        // statements.
151        StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
152    }
153
154    fn is_required(&self) -> bool {
155        false
156    }
157}
158
159newtype_index! {
160    /// This represents a `Value` in the symbolic execution.
161    #[debug_format = "_v{}"]
162    struct VnIndex {}
163}
164
165/// Marker type to forbid hashing and comparing opaque values.
166/// This struct should only be constructed by `ValueSet::insert_unique` to ensure we use that
167/// method to create non-unifiable values. It will ICE if used in `ValueSet::insert`.
168#[derive(Copy, Clone, Debug, Eq)]
169struct VnOpaque;
170impl PartialEq for VnOpaque {
171    fn eq(&self, _: &VnOpaque) -> bool {
172        // ICE if we try to compare unique values
173        unreachable!()
174    }
175}
176impl Hash for VnOpaque {
177    fn hash<T: Hasher>(&self, _: &mut T) {
178        // ICE if we try to hash unique values
179        unreachable!()
180    }
181}
182
183#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
184enum AddressKind {
185    Ref(BorrowKind),
186    Address(RawPtrKind),
187}
188
189#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
190enum AddressBase {
191    /// This address is based on this local.
192    Local(Local),
193    /// This address is based on the deref of this pointer.
194    Deref(VnIndex),
195}
196
197#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
198enum Value<'a, 'tcx> {
199    // Root values.
200    /// Used to represent values we know nothing about.
201    Opaque(VnOpaque),
202    /// The value is a argument.
203    Argument(VnOpaque),
204    /// Evaluated or unevaluated constant value.
205    Constant {
206        value: Const<'tcx>,
207        /// Some constants do not have a deterministic value. To avoid merging two instances of the
208        /// same `Const`, we assign them an additional integer index.
209        // `disambiguator` is `None` iff the constant is deterministic.
210        disambiguator: Option<VnOpaque>,
211    },
212
213    // Aggregates.
214    /// An aggregate value, either tuple/closure/struct/enum.
215    /// This does not contain unions, as we cannot reason with the value.
216    Aggregate(VariantIdx, &'a [VnIndex]),
217    /// A union aggregate value.
218    Union(FieldIdx, VnIndex),
219    /// A raw pointer aggregate built from a thin pointer and metadata.
220    RawPtr {
221        /// Thin pointer component. This is field 0 in MIR.
222        pointer: VnIndex,
223        /// Metadata component. This is field 1 in MIR.
224        metadata: VnIndex,
225    },
226    /// This corresponds to a `[value; count]` expression.
227    Repeat(VnIndex, ty::Const<'tcx>),
228    /// The address of a place.
229    Address {
230        base: AddressBase,
231        // We do not use a plain `Place` as we want to be able to reason about indices.
232        // This does not contain any `Deref` projection.
233        projection: &'a [ProjectionElem<VnIndex, Ty<'tcx>>],
234        kind: AddressKind,
235        /// Give each borrow and pointer a different provenance, so we don't merge them.
236        provenance: VnOpaque,
237    },
238
239    // Extractions.
240    /// This is the *value* obtained by projecting another value.
241    Projection(VnIndex, ProjectionElem<VnIndex, ()>),
242    /// Discriminant of the given value.
243    Discriminant(VnIndex),
244
245    // Operations.
246    RuntimeChecks(RuntimeChecks),
247    UnaryOp(UnOp, VnIndex),
248    BinaryOp(BinOp, VnIndex, VnIndex),
249    Cast {
250        kind: CastKind,
251        value: VnIndex,
252    },
253}
254
255/// Stores and deduplicates pairs of `(Value, Ty)` into in `VnIndex` numbered values.
256///
257/// This data structure is mostly a partial reimplementation of `FxIndexMap<VnIndex, (Value, Ty)>`.
258/// We do not use a regular `FxIndexMap` to skip hashing values that are unique by construction,
259/// like opaque values, address with provenance and non-deterministic constants.
260struct ValueSet<'a, 'tcx> {
261    indices: HashTable<VnIndex>,
262    hashes: IndexVec<VnIndex, u64>,
263    values: IndexVec<VnIndex, Value<'a, 'tcx>>,
264    types: IndexVec<VnIndex, Ty<'tcx>>,
265}
266
267impl<'a, 'tcx> ValueSet<'a, 'tcx> {
268    fn new(num_values: usize) -> ValueSet<'a, 'tcx> {
269        ValueSet {
270            indices: HashTable::with_capacity(num_values),
271            hashes: IndexVec::with_capacity(num_values),
272            values: IndexVec::with_capacity(num_values),
273            types: IndexVec::with_capacity(num_values),
274        }
275    }
276
277    /// Insert a `(Value, Ty)` pair without hashing or deduplication.
278    /// This always creates a new `VnIndex`.
279    #[inline]
280    fn insert_unique(
281        &mut self,
282        ty: Ty<'tcx>,
283        value: impl FnOnce(VnOpaque) -> Value<'a, 'tcx>,
284    ) -> VnIndex {
285        let value = value(VnOpaque);
286
287        debug_assert!(match value {
288            Value::Opaque(_) | Value::Argument(_) | Value::Address { .. } => true,
289            Value::Constant { disambiguator, .. } => disambiguator.is_some(),
290            _ => false,
291        });
292
293        let index = self.hashes.push(0);
294        let _index = self.types.push(ty);
295        debug_assert_eq!(index, _index);
296        let _index = self.values.push(value);
297        debug_assert_eq!(index, _index);
298        index
299    }
300
301    /// Insert a `(Value, Ty)` pair to be deduplicated.
302    /// Returns `true` as second tuple field if this value did not exist previously.
303    #[allow(rustc::pass_by_value)] // closures take `&VnIndex`
304    fn insert(&mut self, ty: Ty<'tcx>, value: Value<'a, 'tcx>) -> (VnIndex, bool) {
305        debug_assert!(match value {
306            Value::Opaque(_) | Value::Address { .. } => false,
307            Value::Constant { disambiguator, .. } => disambiguator.is_none(),
308            _ => true,
309        });
310
311        let hash: u64 = {
312            let mut h = FxHasher::default();
313            value.hash(&mut h);
314            ty.hash(&mut h);
315            h.finish()
316        };
317
318        let eq = |index: &VnIndex| self.values[*index] == value && self.types[*index] == ty;
319        let hasher = |index: &VnIndex| self.hashes[*index];
320        match self.indices.entry(hash, eq, hasher) {
321            Entry::Occupied(entry) => {
322                let index = *entry.get();
323                (index, false)
324            }
325            Entry::Vacant(entry) => {
326                let index = self.hashes.push(hash);
327                entry.insert(index);
328                let _index = self.values.push(value);
329                debug_assert_eq!(index, _index);
330                let _index = self.types.push(ty);
331                debug_assert_eq!(index, _index);
332                (index, true)
333            }
334        }
335    }
336
337    /// Return the `Value` associated with the given `VnIndex`.
338    #[inline]
339    fn value(&self, index: VnIndex) -> Value<'a, 'tcx> {
340        self.values[index]
341    }
342
343    /// Return the type associated with the given `VnIndex`.
344    #[inline]
345    fn ty(&self, index: VnIndex) -> Ty<'tcx> {
346        self.types[index]
347    }
348}
349
350struct VnState<'body, 'a, 'tcx> {
351    tcx: TyCtxt<'tcx>,
352    ecx: InterpCx<'tcx, DummyMachine>,
353    local_decls: &'body LocalDecls<'tcx>,
354    is_coroutine: bool,
355    /// Value stored in each local.
356    locals: IndexVec<Local, Option<VnIndex>>,
357    /// Locals that are assigned that value.
358    // This vector does not hold all the values of `VnIndex` that we create.
359    rev_locals: IndexVec<VnIndex, SmallVec<[Local; 1]>>,
360    values: ValueSet<'a, 'tcx>,
361    /// Values evaluated as constants if possible.
362    /// - `None` are values not computed yet;
363    /// - `Some(None)` are values for which computation has failed;
364    /// - `Some(Some(op))` are successful computations.
365    evaluated: IndexVec<VnIndex, Option<Option<&'a OpTy<'tcx>>>>,
366    ssa: &'body SsaLocals,
367    dominators: Dominators<BasicBlock>,
368    reused_locals: DenseBitSet<Local>,
369    arena: &'a DroplessArena,
370}
371
372impl<'body, 'a, 'tcx> VnState<'body, 'a, 'tcx> {
373    fn new(
374        tcx: TyCtxt<'tcx>,
375        body: &Body<'tcx>,
376        typing_env: ty::TypingEnv<'tcx>,
377        ssa: &'body SsaLocals,
378        dominators: Dominators<BasicBlock>,
379        local_decls: &'body LocalDecls<'tcx>,
380        arena: &'a DroplessArena,
381    ) -> Self {
382        // Compute a rough estimate of the number of values in the body from the number of
383        // statements. This is meant to reduce the number of allocations, but it's all right if
384        // we miss the exact amount. We estimate based on 2 values per statement (one in LHS and
385        // one in RHS) and 4 values per terminator (for call operands).
386        let num_values =
387            2 * body.basic_blocks.iter().map(|bbdata| bbdata.statements.len()).sum::<usize>()
388                + 4 * body.basic_blocks.len();
389        VnState {
390            tcx,
391            ecx: InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine),
392            local_decls,
393            is_coroutine: body.coroutine.is_some(),
394            locals: IndexVec::from_elem(None, local_decls),
395            rev_locals: IndexVec::with_capacity(num_values),
396            values: ValueSet::new(num_values),
397            evaluated: IndexVec::with_capacity(num_values),
398            ssa,
399            dominators,
400            reused_locals: DenseBitSet::new_empty(local_decls.len()),
401            arena,
402        }
403    }
404
405    fn typing_env(&self) -> ty::TypingEnv<'tcx> {
406        self.ecx.typing_env()
407    }
408
409    fn insert_unique(
410        &mut self,
411        ty: Ty<'tcx>,
412        value: impl FnOnce(VnOpaque) -> Value<'a, 'tcx>,
413    ) -> VnIndex {
414        let index = self.values.insert_unique(ty, value);
415        let _index = self.evaluated.push(None);
416        debug_assert_eq!(index, _index);
417        let _index = self.rev_locals.push(SmallVec::new());
418        debug_assert_eq!(index, _index);
419        index
420    }
421
422    #[instrument(level = "trace", skip(self), ret)]
423    fn insert(&mut self, ty: Ty<'tcx>, value: Value<'a, 'tcx>) -> VnIndex {
424        let (index, new) = self.values.insert(ty, value);
425        if new {
426            // Grow `evaluated` and `rev_locals` here to amortize the allocations.
427            let _index = self.evaluated.push(None);
428            debug_assert_eq!(index, _index);
429            let _index = self.rev_locals.push(SmallVec::new());
430            debug_assert_eq!(index, _index);
431        }
432        index
433    }
434
435    /// Create a new `Value` for which we have no information at all, except that it is distinct
436    /// from all the others.
437    #[instrument(level = "trace", skip(self), ret)]
438    fn new_opaque(&mut self, ty: Ty<'tcx>) -> VnIndex {
439        let index = self.insert_unique(ty, Value::Opaque);
440        self.evaluated[index] = Some(None);
441        index
442    }
443
444    #[instrument(level = "trace", skip(self), ret)]
445    fn new_argument(&mut self, ty: Ty<'tcx>) -> VnIndex {
446        let index = self.insert_unique(ty, Value::Argument);
447        self.evaluated[index] = Some(None);
448        index
449    }
450
451    /// Create a new `Value::Address` distinct from all the others.
452    #[instrument(level = "trace", skip(self), ret)]
453    fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> Option<VnIndex> {
454        let pty = place.ty(self.local_decls, self.tcx).ty;
455        let ty = match kind {
456            AddressKind::Ref(bk) => {
457                Ty::new_ref(self.tcx, self.tcx.lifetimes.re_erased, pty, bk.to_mutbl_lossy())
458            }
459            AddressKind::Address(mutbl) => Ty::new_ptr(self.tcx, pty, mutbl.to_mutbl_lossy()),
460        };
461
462        let mut projection = place.projection.iter();
463        let base = if place.is_indirect_first_projection() {
464            let base = self.locals[place.local]?;
465            // Skip the initial `Deref`.
466            projection.next();
467            AddressBase::Deref(base)
468        } else if self.ssa.is_ssa(place.local) {
469            // Only propagate the pointer of the SSA local.
470            AddressBase::Local(place.local)
471        } else {
472            return None;
473        };
474        // Do not try evaluating inside `Index`, this has been done by `simplify_place_projection`.
475        let projection =
476            projection.map(|proj| proj.try_map(|index| self.locals[index], |ty| ty).ok_or(()));
477        let projection = self.arena.try_alloc_from_iter(projection).ok()?;
478
479        let index = self.insert_unique(ty, |provenance| Value::Address {
480            base,
481            projection,
482            kind,
483            provenance,
484        });
485        Some(index)
486    }
487
488    #[instrument(level = "trace", skip(self), ret)]
489    fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex {
490        if value.is_deterministic() {
491            // The constant is deterministic, no need to disambiguate.
492            let constant = Value::Constant { value, disambiguator: None };
493            self.insert(value.ty(), constant)
494        } else {
495            // Multiple mentions of this constant will yield different values,
496            // so assign a different `disambiguator` to ensure they do not get the same `VnIndex`.
497            self.insert_unique(value.ty(), |disambiguator| Value::Constant {
498                value,
499                disambiguator: Some(disambiguator),
500            })
501        }
502    }
503
504    #[inline]
505    fn get(&self, index: VnIndex) -> Value<'a, 'tcx> {
506        self.values.value(index)
507    }
508
509    #[inline]
510    fn ty(&self, index: VnIndex) -> Ty<'tcx> {
511        self.values.ty(index)
512    }
513
514    /// Record that `local` is assigned `value`. `local` must be SSA.
515    #[instrument(level = "trace", skip(self))]
516    fn assign(&mut self, local: Local, value: VnIndex) {
517        debug_assert!(self.ssa.is_ssa(local));
518        self.locals[local] = Some(value);
519        self.rev_locals[value].push(local);
520    }
521
522    fn insert_bool(&mut self, flag: bool) -> VnIndex {
523        // Booleans are deterministic.
524        let value = Const::from_bool(self.tcx, flag);
525        debug_assert!(value.is_deterministic());
526        self.insert(self.tcx.types.bool, Value::Constant { value, disambiguator: None })
527    }
528
529    fn insert_scalar(&mut self, ty: Ty<'tcx>, scalar: Scalar) -> VnIndex {
530        // Scalars are deterministic.
531        let value = Const::from_scalar(self.tcx, scalar, ty);
532        debug_assert!(value.is_deterministic());
533        self.insert(ty, Value::Constant { value, disambiguator: None })
534    }
535
536    fn insert_tuple(&mut self, ty: Ty<'tcx>, values: &[VnIndex]) -> VnIndex {
537        self.insert(ty, Value::Aggregate(VariantIdx::ZERO, self.arena.alloc_slice(values)))
538    }
539
540    #[instrument(level = "trace", skip(self), ret)]
541    fn eval_to_const_inner(&mut self, value: VnIndex) -> Option<OpTy<'tcx>> {
542        use Value::*;
543        let ty = self.ty(value);
544        // Avoid computing layouts inside a coroutine, as that can cause cycles.
545        let ty = if !self.is_coroutine || ty.is_scalar() {
546            self.ecx.layout_of(ty).ok()?
547        } else {
548            return None;
549        };
550        let op = match self.get(value) {
551            _ if ty.is_zst() => ImmTy::uninit(ty).into(),
552
553            Opaque(_) | Argument(_) => return None,
554            // Keep runtime check constants as symbolic.
555            RuntimeChecks(..) => return None,
556
557            // In general, evaluating repeat expressions just consumes a lot of memory.
558            // But in the special case that the element is just Immediate::Uninit, we can evaluate
559            // it without extra memory! If we don't propagate uninit values like this, LLVM can get
560            // very confused: https://github.com/rust-lang/rust/issues/139355
561            Repeat(value, _count) => {
562                let value = self.eval_to_const(value)?;
563                if value.is_immediate_uninit() {
564                    ImmTy::uninit(ty).into()
565                } else {
566                    return None;
567                }
568            }
569            Constant { ref value, disambiguator: _ } => {
570                self.ecx.eval_mir_constant(value, DUMMY_SP, None).discard_err()?
571            }
572            Aggregate(variant, ref fields) => {
573                let fields =
574                    fields.iter().map(|&f| self.eval_to_const(f)).collect::<Option<Vec<_>>>()?;
575                let variant = if ty.ty.is_enum() { Some(variant) } else { None };
576                let (BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) = ty.backend_repr
577                else {
578                    return None;
579                };
580                let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
581                let variant_dest = if let Some(variant) = variant {
582                    self.ecx.project_downcast(&dest, variant).discard_err()?
583                } else {
584                    dest.clone()
585                };
586                for (field_index, op) in fields.into_iter().enumerate() {
587                    let field_dest = self
588                        .ecx
589                        .project_field(&variant_dest, FieldIdx::from_usize(field_index))
590                        .discard_err()?;
591                    self.ecx.copy_op(op, &field_dest).discard_err()?;
592                }
593                self.ecx
594                    .write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest)
595                    .discard_err()?;
596                self.ecx
597                    .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
598                    .discard_err()?;
599                dest.into()
600            }
601            Union(active_field, field) => {
602                let field = self.eval_to_const(field)?;
603                if field.layout.layout.is_zst() {
604                    ImmTy::from_immediate(Immediate::Uninit, ty).into()
605                } else if matches!(
606                    ty.backend_repr,
607                    BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)
608                ) {
609                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
610                    let field_dest = self.ecx.project_field(&dest, active_field).discard_err()?;
611                    self.ecx.copy_op(field, &field_dest).discard_err()?;
612                    self.ecx
613                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
614                        .discard_err()?;
615                    dest.into()
616                } else {
617                    return None;
618                }
619            }
620            RawPtr { pointer, metadata } => {
621                let pointer = self.eval_to_const(pointer)?;
622                let metadata = self.eval_to_const(metadata)?;
623
624                // Pointers don't have fields, so don't `project_field` them.
625                let data = self.ecx.read_pointer(pointer).discard_err()?;
626                let meta = if metadata.layout.is_zst() {
627                    MemPlaceMeta::None
628                } else {
629                    MemPlaceMeta::Meta(self.ecx.read_scalar(metadata).discard_err()?)
630                };
631                let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx);
632                ImmTy::from_immediate(ptr_imm, ty).into()
633            }
634
635            Projection(base, elem) => {
636                let base = self.eval_to_const(base)?;
637                // `Index` by constants should have been replaced by `ConstantIndex` by
638                // `simplify_place_projection`.
639                let elem = elem.try_map(|_| None, |()| ty.ty)?;
640                self.ecx.project(base, elem).discard_err()?
641            }
642            Address { base, projection, .. } => {
643                debug_assert!(!projection.contains(&ProjectionElem::Deref));
644                let pointer = match base {
645                    AddressBase::Deref(pointer) => self.eval_to_const(pointer)?,
646                    // We have no stack to point to.
647                    AddressBase::Local(_) => return None,
648                };
649                let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?;
650                for elem in projection {
651                    // `Index` by constants should have been replaced by `ConstantIndex` by
652                    // `simplify_place_projection`.
653                    let elem = elem.try_map(|_| None, |ty| ty)?;
654                    mplace = self.ecx.project(&mplace, elem).discard_err()?;
655                }
656                let pointer = mplace.to_ref(&self.ecx);
657                ImmTy::from_immediate(pointer, ty).into()
658            }
659
660            Discriminant(base) => {
661                let base = self.eval_to_const(base)?;
662                let variant = self.ecx.read_discriminant(base).discard_err()?;
663                let discr_value =
664                    self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
665                discr_value.into()
666            }
667            UnaryOp(un_op, operand) => {
668                let operand = self.eval_to_const(operand)?;
669                let operand = self.ecx.read_immediate(operand).discard_err()?;
670                let val = self.ecx.unary_op(un_op, &operand).discard_err()?;
671                val.into()
672            }
673            BinaryOp(bin_op, lhs, rhs) => {
674                let lhs = self.eval_to_const(lhs)?;
675                let rhs = self.eval_to_const(rhs)?;
676                let lhs = self.ecx.read_immediate(lhs).discard_err()?;
677                let rhs = self.ecx.read_immediate(rhs).discard_err()?;
678                let val = self.ecx.binary_op(bin_op, &lhs, &rhs).discard_err()?;
679                val.into()
680            }
681            Cast { kind, value } => match kind {
682                CastKind::IntToInt | CastKind::IntToFloat => {
683                    let value = self.eval_to_const(value)?;
684                    let value = self.ecx.read_immediate(value).discard_err()?;
685                    let res = self.ecx.int_to_int_or_float(&value, ty).discard_err()?;
686                    res.into()
687                }
688                CastKind::FloatToFloat | CastKind::FloatToInt => {
689                    let value = self.eval_to_const(value)?;
690                    let value = self.ecx.read_immediate(value).discard_err()?;
691                    let res = self.ecx.float_to_float_or_int(&value, ty).discard_err()?;
692                    res.into()
693                }
694                CastKind::Transmute | CastKind::Subtype => {
695                    let value = self.eval_to_const(value)?;
696                    // `offset` for immediates generally only supports projections that match the
697                    // type of the immediate. However, as a HACK, we exploit that it can also do
698                    // limited transmutes: it only works between types with the same layout, and
699                    // cannot transmute pointers to integers.
700                    if value.as_mplace_or_imm().is_right() {
701                        let can_transmute = match (value.layout.backend_repr, ty.backend_repr) {
702                            (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => {
703                                s1.size(&self.ecx) == s2.size(&self.ecx)
704                                    && !matches!(s1.primitive(), Primitive::Pointer(..))
705                            }
706                            (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
707                                a1.size(&self.ecx) == a2.size(&self.ecx)
708                                    && b1.size(&self.ecx) == b2.size(&self.ecx)
709                                    // The alignment of the second component determines its offset, so that also needs to match.
710                                    && b1.align(&self.ecx) == b2.align(&self.ecx)
711                                    // None of the inputs may be a pointer.
712                                    && !matches!(a1.primitive(), Primitive::Pointer(..))
713                                    && !matches!(b1.primitive(), Primitive::Pointer(..))
714                            }
715                            _ => false,
716                        };
717                        if !can_transmute {
718                            return None;
719                        }
720                    }
721                    value.offset(Size::ZERO, ty, &self.ecx).discard_err()?
722                }
723                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) => {
724                    let src = self.eval_to_const(value)?;
725                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
726                    self.ecx.unsize_into(src, ty, &dest).discard_err()?;
727                    self.ecx
728                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
729                        .discard_err()?;
730                    dest.into()
731                }
732                CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
733                    let src = self.eval_to_const(value)?;
734                    let src = self.ecx.read_immediate(src).discard_err()?;
735                    let ret = self.ecx.ptr_to_ptr(&src, ty).discard_err()?;
736                    ret.into()
737                }
738                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::UnsafeFnPointer, _) => {
739                    let src = self.eval_to_const(value)?;
740                    let src = self.ecx.read_immediate(src).discard_err()?;
741                    ImmTy::from_immediate(*src, ty).into()
742                }
743                _ => return None,
744            },
745        };
746        Some(op)
747    }
748
749    fn eval_to_const(&mut self, index: VnIndex) -> Option<&'a OpTy<'tcx>> {
750        if let Some(op) = self.evaluated[index] {
751            return op;
752        }
753        let op = self.eval_to_const_inner(index);
754        self.evaluated[index] = Some(self.arena.alloc(op).as_ref());
755        self.evaluated[index].unwrap()
756    }
757
758    /// Represent the *value* we obtain by dereferencing an `Address` value.
759    #[instrument(level = "trace", skip(self), ret)]
760    fn dereference_address(
761        &mut self,
762        base: AddressBase,
763        projection: &[ProjectionElem<VnIndex, Ty<'tcx>>],
764    ) -> Option<VnIndex> {
765        let (mut place_ty, mut value) = match base {
766            // The base is a local, so we take the local's value and project from it.
767            AddressBase::Local(local) => {
768                let local = self.locals[local]?;
769                let place_ty = PlaceTy::from_ty(self.ty(local));
770                (place_ty, local)
771            }
772            // The base is a pointer's deref, so we introduce the implicit deref.
773            AddressBase::Deref(reborrow) => {
774                let place_ty = PlaceTy::from_ty(self.ty(reborrow));
775                self.project(place_ty, reborrow, ProjectionElem::Deref)?
776            }
777        };
778        for &proj in projection {
779            (place_ty, value) = self.project(place_ty, value, proj)?;
780        }
781        Some(value)
782    }
783
784    #[instrument(level = "trace", skip(self), ret)]
785    fn project(
786        &mut self,
787        place_ty: PlaceTy<'tcx>,
788        value: VnIndex,
789        proj: ProjectionElem<VnIndex, Ty<'tcx>>,
790    ) -> Option<(PlaceTy<'tcx>, VnIndex)> {
791        let projection_ty = place_ty.projection_ty(self.tcx, proj);
792        let proj = match proj {
793            ProjectionElem::Deref => {
794                if let Some(Mutability::Not) = place_ty.ty.ref_mutability()
795                    && projection_ty.ty.is_freeze(self.tcx, self.typing_env())
796                {
797                    if let Value::Address { base, projection, .. } = self.get(value)
798                        && let Some(value) = self.dereference_address(base, projection)
799                    {
800                        return Some((projection_ty, value));
801                    }
802                    // DO NOT reason the pointer value.
803                    // We cannot unify two pointers that dereference same local, because they may
804                    // have different lifetimes.
805                    // ```
806                    // let b: &T = *a;
807                    // ... `a` is allowed to be modified. `c` and `b` have different borrowing lifetime.
808                    // Unifying them will extend the lifetime of `b`.
809                    // let c: &T = *a;
810                    // ```
811                    if projection_ty.ty.is_ref() {
812                        return None;
813                    }
814
815                    // An immutable borrow `_x` always points to the same value for the
816                    // lifetime of the borrow, so we can merge all instances of `*_x`.
817                    let deref = self
818                        .insert(projection_ty.ty, Value::Projection(value, ProjectionElem::Deref));
819                    return Some((projection_ty, deref));
820                } else {
821                    return None;
822                }
823            }
824            ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
825            ProjectionElem::Field(f, _) => match self.get(value) {
826                Value::Aggregate(_, fields) => return Some((projection_ty, fields[f.as_usize()])),
827                Value::Union(active, field) if active == f => return Some((projection_ty, field)),
828                Value::Projection(outer_value, ProjectionElem::Downcast(_, read_variant))
829                    if let Value::Aggregate(written_variant, fields) = self.get(outer_value)
830                    // This pass is not aware of control-flow, so we do not know whether the
831                    // replacement we are doing is actually reachable. We could be in any arm of
832                    // ```
833                    // match Some(x) {
834                    //     Some(y) => /* stuff */,
835                    //     None => /* other */,
836                    // }
837                    // ```
838                    //
839                    // In surface rust, the current statement would be unreachable.
840                    //
841                    // However, from the reference chapter on enums and RFC 2195,
842                    // accessing the wrong variant is not UB if the enum has repr.
843                    // So it's not impossible for a series of MIR opts to generate
844                    // a downcast to an inactive variant.
845                    && written_variant == read_variant =>
846                {
847                    return Some((projection_ty, fields[f.as_usize()]));
848                }
849                _ => ProjectionElem::Field(f, ()),
850            },
851            ProjectionElem::Index(idx) => {
852                if let Value::Repeat(inner, _) = self.get(value) {
853                    return Some((projection_ty, inner));
854                }
855                ProjectionElem::Index(idx)
856            }
857            ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
858                match self.get(value) {
859                    Value::Repeat(inner, _) => {
860                        return Some((projection_ty, inner));
861                    }
862                    Value::Aggregate(_, operands) => {
863                        let offset = if from_end {
864                            operands.len() - offset as usize
865                        } else {
866                            offset as usize
867                        };
868                        let value = operands.get(offset).copied()?;
869                        return Some((projection_ty, value));
870                    }
871                    _ => {}
872                };
873                ProjectionElem::ConstantIndex { offset, min_length, from_end }
874            }
875            ProjectionElem::Subslice { from, to, from_end } => {
876                ProjectionElem::Subslice { from, to, from_end }
877            }
878            ProjectionElem::OpaqueCast(_) => ProjectionElem::OpaqueCast(()),
879            ProjectionElem::UnwrapUnsafeBinder(_) => ProjectionElem::UnwrapUnsafeBinder(()),
880        };
881
882        let value = self.insert(projection_ty.ty, Value::Projection(value, proj));
883        Some((projection_ty, value))
884    }
885
886    /// Simplify the projection chain if we know better.
887    #[instrument(level = "trace", skip(self))]
888    fn simplify_place_projection(&mut self, place: &mut Place<'tcx>, location: Location) {
889        // If the projection is indirect, we treat the local as a value, so can replace it with
890        // another local.
891        if place.is_indirect_first_projection()
892            && let Some(base) = self.locals[place.local]
893            && let Some(new_local) = self.try_as_local(base, location)
894            && place.local != new_local
895        {
896            place.local = new_local;
897            self.reused_locals.insert(new_local);
898        }
899
900        let mut projection = Cow::Borrowed(&place.projection[..]);
901
902        for i in 0..projection.len() {
903            let elem = projection[i];
904            if let ProjectionElem::Index(idx_local) = elem
905                && let Some(idx) = self.locals[idx_local]
906            {
907                if let Some(offset) = self.eval_to_const(idx)
908                    && let Some(offset) = self.ecx.read_target_usize(offset).discard_err()
909                    && let Some(min_length) = offset.checked_add(1)
910                {
911                    projection.to_mut()[i] =
912                        ProjectionElem::ConstantIndex { offset, min_length, from_end: false };
913                } else if let Some(new_idx_local) = self.try_as_local(idx, location)
914                    && idx_local != new_idx_local
915                {
916                    projection.to_mut()[i] = ProjectionElem::Index(new_idx_local);
917                    self.reused_locals.insert(new_idx_local);
918                }
919            }
920        }
921
922        if Cow::is_owned(&projection) {
923            place.projection = self.tcx.mk_place_elems(&projection);
924        }
925
926        trace!(?place);
927    }
928
929    /// Represent the *value* which would be read from `place`. If we succeed, return it.
930    /// If we fail, return a `PlaceRef` that contains the same value.
931    #[instrument(level = "trace", skip(self), ret)]
932    fn compute_place_value(
933        &mut self,
934        place: Place<'tcx>,
935        location: Location,
936    ) -> Result<VnIndex, PlaceRef<'tcx>> {
937        // Invariant: `place` and `place_ref` point to the same value, even if they point to
938        // different memory locations.
939        let mut place_ref = place.as_ref();
940
941        // Invariant: `value` holds the value up-to the `index`th projection excluded.
942        let Some(mut value) = self.locals[place.local] else { return Err(place_ref) };
943        // Invariant: `value` has type `place_ty`, with optional downcast variant if needed.
944        let mut place_ty = PlaceTy::from_ty(self.local_decls[place.local].ty);
945        for (index, proj) in place.projection.iter().enumerate() {
946            if let Some(local) = self.try_as_local(value, location) {
947                // Both `local` and `Place { local: place.local, projection: projection[..index] }`
948                // hold the same value. Therefore, following place holds the value in the original
949                // `place`.
950                place_ref = PlaceRef { local, projection: &place.projection[index..] };
951            }
952
953            let Some(proj) = proj.try_map(|value| self.locals[value], |ty| ty) else {
954                return Err(place_ref);
955            };
956            let Some(ty_and_value) = self.project(place_ty, value, proj) else {
957                return Err(place_ref);
958            };
959            (place_ty, value) = ty_and_value;
960        }
961
962        Ok(value)
963    }
964
965    /// Represent the *value* which would be read from `place`, and point `place` to a preexisting
966    /// place with the same value (if that already exists).
967    #[instrument(level = "trace", skip(self), ret)]
968    fn simplify_place_value(
969        &mut self,
970        place: &mut Place<'tcx>,
971        location: Location,
972    ) -> Option<VnIndex> {
973        self.simplify_place_projection(place, location);
974
975        match self.compute_place_value(*place, location) {
976            Ok(value) => {
977                if let Some(new_place) = self.try_as_place(value, location, true)
978                    && (new_place.local != place.local
979                        || new_place.projection.len() < place.projection.len())
980                {
981                    *place = new_place;
982                    self.reused_locals.insert(new_place.local);
983                }
984                Some(value)
985            }
986            Err(place_ref) => {
987                if place_ref.local != place.local
988                    || place_ref.projection.len() < place.projection.len()
989                {
990                    // By the invariant on `place_ref`.
991                    *place = place_ref.project_deeper(&[], self.tcx);
992                    self.reused_locals.insert(place_ref.local);
993                }
994                None
995            }
996        }
997    }
998
999    #[instrument(level = "trace", skip(self), ret)]
1000    fn simplify_operand(
1001        &mut self,
1002        operand: &mut Operand<'tcx>,
1003        location: Location,
1004    ) -> Option<VnIndex> {
1005        match *operand {
1006            Operand::RuntimeChecks(c) => {
1007                Some(self.insert(self.tcx.types.bool, Value::RuntimeChecks(c)))
1008            }
1009            Operand::Constant(ref constant) => Some(self.insert_constant(constant.const_)),
1010            Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
1011                let value = self.simplify_place_value(place, location)?;
1012                if let Some(const_) = self.try_as_constant(value) {
1013                    *operand = Operand::Constant(Box::new(const_));
1014                } else if let Value::RuntimeChecks(c) = self.get(value) {
1015                    *operand = Operand::RuntimeChecks(c);
1016                }
1017                Some(value)
1018            }
1019        }
1020    }
1021
1022    #[instrument(level = "trace", skip(self), ret)]
1023    fn simplify_rvalue(
1024        &mut self,
1025        lhs: &Place<'tcx>,
1026        rvalue: &mut Rvalue<'tcx>,
1027        location: Location,
1028    ) -> Option<VnIndex> {
1029        let value = match *rvalue {
1030            // Forward values.
1031            Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
1032
1033            // Roots.
1034            Rvalue::Repeat(ref mut op, amount) => {
1035                let op = self.simplify_operand(op, location)?;
1036                Value::Repeat(op, amount)
1037            }
1038            Rvalue::Aggregate(..) => return self.simplify_aggregate(rvalue, location),
1039            Rvalue::Ref(_, borrow_kind, ref mut place) => {
1040                self.simplify_place_projection(place, location);
1041                return self.new_pointer(*place, AddressKind::Ref(borrow_kind));
1042            }
1043            Rvalue::RawPtr(mutbl, ref mut place) => {
1044                self.simplify_place_projection(place, location);
1045                return self.new_pointer(*place, AddressKind::Address(mutbl));
1046            }
1047            Rvalue::WrapUnsafeBinder(ref mut op, _) => {
1048                let value = self.simplify_operand(op, location)?;
1049                Value::Cast { kind: CastKind::Transmute, value }
1050            }
1051
1052            // Operations.
1053            Rvalue::Cast(ref mut kind, ref mut value, to) => {
1054                return self.simplify_cast(kind, value, to, location);
1055            }
1056            Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
1057                return self.simplify_binary(op, lhs, rhs, location);
1058            }
1059            Rvalue::UnaryOp(op, ref mut arg_op) => {
1060                return self.simplify_unary(op, arg_op, location);
1061            }
1062            Rvalue::Discriminant(ref mut place) => {
1063                let place = self.simplify_place_value(place, location)?;
1064                if let Some(discr) = self.simplify_discriminant(place) {
1065                    return Some(discr);
1066                }
1067                Value::Discriminant(place)
1068            }
1069
1070            // Unsupported values.
1071            Rvalue::ThreadLocalRef(..) => return None,
1072            Rvalue::CopyForDeref(_) | Rvalue::ShallowInitBox(..) => {
1073                bug!("forbidden in runtime MIR: {rvalue:?}")
1074            }
1075        };
1076        let ty = rvalue.ty(self.local_decls, self.tcx);
1077        Some(self.insert(ty, value))
1078    }
1079
1080    fn simplify_discriminant(&mut self, place: VnIndex) -> Option<VnIndex> {
1081        let enum_ty = self.ty(place);
1082        if enum_ty.is_enum()
1083            && let Value::Aggregate(variant, _) = self.get(place)
1084        {
1085            let discr = self.ecx.discriminant_for_variant(enum_ty, variant).discard_err()?;
1086            return Some(self.insert_scalar(discr.layout.ty, discr.to_scalar()));
1087        }
1088
1089        None
1090    }
1091
1092    fn try_as_place_elem(
1093        &mut self,
1094        ty: Ty<'tcx>,
1095        proj: ProjectionElem<VnIndex, ()>,
1096        loc: Location,
1097    ) -> Option<PlaceElem<'tcx>> {
1098        proj.try_map(
1099            |value| {
1100                let local = self.try_as_local(value, loc)?;
1101                self.reused_locals.insert(local);
1102                Some(local)
1103            },
1104            |()| ty,
1105        )
1106    }
1107
1108    fn simplify_aggregate_to_copy(
1109        &mut self,
1110        ty: Ty<'tcx>,
1111        variant_index: VariantIdx,
1112        fields: &[VnIndex],
1113    ) -> Option<VnIndex> {
1114        let Some(&first_field) = fields.first() else { return None };
1115        let Value::Projection(copy_from_value, _) = self.get(first_field) else { return None };
1116
1117        // All fields must correspond one-to-one and come from the same aggregate value.
1118        if fields.iter().enumerate().any(|(index, &v)| {
1119            if let Value::Projection(pointer, ProjectionElem::Field(from_index, _)) = self.get(v)
1120                && copy_from_value == pointer
1121                && from_index.index() == index
1122            {
1123                return false;
1124            }
1125            true
1126        }) {
1127            return None;
1128        }
1129
1130        let mut copy_from_local_value = copy_from_value;
1131        if let Value::Projection(pointer, proj) = self.get(copy_from_value)
1132            && let ProjectionElem::Downcast(_, read_variant) = proj
1133        {
1134            if variant_index == read_variant {
1135                // When copying a variant, there is no need to downcast.
1136                copy_from_local_value = pointer;
1137            } else {
1138                // The copied variant must be identical.
1139                return None;
1140            }
1141        }
1142
1143        // Both must be variants of the same type.
1144        if self.ty(copy_from_local_value) == ty { Some(copy_from_local_value) } else { None }
1145    }
1146
1147    fn simplify_aggregate(
1148        &mut self,
1149        rvalue: &mut Rvalue<'tcx>,
1150        location: Location,
1151    ) -> Option<VnIndex> {
1152        let tcx = self.tcx;
1153        let ty = rvalue.ty(self.local_decls, tcx);
1154
1155        let Rvalue::Aggregate(box ref kind, ref mut field_ops) = *rvalue else { bug!() };
1156
1157        if field_ops.is_empty() {
1158            let is_zst = match *kind {
1159                AggregateKind::Array(..)
1160                | AggregateKind::Tuple
1161                | AggregateKind::Closure(..)
1162                | AggregateKind::CoroutineClosure(..) => true,
1163                // Only enums can be non-ZST.
1164                AggregateKind::Adt(did, ..) => tcx.def_kind(did) != DefKind::Enum,
1165                // Coroutines are never ZST, as they at least contain the implicit states.
1166                AggregateKind::Coroutine(..) => false,
1167                AggregateKind::RawPtr(..) => bug!("MIR for RawPtr aggregate must have 2 fields"),
1168            };
1169
1170            if is_zst {
1171                return Some(self.insert_constant(Const::zero_sized(ty)));
1172            }
1173        }
1174
1175        let fields = self.arena.alloc_from_iter(field_ops.iter_mut().map(|op| {
1176            self.simplify_operand(op, location)
1177                .unwrap_or_else(|| self.new_opaque(op.ty(self.local_decls, self.tcx)))
1178        }));
1179
1180        let variant_index = match *kind {
1181            AggregateKind::Array(..) | AggregateKind::Tuple => {
1182                assert!(!field_ops.is_empty());
1183                FIRST_VARIANT
1184            }
1185            AggregateKind::Closure(..)
1186            | AggregateKind::CoroutineClosure(..)
1187            | AggregateKind::Coroutine(..) => FIRST_VARIANT,
1188            AggregateKind::Adt(_, variant_index, _, _, None) => variant_index,
1189            // Do not track unions.
1190            AggregateKind::Adt(_, _, _, _, Some(active_field)) => {
1191                let field = *fields.first()?;
1192                return Some(self.insert(ty, Value::Union(active_field, field)));
1193            }
1194            AggregateKind::RawPtr(..) => {
1195                assert_eq!(field_ops.len(), 2);
1196                let [mut pointer, metadata] = fields.try_into().unwrap();
1197
1198                // Any thin pointer of matching mutability is fine as the data pointer.
1199                let mut was_updated = false;
1200                while let Value::Cast { kind: CastKind::PtrToPtr, value: cast_value } =
1201                    self.get(pointer)
1202                    && let ty::RawPtr(from_pointee_ty, from_mtbl) = self.ty(cast_value).kind()
1203                    && let ty::RawPtr(_, output_mtbl) = ty.kind()
1204                    && from_mtbl == output_mtbl
1205                    && from_pointee_ty.is_sized(self.tcx, self.typing_env())
1206                {
1207                    pointer = cast_value;
1208                    was_updated = true;
1209                }
1210
1211                if was_updated && let Some(op) = self.try_as_operand(pointer, location) {
1212                    field_ops[FieldIdx::ZERO] = op;
1213                }
1214
1215                return Some(self.insert(ty, Value::RawPtr { pointer, metadata }));
1216            }
1217        };
1218
1219        if ty.is_array()
1220            && fields.len() > 4
1221            && let Ok(&first) = fields.iter().all_equal_value()
1222        {
1223            let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
1224            if let Some(op) = self.try_as_operand(first, location) {
1225                *rvalue = Rvalue::Repeat(op, len);
1226            }
1227            return Some(self.insert(ty, Value::Repeat(first, len)));
1228        }
1229
1230        if let Some(value) = self.simplify_aggregate_to_copy(ty, variant_index, &fields) {
1231            if let Some(place) = self.try_as_place(value, location, true) {
1232                self.reused_locals.insert(place.local);
1233                *rvalue = Rvalue::Use(Operand::Copy(place));
1234            }
1235            return Some(value);
1236        }
1237
1238        Some(self.insert(ty, Value::Aggregate(variant_index, fields)))
1239    }
1240
1241    #[instrument(level = "trace", skip(self), ret)]
1242    fn simplify_unary(
1243        &mut self,
1244        op: UnOp,
1245        arg_op: &mut Operand<'tcx>,
1246        location: Location,
1247    ) -> Option<VnIndex> {
1248        let mut arg_index = self.simplify_operand(arg_op, location)?;
1249        let arg_ty = self.ty(arg_index);
1250        let ret_ty = op.ty(self.tcx, arg_ty);
1251
1252        // PtrMetadata doesn't care about *const vs *mut vs & vs &mut,
1253        // so start by removing those distinctions so we can update the `Operand`
1254        if op == UnOp::PtrMetadata {
1255            let mut was_updated = false;
1256            loop {
1257                arg_index = match self.get(arg_index) {
1258                    // Pointer casts that preserve metadata, such as
1259                    // `*const [i32]` <-> `*mut [i32]` <-> `*mut [f32]`.
1260                    // It's critical that this not eliminate cases like
1261                    // `*const [T]` -> `*const T` which remove metadata.
1262                    // We run on potentially-generic MIR, though, so unlike codegen
1263                    // we can't always know exactly what the metadata are.
1264                    // To allow things like `*mut (?A, ?T)` <-> `*mut (?B, ?T)`,
1265                    // it's fine to get a projection as the type.
1266                    Value::Cast { kind: CastKind::PtrToPtr, value: inner }
1267                        if self.pointers_have_same_metadata(self.ty(inner), arg_ty) =>
1268                    {
1269                        inner
1270                    }
1271
1272                    // We have an unsizing cast, which assigns the length to wide pointer metadata.
1273                    Value::Cast {
1274                        kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1275                        value: from,
1276                    } if let Some(from) = self.ty(from).builtin_deref(true)
1277                        && let ty::Array(_, len) = from.kind()
1278                        && let Some(to) = self.ty(arg_index).builtin_deref(true)
1279                        && let ty::Slice(..) = to.kind() =>
1280                    {
1281                        return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1282                    }
1283
1284                    // `&mut *p`, `&raw *p`, etc don't change metadata.
1285                    Value::Address { base: AddressBase::Deref(reborrowed), projection, .. }
1286                        if projection.is_empty() =>
1287                    {
1288                        reborrowed
1289                    }
1290
1291                    _ => break,
1292                };
1293                was_updated = true;
1294            }
1295
1296            if was_updated && let Some(op) = self.try_as_operand(arg_index, location) {
1297                *arg_op = op;
1298            }
1299        }
1300
1301        let value = match (op, self.get(arg_index)) {
1302            (UnOp::Not, Value::UnaryOp(UnOp::Not, inner)) => return Some(inner),
1303            (UnOp::Neg, Value::UnaryOp(UnOp::Neg, inner)) => return Some(inner),
1304            (UnOp::Not, Value::BinaryOp(BinOp::Eq, lhs, rhs)) => {
1305                Value::BinaryOp(BinOp::Ne, lhs, rhs)
1306            }
1307            (UnOp::Not, Value::BinaryOp(BinOp::Ne, lhs, rhs)) => {
1308                Value::BinaryOp(BinOp::Eq, lhs, rhs)
1309            }
1310            (UnOp::PtrMetadata, Value::RawPtr { metadata, .. }) => return Some(metadata),
1311            // We have an unsizing cast, which assigns the length to wide pointer metadata.
1312            (
1313                UnOp::PtrMetadata,
1314                Value::Cast {
1315                    kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1316                    value: inner,
1317                },
1318            ) if let ty::Slice(..) = arg_ty.builtin_deref(true).unwrap().kind()
1319                && let ty::Array(_, len) = self.ty(inner).builtin_deref(true).unwrap().kind() =>
1320            {
1321                return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1322            }
1323            _ => Value::UnaryOp(op, arg_index),
1324        };
1325        Some(self.insert(ret_ty, value))
1326    }
1327
1328    #[instrument(level = "trace", skip(self), ret)]
1329    fn simplify_binary(
1330        &mut self,
1331        op: BinOp,
1332        lhs_operand: &mut Operand<'tcx>,
1333        rhs_operand: &mut Operand<'tcx>,
1334        location: Location,
1335    ) -> Option<VnIndex> {
1336        let lhs = self.simplify_operand(lhs_operand, location);
1337        let rhs = self.simplify_operand(rhs_operand, location);
1338
1339        // Only short-circuit options after we called `simplify_operand`
1340        // on both operands for side effect.
1341        let mut lhs = lhs?;
1342        let mut rhs = rhs?;
1343
1344        let lhs_ty = self.ty(lhs);
1345
1346        // If we're comparing pointers, remove `PtrToPtr` casts if the from
1347        // types of both casts and the metadata all match.
1348        if let BinOp::Eq | BinOp::Ne | BinOp::Lt | BinOp::Le | BinOp::Gt | BinOp::Ge = op
1349            && lhs_ty.is_any_ptr()
1350            && let Value::Cast { kind: CastKind::PtrToPtr, value: lhs_value } = self.get(lhs)
1351            && let Value::Cast { kind: CastKind::PtrToPtr, value: rhs_value } = self.get(rhs)
1352            && let lhs_from = self.ty(lhs_value)
1353            && lhs_from == self.ty(rhs_value)
1354            && self.pointers_have_same_metadata(lhs_from, lhs_ty)
1355        {
1356            lhs = lhs_value;
1357            rhs = rhs_value;
1358            if let Some(lhs_op) = self.try_as_operand(lhs, location)
1359                && let Some(rhs_op) = self.try_as_operand(rhs, location)
1360            {
1361                *lhs_operand = lhs_op;
1362                *rhs_operand = rhs_op;
1363            }
1364        }
1365
1366        if let Some(value) = self.simplify_binary_inner(op, lhs_ty, lhs, rhs) {
1367            return Some(value);
1368        }
1369        let ty = op.ty(self.tcx, lhs_ty, self.ty(rhs));
1370        let value = Value::BinaryOp(op, lhs, rhs);
1371        Some(self.insert(ty, value))
1372    }
1373
1374    fn simplify_binary_inner(
1375        &mut self,
1376        op: BinOp,
1377        lhs_ty: Ty<'tcx>,
1378        lhs: VnIndex,
1379        rhs: VnIndex,
1380    ) -> Option<VnIndex> {
1381        // Floats are weird enough that none of the logic below applies.
1382        let reasonable_ty =
1383            lhs_ty.is_integral() || lhs_ty.is_bool() || lhs_ty.is_char() || lhs_ty.is_any_ptr();
1384        if !reasonable_ty {
1385            return None;
1386        }
1387
1388        let layout = self.ecx.layout_of(lhs_ty).ok()?;
1389
1390        let mut as_bits = |value: VnIndex| {
1391            let constant = self.eval_to_const(value)?;
1392            if layout.backend_repr.is_scalar() {
1393                let scalar = self.ecx.read_scalar(constant).discard_err()?;
1394                scalar.to_bits(constant.layout.size).discard_err()
1395            } else {
1396                // `constant` is a wide pointer. Do not evaluate to bits.
1397                None
1398            }
1399        };
1400
1401        // Represent the values as `Left(bits)` or `Right(VnIndex)`.
1402        use Either::{Left, Right};
1403        let a = as_bits(lhs).map_or(Right(lhs), Left);
1404        let b = as_bits(rhs).map_or(Right(rhs), Left);
1405
1406        let result = match (op, a, b) {
1407            // Neutral elements.
1408            (
1409                BinOp::Add
1410                | BinOp::AddWithOverflow
1411                | BinOp::AddUnchecked
1412                | BinOp::BitOr
1413                | BinOp::BitXor,
1414                Left(0),
1415                Right(p),
1416            )
1417            | (
1418                BinOp::Add
1419                | BinOp::AddWithOverflow
1420                | BinOp::AddUnchecked
1421                | BinOp::BitOr
1422                | BinOp::BitXor
1423                | BinOp::Sub
1424                | BinOp::SubWithOverflow
1425                | BinOp::SubUnchecked
1426                | BinOp::Offset
1427                | BinOp::Shl
1428                | BinOp::Shr,
1429                Right(p),
1430                Left(0),
1431            )
1432            | (BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked, Left(1), Right(p))
1433            | (
1434                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::Div,
1435                Right(p),
1436                Left(1),
1437            ) => p,
1438            // Attempt to simplify `x & ALL_ONES` to `x`, with `ALL_ONES` depending on type size.
1439            (BinOp::BitAnd, Right(p), Left(ones)) | (BinOp::BitAnd, Left(ones), Right(p))
1440                if ones == layout.size.truncate(u128::MAX)
1441                    || (layout.ty.is_bool() && ones == 1) =>
1442            {
1443                p
1444            }
1445            // Absorbing elements.
1446            (
1447                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::BitAnd,
1448                _,
1449                Left(0),
1450            )
1451            | (BinOp::Rem, _, Left(1))
1452            | (
1453                BinOp::Mul
1454                | BinOp::MulWithOverflow
1455                | BinOp::MulUnchecked
1456                | BinOp::Div
1457                | BinOp::Rem
1458                | BinOp::BitAnd
1459                | BinOp::Shl
1460                | BinOp::Shr,
1461                Left(0),
1462                _,
1463            ) => self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size)),
1464            // Attempt to simplify `x | ALL_ONES` to `ALL_ONES`.
1465            (BinOp::BitOr, _, Left(ones)) | (BinOp::BitOr, Left(ones), _)
1466                if ones == layout.size.truncate(u128::MAX)
1467                    || (layout.ty.is_bool() && ones == 1) =>
1468            {
1469                self.insert_scalar(lhs_ty, Scalar::from_uint(ones, layout.size))
1470            }
1471            // Sub/Xor with itself.
1472            (BinOp::Sub | BinOp::SubWithOverflow | BinOp::SubUnchecked | BinOp::BitXor, a, b)
1473                if a == b =>
1474            {
1475                self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size))
1476            }
1477            // Comparison:
1478            // - if both operands can be computed as bits, just compare the bits;
1479            // - if we proved that both operands have the same value, we can insert true/false;
1480            // - otherwise, do nothing, as we do not try to prove inequality.
1481            (BinOp::Eq, Left(a), Left(b)) => self.insert_bool(a == b),
1482            (BinOp::Eq, a, b) if a == b => self.insert_bool(true),
1483            (BinOp::Ne, Left(a), Left(b)) => self.insert_bool(a != b),
1484            (BinOp::Ne, a, b) if a == b => self.insert_bool(false),
1485            _ => return None,
1486        };
1487
1488        if op.is_overflowing() {
1489            let ty = Ty::new_tup(self.tcx, &[self.ty(result), self.tcx.types.bool]);
1490            let false_val = self.insert_bool(false);
1491            Some(self.insert_tuple(ty, &[result, false_val]))
1492        } else {
1493            Some(result)
1494        }
1495    }
1496
1497    fn simplify_cast(
1498        &mut self,
1499        initial_kind: &mut CastKind,
1500        initial_operand: &mut Operand<'tcx>,
1501        to: Ty<'tcx>,
1502        location: Location,
1503    ) -> Option<VnIndex> {
1504        use CastKind::*;
1505        use rustc_middle::ty::adjustment::PointerCoercion::*;
1506
1507        let mut kind = *initial_kind;
1508        let mut value = self.simplify_operand(initial_operand, location)?;
1509        let mut from = self.ty(value);
1510        if from == to {
1511            return Some(value);
1512        }
1513
1514        if let CastKind::PointerCoercion(ReifyFnPointer(_) | ClosureFnPointer(_), _) = kind {
1515            // Each reification of a generic fn may get a different pointer.
1516            // Do not try to merge them.
1517            return Some(self.new_opaque(to));
1518        }
1519
1520        let mut was_ever_updated = false;
1521        loop {
1522            let mut was_updated_this_iteration = false;
1523
1524            // Transmuting between raw pointers is just a pointer cast so long as
1525            // they have the same metadata type (like `*const i32` <=> `*mut u64`
1526            // or `*mut [i32]` <=> `*const [u64]`), including the common special
1527            // case of `*const T` <=> `*mut T`.
1528            if let Transmute = kind
1529                && from.is_raw_ptr()
1530                && to.is_raw_ptr()
1531                && self.pointers_have_same_metadata(from, to)
1532            {
1533                kind = PtrToPtr;
1534                was_updated_this_iteration = true;
1535            }
1536
1537            // If a cast just casts away the metadata again, then we can get it by
1538            // casting the original thin pointer passed to `from_raw_parts`
1539            if let PtrToPtr = kind
1540                && let Value::RawPtr { pointer, .. } = self.get(value)
1541                && let ty::RawPtr(to_pointee, _) = to.kind()
1542                && to_pointee.is_sized(self.tcx, self.typing_env())
1543            {
1544                from = self.ty(pointer);
1545                value = pointer;
1546                was_updated_this_iteration = true;
1547                if from == to {
1548                    return Some(pointer);
1549                }
1550            }
1551
1552            // Aggregate-then-Transmute can just transmute the original field value,
1553            // so long as the bytes of a value from only from a single field.
1554            if let Transmute = kind
1555                && let Value::Aggregate(variant_idx, field_values) = self.get(value)
1556                && let Some((field_idx, field_ty)) =
1557                    self.value_is_all_in_one_field(from, variant_idx)
1558            {
1559                from = field_ty;
1560                value = field_values[field_idx.as_usize()];
1561                was_updated_this_iteration = true;
1562                if field_ty == to {
1563                    return Some(value);
1564                }
1565            }
1566
1567            // Various cast-then-cast cases can be simplified.
1568            if let Value::Cast { kind: inner_kind, value: inner_value } = self.get(value) {
1569                let inner_from = self.ty(inner_value);
1570                let new_kind = match (inner_kind, kind) {
1571                    // Even if there's a narrowing cast in here that's fine, because
1572                    // things like `*mut [i32] -> *mut i32 -> *const i32` and
1573                    // `*mut [i32] -> *const [i32] -> *const i32` can skip the middle in MIR.
1574                    (PtrToPtr, PtrToPtr) => Some(PtrToPtr),
1575                    // PtrToPtr-then-Transmute is fine so long as the pointer cast is identity:
1576                    // `*const T -> *mut T -> NonNull<T>` is fine, but we need to check for narrowing
1577                    // to skip things like `*const [i32] -> *const i32 -> NonNull<T>`.
1578                    (PtrToPtr, Transmute) if self.pointers_have_same_metadata(inner_from, from) => {
1579                        Some(Transmute)
1580                    }
1581                    // Similarly, for Transmute-then-PtrToPtr. Note that we need to check different
1582                    // variables for their metadata, and thus this can't merge with the previous arm.
1583                    (Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => {
1584                        Some(Transmute)
1585                    }
1586                    // It would be legal to always do this, but we don't want to hide information
1587                    // from the backend that it'd otherwise be able to use for optimizations.
1588                    (Transmute, Transmute)
1589                        if !self.transmute_may_have_niche_of_interest_to_backend(
1590                            inner_from, from, to,
1591                        ) =>
1592                    {
1593                        Some(Transmute)
1594                    }
1595                    _ => None,
1596                };
1597                if let Some(new_kind) = new_kind {
1598                    kind = new_kind;
1599                    from = inner_from;
1600                    value = inner_value;
1601                    was_updated_this_iteration = true;
1602                    if inner_from == to {
1603                        return Some(inner_value);
1604                    }
1605                }
1606            }
1607
1608            if was_updated_this_iteration {
1609                was_ever_updated = true;
1610            } else {
1611                break;
1612            }
1613        }
1614
1615        if was_ever_updated && let Some(op) = self.try_as_operand(value, location) {
1616            *initial_operand = op;
1617            *initial_kind = kind;
1618        }
1619
1620        Some(self.insert(to, Value::Cast { kind, value }))
1621    }
1622
1623    fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool {
1624        let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1625        let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1626        if left_meta_ty == right_meta_ty {
1627            true
1628        } else if let Ok(left) =
1629            self.tcx.try_normalize_erasing_regions(self.typing_env(), left_meta_ty)
1630            && let Ok(right) =
1631                self.tcx.try_normalize_erasing_regions(self.typing_env(), right_meta_ty)
1632        {
1633            left == right
1634        } else {
1635            false
1636        }
1637    }
1638
1639    /// Returns `false` if we're confident that the middle type doesn't have an
1640    /// interesting niche so we can skip that step when transmuting.
1641    ///
1642    /// The backend will emit `assume`s when transmuting between types with niches,
1643    /// so we want to preserve `i32 -> char -> u32` so that that data is around,
1644    /// but it's fine to skip whole-range-is-value steps like `A -> u32 -> B`.
1645    fn transmute_may_have_niche_of_interest_to_backend(
1646        &self,
1647        from_ty: Ty<'tcx>,
1648        middle_ty: Ty<'tcx>,
1649        to_ty: Ty<'tcx>,
1650    ) -> bool {
1651        let Ok(middle_layout) = self.ecx.layout_of(middle_ty) else {
1652            // If it's too generic or something, then assume it might be interesting later.
1653            return true;
1654        };
1655
1656        if middle_layout.uninhabited {
1657            return true;
1658        }
1659
1660        match middle_layout.backend_repr {
1661            BackendRepr::Scalar(mid) => {
1662                if mid.is_always_valid(&self.ecx) {
1663                    // With no niche it's never interesting, so don't bother
1664                    // looking at the layout of the other two types.
1665                    false
1666                } else if let Ok(from_layout) = self.ecx.layout_of(from_ty)
1667                    && !from_layout.uninhabited
1668                    && from_layout.size == middle_layout.size
1669                    && let BackendRepr::Scalar(from_a) = from_layout.backend_repr
1670                    && let mid_range = mid.valid_range(&self.ecx)
1671                    && let from_range = from_a.valid_range(&self.ecx)
1672                    && mid_range.contains_range(from_range, middle_layout.size)
1673                {
1674                    // The `from_range` is a (non-strict) subset of `mid_range`
1675                    // such as if we're doing `bool` -> `ascii::Char` -> `_`,
1676                    // where `from_range: 0..=1` and `mid_range: 0..=127`,
1677                    // and thus the middle doesn't tell us anything we don't
1678                    // already know from the initial type.
1679                    false
1680                } else if let Ok(to_layout) = self.ecx.layout_of(to_ty)
1681                    && !to_layout.uninhabited
1682                    && to_layout.size == middle_layout.size
1683                    && let BackendRepr::Scalar(to_a) = to_layout.backend_repr
1684                    && let mid_range = mid.valid_range(&self.ecx)
1685                    && let to_range = to_a.valid_range(&self.ecx)
1686                    && mid_range.contains_range(to_range, middle_layout.size)
1687                {
1688                    // The `to_range` is a (non-strict) subset of `mid_range`
1689                    // such as if we're doing `_` -> `ascii::Char` -> `bool`,
1690                    // where `mid_range: 0..=127` and `to_range: 0..=1`,
1691                    // and thus the middle doesn't tell us anything we don't
1692                    // already know from the final type.
1693                    false
1694                } else {
1695                    true
1696                }
1697            }
1698            BackendRepr::ScalarPair(a, b) => {
1699                !a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx)
1700            }
1701            BackendRepr::SimdVector { .. }
1702            | BackendRepr::ScalableVector { .. }
1703            | BackendRepr::Memory { .. } => false,
1704        }
1705    }
1706
1707    fn value_is_all_in_one_field(
1708        &self,
1709        ty: Ty<'tcx>,
1710        variant: VariantIdx,
1711    ) -> Option<(FieldIdx, Ty<'tcx>)> {
1712        if let Ok(layout) = self.ecx.layout_of(ty)
1713            && let abi::Variants::Single { index } = layout.variants
1714            && index == variant
1715            && let Some((field_idx, field_layout)) = layout.non_1zst_field(&self.ecx)
1716            && layout.size == field_layout.size
1717        {
1718            // We needed to check the variant to avoid trying to read the tag
1719            // field from an enum where no fields have variants, since that tag
1720            // field isn't in the `Aggregate` from which we're getting values.
1721            Some((field_idx, field_layout.ty))
1722        } else if let ty::Adt(adt, args) = ty.kind()
1723            && adt.is_struct()
1724            && adt.repr().transparent()
1725            && let [single_field] = adt.non_enum_variant().fields.raw.as_slice()
1726        {
1727            Some((FieldIdx::ZERO, single_field.ty(self.tcx, args)))
1728        } else {
1729            None
1730        }
1731    }
1732}
1733
1734fn op_to_prop_const<'tcx>(
1735    ecx: &mut InterpCx<'tcx, DummyMachine>,
1736    op: &OpTy<'tcx>,
1737) -> Option<ConstValue> {
1738    // Do not attempt to propagate unsized locals.
1739    if op.layout.is_unsized() {
1740        return None;
1741    }
1742
1743    // This constant is a ZST, just return an empty value.
1744    if op.layout.is_zst() {
1745        return Some(ConstValue::ZeroSized);
1746    }
1747
1748    // Do not synthetize too large constants. Codegen will just memcpy them, which we'd like to
1749    // avoid.
1750    // But we *do* want to synthesize any size constant if it is entirely uninit because that
1751    // benefits codegen, which has special handling for them.
1752    if !op.is_immediate_uninit()
1753        && !matches!(op.layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..))
1754    {
1755        return None;
1756    }
1757
1758    // If this constant has scalar ABI, return it as a `ConstValue::Scalar`.
1759    if let BackendRepr::Scalar(abi::Scalar::Initialized { .. }) = op.layout.backend_repr
1760        && let Some(scalar) = ecx.read_scalar(op).discard_err()
1761    {
1762        if !scalar.try_to_scalar_int().is_ok() {
1763            // Check that we do not leak a pointer.
1764            // Those pointers may lose part of their identity in codegen.
1765            // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1766            return None;
1767        }
1768        return Some(ConstValue::Scalar(scalar));
1769    }
1770
1771    // If this constant is already represented as an `Allocation`,
1772    // try putting it into global memory to return it.
1773    if let Either::Left(mplace) = op.as_mplace_or_imm() {
1774        let (size, _align) = ecx.size_and_align_of_val(&mplace).discard_err()??;
1775
1776        // Do not try interning a value that contains provenance.
1777        // Due to https://github.com/rust-lang/rust/issues/79738, doing so could lead to bugs.
1778        // FIXME: remove this hack once that issue is fixed.
1779        let alloc_ref = ecx.get_ptr_alloc(mplace.ptr(), size).discard_err()??;
1780        if alloc_ref.has_provenance() {
1781            return None;
1782        }
1783
1784        let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
1785        let (prov, offset) = pointer.prov_and_relative_offset();
1786        let alloc_id = prov.alloc_id();
1787        intern_const_alloc_for_constprop(ecx, alloc_id).discard_err()?;
1788
1789        // `alloc_id` may point to a static. Codegen will choke on an `Indirect` with anything
1790        // by `GlobalAlloc::Memory`, so do fall through to copying if needed.
1791        // FIXME: find a way to treat this more uniformly (probably by fixing codegen)
1792        if let GlobalAlloc::Memory(alloc) = ecx.tcx.global_alloc(alloc_id)
1793            // Transmuting a constant is just an offset in the allocation. If the alignment of the
1794            // allocation is not enough, fallback to copying into a properly aligned value.
1795            && alloc.inner().align >= op.layout.align.abi
1796        {
1797            return Some(ConstValue::Indirect { alloc_id, offset });
1798        }
1799    }
1800
1801    // Everything failed: create a new allocation to hold the data.
1802    let alloc_id =
1803        ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest)).discard_err()?;
1804    let value = ConstValue::Indirect { alloc_id, offset: Size::ZERO };
1805
1806    // Check that we do not leak a pointer.
1807    // Those pointers may lose part of their identity in codegen.
1808    // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1809    if ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().provenance().ptrs().is_empty() {
1810        return Some(value);
1811    }
1812
1813    None
1814}
1815
1816impl<'tcx> VnState<'_, '_, 'tcx> {
1817    /// If either [`Self::try_as_constant`] as [`Self::try_as_place`] succeeds,
1818    /// returns that result as an [`Operand`].
1819    fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option<Operand<'tcx>> {
1820        if let Some(const_) = self.try_as_constant(index) {
1821            Some(Operand::Constant(Box::new(const_)))
1822        } else if let Value::RuntimeChecks(c) = self.get(index) {
1823            Some(Operand::RuntimeChecks(c))
1824        } else if let Some(place) = self.try_as_place(index, location, false) {
1825            self.reused_locals.insert(place.local);
1826            Some(Operand::Copy(place))
1827        } else {
1828            None
1829        }
1830    }
1831
1832    /// If `index` is a `Value::Constant`, return the `Constant` to be put in the MIR.
1833    fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
1834        // This was already constant in MIR, do not change it. If the constant is not
1835        // deterministic, adding an additional mention of it in MIR will not give the same value as
1836        // the former mention.
1837        if let Value::Constant { value, disambiguator: None } = self.get(index) {
1838            debug_assert!(value.is_deterministic());
1839            return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1840        }
1841
1842        let op = self.eval_to_const(index)?;
1843        if op.layout.is_unsized() {
1844            // Do not attempt to propagate unsized locals.
1845            return None;
1846        }
1847
1848        let value = op_to_prop_const(&mut self.ecx, op)?;
1849
1850        // Check that we do not leak a pointer.
1851        // Those pointers may lose part of their identity in codegen.
1852        // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1853        assert!(!value.may_have_provenance(self.tcx, op.layout.size));
1854
1855        let const_ = Const::Val(value, op.layout.ty);
1856        Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_ })
1857    }
1858
1859    /// Construct a place which holds the same value as `index` and for which all locals strictly
1860    /// dominate `loc`. If you used this place, add its base local to `reused_locals` to remove
1861    /// storage statements.
1862    #[instrument(level = "trace", skip(self), ret)]
1863    fn try_as_place(
1864        &mut self,
1865        mut index: VnIndex,
1866        loc: Location,
1867        allow_complex_projection: bool,
1868    ) -> Option<Place<'tcx>> {
1869        let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new();
1870        loop {
1871            if let Some(local) = self.try_as_local(index, loc) {
1872                projection.reverse();
1873                let place =
1874                    Place { local, projection: self.tcx.mk_place_elems(projection.as_slice()) };
1875                return Some(place);
1876            } else if projection.last() == Some(&PlaceElem::Deref) {
1877                // `Deref` can only be the first projection in a place.
1878                // If we are here, we failed to find a local, and we already have a `Deref`.
1879                // Trying to add projections will only result in an ill-formed place.
1880                return None;
1881            } else if let Value::Projection(pointer, proj) = self.get(index)
1882                && (allow_complex_projection || proj.is_stable_offset())
1883                && let Some(proj) = self.try_as_place_elem(self.ty(index), proj, loc)
1884            {
1885                if proj == PlaceElem::Deref {
1886                    // We can introduce a new dereference if the source value cannot be changed in the body.
1887                    // Dereferencing an immutable argument always gives the same value in the body.
1888                    match self.get(pointer) {
1889                        Value::Argument(_)
1890                            if let Some(Mutability::Not) = self.ty(pointer).ref_mutability() => {}
1891                        _ => {
1892                            return None;
1893                        }
1894                    }
1895                }
1896                projection.push(proj);
1897                index = pointer;
1898            } else {
1899                return None;
1900            }
1901        }
1902    }
1903
1904    /// If there is a local which is assigned `index`, and its assignment strictly dominates `loc`,
1905    /// return it. If you used this local, add it to `reused_locals` to remove storage statements.
1906    fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
1907        let other = self.rev_locals.get(index)?;
1908        other
1909            .iter()
1910            .find(|&&other| self.ssa.assignment_dominates(&self.dominators, other, loc))
1911            .copied()
1912    }
1913}
1914
1915impl<'tcx> MutVisitor<'tcx> for VnState<'_, '_, 'tcx> {
1916    fn tcx(&self) -> TyCtxt<'tcx> {
1917        self.tcx
1918    }
1919
1920    fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
1921        self.simplify_place_projection(place, location);
1922        self.super_place(place, context, location);
1923    }
1924
1925    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1926        self.simplify_operand(operand, location);
1927        self.super_operand(operand, location);
1928    }
1929
1930    fn visit_assign(
1931        &mut self,
1932        lhs: &mut Place<'tcx>,
1933        rvalue: &mut Rvalue<'tcx>,
1934        location: Location,
1935    ) {
1936        self.simplify_place_projection(lhs, location);
1937
1938        let value = self.simplify_rvalue(lhs, rvalue, location);
1939        if let Some(value) = value {
1940            if let Some(const_) = self.try_as_constant(value) {
1941                *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
1942            } else if let Some(place) = self.try_as_place(value, location, false)
1943                && *rvalue != Rvalue::Use(Operand::Move(place))
1944                && *rvalue != Rvalue::Use(Operand::Copy(place))
1945            {
1946                *rvalue = Rvalue::Use(Operand::Copy(place));
1947                self.reused_locals.insert(place.local);
1948            }
1949        }
1950
1951        if let Some(local) = lhs.as_local()
1952            && self.ssa.is_ssa(local)
1953            && let rvalue_ty = rvalue.ty(self.local_decls, self.tcx)
1954            // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark
1955            // `local` as reusable if we have an exact type match.
1956            && self.local_decls[local].ty == rvalue_ty
1957        {
1958            let value = value.unwrap_or_else(|| self.new_opaque(rvalue_ty));
1959            self.assign(local, value);
1960        }
1961    }
1962
1963    fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
1964        if let Terminator { kind: TerminatorKind::Call { destination, .. }, .. } = terminator {
1965            if let Some(local) = destination.as_local()
1966                && self.ssa.is_ssa(local)
1967            {
1968                let ty = self.local_decls[local].ty;
1969                let opaque = self.new_opaque(ty);
1970                self.assign(local, opaque);
1971            }
1972        }
1973        self.super_terminator(terminator, location);
1974    }
1975}
1976
1977struct StorageRemover<'tcx> {
1978    tcx: TyCtxt<'tcx>,
1979    reused_locals: DenseBitSet<Local>,
1980}
1981
1982impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
1983    fn tcx(&self) -> TyCtxt<'tcx> {
1984        self.tcx
1985    }
1986
1987    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
1988        if let Operand::Move(place) = *operand
1989            && !place.is_indirect_first_projection()
1990            && self.reused_locals.contains(place.local)
1991        {
1992            *operand = Operand::Copy(place);
1993        }
1994    }
1995
1996    fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
1997        match stmt.kind {
1998            // When removing storage statements, we need to remove both (#107511).
1999            StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
2000                if self.reused_locals.contains(l) =>
2001            {
2002                stmt.make_nop(true)
2003            }
2004            _ => self.super_statement(stmt, loc),
2005        }
2006    }
2007}