rustc_mir_transform/
gvn.rs

1//! Global value numbering.
2//!
3//! MIR may contain repeated and/or redundant computations. The objective of this pass is to detect
4//! such redundancies and re-use the already-computed result when possible.
5//!
6//! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
7//! values, the locals in which they are stored, and the assignment location.
8//!
9//! We traverse all assignments `x = rvalue` and operands.
10//!
11//! For each SSA one, we compute a symbolic representation of values that are assigned to SSA
12//! locals. This symbolic representation is defined by the `Value` enum. Each produced instance of
13//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
14//!
15//! For each non-SSA
16//! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
17//! replace the rvalue/operand by that constant. Otherwise, if there is an SSA local `y`
18//! associated to this `VnIndex`, and if its definition location strictly dominates the assignment
19//! to `x`, we replace the assignment by `x = y`.
20//!
21//! By opportunity, this pass simplifies some `Rvalue`s based on the accumulated knowledge.
22//!
23//! # Operational semantic
24//!
25//! Operationally, this pass attempts to prove bitwise equality between locals. Given this MIR:
26//! ```ignore (MIR)
27//! _a = some value // has VnIndex i
28//! // some MIR
29//! _b = some other value // also has VnIndex i
30//! ```
31//!
32//! We consider it to be replaceable by:
33//! ```ignore (MIR)
34//! _a = some value // has VnIndex i
35//! // some MIR
36//! _c = some other value // also has VnIndex i
37//! assume(_a bitwise equal to _c) // follows from having the same VnIndex
38//! _b = _a // follows from the `assume`
39//! ```
40//!
41//! Which is simplifiable to:
42//! ```ignore (MIR)
43//! _a = some value // has VnIndex i
44//! // some MIR
45//! _b = _a
46//! ```
47//!
48//! # Handling of references
49//!
50//! We handle references by assigning a different "provenance" index to each Ref/RawPtr rvalue.
51//! This ensure that we do not spuriously merge borrows that should not be merged. Meanwhile, we
52//! consider all the derefs of an immutable reference to a freeze type to give the same value:
53//! ```ignore (MIR)
54//! _a = *_b // _b is &Freeze
55//! _c = *_b // replaced by _c = _a
56//! ```
57//!
58//! # Determinism of constant propagation
59//!
60//! When registering a new `Value`, we attempt to opportunistically evaluate it as a constant.
61//! The evaluated form is inserted in `evaluated` as an `OpTy` or `None` if evaluation failed.
62//!
63//! The difficulty is non-deterministic evaluation of MIR constants. Some `Const` can have
64//! different runtime values each time they are evaluated. This is the case with
65//! `Const::Slice` which have a new pointer each time they are evaluated, and constants that
66//! contain a fn pointer (`AllocId` pointing to a `GlobalAlloc::Function`) pointing to a different
67//! symbol in each codegen unit.
68//!
69//! Meanwhile, we want to be able to read indirect constants. For instance:
70//! ```
71//! static A: &'static &'static u8 = &&63;
72//! fn foo() -> u8 {
73//!     **A // We want to replace by 63.
74//! }
75//! fn bar() -> u8 {
76//!     b"abc"[1] // We want to replace by 'b'.
77//! }
78//! ```
79//!
80//! The `Value::Constant` variant stores a possibly unevaluated constant. Evaluating that constant
81//! may be non-deterministic. When that happens, we assign a disambiguator to ensure that we do not
82//! merge the constants. See `duplicate_slice` test in `gvn.rs`.
83//!
84//! Second, when writing constants in MIR, we do not write `Const::Slice` or `Const`
85//! that contain `AllocId`s.
86
87use std::borrow::Cow;
88use std::hash::{Hash, Hasher};
89
90use either::Either;
91use hashbrown::hash_table::{Entry, HashTable};
92use itertools::Itertools as _;
93use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
94use rustc_arena::DroplessArena;
95use rustc_const_eval::const_eval::DummyMachine;
96use rustc_const_eval::interpret::{
97    ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar,
98    intern_const_alloc_for_constprop,
99};
100use rustc_data_structures::fx::FxHasher;
101use rustc_data_structures::graph::dominators::Dominators;
102use rustc_hir::def::DefKind;
103use rustc_index::bit_set::DenseBitSet;
104use rustc_index::{IndexVec, newtype_index};
105use rustc_middle::bug;
106use rustc_middle::mir::interpret::GlobalAlloc;
107use rustc_middle::mir::visit::*;
108use rustc_middle::mir::*;
109use rustc_middle::ty::layout::HasTypingEnv;
110use rustc_middle::ty::{self, Ty, TyCtxt};
111use rustc_span::DUMMY_SP;
112use smallvec::SmallVec;
113use tracing::{debug, instrument, trace};
114
115use crate::ssa::SsaLocals;
116
117pub(super) struct GVN;
118
119impl<'tcx> crate::MirPass<'tcx> for GVN {
120    fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
121        sess.mir_opt_level() >= 2
122    }
123
124    #[instrument(level = "trace", skip(self, tcx, body))]
125    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
126        debug!(def_id = ?body.source.def_id());
127
128        let typing_env = body.typing_env(tcx);
129        let ssa = SsaLocals::new(tcx, body, typing_env);
130        // Clone dominators because we need them while mutating the body.
131        let dominators = body.basic_blocks.dominators().clone();
132        let maybe_loop_headers = loops::maybe_loop_headers(body);
133
134        let arena = DroplessArena::default();
135        let mut state =
136            VnState::new(tcx, body, typing_env, &ssa, dominators, &body.local_decls, &arena);
137
138        for local in body.args_iter().filter(|&local| ssa.is_ssa(local)) {
139            let opaque = state.new_opaque(body.local_decls[local].ty);
140            state.assign(local, opaque);
141        }
142
143        let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
144        for bb in reverse_postorder {
145            // N.B. With loops, reverse postorder cannot produce a valid topological order.
146            // A statement or terminator from inside the loop, that is not processed yet, may have performed an indirect write.
147            if maybe_loop_headers.contains(bb) {
148                state.invalidate_derefs();
149            }
150            let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
151            state.visit_basic_block_data(bb, data);
152        }
153
154        // For each local that is reused (`y` above), we remove its storage statements do avoid any
155        // difficulty. Those locals are SSA, so should be easy to optimize by LLVM without storage
156        // statements.
157        StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
158    }
159
160    fn is_required(&self) -> bool {
161        false
162    }
163}
164
165newtype_index! {
166    /// This represents a `Value` in the symbolic execution.
167    #[debug_format = "_v{}"]
168    struct VnIndex {}
169}
170
171/// Marker type to forbid hashing and comparing opaque values.
172/// This struct should only be constructed by `ValueSet::insert_unique` to ensure we use that
173/// method to create non-unifiable values. It will ICE if used in `ValueSet::insert`.
174#[derive(Copy, Clone, Debug, Eq)]
175struct VnOpaque;
176impl PartialEq for VnOpaque {
177    fn eq(&self, _: &VnOpaque) -> bool {
178        // ICE if we try to compare unique values
179        unreachable!()
180    }
181}
182impl Hash for VnOpaque {
183    fn hash<T: Hasher>(&self, _: &mut T) {
184        // ICE if we try to hash unique values
185        unreachable!()
186    }
187}
188
189#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
190enum AddressKind {
191    Ref(BorrowKind),
192    Address(RawPtrKind),
193}
194
195#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
196enum AddressBase {
197    /// This address is based on this local.
198    Local(Local),
199    /// This address is based on the deref of this pointer.
200    Deref(VnIndex),
201}
202
203#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
204enum Value<'a, 'tcx> {
205    // Root values.
206    /// Used to represent values we know nothing about.
207    /// The `usize` is a counter incremented by `new_opaque`.
208    Opaque(VnOpaque),
209    /// Evaluated or unevaluated constant value.
210    Constant {
211        value: Const<'tcx>,
212        /// Some constants do not have a deterministic value. To avoid merging two instances of the
213        /// same `Const`, we assign them an additional integer index.
214        // `disambiguator` is `None` iff the constant is deterministic.
215        disambiguator: Option<VnOpaque>,
216    },
217
218    // Aggregates.
219    /// An aggregate value, either tuple/closure/struct/enum.
220    /// This does not contain unions, as we cannot reason with the value.
221    Aggregate(VariantIdx, &'a [VnIndex]),
222    /// A union aggregate value.
223    Union(FieldIdx, VnIndex),
224    /// A raw pointer aggregate built from a thin pointer and metadata.
225    RawPtr {
226        /// Thin pointer component. This is field 0 in MIR.
227        pointer: VnIndex,
228        /// Metadata component. This is field 1 in MIR.
229        metadata: VnIndex,
230    },
231    /// This corresponds to a `[value; count]` expression.
232    Repeat(VnIndex, ty::Const<'tcx>),
233    /// The address of a place.
234    Address {
235        base: AddressBase,
236        // We do not use a plain `Place` as we want to be able to reason about indices.
237        // This does not contain any `Deref` projection.
238        projection: &'a [ProjectionElem<VnIndex, Ty<'tcx>>],
239        kind: AddressKind,
240        /// Give each borrow and pointer a different provenance, so we don't merge them.
241        provenance: VnOpaque,
242    },
243
244    // Extractions.
245    /// This is the *value* obtained by projecting another value.
246    Projection(VnIndex, ProjectionElem<VnIndex, ()>),
247    /// Discriminant of the given value.
248    Discriminant(VnIndex),
249
250    // Operations.
251    NullaryOp(NullOp<'tcx>, Ty<'tcx>),
252    UnaryOp(UnOp, VnIndex),
253    BinaryOp(BinOp, VnIndex, VnIndex),
254    Cast {
255        kind: CastKind,
256        value: VnIndex,
257    },
258}
259
260/// Stores and deduplicates pairs of `(Value, Ty)` into in `VnIndex` numbered values.
261///
262/// This data structure is mostly a partial reimplementation of `FxIndexMap<VnIndex, (Value, Ty)>`.
263/// We do not use a regular `FxIndexMap` to skip hashing values that are unique by construction,
264/// like opaque values, address with provenance and non-deterministic constants.
265struct ValueSet<'a, 'tcx> {
266    indices: HashTable<VnIndex>,
267    hashes: IndexVec<VnIndex, u64>,
268    values: IndexVec<VnIndex, Value<'a, 'tcx>>,
269    types: IndexVec<VnIndex, Ty<'tcx>>,
270}
271
272impl<'a, 'tcx> ValueSet<'a, 'tcx> {
273    fn new(num_values: usize) -> ValueSet<'a, 'tcx> {
274        ValueSet {
275            indices: HashTable::with_capacity(num_values),
276            hashes: IndexVec::with_capacity(num_values),
277            values: IndexVec::with_capacity(num_values),
278            types: IndexVec::with_capacity(num_values),
279        }
280    }
281
282    /// Insert a `(Value, Ty)` pair without hashing or deduplication.
283    /// This always creates a new `VnIndex`.
284    #[inline]
285    fn insert_unique(
286        &mut self,
287        ty: Ty<'tcx>,
288        value: impl FnOnce(VnOpaque) -> Value<'a, 'tcx>,
289    ) -> VnIndex {
290        let value = value(VnOpaque);
291
292        debug_assert!(match value {
293            Value::Opaque(_) | Value::Address { .. } => true,
294            Value::Constant { disambiguator, .. } => disambiguator.is_some(),
295            _ => false,
296        });
297
298        let index = self.hashes.push(0);
299        let _index = self.types.push(ty);
300        debug_assert_eq!(index, _index);
301        let _index = self.values.push(value);
302        debug_assert_eq!(index, _index);
303        index
304    }
305
306    /// Insert a `(Value, Ty)` pair to be deduplicated.
307    /// Returns `true` as second tuple field if this value did not exist previously.
308    #[allow(rustc::pass_by_value)] // closures take `&VnIndex`
309    fn insert(&mut self, ty: Ty<'tcx>, value: Value<'a, 'tcx>) -> (VnIndex, bool) {
310        debug_assert!(match value {
311            Value::Opaque(_) | Value::Address { .. } => false,
312            Value::Constant { disambiguator, .. } => disambiguator.is_none(),
313            _ => true,
314        });
315
316        let hash: u64 = {
317            let mut h = FxHasher::default();
318            value.hash(&mut h);
319            ty.hash(&mut h);
320            h.finish()
321        };
322
323        let eq = |index: &VnIndex| self.values[*index] == value && self.types[*index] == ty;
324        let hasher = |index: &VnIndex| self.hashes[*index];
325        match self.indices.entry(hash, eq, hasher) {
326            Entry::Occupied(entry) => {
327                let index = *entry.get();
328                (index, false)
329            }
330            Entry::Vacant(entry) => {
331                let index = self.hashes.push(hash);
332                entry.insert(index);
333                let _index = self.values.push(value);
334                debug_assert_eq!(index, _index);
335                let _index = self.types.push(ty);
336                debug_assert_eq!(index, _index);
337                (index, true)
338            }
339        }
340    }
341
342    /// Return the `Value` associated with the given `VnIndex`.
343    #[inline]
344    fn value(&self, index: VnIndex) -> Value<'a, 'tcx> {
345        self.values[index]
346    }
347
348    /// Return the type associated with the given `VnIndex`.
349    #[inline]
350    fn ty(&self, index: VnIndex) -> Ty<'tcx> {
351        self.types[index]
352    }
353
354    /// Replace the value associated with `index` with an opaque value.
355    #[inline]
356    fn forget(&mut self, index: VnIndex) {
357        self.values[index] = Value::Opaque(VnOpaque);
358    }
359}
360
361struct VnState<'body, 'a, 'tcx> {
362    tcx: TyCtxt<'tcx>,
363    ecx: InterpCx<'tcx, DummyMachine>,
364    local_decls: &'body LocalDecls<'tcx>,
365    is_coroutine: bool,
366    /// Value stored in each local.
367    locals: IndexVec<Local, Option<VnIndex>>,
368    /// Locals that are assigned that value.
369    // This vector does not hold all the values of `VnIndex` that we create.
370    rev_locals: IndexVec<VnIndex, SmallVec<[Local; 1]>>,
371    values: ValueSet<'a, 'tcx>,
372    /// Values evaluated as constants if possible.
373    /// - `None` are values not computed yet;
374    /// - `Some(None)` are values for which computation has failed;
375    /// - `Some(Some(op))` are successful computations.
376    evaluated: IndexVec<VnIndex, Option<Option<&'a OpTy<'tcx>>>>,
377    /// Cache the deref values.
378    derefs: Vec<VnIndex>,
379    ssa: &'body SsaLocals,
380    dominators: Dominators<BasicBlock>,
381    reused_locals: DenseBitSet<Local>,
382    arena: &'a DroplessArena,
383}
384
385impl<'body, 'a, 'tcx> VnState<'body, 'a, 'tcx> {
386    fn new(
387        tcx: TyCtxt<'tcx>,
388        body: &Body<'tcx>,
389        typing_env: ty::TypingEnv<'tcx>,
390        ssa: &'body SsaLocals,
391        dominators: Dominators<BasicBlock>,
392        local_decls: &'body LocalDecls<'tcx>,
393        arena: &'a DroplessArena,
394    ) -> Self {
395        // Compute a rough estimate of the number of values in the body from the number of
396        // statements. This is meant to reduce the number of allocations, but it's all right if
397        // we miss the exact amount. We estimate based on 2 values per statement (one in LHS and
398        // one in RHS) and 4 values per terminator (for call operands).
399        let num_values =
400            2 * body.basic_blocks.iter().map(|bbdata| bbdata.statements.len()).sum::<usize>()
401                + 4 * body.basic_blocks.len();
402        VnState {
403            tcx,
404            ecx: InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine),
405            local_decls,
406            is_coroutine: body.coroutine.is_some(),
407            locals: IndexVec::from_elem(None, local_decls),
408            rev_locals: IndexVec::with_capacity(num_values),
409            values: ValueSet::new(num_values),
410            evaluated: IndexVec::with_capacity(num_values),
411            derefs: Vec::new(),
412            ssa,
413            dominators,
414            reused_locals: DenseBitSet::new_empty(local_decls.len()),
415            arena,
416        }
417    }
418
419    fn typing_env(&self) -> ty::TypingEnv<'tcx> {
420        self.ecx.typing_env()
421    }
422
423    #[instrument(level = "trace", skip(self), ret)]
424    fn insert(&mut self, ty: Ty<'tcx>, value: Value<'a, 'tcx>) -> VnIndex {
425        let (index, new) = self.values.insert(ty, value);
426        if new {
427            // Grow `evaluated` and `rev_locals` here to amortize the allocations.
428            let _index = self.evaluated.push(None);
429            debug_assert_eq!(index, _index);
430            let _index = self.rev_locals.push(SmallVec::new());
431            debug_assert_eq!(index, _index);
432        }
433        index
434    }
435
436    /// Create a new `Value` for which we have no information at all, except that it is distinct
437    /// from all the others.
438    #[instrument(level = "trace", skip(self), ret)]
439    fn new_opaque(&mut self, ty: Ty<'tcx>) -> VnIndex {
440        let index = self.values.insert_unique(ty, Value::Opaque);
441        let _index = self.evaluated.push(Some(None));
442        debug_assert_eq!(index, _index);
443        let _index = self.rev_locals.push(SmallVec::new());
444        debug_assert_eq!(index, _index);
445        index
446    }
447
448    /// Create a new `Value::Address` distinct from all the others.
449    #[instrument(level = "trace", skip(self), ret)]
450    fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> Option<VnIndex> {
451        let pty = place.ty(self.local_decls, self.tcx).ty;
452        let ty = match kind {
453            AddressKind::Ref(bk) => {
454                Ty::new_ref(self.tcx, self.tcx.lifetimes.re_erased, pty, bk.to_mutbl_lossy())
455            }
456            AddressKind::Address(mutbl) => Ty::new_ptr(self.tcx, pty, mutbl.to_mutbl_lossy()),
457        };
458
459        let mut projection = place.projection.iter();
460        let base = if place.is_indirect_first_projection() {
461            let base = self.locals[place.local]?;
462            // Skip the initial `Deref`.
463            projection.next();
464            AddressBase::Deref(base)
465        } else {
466            AddressBase::Local(place.local)
467        };
468        // Do not try evaluating inside `Index`, this has been done by `simplify_place_projection`.
469        let projection =
470            projection.map(|proj| proj.try_map(|index| self.locals[index], |ty| ty).ok_or(()));
471        let projection = self.arena.try_alloc_from_iter(projection).ok()?;
472
473        let index = self.values.insert_unique(ty, |provenance| Value::Address {
474            base,
475            projection,
476            kind,
477            provenance,
478        });
479        let _index = self.evaluated.push(None);
480        debug_assert_eq!(index, _index);
481        let _index = self.rev_locals.push(SmallVec::new());
482        debug_assert_eq!(index, _index);
483
484        Some(index)
485    }
486
487    #[instrument(level = "trace", skip(self), ret)]
488    fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex {
489        let (index, new) = if value.is_deterministic() {
490            // The constant is deterministic, no need to disambiguate.
491            let constant = Value::Constant { value, disambiguator: None };
492            self.values.insert(value.ty(), constant)
493        } else {
494            // Multiple mentions of this constant will yield different values,
495            // so assign a different `disambiguator` to ensure they do not get the same `VnIndex`.
496            let index = self.values.insert_unique(value.ty(), |disambiguator| Value::Constant {
497                value,
498                disambiguator: Some(disambiguator),
499            });
500            (index, true)
501        };
502        if new {
503            let _index = self.evaluated.push(None);
504            debug_assert_eq!(index, _index);
505            let _index = self.rev_locals.push(SmallVec::new());
506            debug_assert_eq!(index, _index);
507        }
508        index
509    }
510
511    #[inline]
512    fn get(&self, index: VnIndex) -> Value<'a, 'tcx> {
513        self.values.value(index)
514    }
515
516    #[inline]
517    fn ty(&self, index: VnIndex) -> Ty<'tcx> {
518        self.values.ty(index)
519    }
520
521    /// Record that `local` is assigned `value`. `local` must be SSA.
522    #[instrument(level = "trace", skip(self))]
523    fn assign(&mut self, local: Local, value: VnIndex) {
524        debug_assert!(self.ssa.is_ssa(local));
525        self.locals[local] = Some(value);
526        self.rev_locals[value].push(local);
527    }
528
529    fn insert_bool(&mut self, flag: bool) -> VnIndex {
530        // Booleans are deterministic.
531        let value = Const::from_bool(self.tcx, flag);
532        debug_assert!(value.is_deterministic());
533        self.insert(self.tcx.types.bool, Value::Constant { value, disambiguator: None })
534    }
535
536    fn insert_scalar(&mut self, ty: Ty<'tcx>, scalar: Scalar) -> VnIndex {
537        // Scalars are deterministic.
538        let value = Const::from_scalar(self.tcx, scalar, ty);
539        debug_assert!(value.is_deterministic());
540        self.insert(ty, Value::Constant { value, disambiguator: None })
541    }
542
543    fn insert_tuple(&mut self, ty: Ty<'tcx>, values: &[VnIndex]) -> VnIndex {
544        self.insert(ty, Value::Aggregate(VariantIdx::ZERO, self.arena.alloc_slice(values)))
545    }
546
547    fn insert_deref(&mut self, ty: Ty<'tcx>, value: VnIndex) -> VnIndex {
548        let value = self.insert(ty, Value::Projection(value, ProjectionElem::Deref));
549        self.derefs.push(value);
550        value
551    }
552
553    fn invalidate_derefs(&mut self) {
554        for deref in std::mem::take(&mut self.derefs) {
555            self.values.forget(deref);
556        }
557    }
558
559    #[instrument(level = "trace", skip(self), ret)]
560    fn eval_to_const_inner(&mut self, value: VnIndex) -> Option<OpTy<'tcx>> {
561        use Value::*;
562        let ty = self.ty(value);
563        // Avoid computing layouts inside a coroutine, as that can cause cycles.
564        let ty = if !self.is_coroutine || ty.is_scalar() {
565            self.ecx.layout_of(ty).ok()?
566        } else {
567            return None;
568        };
569        let op = match self.get(value) {
570            _ if ty.is_zst() => ImmTy::uninit(ty).into(),
571
572            Opaque(_) => return None,
573            // Do not bother evaluating repeat expressions. This would uselessly consume memory.
574            Repeat(..) => return None,
575
576            Constant { ref value, disambiguator: _ } => {
577                self.ecx.eval_mir_constant(value, DUMMY_SP, None).discard_err()?
578            }
579            Aggregate(variant, ref fields) => {
580                let fields =
581                    fields.iter().map(|&f| self.eval_to_const(f)).collect::<Option<Vec<_>>>()?;
582                let variant = if ty.ty.is_enum() { Some(variant) } else { None };
583                if matches!(ty.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..))
584                {
585                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
586                    let variant_dest = if let Some(variant) = variant {
587                        self.ecx.project_downcast(&dest, variant).discard_err()?
588                    } else {
589                        dest.clone()
590                    };
591                    for (field_index, op) in fields.into_iter().enumerate() {
592                        let field_dest = self
593                            .ecx
594                            .project_field(&variant_dest, FieldIdx::from_usize(field_index))
595                            .discard_err()?;
596                        self.ecx.copy_op(op, &field_dest).discard_err()?;
597                    }
598                    self.ecx
599                        .write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest)
600                        .discard_err()?;
601                    self.ecx
602                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
603                        .discard_err()?;
604                    dest.into()
605                } else {
606                    return None;
607                }
608            }
609            Union(active_field, field) => {
610                let field = self.eval_to_const(field)?;
611                if matches!(ty.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..))
612                {
613                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
614                    let field_dest = self.ecx.project_field(&dest, active_field).discard_err()?;
615                    self.ecx.copy_op(field, &field_dest).discard_err()?;
616                    self.ecx
617                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
618                        .discard_err()?;
619                    dest.into()
620                } else {
621                    return None;
622                }
623            }
624            RawPtr { pointer, metadata } => {
625                let pointer = self.eval_to_const(pointer)?;
626                let metadata = self.eval_to_const(metadata)?;
627
628                // Pointers don't have fields, so don't `project_field` them.
629                let data = self.ecx.read_pointer(pointer).discard_err()?;
630                let meta = if metadata.layout.is_zst() {
631                    MemPlaceMeta::None
632                } else {
633                    MemPlaceMeta::Meta(self.ecx.read_scalar(metadata).discard_err()?)
634                };
635                let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx);
636                ImmTy::from_immediate(ptr_imm, ty).into()
637            }
638
639            Projection(base, elem) => {
640                let base = self.eval_to_const(base)?;
641                // `Index` by constants should have been replaced by `ConstantIndex` by
642                // `simplify_place_projection`.
643                let elem = elem.try_map(|_| None, |()| ty.ty)?;
644                self.ecx.project(base, elem).discard_err()?
645            }
646            Address { base, projection, .. } => {
647                debug_assert!(!projection.contains(&ProjectionElem::Deref));
648                let pointer = match base {
649                    AddressBase::Deref(pointer) => self.eval_to_const(pointer)?,
650                    // We have no stack to point to.
651                    AddressBase::Local(_) => return None,
652                };
653                let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?;
654                for elem in projection {
655                    // `Index` by constants should have been replaced by `ConstantIndex` by
656                    // `simplify_place_projection`.
657                    let elem = elem.try_map(|_| None, |ty| ty)?;
658                    mplace = self.ecx.project(&mplace, elem).discard_err()?;
659                }
660                let pointer = mplace.to_ref(&self.ecx);
661                ImmTy::from_immediate(pointer, ty).into()
662            }
663
664            Discriminant(base) => {
665                let base = self.eval_to_const(base)?;
666                let variant = self.ecx.read_discriminant(base).discard_err()?;
667                let discr_value =
668                    self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
669                discr_value.into()
670            }
671            NullaryOp(null_op, arg_ty) => {
672                let arg_layout = self.ecx.layout_of(arg_ty).ok()?;
673                if let NullOp::SizeOf | NullOp::AlignOf = null_op
674                    && arg_layout.is_unsized()
675                {
676                    return None;
677                }
678                let val = match null_op {
679                    NullOp::SizeOf => arg_layout.size.bytes(),
680                    NullOp::AlignOf => arg_layout.align.bytes(),
681                    NullOp::OffsetOf(fields) => self
682                        .tcx
683                        .offset_of_subfield(self.typing_env(), arg_layout, fields.iter())
684                        .bytes(),
685                    NullOp::UbChecks => return None,
686                    NullOp::ContractChecks => return None,
687                };
688                ImmTy::from_uint(val, ty).into()
689            }
690            UnaryOp(un_op, operand) => {
691                let operand = self.eval_to_const(operand)?;
692                let operand = self.ecx.read_immediate(operand).discard_err()?;
693                let val = self.ecx.unary_op(un_op, &operand).discard_err()?;
694                val.into()
695            }
696            BinaryOp(bin_op, lhs, rhs) => {
697                let lhs = self.eval_to_const(lhs)?;
698                let rhs = self.eval_to_const(rhs)?;
699                let lhs = self.ecx.read_immediate(lhs).discard_err()?;
700                let rhs = self.ecx.read_immediate(rhs).discard_err()?;
701                let val = self.ecx.binary_op(bin_op, &lhs, &rhs).discard_err()?;
702                val.into()
703            }
704            Cast { kind, value } => match kind {
705                CastKind::IntToInt | CastKind::IntToFloat => {
706                    let value = self.eval_to_const(value)?;
707                    let value = self.ecx.read_immediate(value).discard_err()?;
708                    let res = self.ecx.int_to_int_or_float(&value, ty).discard_err()?;
709                    res.into()
710                }
711                CastKind::FloatToFloat | CastKind::FloatToInt => {
712                    let value = self.eval_to_const(value)?;
713                    let value = self.ecx.read_immediate(value).discard_err()?;
714                    let res = self.ecx.float_to_float_or_int(&value, ty).discard_err()?;
715                    res.into()
716                }
717                CastKind::Transmute | CastKind::Subtype => {
718                    let value = self.eval_to_const(value)?;
719                    // `offset` for immediates generally only supports projections that match the
720                    // type of the immediate. However, as a HACK, we exploit that it can also do
721                    // limited transmutes: it only works between types with the same layout, and
722                    // cannot transmute pointers to integers.
723                    if value.as_mplace_or_imm().is_right() {
724                        let can_transmute = match (value.layout.backend_repr, ty.backend_repr) {
725                            (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => {
726                                s1.size(&self.ecx) == s2.size(&self.ecx)
727                                    && !matches!(s1.primitive(), Primitive::Pointer(..))
728                            }
729                            (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
730                                a1.size(&self.ecx) == a2.size(&self.ecx)
731                                    && b1.size(&self.ecx) == b2.size(&self.ecx)
732                                    // The alignment of the second component determines its offset, so that also needs to match.
733                                    && b1.align(&self.ecx) == b2.align(&self.ecx)
734                                    // None of the inputs may be a pointer.
735                                    && !matches!(a1.primitive(), Primitive::Pointer(..))
736                                    && !matches!(b1.primitive(), Primitive::Pointer(..))
737                            }
738                            _ => false,
739                        };
740                        if !can_transmute {
741                            return None;
742                        }
743                    }
744                    value.offset(Size::ZERO, ty, &self.ecx).discard_err()?
745                }
746                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) => {
747                    let src = self.eval_to_const(value)?;
748                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
749                    self.ecx.unsize_into(src, ty, &dest).discard_err()?;
750                    self.ecx
751                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
752                        .discard_err()?;
753                    dest.into()
754                }
755                CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
756                    let src = self.eval_to_const(value)?;
757                    let src = self.ecx.read_immediate(src).discard_err()?;
758                    let ret = self.ecx.ptr_to_ptr(&src, ty).discard_err()?;
759                    ret.into()
760                }
761                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::UnsafeFnPointer, _) => {
762                    let src = self.eval_to_const(value)?;
763                    let src = self.ecx.read_immediate(src).discard_err()?;
764                    ImmTy::from_immediate(*src, ty).into()
765                }
766                _ => return None,
767            },
768        };
769        Some(op)
770    }
771
772    fn eval_to_const(&mut self, index: VnIndex) -> Option<&'a OpTy<'tcx>> {
773        if let Some(op) = self.evaluated[index] {
774            return op;
775        }
776        let op = self.eval_to_const_inner(index);
777        self.evaluated[index] = Some(self.arena.alloc(op).as_ref());
778        self.evaluated[index].unwrap()
779    }
780
781    /// Represent the *value* we obtain by dereferencing an `Address` value.
782    #[instrument(level = "trace", skip(self), ret)]
783    fn dereference_address(
784        &mut self,
785        base: AddressBase,
786        projection: &[ProjectionElem<VnIndex, Ty<'tcx>>],
787    ) -> Option<VnIndex> {
788        let (mut place_ty, mut value) = match base {
789            // The base is a local, so we take the local's value and project from it.
790            AddressBase::Local(local) => {
791                let local = self.locals[local]?;
792                let place_ty = PlaceTy::from_ty(self.ty(local));
793                (place_ty, local)
794            }
795            // The base is a pointer's deref, so we introduce the implicit deref.
796            AddressBase::Deref(reborrow) => {
797                let place_ty = PlaceTy::from_ty(self.ty(reborrow));
798                self.project(place_ty, reborrow, ProjectionElem::Deref)?
799            }
800        };
801        for &proj in projection {
802            (place_ty, value) = self.project(place_ty, value, proj)?;
803        }
804        Some(value)
805    }
806
807    #[instrument(level = "trace", skip(self), ret)]
808    fn project(
809        &mut self,
810        place_ty: PlaceTy<'tcx>,
811        value: VnIndex,
812        proj: ProjectionElem<VnIndex, Ty<'tcx>>,
813    ) -> Option<(PlaceTy<'tcx>, VnIndex)> {
814        let projection_ty = place_ty.projection_ty(self.tcx, proj);
815        let proj = match proj {
816            ProjectionElem::Deref => {
817                if let Some(Mutability::Not) = place_ty.ty.ref_mutability()
818                    && projection_ty.ty.is_freeze(self.tcx, self.typing_env())
819                {
820                    if let Value::Address { base, projection, .. } = self.get(value)
821                        && let Some(value) = self.dereference_address(base, projection)
822                    {
823                        return Some((projection_ty, value));
824                    }
825
826                    // An immutable borrow `_x` always points to the same value for the
827                    // lifetime of the borrow, so we can merge all instances of `*_x`.
828                    return Some((projection_ty, self.insert_deref(projection_ty.ty, value)));
829                } else {
830                    return None;
831                }
832            }
833            ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
834            ProjectionElem::Field(f, _) => match self.get(value) {
835                Value::Aggregate(_, fields) => return Some((projection_ty, fields[f.as_usize()])),
836                Value::Union(active, field) if active == f => return Some((projection_ty, field)),
837                Value::Projection(outer_value, ProjectionElem::Downcast(_, read_variant))
838                    if let Value::Aggregate(written_variant, fields) = self.get(outer_value)
839                    // This pass is not aware of control-flow, so we do not know whether the
840                    // replacement we are doing is actually reachable. We could be in any arm of
841                    // ```
842                    // match Some(x) {
843                    //     Some(y) => /* stuff */,
844                    //     None => /* other */,
845                    // }
846                    // ```
847                    //
848                    // In surface rust, the current statement would be unreachable.
849                    //
850                    // However, from the reference chapter on enums and RFC 2195,
851                    // accessing the wrong variant is not UB if the enum has repr.
852                    // So it's not impossible for a series of MIR opts to generate
853                    // a downcast to an inactive variant.
854                    && written_variant == read_variant =>
855                {
856                    return Some((projection_ty, fields[f.as_usize()]));
857                }
858                _ => ProjectionElem::Field(f, ()),
859            },
860            ProjectionElem::Index(idx) => {
861                if let Value::Repeat(inner, _) = self.get(value) {
862                    return Some((projection_ty, inner));
863                }
864                ProjectionElem::Index(idx)
865            }
866            ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
867                match self.get(value) {
868                    Value::Repeat(inner, _) => {
869                        return Some((projection_ty, inner));
870                    }
871                    Value::Aggregate(_, operands) => {
872                        let offset = if from_end {
873                            operands.len() - offset as usize
874                        } else {
875                            offset as usize
876                        };
877                        let value = operands.get(offset).copied()?;
878                        return Some((projection_ty, value));
879                    }
880                    _ => {}
881                };
882                ProjectionElem::ConstantIndex { offset, min_length, from_end }
883            }
884            ProjectionElem::Subslice { from, to, from_end } => {
885                ProjectionElem::Subslice { from, to, from_end }
886            }
887            ProjectionElem::OpaqueCast(_) => ProjectionElem::OpaqueCast(()),
888            ProjectionElem::UnwrapUnsafeBinder(_) => ProjectionElem::UnwrapUnsafeBinder(()),
889        };
890
891        let value = self.insert(projection_ty.ty, Value::Projection(value, proj));
892        Some((projection_ty, value))
893    }
894
895    /// Simplify the projection chain if we know better.
896    #[instrument(level = "trace", skip(self))]
897    fn simplify_place_projection(&mut self, place: &mut Place<'tcx>, location: Location) {
898        // If the projection is indirect, we treat the local as a value, so can replace it with
899        // another local.
900        if place.is_indirect_first_projection()
901            && let Some(base) = self.locals[place.local]
902            && let Some(new_local) = self.try_as_local(base, location)
903            && place.local != new_local
904        {
905            place.local = new_local;
906            self.reused_locals.insert(new_local);
907        }
908
909        let mut projection = Cow::Borrowed(&place.projection[..]);
910
911        for i in 0..projection.len() {
912            let elem = projection[i];
913            if let ProjectionElem::Index(idx_local) = elem
914                && let Some(idx) = self.locals[idx_local]
915            {
916                if let Some(offset) = self.eval_to_const(idx)
917                    && let Some(offset) = self.ecx.read_target_usize(offset).discard_err()
918                    && let Some(min_length) = offset.checked_add(1)
919                {
920                    projection.to_mut()[i] =
921                        ProjectionElem::ConstantIndex { offset, min_length, from_end: false };
922                } else if let Some(new_idx_local) = self.try_as_local(idx, location)
923                    && idx_local != new_idx_local
924                {
925                    projection.to_mut()[i] = ProjectionElem::Index(new_idx_local);
926                    self.reused_locals.insert(new_idx_local);
927                }
928            }
929        }
930
931        if projection.is_owned() {
932            place.projection = self.tcx.mk_place_elems(&projection);
933        }
934
935        trace!(?place);
936    }
937
938    /// Represent the *value* which would be read from `place`. If we succeed, return it.
939    /// If we fail, return a `PlaceRef` that contains the same value.
940    #[instrument(level = "trace", skip(self), ret)]
941    fn compute_place_value(
942        &mut self,
943        place: Place<'tcx>,
944        location: Location,
945    ) -> Result<VnIndex, PlaceRef<'tcx>> {
946        // Invariant: `place` and `place_ref` point to the same value, even if they point to
947        // different memory locations.
948        let mut place_ref = place.as_ref();
949
950        // Invariant: `value` holds the value up-to the `index`th projection excluded.
951        let Some(mut value) = self.locals[place.local] else { return Err(place_ref) };
952        // Invariant: `value` has type `place_ty`, with optional downcast variant if needed.
953        let mut place_ty = PlaceTy::from_ty(self.local_decls[place.local].ty);
954        for (index, proj) in place.projection.iter().enumerate() {
955            if let Some(local) = self.try_as_local(value, location) {
956                // Both `local` and `Place { local: place.local, projection: projection[..index] }`
957                // hold the same value. Therefore, following place holds the value in the original
958                // `place`.
959                place_ref = PlaceRef { local, projection: &place.projection[index..] };
960            }
961
962            let Some(proj) = proj.try_map(|value| self.locals[value], |ty| ty) else {
963                return Err(place_ref);
964            };
965            let Some(ty_and_value) = self.project(place_ty, value, proj) else {
966                return Err(place_ref);
967            };
968            (place_ty, value) = ty_and_value;
969        }
970
971        Ok(value)
972    }
973
974    /// Represent the *value* which would be read from `place`, and point `place` to a preexisting
975    /// place with the same value (if that already exists).
976    #[instrument(level = "trace", skip(self), ret)]
977    fn simplify_place_value(
978        &mut self,
979        place: &mut Place<'tcx>,
980        location: Location,
981    ) -> Option<VnIndex> {
982        self.simplify_place_projection(place, location);
983
984        match self.compute_place_value(*place, location) {
985            Ok(value) => {
986                if let Some(new_place) = self.try_as_place(value, location, true)
987                    && (new_place.local != place.local
988                        || new_place.projection.len() < place.projection.len())
989                {
990                    *place = new_place;
991                    self.reused_locals.insert(new_place.local);
992                }
993                Some(value)
994            }
995            Err(place_ref) => {
996                if place_ref.local != place.local
997                    || place_ref.projection.len() < place.projection.len()
998                {
999                    // By the invariant on `place_ref`.
1000                    *place = place_ref.project_deeper(&[], self.tcx);
1001                    self.reused_locals.insert(place_ref.local);
1002                }
1003                None
1004            }
1005        }
1006    }
1007
1008    #[instrument(level = "trace", skip(self), ret)]
1009    fn simplify_operand(
1010        &mut self,
1011        operand: &mut Operand<'tcx>,
1012        location: Location,
1013    ) -> Option<VnIndex> {
1014        match *operand {
1015            Operand::Constant(ref constant) => Some(self.insert_constant(constant.const_)),
1016            Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
1017                let value = self.simplify_place_value(place, location)?;
1018                if let Some(const_) = self.try_as_constant(value) {
1019                    *operand = Operand::Constant(Box::new(const_));
1020                }
1021                Some(value)
1022            }
1023        }
1024    }
1025
1026    #[instrument(level = "trace", skip(self), ret)]
1027    fn simplify_rvalue(
1028        &mut self,
1029        lhs: &Place<'tcx>,
1030        rvalue: &mut Rvalue<'tcx>,
1031        location: Location,
1032    ) -> Option<VnIndex> {
1033        let value = match *rvalue {
1034            // Forward values.
1035            Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
1036
1037            // Roots.
1038            Rvalue::Repeat(ref mut op, amount) => {
1039                let op = self.simplify_operand(op, location)?;
1040                Value::Repeat(op, amount)
1041            }
1042            Rvalue::NullaryOp(op, ty) => Value::NullaryOp(op, ty),
1043            Rvalue::Aggregate(..) => return self.simplify_aggregate(lhs, rvalue, location),
1044            Rvalue::Ref(_, borrow_kind, ref mut place) => {
1045                self.simplify_place_projection(place, location);
1046                return self.new_pointer(*place, AddressKind::Ref(borrow_kind));
1047            }
1048            Rvalue::RawPtr(mutbl, ref mut place) => {
1049                self.simplify_place_projection(place, location);
1050                return self.new_pointer(*place, AddressKind::Address(mutbl));
1051            }
1052            Rvalue::WrapUnsafeBinder(ref mut op, _) => {
1053                let value = self.simplify_operand(op, location)?;
1054                Value::Cast { kind: CastKind::Transmute, value }
1055            }
1056
1057            // Operations.
1058            Rvalue::Cast(ref mut kind, ref mut value, to) => {
1059                return self.simplify_cast(kind, value, to, location);
1060            }
1061            Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
1062                return self.simplify_binary(op, lhs, rhs, location);
1063            }
1064            Rvalue::UnaryOp(op, ref mut arg_op) => {
1065                return self.simplify_unary(op, arg_op, location);
1066            }
1067            Rvalue::Discriminant(ref mut place) => {
1068                let place = self.simplify_place_value(place, location)?;
1069                if let Some(discr) = self.simplify_discriminant(place) {
1070                    return Some(discr);
1071                }
1072                Value::Discriminant(place)
1073            }
1074
1075            // Unsupported values.
1076            Rvalue::ThreadLocalRef(..) => return None,
1077            Rvalue::CopyForDeref(_) | Rvalue::ShallowInitBox(..) => {
1078                bug!("forbidden in runtime MIR: {rvalue:?}")
1079            }
1080        };
1081        let ty = rvalue.ty(self.local_decls, self.tcx);
1082        Some(self.insert(ty, value))
1083    }
1084
1085    fn simplify_discriminant(&mut self, place: VnIndex) -> Option<VnIndex> {
1086        let enum_ty = self.ty(place);
1087        if enum_ty.is_enum()
1088            && let Value::Aggregate(variant, _) = self.get(place)
1089        {
1090            let discr = self.ecx.discriminant_for_variant(enum_ty, variant).discard_err()?;
1091            return Some(self.insert_scalar(discr.layout.ty, discr.to_scalar()));
1092        }
1093
1094        None
1095    }
1096
1097    fn try_as_place_elem(
1098        &mut self,
1099        ty: Ty<'tcx>,
1100        proj: ProjectionElem<VnIndex, ()>,
1101        loc: Location,
1102    ) -> Option<PlaceElem<'tcx>> {
1103        proj.try_map(
1104            |value| {
1105                let local = self.try_as_local(value, loc)?;
1106                self.reused_locals.insert(local);
1107                Some(local)
1108            },
1109            |()| ty,
1110        )
1111    }
1112
1113    fn simplify_aggregate_to_copy(
1114        &mut self,
1115        ty: Ty<'tcx>,
1116        variant_index: VariantIdx,
1117        fields: &[VnIndex],
1118    ) -> Option<VnIndex> {
1119        let Some(&first_field) = fields.first() else { return None };
1120        let Value::Projection(copy_from_value, _) = self.get(first_field) else { return None };
1121
1122        // All fields must correspond one-to-one and come from the same aggregate value.
1123        if fields.iter().enumerate().any(|(index, &v)| {
1124            if let Value::Projection(pointer, ProjectionElem::Field(from_index, _)) = self.get(v)
1125                && copy_from_value == pointer
1126                && from_index.index() == index
1127            {
1128                return false;
1129            }
1130            true
1131        }) {
1132            return None;
1133        }
1134
1135        let mut copy_from_local_value = copy_from_value;
1136        if let Value::Projection(pointer, proj) = self.get(copy_from_value)
1137            && let ProjectionElem::Downcast(_, read_variant) = proj
1138        {
1139            if variant_index == read_variant {
1140                // When copying a variant, there is no need to downcast.
1141                copy_from_local_value = pointer;
1142            } else {
1143                // The copied variant must be identical.
1144                return None;
1145            }
1146        }
1147
1148        // Both must be variants of the same type.
1149        if self.ty(copy_from_local_value) == ty { Some(copy_from_local_value) } else { None }
1150    }
1151
1152    fn simplify_aggregate(
1153        &mut self,
1154        lhs: &Place<'tcx>,
1155        rvalue: &mut Rvalue<'tcx>,
1156        location: Location,
1157    ) -> Option<VnIndex> {
1158        let tcx = self.tcx;
1159        let ty = rvalue.ty(self.local_decls, tcx);
1160
1161        let Rvalue::Aggregate(box ref kind, ref mut field_ops) = *rvalue else { bug!() };
1162
1163        if field_ops.is_empty() {
1164            let is_zst = match *kind {
1165                AggregateKind::Array(..)
1166                | AggregateKind::Tuple
1167                | AggregateKind::Closure(..)
1168                | AggregateKind::CoroutineClosure(..) => true,
1169                // Only enums can be non-ZST.
1170                AggregateKind::Adt(did, ..) => tcx.def_kind(did) != DefKind::Enum,
1171                // Coroutines are never ZST, as they at least contain the implicit states.
1172                AggregateKind::Coroutine(..) => false,
1173                AggregateKind::RawPtr(..) => bug!("MIR for RawPtr aggregate must have 2 fields"),
1174            };
1175
1176            if is_zst {
1177                return Some(self.insert_constant(Const::zero_sized(ty)));
1178            }
1179        }
1180
1181        let fields = self.arena.alloc_from_iter(field_ops.iter_mut().map(|op| {
1182            self.simplify_operand(op, location)
1183                .unwrap_or_else(|| self.new_opaque(op.ty(self.local_decls, self.tcx)))
1184        }));
1185
1186        let variant_index = match *kind {
1187            AggregateKind::Array(..) | AggregateKind::Tuple => {
1188                assert!(!field_ops.is_empty());
1189                FIRST_VARIANT
1190            }
1191            AggregateKind::Closure(..)
1192            | AggregateKind::CoroutineClosure(..)
1193            | AggregateKind::Coroutine(..) => FIRST_VARIANT,
1194            AggregateKind::Adt(_, variant_index, _, _, None) => variant_index,
1195            // Do not track unions.
1196            AggregateKind::Adt(_, _, _, _, Some(active_field)) => {
1197                let field = *fields.first()?;
1198                return Some(self.insert(ty, Value::Union(active_field, field)));
1199            }
1200            AggregateKind::RawPtr(..) => {
1201                assert_eq!(field_ops.len(), 2);
1202                let [mut pointer, metadata] = fields.try_into().unwrap();
1203
1204                // Any thin pointer of matching mutability is fine as the data pointer.
1205                let mut was_updated = false;
1206                while let Value::Cast { kind: CastKind::PtrToPtr, value: cast_value } =
1207                    self.get(pointer)
1208                    && let ty::RawPtr(from_pointee_ty, from_mtbl) = self.ty(cast_value).kind()
1209                    && let ty::RawPtr(_, output_mtbl) = ty.kind()
1210                    && from_mtbl == output_mtbl
1211                    && from_pointee_ty.is_sized(self.tcx, self.typing_env())
1212                {
1213                    pointer = cast_value;
1214                    was_updated = true;
1215                }
1216
1217                if was_updated && let Some(op) = self.try_as_operand(pointer, location) {
1218                    field_ops[FieldIdx::ZERO] = op;
1219                }
1220
1221                return Some(self.insert(ty, Value::RawPtr { pointer, metadata }));
1222            }
1223        };
1224
1225        if ty.is_array()
1226            && fields.len() > 4
1227            && let Ok(&first) = fields.iter().all_equal_value()
1228        {
1229            let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
1230            if let Some(op) = self.try_as_operand(first, location) {
1231                *rvalue = Rvalue::Repeat(op, len);
1232            }
1233            return Some(self.insert(ty, Value::Repeat(first, len)));
1234        }
1235
1236        if let Some(value) = self.simplify_aggregate_to_copy(ty, variant_index, &fields) {
1237            // Allow introducing places with non-constant offsets, as those are still better than
1238            // reconstructing an aggregate. But avoid creating `*a = copy (*b)`, as they might be
1239            // aliases resulting in overlapping assignments.
1240            let allow_complex_projection =
1241                lhs.projection[..].iter().all(PlaceElem::is_stable_offset);
1242            if let Some(place) = self.try_as_place(value, location, allow_complex_projection) {
1243                self.reused_locals.insert(place.local);
1244                *rvalue = Rvalue::Use(Operand::Copy(place));
1245            }
1246            return Some(value);
1247        }
1248
1249        Some(self.insert(ty, Value::Aggregate(variant_index, fields)))
1250    }
1251
1252    #[instrument(level = "trace", skip(self), ret)]
1253    fn simplify_unary(
1254        &mut self,
1255        op: UnOp,
1256        arg_op: &mut Operand<'tcx>,
1257        location: Location,
1258    ) -> Option<VnIndex> {
1259        let mut arg_index = self.simplify_operand(arg_op, location)?;
1260        let arg_ty = self.ty(arg_index);
1261        let ret_ty = op.ty(self.tcx, arg_ty);
1262
1263        // PtrMetadata doesn't care about *const vs *mut vs & vs &mut,
1264        // so start by removing those distinctions so we can update the `Operand`
1265        if op == UnOp::PtrMetadata {
1266            let mut was_updated = false;
1267            loop {
1268                arg_index = match self.get(arg_index) {
1269                    // Pointer casts that preserve metadata, such as
1270                    // `*const [i32]` <-> `*mut [i32]` <-> `*mut [f32]`.
1271                    // It's critical that this not eliminate cases like
1272                    // `*const [T]` -> `*const T` which remove metadata.
1273                    // We run on potentially-generic MIR, though, so unlike codegen
1274                    // we can't always know exactly what the metadata are.
1275                    // To allow things like `*mut (?A, ?T)` <-> `*mut (?B, ?T)`,
1276                    // it's fine to get a projection as the type.
1277                    Value::Cast { kind: CastKind::PtrToPtr, value: inner }
1278                        if self.pointers_have_same_metadata(self.ty(inner), arg_ty) =>
1279                    {
1280                        inner
1281                    }
1282
1283                    // We have an unsizing cast, which assigns the length to wide pointer metadata.
1284                    Value::Cast {
1285                        kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1286                        value: from,
1287                    } if let Some(from) = self.ty(from).builtin_deref(true)
1288                        && let ty::Array(_, len) = from.kind()
1289                        && let Some(to) = self.ty(arg_index).builtin_deref(true)
1290                        && let ty::Slice(..) = to.kind() =>
1291                    {
1292                        return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1293                    }
1294
1295                    // `&mut *p`, `&raw *p`, etc don't change metadata.
1296                    Value::Address { base: AddressBase::Deref(reborrowed), projection, .. }
1297                        if projection.is_empty() =>
1298                    {
1299                        reborrowed
1300                    }
1301
1302                    _ => break,
1303                };
1304                was_updated = true;
1305            }
1306
1307            if was_updated && let Some(op) = self.try_as_operand(arg_index, location) {
1308                *arg_op = op;
1309            }
1310        }
1311
1312        let value = match (op, self.get(arg_index)) {
1313            (UnOp::Not, Value::UnaryOp(UnOp::Not, inner)) => return Some(inner),
1314            (UnOp::Neg, Value::UnaryOp(UnOp::Neg, inner)) => return Some(inner),
1315            (UnOp::Not, Value::BinaryOp(BinOp::Eq, lhs, rhs)) => {
1316                Value::BinaryOp(BinOp::Ne, lhs, rhs)
1317            }
1318            (UnOp::Not, Value::BinaryOp(BinOp::Ne, lhs, rhs)) => {
1319                Value::BinaryOp(BinOp::Eq, lhs, rhs)
1320            }
1321            (UnOp::PtrMetadata, Value::RawPtr { metadata, .. }) => return Some(metadata),
1322            // We have an unsizing cast, which assigns the length to wide pointer metadata.
1323            (
1324                UnOp::PtrMetadata,
1325                Value::Cast {
1326                    kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1327                    value: inner,
1328                },
1329            ) if let ty::Slice(..) = arg_ty.builtin_deref(true).unwrap().kind()
1330                && let ty::Array(_, len) = self.ty(inner).builtin_deref(true).unwrap().kind() =>
1331            {
1332                return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1333            }
1334            _ => Value::UnaryOp(op, arg_index),
1335        };
1336        Some(self.insert(ret_ty, value))
1337    }
1338
1339    #[instrument(level = "trace", skip(self), ret)]
1340    fn simplify_binary(
1341        &mut self,
1342        op: BinOp,
1343        lhs_operand: &mut Operand<'tcx>,
1344        rhs_operand: &mut Operand<'tcx>,
1345        location: Location,
1346    ) -> Option<VnIndex> {
1347        let lhs = self.simplify_operand(lhs_operand, location);
1348        let rhs = self.simplify_operand(rhs_operand, location);
1349
1350        // Only short-circuit options after we called `simplify_operand`
1351        // on both operands for side effect.
1352        let mut lhs = lhs?;
1353        let mut rhs = rhs?;
1354
1355        let lhs_ty = self.ty(lhs);
1356
1357        // If we're comparing pointers, remove `PtrToPtr` casts if the from
1358        // types of both casts and the metadata all match.
1359        if let BinOp::Eq | BinOp::Ne | BinOp::Lt | BinOp::Le | BinOp::Gt | BinOp::Ge = op
1360            && lhs_ty.is_any_ptr()
1361            && let Value::Cast { kind: CastKind::PtrToPtr, value: lhs_value } = self.get(lhs)
1362            && let Value::Cast { kind: CastKind::PtrToPtr, value: rhs_value } = self.get(rhs)
1363            && let lhs_from = self.ty(lhs_value)
1364            && lhs_from == self.ty(rhs_value)
1365            && self.pointers_have_same_metadata(lhs_from, lhs_ty)
1366        {
1367            lhs = lhs_value;
1368            rhs = rhs_value;
1369            if let Some(lhs_op) = self.try_as_operand(lhs, location)
1370                && let Some(rhs_op) = self.try_as_operand(rhs, location)
1371            {
1372                *lhs_operand = lhs_op;
1373                *rhs_operand = rhs_op;
1374            }
1375        }
1376
1377        if let Some(value) = self.simplify_binary_inner(op, lhs_ty, lhs, rhs) {
1378            return Some(value);
1379        }
1380        let ty = op.ty(self.tcx, lhs_ty, self.ty(rhs));
1381        let value = Value::BinaryOp(op, lhs, rhs);
1382        Some(self.insert(ty, value))
1383    }
1384
1385    fn simplify_binary_inner(
1386        &mut self,
1387        op: BinOp,
1388        lhs_ty: Ty<'tcx>,
1389        lhs: VnIndex,
1390        rhs: VnIndex,
1391    ) -> Option<VnIndex> {
1392        // Floats are weird enough that none of the logic below applies.
1393        let reasonable_ty =
1394            lhs_ty.is_integral() || lhs_ty.is_bool() || lhs_ty.is_char() || lhs_ty.is_any_ptr();
1395        if !reasonable_ty {
1396            return None;
1397        }
1398
1399        let layout = self.ecx.layout_of(lhs_ty).ok()?;
1400
1401        let mut as_bits = |value: VnIndex| {
1402            let constant = self.eval_to_const(value)?;
1403            if layout.backend_repr.is_scalar() {
1404                let scalar = self.ecx.read_scalar(constant).discard_err()?;
1405                scalar.to_bits(constant.layout.size).discard_err()
1406            } else {
1407                // `constant` is a wide pointer. Do not evaluate to bits.
1408                None
1409            }
1410        };
1411
1412        // Represent the values as `Left(bits)` or `Right(VnIndex)`.
1413        use Either::{Left, Right};
1414        let a = as_bits(lhs).map_or(Right(lhs), Left);
1415        let b = as_bits(rhs).map_or(Right(rhs), Left);
1416
1417        let result = match (op, a, b) {
1418            // Neutral elements.
1419            (
1420                BinOp::Add
1421                | BinOp::AddWithOverflow
1422                | BinOp::AddUnchecked
1423                | BinOp::BitOr
1424                | BinOp::BitXor,
1425                Left(0),
1426                Right(p),
1427            )
1428            | (
1429                BinOp::Add
1430                | BinOp::AddWithOverflow
1431                | BinOp::AddUnchecked
1432                | BinOp::BitOr
1433                | BinOp::BitXor
1434                | BinOp::Sub
1435                | BinOp::SubWithOverflow
1436                | BinOp::SubUnchecked
1437                | BinOp::Offset
1438                | BinOp::Shl
1439                | BinOp::Shr,
1440                Right(p),
1441                Left(0),
1442            )
1443            | (BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked, Left(1), Right(p))
1444            | (
1445                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::Div,
1446                Right(p),
1447                Left(1),
1448            ) => p,
1449            // Attempt to simplify `x & ALL_ONES` to `x`, with `ALL_ONES` depending on type size.
1450            (BinOp::BitAnd, Right(p), Left(ones)) | (BinOp::BitAnd, Left(ones), Right(p))
1451                if ones == layout.size.truncate(u128::MAX)
1452                    || (layout.ty.is_bool() && ones == 1) =>
1453            {
1454                p
1455            }
1456            // Absorbing elements.
1457            (
1458                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::BitAnd,
1459                _,
1460                Left(0),
1461            )
1462            | (BinOp::Rem, _, Left(1))
1463            | (
1464                BinOp::Mul
1465                | BinOp::MulWithOverflow
1466                | BinOp::MulUnchecked
1467                | BinOp::Div
1468                | BinOp::Rem
1469                | BinOp::BitAnd
1470                | BinOp::Shl
1471                | BinOp::Shr,
1472                Left(0),
1473                _,
1474            ) => self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size)),
1475            // Attempt to simplify `x | ALL_ONES` to `ALL_ONES`.
1476            (BinOp::BitOr, _, Left(ones)) | (BinOp::BitOr, Left(ones), _)
1477                if ones == layout.size.truncate(u128::MAX)
1478                    || (layout.ty.is_bool() && ones == 1) =>
1479            {
1480                self.insert_scalar(lhs_ty, Scalar::from_uint(ones, layout.size))
1481            }
1482            // Sub/Xor with itself.
1483            (BinOp::Sub | BinOp::SubWithOverflow | BinOp::SubUnchecked | BinOp::BitXor, a, b)
1484                if a == b =>
1485            {
1486                self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size))
1487            }
1488            // Comparison:
1489            // - if both operands can be computed as bits, just compare the bits;
1490            // - if we proved that both operands have the same value, we can insert true/false;
1491            // - otherwise, do nothing, as we do not try to prove inequality.
1492            (BinOp::Eq, Left(a), Left(b)) => self.insert_bool(a == b),
1493            (BinOp::Eq, a, b) if a == b => self.insert_bool(true),
1494            (BinOp::Ne, Left(a), Left(b)) => self.insert_bool(a != b),
1495            (BinOp::Ne, a, b) if a == b => self.insert_bool(false),
1496            _ => return None,
1497        };
1498
1499        if op.is_overflowing() {
1500            let ty = Ty::new_tup(self.tcx, &[self.ty(result), self.tcx.types.bool]);
1501            let false_val = self.insert_bool(false);
1502            Some(self.insert_tuple(ty, &[result, false_val]))
1503        } else {
1504            Some(result)
1505        }
1506    }
1507
1508    fn simplify_cast(
1509        &mut self,
1510        initial_kind: &mut CastKind,
1511        initial_operand: &mut Operand<'tcx>,
1512        to: Ty<'tcx>,
1513        location: Location,
1514    ) -> Option<VnIndex> {
1515        use CastKind::*;
1516        use rustc_middle::ty::adjustment::PointerCoercion::*;
1517
1518        let mut kind = *initial_kind;
1519        let mut value = self.simplify_operand(initial_operand, location)?;
1520        let mut from = self.ty(value);
1521        if from == to {
1522            return Some(value);
1523        }
1524
1525        if let CastKind::PointerCoercion(ReifyFnPointer | ClosureFnPointer(_), _) = kind {
1526            // Each reification of a generic fn may get a different pointer.
1527            // Do not try to merge them.
1528            return Some(self.new_opaque(to));
1529        }
1530
1531        let mut was_ever_updated = false;
1532        loop {
1533            let mut was_updated_this_iteration = false;
1534
1535            // Transmuting between raw pointers is just a pointer cast so long as
1536            // they have the same metadata type (like `*const i32` <=> `*mut u64`
1537            // or `*mut [i32]` <=> `*const [u64]`), including the common special
1538            // case of `*const T` <=> `*mut T`.
1539            if let Transmute = kind
1540                && from.is_raw_ptr()
1541                && to.is_raw_ptr()
1542                && self.pointers_have_same_metadata(from, to)
1543            {
1544                kind = PtrToPtr;
1545                was_updated_this_iteration = true;
1546            }
1547
1548            // If a cast just casts away the metadata again, then we can get it by
1549            // casting the original thin pointer passed to `from_raw_parts`
1550            if let PtrToPtr = kind
1551                && let Value::RawPtr { pointer, .. } = self.get(value)
1552                && let ty::RawPtr(to_pointee, _) = to.kind()
1553                && to_pointee.is_sized(self.tcx, self.typing_env())
1554            {
1555                from = self.ty(pointer);
1556                value = pointer;
1557                was_updated_this_iteration = true;
1558                if from == to {
1559                    return Some(pointer);
1560                }
1561            }
1562
1563            // Aggregate-then-Transmute can just transmute the original field value,
1564            // so long as the bytes of a value from only from a single field.
1565            if let Transmute = kind
1566                && let Value::Aggregate(variant_idx, field_values) = self.get(value)
1567                && let Some((field_idx, field_ty)) =
1568                    self.value_is_all_in_one_field(from, variant_idx)
1569            {
1570                from = field_ty;
1571                value = field_values[field_idx.as_usize()];
1572                was_updated_this_iteration = true;
1573                if field_ty == to {
1574                    return Some(value);
1575                }
1576            }
1577
1578            // Various cast-then-cast cases can be simplified.
1579            if let Value::Cast { kind: inner_kind, value: inner_value } = self.get(value) {
1580                let inner_from = self.ty(inner_value);
1581                let new_kind = match (inner_kind, kind) {
1582                    // Even if there's a narrowing cast in here that's fine, because
1583                    // things like `*mut [i32] -> *mut i32 -> *const i32` and
1584                    // `*mut [i32] -> *const [i32] -> *const i32` can skip the middle in MIR.
1585                    (PtrToPtr, PtrToPtr) => Some(PtrToPtr),
1586                    // PtrToPtr-then-Transmute is fine so long as the pointer cast is identity:
1587                    // `*const T -> *mut T -> NonNull<T>` is fine, but we need to check for narrowing
1588                    // to skip things like `*const [i32] -> *const i32 -> NonNull<T>`.
1589                    (PtrToPtr, Transmute) if self.pointers_have_same_metadata(inner_from, from) => {
1590                        Some(Transmute)
1591                    }
1592                    // Similarly, for Transmute-then-PtrToPtr. Note that we need to check different
1593                    // variables for their metadata, and thus this can't merge with the previous arm.
1594                    (Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => {
1595                        Some(Transmute)
1596                    }
1597                    // If would be legal to always do this, but we don't want to hide information
1598                    // from the backend that it'd otherwise be able to use for optimizations.
1599                    (Transmute, Transmute)
1600                        if !self.type_may_have_niche_of_interest_to_backend(from) =>
1601                    {
1602                        Some(Transmute)
1603                    }
1604                    _ => None,
1605                };
1606                if let Some(new_kind) = new_kind {
1607                    kind = new_kind;
1608                    from = inner_from;
1609                    value = inner_value;
1610                    was_updated_this_iteration = true;
1611                    if inner_from == to {
1612                        return Some(inner_value);
1613                    }
1614                }
1615            }
1616
1617            if was_updated_this_iteration {
1618                was_ever_updated = true;
1619            } else {
1620                break;
1621            }
1622        }
1623
1624        if was_ever_updated && let Some(op) = self.try_as_operand(value, location) {
1625            *initial_operand = op;
1626            *initial_kind = kind;
1627        }
1628
1629        Some(self.insert(to, Value::Cast { kind, value }))
1630    }
1631
1632    fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool {
1633        let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1634        let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1635        if left_meta_ty == right_meta_ty {
1636            true
1637        } else if let Ok(left) =
1638            self.tcx.try_normalize_erasing_regions(self.typing_env(), left_meta_ty)
1639            && let Ok(right) =
1640                self.tcx.try_normalize_erasing_regions(self.typing_env(), right_meta_ty)
1641        {
1642            left == right
1643        } else {
1644            false
1645        }
1646    }
1647
1648    /// Returns `false` if we know for sure that this type has no interesting niche,
1649    /// and thus we can skip transmuting through it without worrying.
1650    ///
1651    /// The backend will emit `assume`s when transmuting between types with niches,
1652    /// so we want to preserve `i32 -> char -> u32` so that that data is around,
1653    /// but it's fine to skip whole-range-is-value steps like `A -> u32 -> B`.
1654    fn type_may_have_niche_of_interest_to_backend(&self, ty: Ty<'tcx>) -> bool {
1655        let Ok(layout) = self.ecx.layout_of(ty) else {
1656            // If it's too generic or something, then assume it might be interesting later.
1657            return true;
1658        };
1659
1660        if layout.uninhabited {
1661            return true;
1662        }
1663
1664        match layout.backend_repr {
1665            BackendRepr::Scalar(a) => !a.is_always_valid(&self.ecx),
1666            BackendRepr::ScalarPair(a, b) => {
1667                !a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx)
1668            }
1669            BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => false,
1670        }
1671    }
1672
1673    fn value_is_all_in_one_field(
1674        &self,
1675        ty: Ty<'tcx>,
1676        variant: VariantIdx,
1677    ) -> Option<(FieldIdx, Ty<'tcx>)> {
1678        if let Ok(layout) = self.ecx.layout_of(ty)
1679            && let abi::Variants::Single { index } = layout.variants
1680            && index == variant
1681            && let Some((field_idx, field_layout)) = layout.non_1zst_field(&self.ecx)
1682            && layout.size == field_layout.size
1683        {
1684            // We needed to check the variant to avoid trying to read the tag
1685            // field from an enum where no fields have variants, since that tag
1686            // field isn't in the `Aggregate` from which we're getting values.
1687            Some((field_idx, field_layout.ty))
1688        } else if let ty::Adt(adt, args) = ty.kind()
1689            && adt.is_struct()
1690            && adt.repr().transparent()
1691            && let [single_field] = adt.non_enum_variant().fields.raw.as_slice()
1692        {
1693            Some((FieldIdx::ZERO, single_field.ty(self.tcx, args)))
1694        } else {
1695            None
1696        }
1697    }
1698}
1699
1700fn op_to_prop_const<'tcx>(
1701    ecx: &mut InterpCx<'tcx, DummyMachine>,
1702    op: &OpTy<'tcx>,
1703) -> Option<ConstValue> {
1704    // Do not attempt to propagate unsized locals.
1705    if op.layout.is_unsized() {
1706        return None;
1707    }
1708
1709    // This constant is a ZST, just return an empty value.
1710    if op.layout.is_zst() {
1711        return Some(ConstValue::ZeroSized);
1712    }
1713
1714    // Do not synthetize too large constants. Codegen will just memcpy them, which we'd like to
1715    // avoid.
1716    if !matches!(op.layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
1717        return None;
1718    }
1719
1720    // If this constant has scalar ABI, return it as a `ConstValue::Scalar`.
1721    if let BackendRepr::Scalar(abi::Scalar::Initialized { .. }) = op.layout.backend_repr
1722        && let Some(scalar) = ecx.read_scalar(op).discard_err()
1723    {
1724        if !scalar.try_to_scalar_int().is_ok() {
1725            // Check that we do not leak a pointer.
1726            // Those pointers may lose part of their identity in codegen.
1727            // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1728            return None;
1729        }
1730        return Some(ConstValue::Scalar(scalar));
1731    }
1732
1733    // If this constant is already represented as an `Allocation`,
1734    // try putting it into global memory to return it.
1735    if let Either::Left(mplace) = op.as_mplace_or_imm() {
1736        let (size, _align) = ecx.size_and_align_of_val(&mplace).discard_err()??;
1737
1738        // Do not try interning a value that contains provenance.
1739        // Due to https://github.com/rust-lang/rust/issues/79738, doing so could lead to bugs.
1740        // FIXME: remove this hack once that issue is fixed.
1741        let alloc_ref = ecx.get_ptr_alloc(mplace.ptr(), size).discard_err()??;
1742        if alloc_ref.has_provenance() {
1743            return None;
1744        }
1745
1746        let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
1747        let (prov, offset) = pointer.prov_and_relative_offset();
1748        let alloc_id = prov.alloc_id();
1749        intern_const_alloc_for_constprop(ecx, alloc_id).discard_err()?;
1750
1751        // `alloc_id` may point to a static. Codegen will choke on an `Indirect` with anything
1752        // by `GlobalAlloc::Memory`, so do fall through to copying if needed.
1753        // FIXME: find a way to treat this more uniformly (probably by fixing codegen)
1754        if let GlobalAlloc::Memory(alloc) = ecx.tcx.global_alloc(alloc_id)
1755            // Transmuting a constant is just an offset in the allocation. If the alignment of the
1756            // allocation is not enough, fallback to copying into a properly aligned value.
1757            && alloc.inner().align >= op.layout.align.abi
1758        {
1759            return Some(ConstValue::Indirect { alloc_id, offset });
1760        }
1761    }
1762
1763    // Everything failed: create a new allocation to hold the data.
1764    let alloc_id =
1765        ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest)).discard_err()?;
1766    let value = ConstValue::Indirect { alloc_id, offset: Size::ZERO };
1767
1768    // Check that we do not leak a pointer.
1769    // Those pointers may lose part of their identity in codegen.
1770    // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1771    if ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().provenance().ptrs().is_empty() {
1772        return Some(value);
1773    }
1774
1775    None
1776}
1777
1778impl<'tcx> VnState<'_, '_, 'tcx> {
1779    /// If either [`Self::try_as_constant`] as [`Self::try_as_place`] succeeds,
1780    /// returns that result as an [`Operand`].
1781    fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option<Operand<'tcx>> {
1782        if let Some(const_) = self.try_as_constant(index) {
1783            Some(Operand::Constant(Box::new(const_)))
1784        } else if let Some(place) = self.try_as_place(index, location, false) {
1785            self.reused_locals.insert(place.local);
1786            Some(Operand::Copy(place))
1787        } else {
1788            None
1789        }
1790    }
1791
1792    /// If `index` is a `Value::Constant`, return the `Constant` to be put in the MIR.
1793    fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
1794        // This was already constant in MIR, do not change it. If the constant is not
1795        // deterministic, adding an additional mention of it in MIR will not give the same value as
1796        // the former mention.
1797        if let Value::Constant { value, disambiguator: None } = self.get(index) {
1798            debug_assert!(value.is_deterministic());
1799            return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1800        }
1801
1802        let op = self.eval_to_const(index)?;
1803        if op.layout.is_unsized() {
1804            // Do not attempt to propagate unsized locals.
1805            return None;
1806        }
1807
1808        let value = op_to_prop_const(&mut self.ecx, op)?;
1809
1810        // Check that we do not leak a pointer.
1811        // Those pointers may lose part of their identity in codegen.
1812        // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1813        assert!(!value.may_have_provenance(self.tcx, op.layout.size));
1814
1815        let const_ = Const::Val(value, op.layout.ty);
1816        Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_ })
1817    }
1818
1819    /// Construct a place which holds the same value as `index` and for which all locals strictly
1820    /// dominate `loc`. If you used this place, add its base local to `reused_locals` to remove
1821    /// storage statements.
1822    #[instrument(level = "trace", skip(self), ret)]
1823    fn try_as_place(
1824        &mut self,
1825        mut index: VnIndex,
1826        loc: Location,
1827        allow_complex_projection: bool,
1828    ) -> Option<Place<'tcx>> {
1829        let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new();
1830        loop {
1831            if let Some(local) = self.try_as_local(index, loc) {
1832                projection.reverse();
1833                let place =
1834                    Place { local, projection: self.tcx.mk_place_elems(projection.as_slice()) };
1835                return Some(place);
1836            } else if projection.last() == Some(&PlaceElem::Deref) {
1837                // `Deref` can only be the first projection in a place.
1838                // If we are here, we failed to find a local, and we already have a `Deref`.
1839                // Trying to add projections will only result in an ill-formed place.
1840                return None;
1841            } else if let Value::Projection(pointer, proj) = self.get(index)
1842                && (allow_complex_projection || proj.is_stable_offset())
1843                && let Some(proj) = self.try_as_place_elem(self.ty(index), proj, loc)
1844            {
1845                projection.push(proj);
1846                index = pointer;
1847            } else {
1848                return None;
1849            }
1850        }
1851    }
1852
1853    /// If there is a local which is assigned `index`, and its assignment strictly dominates `loc`,
1854    /// return it. If you used this local, add it to `reused_locals` to remove storage statements.
1855    fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
1856        let other = self.rev_locals.get(index)?;
1857        other
1858            .iter()
1859            .find(|&&other| self.ssa.assignment_dominates(&self.dominators, other, loc))
1860            .copied()
1861    }
1862}
1863
1864impl<'tcx> MutVisitor<'tcx> for VnState<'_, '_, 'tcx> {
1865    fn tcx(&self) -> TyCtxt<'tcx> {
1866        self.tcx
1867    }
1868
1869    fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
1870        self.simplify_place_projection(place, location);
1871        if context.is_mutating_use() && place.is_indirect() {
1872            // Non-local mutation maybe invalidate deref.
1873            self.invalidate_derefs();
1874        }
1875        self.super_place(place, context, location);
1876    }
1877
1878    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1879        self.simplify_operand(operand, location);
1880        self.super_operand(operand, location);
1881    }
1882
1883    fn visit_assign(
1884        &mut self,
1885        lhs: &mut Place<'tcx>,
1886        rvalue: &mut Rvalue<'tcx>,
1887        location: Location,
1888    ) {
1889        self.simplify_place_projection(lhs, location);
1890
1891        let value = self.simplify_rvalue(lhs, rvalue, location);
1892        if let Some(value) = value {
1893            if let Some(const_) = self.try_as_constant(value) {
1894                *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
1895            } else if let Some(place) = self.try_as_place(value, location, false)
1896                && *rvalue != Rvalue::Use(Operand::Move(place))
1897                && *rvalue != Rvalue::Use(Operand::Copy(place))
1898            {
1899                *rvalue = Rvalue::Use(Operand::Copy(place));
1900                self.reused_locals.insert(place.local);
1901            }
1902        }
1903
1904        if lhs.is_indirect() {
1905            // Non-local mutation maybe invalidate deref.
1906            self.invalidate_derefs();
1907        }
1908
1909        if let Some(local) = lhs.as_local()
1910            && self.ssa.is_ssa(local)
1911            && let rvalue_ty = rvalue.ty(self.local_decls, self.tcx)
1912            // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark
1913            // `local` as reusable if we have an exact type match.
1914            && self.local_decls[local].ty == rvalue_ty
1915        {
1916            let value = value.unwrap_or_else(|| self.new_opaque(rvalue_ty));
1917            self.assign(local, value);
1918        }
1919    }
1920
1921    fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
1922        if let Terminator { kind: TerminatorKind::Call { destination, .. }, .. } = terminator {
1923            if let Some(local) = destination.as_local()
1924                && self.ssa.is_ssa(local)
1925            {
1926                let ty = self.local_decls[local].ty;
1927                let opaque = self.new_opaque(ty);
1928                self.assign(local, opaque);
1929            }
1930        }
1931        // Terminators that can write to memory may invalidate (nested) derefs.
1932        if terminator.kind.can_write_to_memory() {
1933            self.invalidate_derefs();
1934        }
1935        self.super_terminator(terminator, location);
1936    }
1937}
1938
1939struct StorageRemover<'tcx> {
1940    tcx: TyCtxt<'tcx>,
1941    reused_locals: DenseBitSet<Local>,
1942}
1943
1944impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
1945    fn tcx(&self) -> TyCtxt<'tcx> {
1946        self.tcx
1947    }
1948
1949    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
1950        if let Operand::Move(place) = *operand
1951            && !place.is_indirect_first_projection()
1952            && self.reused_locals.contains(place.local)
1953        {
1954            *operand = Operand::Copy(place);
1955        }
1956    }
1957
1958    fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
1959        match stmt.kind {
1960            // When removing storage statements, we need to remove both (#107511).
1961            StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
1962                if self.reused_locals.contains(l) =>
1963            {
1964                stmt.make_nop(true)
1965            }
1966            _ => self.super_statement(stmt, loc),
1967        }
1968    }
1969}