Skip to main content

rustc_mir_transform/
gvn.rs

1//! Global value numbering.
2//!
3//! MIR may contain repeated and/or redundant computations. The objective of this pass is to detect
4//! such redundancies and re-use the already-computed result when possible.
5//!
6//! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
7//! values, the locals in which they are stored, and the assignment location.
8//!
9//! We traverse all assignments `x = rvalue` and operands.
10//!
11//! For each SSA one, we compute a symbolic representation of values that are assigned to SSA
12//! locals. This symbolic representation is defined by the `Value` enum. Each produced instance of
13//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
14//!
15//! For each non-SSA
16//! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
17//! replace the rvalue/operand by that constant. Otherwise, if there is an SSA local `y`
18//! associated to this `VnIndex`, and if its definition location strictly dominates the assignment
19//! to `x`, we replace the assignment by `x = y`.
20//!
21//! By opportunity, this pass simplifies some `Rvalue`s based on the accumulated knowledge.
22//!
23//! # Operational semantic
24//!
25//! Operationally, this pass attempts to prove bitwise equality between locals. Given this MIR:
26//! ```ignore (MIR)
27//! _a = some value // has VnIndex i
28//! // some MIR
29//! _b = some other value // also has VnIndex i
30//! ```
31//!
32//! We consider it to be replaceable by:
33//! ```ignore (MIR)
34//! _a = some value // has VnIndex i
35//! // some MIR
36//! _c = some other value // also has VnIndex i
37//! assume(_a bitwise equal to _c) // follows from having the same VnIndex
38//! _b = _a // follows from the `assume`
39//! ```
40//!
41//! Which is simplifiable to:
42//! ```ignore (MIR)
43//! _a = some value // has VnIndex i
44//! // some MIR
45//! _b = _a
46//! ```
47//!
48//! # Handling of references
49//!
50//! We handle references by assigning a different "provenance" index to each Ref/RawPtr rvalue.
51//! This ensure that we do not spuriously merge borrows that should not be merged. For instance:
52//! ```ignore (MIR)
53//! _x = &_a;
54//! _a = 0;
55//! _y = &_a; // cannot be turned into `_y = _x`!
56//! ```
57//!
58//! On top of that, we consider all the derefs of an immutable reference to a freeze type to give
59//! the same value:
60//! ```ignore (MIR)
61//! _a = *_b // _b is &Freeze
62//! _c = *_b // replaced by _c = _a
63//! ```
64//!
65//! # Determinism of constant propagation
66//!
67//! When registering a new `Value`, we attempt to opportunistically evaluate it as a constant.
68//! The evaluated form is inserted in `evaluated` as an `OpTy` or `None` if evaluation failed.
69//!
70//! The difficulty is non-deterministic evaluation of MIR constants. Some `Const` can have
71//! different runtime values each time they are evaluated. This happens with valtrees that
72//! generate a new allocation each time they are used. This is checked by `is_deterministic`.
73//!
74//! Meanwhile, we want to be able to read indirect constants. For instance:
75//! ```
76//! static A: &'static &'static u8 = &&63;
77//! fn foo() -> u8 {
78//!     **A // We want to replace by 63.
79//! }
80//! fn bar() -> u8 {
81//!     b"abc"[1] // We want to replace by 'b'.
82//! }
83//! ```
84//!
85//! The `Value::Constant` variant stores a possibly unevaluated constant. Evaluating that constant
86//! may be non-deterministic. When that happens, we assign a disambiguator to ensure that we do not
87//! merge the constants. See `duplicate_slice` test in `gvn.rs`.
88//!
89//! Conversely, some constants cannot cross function boundaries, which could happen because of
90//! inlining. For instance, constants that contain a fn pointer (`AllocId` pointing to a
91//! `GlobalAlloc::Function`) point to a different symbol in each codegen unit. To avoid this,
92//! when writing constants in MIR, we do not write `Const`s that contain `AllocId`s. This is
93//! checked by `may_have_provenance`. See <https://github.com/rust-lang/rust/issues/128775> for
94//! more information.
95
96use std::borrow::Cow;
97use std::hash::{Hash, Hasher};
98
99use either::Either;
100use itertools::Itertools as _;
101use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
102use rustc_arena::DroplessArena;
103use rustc_const_eval::const_eval::DummyMachine;
104use rustc_const_eval::interpret::{
105    ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar,
106    intern_const_alloc_for_constprop,
107};
108use rustc_data_structures::fx::FxHasher;
109use rustc_data_structures::graph::dominators::Dominators;
110use rustc_data_structures::hash_table::{Entry, HashTable};
111use rustc_hir::def::DefKind;
112use rustc_index::bit_set::DenseBitSet;
113use rustc_index::{IndexVec, newtype_index};
114use rustc_middle::bug;
115use rustc_middle::mir::interpret::{AllocRange, GlobalAlloc};
116use rustc_middle::mir::visit::*;
117use rustc_middle::mir::*;
118use rustc_middle::ty::layout::HasTypingEnv;
119use rustc_middle::ty::{self, Ty, TyCtxt};
120use rustc_span::DUMMY_SP;
121use smallvec::SmallVec;
122use tracing::{debug, instrument, trace};
123
124use crate::ssa::SsaLocals;
125
126pub(super) struct GVN;
127
128impl<'tcx> crate::MirPass<'tcx> for GVN {
129    fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
130        sess.mir_opt_level() >= 2
131    }
132
133    #[instrument(level = "trace", skip(self, tcx, body))]
134    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
135        debug!(def_id = ?body.source.def_id());
136
137        let typing_env = body.typing_env(tcx);
138        let ssa = SsaLocals::new(tcx, body, typing_env);
139        // Clone dominators because we need them while mutating the body.
140        let dominators = body.basic_blocks.dominators().clone();
141
142        let arena = DroplessArena::default();
143        let mut state =
144            VnState::new(tcx, body, typing_env, &ssa, dominators, &body.local_decls, &arena);
145
146        for local in body.args_iter().filter(|&local| ssa.is_ssa(local)) {
147            let opaque = state.new_argument(body.local_decls[local].ty);
148            state.assign(local, opaque);
149        }
150
151        let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
152        for bb in reverse_postorder {
153            let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
154            state.visit_basic_block_data(bb, data);
155        }
156
157        // For each local that is reused (`y` above), we remove its storage statements do avoid any
158        // difficulty. Those locals are SSA, so should be easy to optimize by LLVM without storage
159        // statements.
160        StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
161    }
162
163    fn is_required(&self) -> bool {
164        false
165    }
166}
167
168newtype_index! {
169    /// This represents a `Value` in the symbolic execution.
170    #[debug_format = "_v{}"]
171    struct VnIndex {}
172}
173
174/// Marker type to forbid hashing and comparing opaque values.
175/// This struct should only be constructed by `ValueSet::insert_unique` to ensure we use that
176/// method to create non-unifiable values. It will ICE if used in `ValueSet::insert`.
177#[derive(Copy, Clone, Debug, Eq)]
178struct VnOpaque;
179impl PartialEq for VnOpaque {
180    fn eq(&self, _: &VnOpaque) -> bool {
181        // ICE if we try to compare unique values
182        unreachable!()
183    }
184}
185impl Hash for VnOpaque {
186    fn hash<T: Hasher>(&self, _: &mut T) {
187        // ICE if we try to hash unique values
188        unreachable!()
189    }
190}
191
192#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
193enum AddressKind {
194    Ref(BorrowKind),
195    Address(RawPtrKind),
196}
197
198#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
199enum AddressBase {
200    /// This address is based on this local.
201    Local(Local),
202    /// This address is based on the deref of this pointer.
203    Deref(VnIndex),
204}
205
206#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
207enum Value<'a, 'tcx> {
208    // Root values.
209    /// Used to represent values we know nothing about.
210    Opaque(VnOpaque),
211    /// The value is a argument.
212    Argument(VnOpaque),
213    /// Evaluated or unevaluated constant value.
214    Constant {
215        value: Const<'tcx>,
216        /// Some constants do not have a deterministic value. To avoid merging two instances of the
217        /// same `Const`, we assign them an additional integer index.
218        // `disambiguator` is `None` iff the constant is deterministic.
219        disambiguator: Option<VnOpaque>,
220    },
221
222    // Aggregates.
223    /// An aggregate value, either tuple/closure/struct/enum.
224    /// This does not contain unions, as we cannot reason with the value.
225    Aggregate(VariantIdx, &'a [VnIndex]),
226    /// A union aggregate value.
227    Union(FieldIdx, VnIndex),
228    /// A raw pointer aggregate built from a thin pointer and metadata.
229    RawPtr {
230        /// Thin pointer component. This is field 0 in MIR.
231        pointer: VnIndex,
232        /// Metadata component. This is field 1 in MIR.
233        metadata: VnIndex,
234    },
235    /// This corresponds to a `[value; count]` expression.
236    Repeat(VnIndex, ty::Const<'tcx>),
237    /// The address of a place.
238    Address {
239        base: AddressBase,
240        // We do not use a plain `Place` as we want to be able to reason about indices.
241        // This does not contain any `Deref` projection.
242        projection: &'a [ProjectionElem<VnIndex, Ty<'tcx>>],
243        kind: AddressKind,
244        /// Give each borrow and pointer a different provenance, so we don't merge them.
245        provenance: VnOpaque,
246    },
247
248    // Extractions.
249    /// This is the *value* obtained by projecting another value.
250    Projection(VnIndex, ProjectionElem<VnIndex, ()>),
251    /// Discriminant of the given value.
252    Discriminant(VnIndex),
253
254    // Operations.
255    RuntimeChecks(RuntimeChecks),
256    UnaryOp(UnOp, VnIndex),
257    BinaryOp(BinOp, VnIndex, VnIndex),
258    Cast {
259        kind: CastKind,
260        value: VnIndex,
261    },
262}
263
264/// Stores and deduplicates pairs of `(Value, Ty)` into in `VnIndex` numbered values.
265///
266/// This data structure is mostly a partial reimplementation of `FxIndexMap<VnIndex, (Value, Ty)>`.
267/// We do not use a regular `FxIndexMap` to skip hashing values that are unique by construction,
268/// like opaque values, address with provenance and non-deterministic constants.
269struct ValueSet<'a, 'tcx> {
270    indices: HashTable<VnIndex>,
271    hashes: IndexVec<VnIndex, u64>,
272    values: IndexVec<VnIndex, Value<'a, 'tcx>>,
273    types: IndexVec<VnIndex, Ty<'tcx>>,
274}
275
276impl<'a, 'tcx> ValueSet<'a, 'tcx> {
277    fn new(num_values: usize) -> ValueSet<'a, 'tcx> {
278        ValueSet {
279            indices: HashTable::with_capacity(num_values),
280            hashes: IndexVec::with_capacity(num_values),
281            values: IndexVec::with_capacity(num_values),
282            types: IndexVec::with_capacity(num_values),
283        }
284    }
285
286    /// Insert a `(Value, Ty)` pair without hashing or deduplication.
287    /// This always creates a new `VnIndex`.
288    #[inline]
289    fn insert_unique(
290        &mut self,
291        ty: Ty<'tcx>,
292        value: impl FnOnce(VnOpaque) -> Value<'a, 'tcx>,
293    ) -> VnIndex {
294        let value = value(VnOpaque);
295
296        debug_assert!(match value {
297            Value::Opaque(_) | Value::Argument(_) | Value::Address { .. } => true,
298            Value::Constant { disambiguator, .. } => disambiguator.is_some(),
299            _ => false,
300        });
301
302        let index = self.hashes.push(0);
303        let _index = self.types.push(ty);
304        debug_assert_eq!(index, _index);
305        let _index = self.values.push(value);
306        debug_assert_eq!(index, _index);
307        index
308    }
309
310    /// Insert a `(Value, Ty)` pair to be deduplicated.
311    /// Returns `true` as second tuple field if this value did not exist previously.
312    #[allow(rustc::disallowed_pass_by_ref)] // closures take `&VnIndex`
313    fn insert(&mut self, ty: Ty<'tcx>, value: Value<'a, 'tcx>) -> (VnIndex, bool) {
314        debug_assert!(match value {
315            Value::Opaque(_) | Value::Address { .. } => false,
316            Value::Constant { disambiguator, .. } => disambiguator.is_none(),
317            _ => true,
318        });
319
320        let hash: u64 = {
321            let mut h = FxHasher::default();
322            value.hash(&mut h);
323            ty.hash(&mut h);
324            h.finish()
325        };
326
327        let eq = |index: &VnIndex| self.values[*index] == value && self.types[*index] == ty;
328        let hasher = |index: &VnIndex| self.hashes[*index];
329        match self.indices.entry(hash, eq, hasher) {
330            Entry::Occupied(entry) => {
331                let index = *entry.get();
332                (index, false)
333            }
334            Entry::Vacant(entry) => {
335                let index = self.hashes.push(hash);
336                entry.insert(index);
337                let _index = self.values.push(value);
338                debug_assert_eq!(index, _index);
339                let _index = self.types.push(ty);
340                debug_assert_eq!(index, _index);
341                (index, true)
342            }
343        }
344    }
345
346    /// Return the `Value` associated with the given `VnIndex`.
347    #[inline]
348    fn value(&self, index: VnIndex) -> Value<'a, 'tcx> {
349        self.values[index]
350    }
351
352    /// Return the type associated with the given `VnIndex`.
353    #[inline]
354    fn ty(&self, index: VnIndex) -> Ty<'tcx> {
355        self.types[index]
356    }
357}
358
359struct VnState<'body, 'a, 'tcx> {
360    tcx: TyCtxt<'tcx>,
361    ecx: InterpCx<'tcx, DummyMachine>,
362    local_decls: &'body LocalDecls<'tcx>,
363    is_coroutine: bool,
364    /// Value stored in each local.
365    locals: IndexVec<Local, Option<VnIndex>>,
366    /// Locals that are assigned that value.
367    // This vector does not hold all the values of `VnIndex` that we create.
368    rev_locals: IndexVec<VnIndex, SmallVec<[Local; 1]>>,
369    values: ValueSet<'a, 'tcx>,
370    /// Values evaluated as constants if possible.
371    /// - `None` are values not computed yet;
372    /// - `Some(None)` are values for which computation has failed;
373    /// - `Some(Some(op))` are successful computations.
374    evaluated: IndexVec<VnIndex, Option<Option<&'a OpTy<'tcx>>>>,
375    ssa: &'body SsaLocals,
376    dominators: Dominators<BasicBlock>,
377    reused_locals: DenseBitSet<Local>,
378    arena: &'a DroplessArena,
379}
380
381impl<'body, 'a, 'tcx> VnState<'body, 'a, 'tcx> {
382    fn new(
383        tcx: TyCtxt<'tcx>,
384        body: &Body<'tcx>,
385        typing_env: ty::TypingEnv<'tcx>,
386        ssa: &'body SsaLocals,
387        dominators: Dominators<BasicBlock>,
388        local_decls: &'body LocalDecls<'tcx>,
389        arena: &'a DroplessArena,
390    ) -> Self {
391        // Compute a rough estimate of the number of values in the body from the number of
392        // statements. This is meant to reduce the number of allocations, but it's all right if
393        // we miss the exact amount. We estimate based on 2 values per statement (one in LHS and
394        // one in RHS) and 4 values per terminator (for call operands).
395        let num_values =
396            2 * body.basic_blocks.iter().map(|bbdata| bbdata.statements.len()).sum::<usize>()
397                + 4 * body.basic_blocks.len();
398        VnState {
399            tcx,
400            ecx: InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine),
401            local_decls,
402            is_coroutine: body.coroutine.is_some(),
403            locals: IndexVec::from_elem(None, local_decls),
404            rev_locals: IndexVec::with_capacity(num_values),
405            values: ValueSet::new(num_values),
406            evaluated: IndexVec::with_capacity(num_values),
407            ssa,
408            dominators,
409            reused_locals: DenseBitSet::new_empty(local_decls.len()),
410            arena,
411        }
412    }
413
414    fn typing_env(&self) -> ty::TypingEnv<'tcx> {
415        self.ecx.typing_env()
416    }
417
418    fn insert_unique(
419        &mut self,
420        ty: Ty<'tcx>,
421        value: impl FnOnce(VnOpaque) -> Value<'a, 'tcx>,
422    ) -> VnIndex {
423        let index = self.values.insert_unique(ty, value);
424        let _index = self.evaluated.push(None);
425        debug_assert_eq!(index, _index);
426        let _index = self.rev_locals.push(SmallVec::new());
427        debug_assert_eq!(index, _index);
428        index
429    }
430
431    #[instrument(level = "trace", skip(self), ret)]
432    fn insert(&mut self, ty: Ty<'tcx>, value: Value<'a, 'tcx>) -> VnIndex {
433        let (index, new) = self.values.insert(ty, value);
434        if new {
435            // Grow `evaluated` and `rev_locals` here to amortize the allocations.
436            let _index = self.evaluated.push(None);
437            debug_assert_eq!(index, _index);
438            let _index = self.rev_locals.push(SmallVec::new());
439            debug_assert_eq!(index, _index);
440        }
441        index
442    }
443
444    /// Create a new `Value` for which we have no information at all, except that it is distinct
445    /// from all the others.
446    #[instrument(level = "trace", skip(self), ret)]
447    fn new_opaque(&mut self, ty: Ty<'tcx>) -> VnIndex {
448        let index = self.insert_unique(ty, Value::Opaque);
449        self.evaluated[index] = Some(None);
450        index
451    }
452
453    #[instrument(level = "trace", skip(self), ret)]
454    fn new_argument(&mut self, ty: Ty<'tcx>) -> VnIndex {
455        let index = self.insert_unique(ty, Value::Argument);
456        self.evaluated[index] = Some(None);
457        index
458    }
459
460    /// Create a new `Value::Address` distinct from all the others.
461    #[instrument(level = "trace", skip(self), ret)]
462    fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> Option<VnIndex> {
463        let pty = place.ty(self.local_decls, self.tcx).ty;
464        let ty = match kind {
465            AddressKind::Ref(bk) => {
466                Ty::new_ref(self.tcx, self.tcx.lifetimes.re_erased, pty, bk.to_mutbl_lossy())
467            }
468            AddressKind::Address(mutbl) => Ty::new_ptr(self.tcx, pty, mutbl.to_mutbl_lossy()),
469        };
470
471        let mut projection = place.projection.iter();
472        let base = if place.is_indirect_first_projection() {
473            let base = self.locals[place.local]?;
474            // Skip the initial `Deref`.
475            projection.next();
476            AddressBase::Deref(base)
477        } else if self.ssa.is_ssa(place.local) {
478            // Only propagate the pointer of the SSA local.
479            AddressBase::Local(place.local)
480        } else {
481            return None;
482        };
483        // Do not try evaluating inside `Index`, this has been done by `simplify_place_projection`.
484        let projection =
485            projection.map(|proj| proj.try_map(|index| self.locals[index], |ty| ty).ok_or(()));
486        let projection = self.arena.try_alloc_from_iter(projection).ok()?;
487
488        let index = self.insert_unique(ty, |provenance| Value::Address {
489            base,
490            projection,
491            kind,
492            provenance,
493        });
494        Some(index)
495    }
496
497    #[instrument(level = "trace", skip(self), ret)]
498    fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex {
499        if is_deterministic(value) {
500            // The constant is deterministic, no need to disambiguate.
501            let constant = Value::Constant { value, disambiguator: None };
502            self.insert(value.ty(), constant)
503        } else {
504            // Multiple mentions of this constant will yield different values,
505            // so assign a different `disambiguator` to ensure they do not get the same `VnIndex`.
506            self.insert_unique(value.ty(), |disambiguator| Value::Constant {
507                value,
508                disambiguator: Some(disambiguator),
509            })
510        }
511    }
512
513    #[inline]
514    fn get(&self, index: VnIndex) -> Value<'a, 'tcx> {
515        self.values.value(index)
516    }
517
518    #[inline]
519    fn ty(&self, index: VnIndex) -> Ty<'tcx> {
520        self.values.ty(index)
521    }
522
523    /// Record that `local` is assigned `value`. `local` must be SSA.
524    #[instrument(level = "trace", skip(self))]
525    fn assign(&mut self, local: Local, value: VnIndex) {
526        debug_assert!(self.ssa.is_ssa(local));
527        self.locals[local] = Some(value);
528        self.rev_locals[value].push(local);
529    }
530
531    fn insert_bool(&mut self, flag: bool) -> VnIndex {
532        // Booleans are deterministic.
533        let value = Const::from_bool(self.tcx, flag);
534        debug_assert!(is_deterministic(value));
535        self.insert(self.tcx.types.bool, Value::Constant { value, disambiguator: None })
536    }
537
538    fn insert_scalar(&mut self, ty: Ty<'tcx>, scalar: Scalar) -> VnIndex {
539        // Scalars are deterministic.
540        let value = Const::from_scalar(self.tcx, scalar, ty);
541        debug_assert!(is_deterministic(value));
542        self.insert(ty, Value::Constant { value, disambiguator: None })
543    }
544
545    fn insert_tuple(&mut self, ty: Ty<'tcx>, values: &[VnIndex]) -> VnIndex {
546        self.insert(ty, Value::Aggregate(VariantIdx::ZERO, self.arena.alloc_slice(values)))
547    }
548
549    #[instrument(level = "trace", skip(self), ret)]
550    fn eval_to_const_inner(&mut self, value: VnIndex) -> Option<OpTy<'tcx>> {
551        use Value::*;
552        let ty = self.ty(value);
553        // Avoid computing layouts inside a coroutine, as that can cause cycles.
554        let ty = if !self.is_coroutine || ty.is_scalar() {
555            self.ecx.layout_of(ty).ok()?
556        } else {
557            return None;
558        };
559        let op = match self.get(value) {
560            _ if ty.is_zst() => ImmTy::uninit(ty).into(),
561
562            Opaque(_) | Argument(_) => return None,
563            // Keep runtime check constants as symbolic.
564            RuntimeChecks(..) => return None,
565
566            // In general, evaluating repeat expressions just consumes a lot of memory.
567            // But in the special case that the element is just Immediate::Uninit, we can evaluate
568            // it without extra memory! If we don't propagate uninit values like this, LLVM can get
569            // very confused: https://github.com/rust-lang/rust/issues/139355
570            Repeat(value, _count) => {
571                let value = self.eval_to_const(value)?;
572                if value.is_immediate_uninit() {
573                    ImmTy::uninit(ty).into()
574                } else {
575                    return None;
576                }
577            }
578            Constant { ref value, disambiguator: _ } => {
579                self.ecx.eval_mir_constant(value, DUMMY_SP, None).discard_err()?
580            }
581            Aggregate(variant, ref fields) => {
582                let fields =
583                    fields.iter().map(|&f| self.eval_to_const(f)).collect::<Option<Vec<_>>>()?;
584                let variant = if ty.ty.is_enum() { Some(variant) } else { None };
585                let (BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) = ty.backend_repr
586                else {
587                    return None;
588                };
589                let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
590                let variant_dest = if let Some(variant) = variant {
591                    self.ecx.project_downcast(&dest, variant).discard_err()?
592                } else {
593                    dest.clone()
594                };
595                for (field_index, op) in fields.into_iter().enumerate() {
596                    let field_dest = self
597                        .ecx
598                        .project_field(&variant_dest, FieldIdx::from_usize(field_index))
599                        .discard_err()?;
600                    self.ecx.copy_op(op, &field_dest).discard_err()?;
601                }
602                self.ecx
603                    .write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest)
604                    .discard_err()?;
605                self.ecx
606                    .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
607                    .discard_err()?;
608                dest.into()
609            }
610            Union(active_field, field) => {
611                let field = self.eval_to_const(field)?;
612                if field.layout.layout.is_zst() {
613                    ImmTy::from_immediate(Immediate::Uninit, ty).into()
614                } else if matches!(
615                    ty.backend_repr,
616                    BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)
617                ) {
618                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
619                    let field_dest = self.ecx.project_field(&dest, active_field).discard_err()?;
620                    self.ecx.copy_op(field, &field_dest).discard_err()?;
621                    self.ecx
622                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
623                        .discard_err()?;
624                    dest.into()
625                } else {
626                    return None;
627                }
628            }
629            RawPtr { pointer, metadata } => {
630                let pointer = self.eval_to_const(pointer)?;
631                let metadata = self.eval_to_const(metadata)?;
632
633                // Pointers don't have fields, so don't `project_field` them.
634                let data = self.ecx.read_pointer(pointer).discard_err()?;
635                let meta = if metadata.layout.is_zst() {
636                    MemPlaceMeta::None
637                } else {
638                    MemPlaceMeta::Meta(self.ecx.read_scalar(metadata).discard_err()?)
639                };
640                let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx);
641                ImmTy::from_immediate(ptr_imm, ty).into()
642            }
643
644            Projection(base, elem) => {
645                let base = self.eval_to_const(base)?;
646                // `Index` by constants should have been replaced by `ConstantIndex` by
647                // `simplify_place_projection`.
648                let elem = elem.try_map(|_| None, |()| ty.ty)?;
649                self.ecx.project(base, elem).discard_err()?
650            }
651            Address { base, projection, .. } => {
652                debug_assert!(!projection.contains(&ProjectionElem::Deref));
653                let pointer = match base {
654                    AddressBase::Deref(pointer) => self.eval_to_const(pointer)?,
655                    // We have no stack to point to.
656                    AddressBase::Local(_) => return None,
657                };
658                let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?;
659                for elem in projection {
660                    // `Index` by constants should have been replaced by `ConstantIndex` by
661                    // `simplify_place_projection`.
662                    let elem = elem.try_map(|_| None, |ty| ty)?;
663                    mplace = self.ecx.project(&mplace, elem).discard_err()?;
664                }
665                let pointer = mplace.to_ref(&self.ecx);
666                ImmTy::from_immediate(pointer, ty).into()
667            }
668
669            Discriminant(base) => {
670                let base = self.eval_to_const(base)?;
671                let variant = self.ecx.read_discriminant(base).discard_err()?;
672                let discr_value =
673                    self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
674                discr_value.into()
675            }
676            UnaryOp(un_op, operand) => {
677                let operand = self.eval_to_const(operand)?;
678                let operand = self.ecx.read_immediate(operand).discard_err()?;
679                let val = self.ecx.unary_op(un_op, &operand).discard_err()?;
680                val.into()
681            }
682            BinaryOp(bin_op, lhs, rhs) => {
683                let lhs = self.eval_to_const(lhs)?;
684                let rhs = self.eval_to_const(rhs)?;
685                let lhs = self.ecx.read_immediate(lhs).discard_err()?;
686                let rhs = self.ecx.read_immediate(rhs).discard_err()?;
687                let val = self.ecx.binary_op(bin_op, &lhs, &rhs).discard_err()?;
688                val.into()
689            }
690            Cast { kind, value } => match kind {
691                CastKind::IntToInt | CastKind::IntToFloat => {
692                    let value = self.eval_to_const(value)?;
693                    let value = self.ecx.read_immediate(value).discard_err()?;
694                    let res = self.ecx.int_to_int_or_float(&value, ty).discard_err()?;
695                    res.into()
696                }
697                CastKind::FloatToFloat | CastKind::FloatToInt => {
698                    let value = self.eval_to_const(value)?;
699                    let value = self.ecx.read_immediate(value).discard_err()?;
700                    let res = self.ecx.float_to_float_or_int(&value, ty).discard_err()?;
701                    res.into()
702                }
703                CastKind::Transmute | CastKind::Subtype => {
704                    let value = self.eval_to_const(value)?;
705                    // `offset` for immediates generally only supports projections that match the
706                    // type of the immediate. However, as a HACK, we exploit that it can also do
707                    // limited transmutes: it only works between types with the same layout, and
708                    // cannot transmute pointers to integers.
709                    if value.as_mplace_or_imm().is_right() {
710                        let can_transmute = match (value.layout.backend_repr, ty.backend_repr) {
711                            (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => {
712                                s1.size(&self.ecx) == s2.size(&self.ecx)
713                                    && !matches!(s1.primitive(), Primitive::Pointer(..))
714                            }
715                            (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
716                                a1.size(&self.ecx) == a2.size(&self.ecx)
717                                    && b1.size(&self.ecx) == b2.size(&self.ecx)
718                                    // The alignment of the second component determines its offset, so that also needs to match.
719                                    && b1.align(&self.ecx) == b2.align(&self.ecx)
720                                    // None of the inputs may be a pointer.
721                                    && !matches!(a1.primitive(), Primitive::Pointer(..))
722                                    && !matches!(b1.primitive(), Primitive::Pointer(..))
723                            }
724                            _ => false,
725                        };
726                        if !can_transmute {
727                            return None;
728                        }
729                    }
730                    value.offset(Size::ZERO, ty, &self.ecx).discard_err()?
731                }
732                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) => {
733                    let src = self.eval_to_const(value)?;
734                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
735                    self.ecx.unsize_into(src, ty, &dest).discard_err()?;
736                    self.ecx
737                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
738                        .discard_err()?;
739                    dest.into()
740                }
741                CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
742                    let src = self.eval_to_const(value)?;
743                    let src = self.ecx.read_immediate(src).discard_err()?;
744                    let ret = self.ecx.ptr_to_ptr(&src, ty).discard_err()?;
745                    ret.into()
746                }
747                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::UnsafeFnPointer, _) => {
748                    let src = self.eval_to_const(value)?;
749                    let src = self.ecx.read_immediate(src).discard_err()?;
750                    ImmTy::from_immediate(*src, ty).into()
751                }
752                _ => return None,
753            },
754        };
755        Some(op)
756    }
757
758    fn eval_to_const(&mut self, index: VnIndex) -> Option<&'a OpTy<'tcx>> {
759        if let Some(op) = self.evaluated[index] {
760            return op;
761        }
762        let op = self.eval_to_const_inner(index);
763        self.evaluated[index] = Some(self.arena.alloc(op).as_ref());
764        self.evaluated[index].unwrap()
765    }
766
767    /// Represent the *value* we obtain by dereferencing an `Address` value.
768    #[instrument(level = "trace", skip(self), ret)]
769    fn dereference_address(
770        &mut self,
771        base: AddressBase,
772        projection: &[ProjectionElem<VnIndex, Ty<'tcx>>],
773    ) -> Option<VnIndex> {
774        let (mut place_ty, mut value) = match base {
775            // The base is a local, so we take the local's value and project from it.
776            AddressBase::Local(local) => {
777                let local = self.locals[local]?;
778                let place_ty = PlaceTy::from_ty(self.ty(local));
779                (place_ty, local)
780            }
781            // The base is a pointer's deref, so we introduce the implicit deref.
782            AddressBase::Deref(reborrow) => {
783                let place_ty = PlaceTy::from_ty(self.ty(reborrow));
784                self.project(place_ty, reborrow, ProjectionElem::Deref)?
785            }
786        };
787        for &proj in projection {
788            (place_ty, value) = self.project(place_ty, value, proj)?;
789        }
790        Some(value)
791    }
792
793    #[instrument(level = "trace", skip(self), ret)]
794    fn project(
795        &mut self,
796        place_ty: PlaceTy<'tcx>,
797        value: VnIndex,
798        proj: ProjectionElem<VnIndex, Ty<'tcx>>,
799    ) -> Option<(PlaceTy<'tcx>, VnIndex)> {
800        let projection_ty = place_ty.projection_ty(self.tcx, proj);
801        let proj = match proj {
802            ProjectionElem::Deref => {
803                if let Some(Mutability::Not) = place_ty.ty.ref_mutability()
804                    && projection_ty.ty.is_freeze(self.tcx, self.typing_env())
805                {
806                    if let Value::Address { base, projection, .. } = self.get(value)
807                        && let Some(value) = self.dereference_address(base, projection)
808                    {
809                        return Some((projection_ty, value));
810                    }
811                    // DO NOT reason the pointer value.
812                    // We cannot unify two pointers that dereference same local, because they may
813                    // have different lifetimes.
814                    // ```
815                    // let b: &T = *a;
816                    // ... `a` is allowed to be modified. `c` and `b` have different borrowing lifetime.
817                    // Unifying them will extend the lifetime of `b`.
818                    // let c: &T = *a;
819                    // ```
820                    if projection_ty.ty.is_ref() {
821                        return None;
822                    }
823
824                    // An immutable borrow `_x` always points to the same value for the
825                    // lifetime of the borrow, so we can merge all instances of `*_x`.
826                    let deref = self
827                        .insert(projection_ty.ty, Value::Projection(value, ProjectionElem::Deref));
828                    return Some((projection_ty, deref));
829                } else {
830                    return None;
831                }
832            }
833            ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
834            ProjectionElem::Field(f, _) => match self.get(value) {
835                Value::Aggregate(_, fields) => return Some((projection_ty, fields[f.as_usize()])),
836                Value::Union(active, field) if active == f => return Some((projection_ty, field)),
837                Value::Projection(outer_value, ProjectionElem::Downcast(_, read_variant))
838                    if let Value::Aggregate(written_variant, fields) = self.get(outer_value)
839                    // This pass is not aware of control-flow, so we do not know whether the
840                    // replacement we are doing is actually reachable. We could be in any arm of
841                    // ```
842                    // match Some(x) {
843                    //     Some(y) => /* stuff */,
844                    //     None => /* other */,
845                    // }
846                    // ```
847                    //
848                    // In surface rust, the current statement would be unreachable.
849                    //
850                    // However, from the reference chapter on enums and RFC 2195,
851                    // accessing the wrong variant is not UB if the enum has repr.
852                    // So it's not impossible for a series of MIR opts to generate
853                    // a downcast to an inactive variant.
854                    && written_variant == read_variant =>
855                {
856                    return Some((projection_ty, fields[f.as_usize()]));
857                }
858                _ => ProjectionElem::Field(f, ()),
859            },
860            ProjectionElem::Index(idx) => {
861                if let Value::Repeat(inner, _) = self.get(value) {
862                    return Some((projection_ty, inner));
863                }
864                ProjectionElem::Index(idx)
865            }
866            ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
867                match self.get(value) {
868                    Value::Repeat(inner, _) => {
869                        return Some((projection_ty, inner));
870                    }
871                    Value::Aggregate(_, operands) => {
872                        let offset = if from_end {
873                            operands.len() - offset as usize
874                        } else {
875                            offset as usize
876                        };
877                        let value = operands.get(offset).copied()?;
878                        return Some((projection_ty, value));
879                    }
880                    _ => {}
881                };
882                ProjectionElem::ConstantIndex { offset, min_length, from_end }
883            }
884            ProjectionElem::Subslice { from, to, from_end } => {
885                ProjectionElem::Subslice { from, to, from_end }
886            }
887            ProjectionElem::OpaqueCast(_) => ProjectionElem::OpaqueCast(()),
888            ProjectionElem::UnwrapUnsafeBinder(_) => ProjectionElem::UnwrapUnsafeBinder(()),
889        };
890
891        let value = self.insert(projection_ty.ty, Value::Projection(value, proj));
892        Some((projection_ty, value))
893    }
894
895    /// Simplify the projection chain if we know better.
896    #[instrument(level = "trace", skip(self))]
897    fn simplify_place_projection(&mut self, place: &mut Place<'tcx>, location: Location) {
898        // If the projection is indirect, we treat the local as a value, so can replace it with
899        // another local.
900        if place.is_indirect_first_projection()
901            && let Some(base) = self.locals[place.local]
902            && let Some(new_local) = self.try_as_local(base, location)
903            && place.local != new_local
904        {
905            place.local = new_local;
906            self.reused_locals.insert(new_local);
907        }
908
909        let mut projection = Cow::Borrowed(&place.projection[..]);
910
911        for i in 0..projection.len() {
912            let elem = projection[i];
913            if let ProjectionElem::Index(idx_local) = elem
914                && let Some(idx) = self.locals[idx_local]
915            {
916                if let Some(offset) = self.eval_to_const(idx)
917                    && let Some(offset) = self.ecx.read_target_usize(offset).discard_err()
918                    && let Some(min_length) = offset.checked_add(1)
919                {
920                    projection.to_mut()[i] =
921                        ProjectionElem::ConstantIndex { offset, min_length, from_end: false };
922                } else if let Some(new_idx_local) = self.try_as_local(idx, location)
923                    && idx_local != new_idx_local
924                {
925                    projection.to_mut()[i] = ProjectionElem::Index(new_idx_local);
926                    self.reused_locals.insert(new_idx_local);
927                }
928            }
929        }
930
931        if Cow::is_owned(&projection) {
932            place.projection = self.tcx.mk_place_elems(&projection);
933        }
934
935        trace!(?place);
936    }
937
938    /// Represent the *value* which would be read from `place`. If we succeed, return it.
939    /// If we fail, return a `PlaceRef` that contains the same value.
940    #[instrument(level = "trace", skip(self), ret)]
941    fn compute_place_value(
942        &mut self,
943        place: Place<'tcx>,
944        location: Location,
945    ) -> Result<VnIndex, PlaceRef<'tcx>> {
946        // Invariant: `place` and `place_ref` point to the same value, even if they point to
947        // different memory locations.
948        let mut place_ref = place.as_ref();
949
950        // Invariant: `value` holds the value up-to the `index`th projection excluded.
951        let Some(mut value) = self.locals[place.local] else { return Err(place_ref) };
952        // Invariant: `value` has type `place_ty`, with optional downcast variant if needed.
953        let mut place_ty = PlaceTy::from_ty(self.local_decls[place.local].ty);
954        for (index, proj) in place.projection.iter().enumerate() {
955            if let Some(local) = self.try_as_local(value, location) {
956                // Both `local` and `Place { local: place.local, projection: projection[..index] }`
957                // hold the same value. Therefore, following place holds the value in the original
958                // `place`.
959                place_ref = PlaceRef { local, projection: &place.projection[index..] };
960            }
961
962            let Some(proj) = proj.try_map(|value| self.locals[value], |ty| ty) else {
963                return Err(place_ref);
964            };
965            let Some(ty_and_value) = self.project(place_ty, value, proj) else {
966                return Err(place_ref);
967            };
968            (place_ty, value) = ty_and_value;
969        }
970
971        Ok(value)
972    }
973
974    /// Represent the *value* which would be read from `place`, and point `place` to a preexisting
975    /// place with the same value (if that already exists).
976    #[instrument(level = "trace", skip(self), ret)]
977    fn simplify_place_value(
978        &mut self,
979        place: &mut Place<'tcx>,
980        location: Location,
981    ) -> Option<VnIndex> {
982        self.simplify_place_projection(place, location);
983
984        match self.compute_place_value(*place, location) {
985            Ok(value) => {
986                if let Some(new_place) = self.try_as_place(value, location, true)
987                    && (new_place.local != place.local
988                        || new_place.projection.len() < place.projection.len())
989                {
990                    *place = new_place;
991                    self.reused_locals.insert(new_place.local);
992                }
993                Some(value)
994            }
995            Err(place_ref) => {
996                if place_ref.local != place.local
997                    || place_ref.projection.len() < place.projection.len()
998                {
999                    // By the invariant on `place_ref`.
1000                    *place = place_ref.project_deeper(&[], self.tcx);
1001                    self.reused_locals.insert(place_ref.local);
1002                }
1003                None
1004            }
1005        }
1006    }
1007
1008    #[instrument(level = "trace", skip(self), ret)]
1009    fn simplify_operand(
1010        &mut self,
1011        operand: &mut Operand<'tcx>,
1012        location: Location,
1013    ) -> Option<VnIndex> {
1014        let value = match *operand {
1015            Operand::RuntimeChecks(c) => self.insert(self.tcx.types.bool, Value::RuntimeChecks(c)),
1016            Operand::Constant(ref constant) => self.insert_constant(constant.const_),
1017            Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
1018                self.simplify_place_value(place, location)?
1019            }
1020        };
1021        if let Some(const_) = self.try_as_constant(value) {
1022            *operand = Operand::Constant(Box::new(const_));
1023        } else if let Value::RuntimeChecks(c) = self.get(value) {
1024            *operand = Operand::RuntimeChecks(c);
1025        }
1026        Some(value)
1027    }
1028
1029    #[instrument(level = "trace", skip(self), ret)]
1030    fn simplify_rvalue(
1031        &mut self,
1032        lhs: &Place<'tcx>,
1033        rvalue: &mut Rvalue<'tcx>,
1034        location: Location,
1035    ) -> Option<VnIndex> {
1036        let value = match *rvalue {
1037            // Forward values.
1038            Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
1039
1040            // Roots.
1041            Rvalue::Repeat(ref mut op, amount) => {
1042                let op = self.simplify_operand(op, location)?;
1043                Value::Repeat(op, amount)
1044            }
1045            Rvalue::Aggregate(..) => return self.simplify_aggregate(rvalue, location),
1046            Rvalue::Ref(_, borrow_kind, ref mut place) => {
1047                self.simplify_place_projection(place, location);
1048                return self.new_pointer(*place, AddressKind::Ref(borrow_kind));
1049            }
1050            Rvalue::RawPtr(mutbl, ref mut place) => {
1051                self.simplify_place_projection(place, location);
1052                return self.new_pointer(*place, AddressKind::Address(mutbl));
1053            }
1054            Rvalue::WrapUnsafeBinder(ref mut op, _) => {
1055                let value = self.simplify_operand(op, location)?;
1056                Value::Cast { kind: CastKind::Transmute, value }
1057            }
1058
1059            // Operations.
1060            Rvalue::Cast(ref mut kind, ref mut value, to) => {
1061                return self.simplify_cast(kind, value, to, location);
1062            }
1063            Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
1064                return self.simplify_binary(op, lhs, rhs, location);
1065            }
1066            Rvalue::UnaryOp(op, ref mut arg_op) => {
1067                return self.simplify_unary(op, arg_op, location);
1068            }
1069            Rvalue::Discriminant(ref mut place) => {
1070                let place = self.simplify_place_value(place, location)?;
1071                if let Some(discr) = self.simplify_discriminant(place) {
1072                    return Some(discr);
1073                }
1074                Value::Discriminant(place)
1075            }
1076
1077            // Unsupported values.
1078            Rvalue::ThreadLocalRef(..) => return None,
1079            Rvalue::CopyForDeref(_) => {
1080                bug!("forbidden in runtime MIR: {rvalue:?}")
1081            }
1082        };
1083        let ty = rvalue.ty(self.local_decls, self.tcx);
1084        Some(self.insert(ty, value))
1085    }
1086
1087    fn simplify_discriminant(&mut self, place: VnIndex) -> Option<VnIndex> {
1088        let enum_ty = self.ty(place);
1089        if enum_ty.is_enum()
1090            && let Value::Aggregate(variant, _) = self.get(place)
1091        {
1092            let discr = self.ecx.discriminant_for_variant(enum_ty, variant).discard_err()?;
1093            return Some(self.insert_scalar(discr.layout.ty, discr.to_scalar()));
1094        }
1095
1096        None
1097    }
1098
1099    fn try_as_place_elem(
1100        &mut self,
1101        ty: Ty<'tcx>,
1102        proj: ProjectionElem<VnIndex, ()>,
1103        loc: Location,
1104    ) -> Option<PlaceElem<'tcx>> {
1105        proj.try_map(
1106            |value| {
1107                let local = self.try_as_local(value, loc)?;
1108                self.reused_locals.insert(local);
1109                Some(local)
1110            },
1111            |()| ty,
1112        )
1113    }
1114
1115    fn simplify_aggregate_to_copy(
1116        &mut self,
1117        ty: Ty<'tcx>,
1118        variant_index: VariantIdx,
1119        fields: &[VnIndex],
1120    ) -> Option<VnIndex> {
1121        let Some(&first_field) = fields.first() else { return None };
1122        let Value::Projection(copy_from_value, _) = self.get(first_field) else { return None };
1123
1124        // All fields must correspond one-to-one and come from the same aggregate value.
1125        if fields.iter().enumerate().any(|(index, &v)| {
1126            if let Value::Projection(pointer, ProjectionElem::Field(from_index, _)) = self.get(v)
1127                && copy_from_value == pointer
1128                && from_index.index() == index
1129            {
1130                return false;
1131            }
1132            true
1133        }) {
1134            return None;
1135        }
1136
1137        let mut copy_from_local_value = copy_from_value;
1138        if let Value::Projection(pointer, proj) = self.get(copy_from_value)
1139            && let ProjectionElem::Downcast(_, read_variant) = proj
1140        {
1141            if variant_index == read_variant {
1142                // When copying a variant, there is no need to downcast.
1143                copy_from_local_value = pointer;
1144            } else {
1145                // The copied variant must be identical.
1146                return None;
1147            }
1148        }
1149
1150        // Both must be variants of the same type.
1151        if self.ty(copy_from_local_value) == ty { Some(copy_from_local_value) } else { None }
1152    }
1153
1154    fn simplify_aggregate(
1155        &mut self,
1156        rvalue: &mut Rvalue<'tcx>,
1157        location: Location,
1158    ) -> Option<VnIndex> {
1159        let tcx = self.tcx;
1160        let ty = rvalue.ty(self.local_decls, tcx);
1161
1162        let Rvalue::Aggregate(box ref kind, ref mut field_ops) = *rvalue else { bug!() };
1163
1164        if field_ops.is_empty() {
1165            let is_zst = match *kind {
1166                AggregateKind::Array(..)
1167                | AggregateKind::Tuple
1168                | AggregateKind::Closure(..)
1169                | AggregateKind::CoroutineClosure(..) => true,
1170                // Only enums can be non-ZST.
1171                AggregateKind::Adt(did, ..) => tcx.def_kind(did) != DefKind::Enum,
1172                // Coroutines are never ZST, as they at least contain the implicit states.
1173                AggregateKind::Coroutine(..) => false,
1174                AggregateKind::RawPtr(..) => bug!("MIR for RawPtr aggregate must have 2 fields"),
1175            };
1176
1177            if is_zst {
1178                return Some(self.insert_constant(Const::zero_sized(ty)));
1179            }
1180        }
1181
1182        let fields = self.arena.alloc_from_iter(field_ops.iter_mut().map(|op| {
1183            self.simplify_operand(op, location)
1184                .unwrap_or_else(|| self.new_opaque(op.ty(self.local_decls, self.tcx)))
1185        }));
1186
1187        let variant_index = match *kind {
1188            AggregateKind::Array(..) | AggregateKind::Tuple => {
1189                assert!(!field_ops.is_empty());
1190                FIRST_VARIANT
1191            }
1192            AggregateKind::Closure(..)
1193            | AggregateKind::CoroutineClosure(..)
1194            | AggregateKind::Coroutine(..) => FIRST_VARIANT,
1195            AggregateKind::Adt(_, variant_index, _, _, None) => variant_index,
1196            // Do not track unions.
1197            AggregateKind::Adt(_, _, _, _, Some(active_field)) => {
1198                let field = *fields.first()?;
1199                return Some(self.insert(ty, Value::Union(active_field, field)));
1200            }
1201            AggregateKind::RawPtr(..) => {
1202                assert_eq!(field_ops.len(), 2);
1203                let [mut pointer, metadata] = fields.try_into().unwrap();
1204
1205                // Any thin pointer of matching mutability is fine as the data pointer.
1206                let mut was_updated = false;
1207                while let Value::Cast { kind: CastKind::PtrToPtr, value: cast_value } =
1208                    self.get(pointer)
1209                    && let ty::RawPtr(from_pointee_ty, from_mtbl) = self.ty(cast_value).kind()
1210                    && let ty::RawPtr(_, output_mtbl) = ty.kind()
1211                    && from_mtbl == output_mtbl
1212                    && from_pointee_ty.is_sized(self.tcx, self.typing_env())
1213                {
1214                    pointer = cast_value;
1215                    was_updated = true;
1216                }
1217
1218                if was_updated && let Some(op) = self.try_as_operand(pointer, location) {
1219                    field_ops[FieldIdx::ZERO] = op;
1220                }
1221
1222                return Some(self.insert(ty, Value::RawPtr { pointer, metadata }));
1223            }
1224        };
1225
1226        if ty.is_array()
1227            && fields.len() > 4
1228            && let Ok(&first) = fields.iter().all_equal_value()
1229        {
1230            let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
1231            if let Some(op) = self.try_as_operand(first, location) {
1232                *rvalue = Rvalue::Repeat(op, len);
1233            }
1234            return Some(self.insert(ty, Value::Repeat(first, len)));
1235        }
1236
1237        if let Some(value) = self.simplify_aggregate_to_copy(ty, variant_index, &fields) {
1238            if let Some(place) = self.try_as_place(value, location, true) {
1239                self.reused_locals.insert(place.local);
1240                *rvalue = Rvalue::Use(Operand::Copy(place));
1241            }
1242            return Some(value);
1243        }
1244
1245        Some(self.insert(ty, Value::Aggregate(variant_index, fields)))
1246    }
1247
1248    #[instrument(level = "trace", skip(self), ret)]
1249    fn simplify_unary(
1250        &mut self,
1251        op: UnOp,
1252        arg_op: &mut Operand<'tcx>,
1253        location: Location,
1254    ) -> Option<VnIndex> {
1255        let mut arg_index = self.simplify_operand(arg_op, location)?;
1256        let arg_ty = self.ty(arg_index);
1257        let ret_ty = op.ty(self.tcx, arg_ty);
1258
1259        // PtrMetadata doesn't care about *const vs *mut vs & vs &mut,
1260        // so start by removing those distinctions so we can update the `Operand`
1261        if op == UnOp::PtrMetadata {
1262            let mut was_updated = false;
1263            loop {
1264                arg_index = match self.get(arg_index) {
1265                    // Pointer casts that preserve metadata, such as
1266                    // `*const [i32]` <-> `*mut [i32]` <-> `*mut [f32]`.
1267                    // It's critical that this not eliminate cases like
1268                    // `*const [T]` -> `*const T` which remove metadata.
1269                    // We run on potentially-generic MIR, though, so unlike codegen
1270                    // we can't always know exactly what the metadata are.
1271                    // To allow things like `*mut (?A, ?T)` <-> `*mut (?B, ?T)`,
1272                    // it's fine to get a projection as the type.
1273                    Value::Cast { kind: CastKind::PtrToPtr, value: inner }
1274                        if self.pointers_have_same_metadata(self.ty(inner), arg_ty) =>
1275                    {
1276                        inner
1277                    }
1278
1279                    // We have an unsizing cast, which assigns the length to wide pointer metadata.
1280                    Value::Cast {
1281                        kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1282                        value: from,
1283                    } if let Some(from) = self.ty(from).builtin_deref(true)
1284                        && let ty::Array(_, len) = from.kind()
1285                        && let Some(to) = self.ty(arg_index).builtin_deref(true)
1286                        && let ty::Slice(..) = to.kind() =>
1287                    {
1288                        return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1289                    }
1290
1291                    // `&mut *p`, `&raw *p`, etc don't change metadata.
1292                    Value::Address { base: AddressBase::Deref(reborrowed), projection, .. }
1293                        if projection.is_empty() =>
1294                    {
1295                        reborrowed
1296                    }
1297
1298                    _ => break,
1299                };
1300                was_updated = true;
1301            }
1302
1303            if was_updated && let Some(op) = self.try_as_operand(arg_index, location) {
1304                *arg_op = op;
1305            }
1306        }
1307
1308        let value = match (op, self.get(arg_index)) {
1309            (UnOp::Not, Value::UnaryOp(UnOp::Not, inner)) => return Some(inner),
1310            (UnOp::Neg, Value::UnaryOp(UnOp::Neg, inner)) => return Some(inner),
1311            (UnOp::Not, Value::BinaryOp(BinOp::Eq, lhs, rhs)) => {
1312                Value::BinaryOp(BinOp::Ne, lhs, rhs)
1313            }
1314            (UnOp::Not, Value::BinaryOp(BinOp::Ne, lhs, rhs)) => {
1315                Value::BinaryOp(BinOp::Eq, lhs, rhs)
1316            }
1317            (UnOp::PtrMetadata, Value::RawPtr { metadata, .. }) => return Some(metadata),
1318            // We have an unsizing cast, which assigns the length to wide pointer metadata.
1319            (
1320                UnOp::PtrMetadata,
1321                Value::Cast {
1322                    kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1323                    value: inner,
1324                },
1325            ) if let ty::Slice(..) = arg_ty.builtin_deref(true).unwrap().kind()
1326                && let ty::Array(_, len) = self.ty(inner).builtin_deref(true).unwrap().kind() =>
1327            {
1328                return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1329            }
1330            _ => Value::UnaryOp(op, arg_index),
1331        };
1332        Some(self.insert(ret_ty, value))
1333    }
1334
1335    #[instrument(level = "trace", skip(self), ret)]
1336    fn simplify_binary(
1337        &mut self,
1338        op: BinOp,
1339        lhs_operand: &mut Operand<'tcx>,
1340        rhs_operand: &mut Operand<'tcx>,
1341        location: Location,
1342    ) -> Option<VnIndex> {
1343        let lhs = self.simplify_operand(lhs_operand, location);
1344        let rhs = self.simplify_operand(rhs_operand, location);
1345
1346        // Only short-circuit options after we called `simplify_operand`
1347        // on both operands for side effect.
1348        let mut lhs = lhs?;
1349        let mut rhs = rhs?;
1350
1351        let lhs_ty = self.ty(lhs);
1352
1353        // If we're comparing pointers, remove `PtrToPtr` casts if the from
1354        // types of both casts and the metadata all match.
1355        if let BinOp::Eq | BinOp::Ne | BinOp::Lt | BinOp::Le | BinOp::Gt | BinOp::Ge = op
1356            && lhs_ty.is_any_ptr()
1357            && let Value::Cast { kind: CastKind::PtrToPtr, value: lhs_value } = self.get(lhs)
1358            && let Value::Cast { kind: CastKind::PtrToPtr, value: rhs_value } = self.get(rhs)
1359            && let lhs_from = self.ty(lhs_value)
1360            && lhs_from == self.ty(rhs_value)
1361            && self.pointers_have_same_metadata(lhs_from, lhs_ty)
1362        {
1363            lhs = lhs_value;
1364            rhs = rhs_value;
1365            if let Some(lhs_op) = self.try_as_operand(lhs, location)
1366                && let Some(rhs_op) = self.try_as_operand(rhs, location)
1367            {
1368                *lhs_operand = lhs_op;
1369                *rhs_operand = rhs_op;
1370            }
1371        }
1372
1373        if let Some(value) = self.simplify_binary_inner(op, lhs_ty, lhs, rhs) {
1374            return Some(value);
1375        }
1376        let ty = op.ty(self.tcx, lhs_ty, self.ty(rhs));
1377        let value = Value::BinaryOp(op, lhs, rhs);
1378        Some(self.insert(ty, value))
1379    }
1380
1381    fn simplify_binary_inner(
1382        &mut self,
1383        op: BinOp,
1384        lhs_ty: Ty<'tcx>,
1385        lhs: VnIndex,
1386        rhs: VnIndex,
1387    ) -> Option<VnIndex> {
1388        // Floats are weird enough that none of the logic below applies.
1389        let reasonable_ty =
1390            lhs_ty.is_integral() || lhs_ty.is_bool() || lhs_ty.is_char() || lhs_ty.is_any_ptr();
1391        if !reasonable_ty {
1392            return None;
1393        }
1394
1395        let layout = self.ecx.layout_of(lhs_ty).ok()?;
1396
1397        let mut as_bits = |value: VnIndex| {
1398            let constant = self.eval_to_const(value)?;
1399            if layout.backend_repr.is_scalar() {
1400                let scalar = self.ecx.read_scalar(constant).discard_err()?;
1401                scalar.to_bits(constant.layout.size).discard_err()
1402            } else {
1403                // `constant` is a wide pointer. Do not evaluate to bits.
1404                None
1405            }
1406        };
1407
1408        // Represent the values as `Left(bits)` or `Right(VnIndex)`.
1409        use Either::{Left, Right};
1410        let a = as_bits(lhs).map_or(Right(lhs), Left);
1411        let b = as_bits(rhs).map_or(Right(rhs), Left);
1412
1413        let result = match (op, a, b) {
1414            // Neutral elements.
1415            (
1416                BinOp::Add
1417                | BinOp::AddWithOverflow
1418                | BinOp::AddUnchecked
1419                | BinOp::BitOr
1420                | BinOp::BitXor,
1421                Left(0),
1422                Right(p),
1423            )
1424            | (
1425                BinOp::Add
1426                | BinOp::AddWithOverflow
1427                | BinOp::AddUnchecked
1428                | BinOp::BitOr
1429                | BinOp::BitXor
1430                | BinOp::Sub
1431                | BinOp::SubWithOverflow
1432                | BinOp::SubUnchecked
1433                | BinOp::Offset
1434                | BinOp::Shl
1435                | BinOp::Shr,
1436                Right(p),
1437                Left(0),
1438            )
1439            | (BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked, Left(1), Right(p))
1440            | (
1441                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::Div,
1442                Right(p),
1443                Left(1),
1444            ) => p,
1445            // Attempt to simplify `x & ALL_ONES` to `x`, with `ALL_ONES` depending on type size.
1446            (BinOp::BitAnd, Right(p), Left(ones)) | (BinOp::BitAnd, Left(ones), Right(p))
1447                if ones == layout.size.truncate(u128::MAX)
1448                    || (layout.ty.is_bool() && ones == 1) =>
1449            {
1450                p
1451            }
1452            // Absorbing elements.
1453            (
1454                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::BitAnd,
1455                _,
1456                Left(0),
1457            )
1458            | (BinOp::Rem, _, Left(1))
1459            | (
1460                BinOp::Mul
1461                | BinOp::MulWithOverflow
1462                | BinOp::MulUnchecked
1463                | BinOp::Div
1464                | BinOp::Rem
1465                | BinOp::BitAnd
1466                | BinOp::Shl
1467                | BinOp::Shr,
1468                Left(0),
1469                _,
1470            ) => self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size)),
1471            // Attempt to simplify `x | ALL_ONES` to `ALL_ONES`.
1472            (BinOp::BitOr, _, Left(ones)) | (BinOp::BitOr, Left(ones), _)
1473                if ones == layout.size.truncate(u128::MAX)
1474                    || (layout.ty.is_bool() && ones == 1) =>
1475            {
1476                self.insert_scalar(lhs_ty, Scalar::from_uint(ones, layout.size))
1477            }
1478            // Sub/Xor with itself.
1479            (BinOp::Sub | BinOp::SubWithOverflow | BinOp::SubUnchecked | BinOp::BitXor, a, b)
1480                if a == b =>
1481            {
1482                self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size))
1483            }
1484            // Comparison:
1485            // - if both operands can be computed as bits, just compare the bits;
1486            // - if we proved that both operands have the same value, we can insert true/false;
1487            // - otherwise, do nothing, as we do not try to prove inequality.
1488            (BinOp::Eq, Left(a), Left(b)) => self.insert_bool(a == b),
1489            (BinOp::Eq, a, b) if a == b => self.insert_bool(true),
1490            (BinOp::Ne, Left(a), Left(b)) => self.insert_bool(a != b),
1491            (BinOp::Ne, a, b) if a == b => self.insert_bool(false),
1492            _ => return None,
1493        };
1494
1495        if op.is_overflowing() {
1496            let ty = Ty::new_tup(self.tcx, &[self.ty(result), self.tcx.types.bool]);
1497            let false_val = self.insert_bool(false);
1498            Some(self.insert_tuple(ty, &[result, false_val]))
1499        } else {
1500            Some(result)
1501        }
1502    }
1503
1504    fn simplify_cast(
1505        &mut self,
1506        initial_kind: &mut CastKind,
1507        initial_operand: &mut Operand<'tcx>,
1508        to: Ty<'tcx>,
1509        location: Location,
1510    ) -> Option<VnIndex> {
1511        use CastKind::*;
1512        use rustc_middle::ty::adjustment::PointerCoercion::*;
1513
1514        let mut kind = *initial_kind;
1515        let mut value = self.simplify_operand(initial_operand, location)?;
1516        let mut from = self.ty(value);
1517        if from == to {
1518            return Some(value);
1519        }
1520
1521        if let CastKind::PointerCoercion(ReifyFnPointer(_) | ClosureFnPointer(_), _) = kind {
1522            // Each reification of a generic fn may get a different pointer.
1523            // Do not try to merge them.
1524            return Some(self.new_opaque(to));
1525        }
1526
1527        let mut was_ever_updated = false;
1528        loop {
1529            let mut was_updated_this_iteration = false;
1530
1531            // Transmuting between raw pointers is just a pointer cast so long as
1532            // they have the same metadata type (like `*const i32` <=> `*mut u64`
1533            // or `*mut [i32]` <=> `*const [u64]`), including the common special
1534            // case of `*const T` <=> `*mut T`.
1535            if let Transmute = kind
1536                && from.is_raw_ptr()
1537                && to.is_raw_ptr()
1538                && self.pointers_have_same_metadata(from, to)
1539            {
1540                kind = PtrToPtr;
1541                was_updated_this_iteration = true;
1542            }
1543
1544            // If a cast just casts away the metadata again, then we can get it by
1545            // casting the original thin pointer passed to `from_raw_parts`
1546            if let PtrToPtr = kind
1547                && let Value::RawPtr { pointer, .. } = self.get(value)
1548                && let ty::RawPtr(to_pointee, _) = to.kind()
1549                && to_pointee.is_sized(self.tcx, self.typing_env())
1550            {
1551                from = self.ty(pointer);
1552                value = pointer;
1553                was_updated_this_iteration = true;
1554                if from == to {
1555                    return Some(pointer);
1556                }
1557            }
1558
1559            // Aggregate-then-Transmute can just transmute the original field value,
1560            // so long as the bytes of a value from only from a single field.
1561            if let Transmute = kind
1562                && let Value::Aggregate(variant_idx, field_values) = self.get(value)
1563                && let Some((field_idx, field_ty)) =
1564                    self.value_is_all_in_one_field(from, variant_idx)
1565            {
1566                from = field_ty;
1567                value = field_values[field_idx.as_usize()];
1568                was_updated_this_iteration = true;
1569                if field_ty == to {
1570                    return Some(value);
1571                }
1572            }
1573
1574            // Various cast-then-cast cases can be simplified.
1575            if let Value::Cast { kind: inner_kind, value: inner_value } = self.get(value) {
1576                let inner_from = self.ty(inner_value);
1577                let new_kind = match (inner_kind, kind) {
1578                    // Even if there's a narrowing cast in here that's fine, because
1579                    // things like `*mut [i32] -> *mut i32 -> *const i32` and
1580                    // `*mut [i32] -> *const [i32] -> *const i32` can skip the middle in MIR.
1581                    (PtrToPtr, PtrToPtr) => Some(PtrToPtr),
1582                    // PtrToPtr-then-Transmute is fine so long as the pointer cast is identity:
1583                    // `*const T -> *mut T -> NonNull<T>` is fine, but we need to check for narrowing
1584                    // to skip things like `*const [i32] -> *const i32 -> NonNull<T>`.
1585                    (PtrToPtr, Transmute) if self.pointers_have_same_metadata(inner_from, from) => {
1586                        Some(Transmute)
1587                    }
1588                    // Similarly, for Transmute-then-PtrToPtr. Note that we need to check different
1589                    // variables for their metadata, and thus this can't merge with the previous arm.
1590                    (Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => {
1591                        Some(Transmute)
1592                    }
1593                    // It would be legal to always do this, but we don't want to hide information
1594                    // from the backend that it'd otherwise be able to use for optimizations.
1595                    (Transmute, Transmute)
1596                        if !self.transmute_may_have_niche_of_interest_to_backend(
1597                            inner_from, from, to,
1598                        ) =>
1599                    {
1600                        Some(Transmute)
1601                    }
1602                    _ => None,
1603                };
1604                if let Some(new_kind) = new_kind {
1605                    kind = new_kind;
1606                    from = inner_from;
1607                    value = inner_value;
1608                    was_updated_this_iteration = true;
1609                    if inner_from == to {
1610                        return Some(inner_value);
1611                    }
1612                }
1613            }
1614
1615            if was_updated_this_iteration {
1616                was_ever_updated = true;
1617            } else {
1618                break;
1619            }
1620        }
1621
1622        if was_ever_updated && let Some(op) = self.try_as_operand(value, location) {
1623            *initial_operand = op;
1624            *initial_kind = kind;
1625        }
1626
1627        Some(self.insert(to, Value::Cast { kind, value }))
1628    }
1629
1630    fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool {
1631        let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1632        let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1633        if left_meta_ty == right_meta_ty {
1634            true
1635        } else if let Ok(left) =
1636            self.tcx.try_normalize_erasing_regions(self.typing_env(), left_meta_ty)
1637            && let Ok(right) =
1638                self.tcx.try_normalize_erasing_regions(self.typing_env(), right_meta_ty)
1639        {
1640            left == right
1641        } else {
1642            false
1643        }
1644    }
1645
1646    /// Returns `false` if we're confident that the middle type doesn't have an
1647    /// interesting niche so we can skip that step when transmuting.
1648    ///
1649    /// The backend will emit `assume`s when transmuting between types with niches,
1650    /// so we want to preserve `i32 -> char -> u32` so that that data is around,
1651    /// but it's fine to skip whole-range-is-value steps like `A -> u32 -> B`.
1652    fn transmute_may_have_niche_of_interest_to_backend(
1653        &self,
1654        from_ty: Ty<'tcx>,
1655        middle_ty: Ty<'tcx>,
1656        to_ty: Ty<'tcx>,
1657    ) -> bool {
1658        let Ok(middle_layout) = self.ecx.layout_of(middle_ty) else {
1659            // If it's too generic or something, then assume it might be interesting later.
1660            return true;
1661        };
1662
1663        if middle_layout.uninhabited {
1664            return true;
1665        }
1666
1667        match middle_layout.backend_repr {
1668            BackendRepr::Scalar(mid) => {
1669                if mid.is_always_valid(&self.ecx) {
1670                    // With no niche it's never interesting, so don't bother
1671                    // looking at the layout of the other two types.
1672                    false
1673                } else if let Ok(from_layout) = self.ecx.layout_of(from_ty)
1674                    && !from_layout.uninhabited
1675                    && from_layout.size == middle_layout.size
1676                    && let BackendRepr::Scalar(from_a) = from_layout.backend_repr
1677                    && let mid_range = mid.valid_range(&self.ecx)
1678                    && let from_range = from_a.valid_range(&self.ecx)
1679                    && mid_range.contains_range(from_range, middle_layout.size)
1680                {
1681                    // The `from_range` is a (non-strict) subset of `mid_range`
1682                    // such as if we're doing `bool` -> `ascii::Char` -> `_`,
1683                    // where `from_range: 0..=1` and `mid_range: 0..=127`,
1684                    // and thus the middle doesn't tell us anything we don't
1685                    // already know from the initial type.
1686                    false
1687                } else if let Ok(to_layout) = self.ecx.layout_of(to_ty)
1688                    && !to_layout.uninhabited
1689                    && to_layout.size == middle_layout.size
1690                    && let BackendRepr::Scalar(to_a) = to_layout.backend_repr
1691                    && let mid_range = mid.valid_range(&self.ecx)
1692                    && let to_range = to_a.valid_range(&self.ecx)
1693                    && mid_range.contains_range(to_range, middle_layout.size)
1694                {
1695                    // The `to_range` is a (non-strict) subset of `mid_range`
1696                    // such as if we're doing `_` -> `ascii::Char` -> `bool`,
1697                    // where `mid_range: 0..=127` and `to_range: 0..=1`,
1698                    // and thus the middle doesn't tell us anything we don't
1699                    // already know from the final type.
1700                    false
1701                } else {
1702                    true
1703                }
1704            }
1705            BackendRepr::ScalarPair(a, b) => {
1706                !a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx)
1707            }
1708            BackendRepr::SimdVector { .. }
1709            | BackendRepr::SimdScalableVector { .. }
1710            | BackendRepr::Memory { .. } => false,
1711        }
1712    }
1713
1714    fn value_is_all_in_one_field(
1715        &self,
1716        ty: Ty<'tcx>,
1717        variant: VariantIdx,
1718    ) -> Option<(FieldIdx, Ty<'tcx>)> {
1719        if let Ok(layout) = self.ecx.layout_of(ty)
1720            && let abi::Variants::Single { index } = layout.variants
1721            && index == variant
1722            && let Some((field_idx, field_layout)) = layout.non_1zst_field(&self.ecx)
1723            && layout.size == field_layout.size
1724        {
1725            // We needed to check the variant to avoid trying to read the tag
1726            // field from an enum where no fields have variants, since that tag
1727            // field isn't in the `Aggregate` from which we're getting values.
1728            Some((field_idx, field_layout.ty))
1729        } else if let ty::Adt(adt, args) = ty.kind()
1730            && adt.is_struct()
1731            && adt.repr().transparent()
1732            && let [single_field] = adt.non_enum_variant().fields.raw.as_slice()
1733        {
1734            Some((FieldIdx::ZERO, single_field.ty(self.tcx, args)))
1735        } else {
1736            None
1737        }
1738    }
1739}
1740
1741/// Return true if any evaluation of this constant in the same MIR body
1742/// always returns the same value, taking into account even pointer identity tests.
1743///
1744/// In other words, this answers: is "cloning" the `Const` ok?
1745///
1746/// This returns `false` for constants that synthesize new `AllocId` when they are instantiated.
1747/// It is `true` for anything else, since a given `AllocId` *does* have a unique runtime value
1748/// within the scope of a single MIR body.
1749fn is_deterministic(c: Const<'_>) -> bool {
1750    // Primitive types cannot contain provenance and always have the same value.
1751    if c.ty().is_primitive() {
1752        return true;
1753    }
1754
1755    match c {
1756        // Some constants may generate fresh allocations for pointers they contain,
1757        // so using the same constant twice can yield two different results.
1758        // Notably, valtrees purposefully generate new allocations.
1759        Const::Ty(..) => false,
1760        // We do not know the contents, so don't attempt to do anything clever.
1761        Const::Unevaluated(..) => false,
1762        // When an evaluated constant contains provenance, it is encoded as an `AllocId`.
1763        // Cloning the constant will reuse the same `AllocId`. If this is in the same MIR
1764        // body, this same `AllocId` will result in the same pointer in codegen.
1765        Const::Val(..) => true,
1766    }
1767}
1768
1769/// Check if a constant may contain provenance information.
1770/// Can return `true` even if there is no provenance.
1771fn may_have_provenance(tcx: TyCtxt<'_>, value: ConstValue, size: Size) -> bool {
1772    match value {
1773        ConstValue::ZeroSized | ConstValue::Scalar(Scalar::Int(_)) => return false,
1774        ConstValue::Scalar(Scalar::Ptr(..)) | ConstValue::Slice { .. } => return true,
1775        ConstValue::Indirect { alloc_id, offset } => !tcx
1776            .global_alloc(alloc_id)
1777            .unwrap_memory()
1778            .inner()
1779            .provenance()
1780            .range_empty(AllocRange::from(offset..offset + size), &tcx),
1781    }
1782}
1783
1784fn op_to_prop_const<'tcx>(
1785    ecx: &mut InterpCx<'tcx, DummyMachine>,
1786    op: &OpTy<'tcx>,
1787) -> Option<ConstValue> {
1788    // Do not attempt to propagate unsized locals.
1789    if op.layout.is_unsized() {
1790        return None;
1791    }
1792
1793    // This constant is a ZST, just return an empty value.
1794    if op.layout.is_zst() {
1795        return Some(ConstValue::ZeroSized);
1796    }
1797
1798    // Do not synthetize too large constants. Codegen will just memcpy them, which we'd like to
1799    // avoid.
1800    // But we *do* want to synthesize any size constant if it is entirely uninit because that
1801    // benefits codegen, which has special handling for them.
1802    if !op.is_immediate_uninit()
1803        && !matches!(op.layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..))
1804    {
1805        return None;
1806    }
1807
1808    // If this constant has scalar ABI, return it as a `ConstValue::Scalar`.
1809    if let BackendRepr::Scalar(abi::Scalar::Initialized { .. }) = op.layout.backend_repr
1810        && let Some(scalar) = ecx.read_scalar(op).discard_err()
1811    {
1812        if !scalar.try_to_scalar_int().is_ok() {
1813            // Check that we do not leak a pointer.
1814            // Those pointers may lose part of their identity in codegen.
1815            // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/128775 is fixed.
1816            return None;
1817        }
1818        return Some(ConstValue::Scalar(scalar));
1819    }
1820
1821    // If this constant is already represented as an `Allocation`,
1822    // try putting it into global memory to return it.
1823    if let Either::Left(mplace) = op.as_mplace_or_imm() {
1824        let (size, _align) = ecx.size_and_align_of_val(&mplace).discard_err()??;
1825
1826        // Do not try interning a value that contains provenance.
1827        // Due to https://github.com/rust-lang/rust/issues/128775, doing so could lead to bugs.
1828        // FIXME: remove this hack once that issue is fixed.
1829        let alloc_ref = ecx.get_ptr_alloc(mplace.ptr(), size).discard_err()??;
1830        if alloc_ref.has_provenance() {
1831            return None;
1832        }
1833
1834        let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
1835        let (prov, offset) = pointer.prov_and_relative_offset();
1836        let alloc_id = prov.alloc_id();
1837        intern_const_alloc_for_constprop(ecx, alloc_id).discard_err()?;
1838
1839        // `alloc_id` may point to a static. Codegen will choke on an `Indirect` with anything
1840        // by `GlobalAlloc::Memory`, so do fall through to copying if needed.
1841        // FIXME: find a way to treat this more uniformly (probably by fixing codegen)
1842        if let GlobalAlloc::Memory(alloc) = ecx.tcx.global_alloc(alloc_id)
1843            // Transmuting a constant is just an offset in the allocation. If the alignment of the
1844            // allocation is not enough, fallback to copying into a properly aligned value.
1845            && alloc.inner().align >= op.layout.align.abi
1846        {
1847            return Some(ConstValue::Indirect { alloc_id, offset });
1848        }
1849    }
1850
1851    // Everything failed: create a new allocation to hold the data.
1852    let alloc_id =
1853        ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest)).discard_err()?;
1854    Some(ConstValue::Indirect { alloc_id, offset: Size::ZERO })
1855}
1856
1857impl<'tcx> VnState<'_, '_, 'tcx> {
1858    /// If either [`Self::try_as_constant`] as [`Self::try_as_place`] succeeds,
1859    /// returns that result as an [`Operand`].
1860    fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option<Operand<'tcx>> {
1861        if let Some(const_) = self.try_as_constant(index) {
1862            Some(Operand::Constant(Box::new(const_)))
1863        } else if let Value::RuntimeChecks(c) = self.get(index) {
1864            Some(Operand::RuntimeChecks(c))
1865        } else if let Some(place) = self.try_as_place(index, location, false) {
1866            self.reused_locals.insert(place.local);
1867            Some(Operand::Copy(place))
1868        } else {
1869            None
1870        }
1871    }
1872
1873    /// If `index` is a `Value::Constant`, return the `Constant` to be put in the MIR.
1874    fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
1875        let value = self.get(index);
1876
1877        // This was already an *evaluated* constant in MIR, do not change it.
1878        if let Value::Constant { value, disambiguator: None } = value
1879            && let Const::Val(..) = value
1880        {
1881            return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1882        }
1883
1884        if let Some(value) = self.try_as_evaluated_constant(index) {
1885            return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1886        }
1887
1888        // We failed to provide an evaluated form, fallback to using the unevaluated constant.
1889        if let Value::Constant { value, disambiguator: None } = value {
1890            return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1891        }
1892
1893        None
1894    }
1895
1896    fn try_as_evaluated_constant(&mut self, index: VnIndex) -> Option<Const<'tcx>> {
1897        let op = self.eval_to_const(index)?;
1898        if op.layout.is_unsized() {
1899            // Do not attempt to propagate unsized locals.
1900            return None;
1901        }
1902
1903        let value = op_to_prop_const(&mut self.ecx, op)?;
1904
1905        // Check that we do not leak a pointer.
1906        // Those pointers may lose part of their identity in codegen.
1907        // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/128775 is fixed.
1908        if may_have_provenance(self.tcx, value, op.layout.size) {
1909            return None;
1910        }
1911
1912        Some(Const::Val(value, op.layout.ty))
1913    }
1914
1915    /// Construct a place which holds the same value as `index` and for which all locals strictly
1916    /// dominate `loc`. If you used this place, add its base local to `reused_locals` to remove
1917    /// storage statements.
1918    #[instrument(level = "trace", skip(self), ret)]
1919    fn try_as_place(
1920        &mut self,
1921        mut index: VnIndex,
1922        loc: Location,
1923        allow_complex_projection: bool,
1924    ) -> Option<Place<'tcx>> {
1925        let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new();
1926        loop {
1927            if let Some(local) = self.try_as_local(index, loc) {
1928                projection.reverse();
1929                let place =
1930                    Place { local, projection: self.tcx.mk_place_elems(projection.as_slice()) };
1931                return Some(place);
1932            } else if projection.last() == Some(&PlaceElem::Deref) {
1933                // `Deref` can only be the first projection in a place.
1934                // If we are here, we failed to find a local, and we already have a `Deref`.
1935                // Trying to add projections will only result in an ill-formed place.
1936                return None;
1937            } else if let Value::Projection(pointer, proj) = self.get(index)
1938                && (allow_complex_projection || proj.is_stable_offset())
1939                && let Some(proj) = self.try_as_place_elem(self.ty(index), proj, loc)
1940            {
1941                if proj == PlaceElem::Deref {
1942                    // We can introduce a new dereference if the source value cannot be changed in the body.
1943                    // Dereferencing an immutable argument always gives the same value in the body.
1944                    match self.get(pointer) {
1945                        Value::Argument(_)
1946                            if let Some(Mutability::Not) = self.ty(pointer).ref_mutability() => {}
1947                        _ => {
1948                            return None;
1949                        }
1950                    }
1951                }
1952                projection.push(proj);
1953                index = pointer;
1954            } else {
1955                return None;
1956            }
1957        }
1958    }
1959
1960    /// If there is a local which is assigned `index`, and its assignment strictly dominates `loc`,
1961    /// return it. If you used this local, add it to `reused_locals` to remove storage statements.
1962    fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
1963        let other = self.rev_locals.get(index)?;
1964        other
1965            .iter()
1966            .find(|&&other| self.ssa.assignment_dominates(&self.dominators, other, loc))
1967            .copied()
1968    }
1969}
1970
1971impl<'tcx> MutVisitor<'tcx> for VnState<'_, '_, 'tcx> {
1972    fn tcx(&self) -> TyCtxt<'tcx> {
1973        self.tcx
1974    }
1975
1976    fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
1977        self.simplify_place_projection(place, location);
1978        self.super_place(place, context, location);
1979    }
1980
1981    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1982        self.simplify_operand(operand, location);
1983        self.super_operand(operand, location);
1984    }
1985
1986    fn visit_assign(
1987        &mut self,
1988        lhs: &mut Place<'tcx>,
1989        rvalue: &mut Rvalue<'tcx>,
1990        location: Location,
1991    ) {
1992        self.simplify_place_projection(lhs, location);
1993
1994        let value = self.simplify_rvalue(lhs, rvalue, location);
1995        if let Some(value) = value {
1996            if let Some(const_) = self.try_as_constant(value) {
1997                *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
1998            } else if let Some(place) = self.try_as_place(value, location, false)
1999                && *rvalue != Rvalue::Use(Operand::Move(place))
2000                && *rvalue != Rvalue::Use(Operand::Copy(place))
2001            {
2002                *rvalue = Rvalue::Use(Operand::Copy(place));
2003                self.reused_locals.insert(place.local);
2004            }
2005        }
2006
2007        if let Some(local) = lhs.as_local()
2008            && self.ssa.is_ssa(local)
2009            && let rvalue_ty = rvalue.ty(self.local_decls, self.tcx)
2010            // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark
2011            // `local` as reusable if we have an exact type match.
2012            && self.local_decls[local].ty == rvalue_ty
2013        {
2014            let value = value.unwrap_or_else(|| self.new_opaque(rvalue_ty));
2015            self.assign(local, value);
2016        }
2017    }
2018
2019    fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
2020        if let Terminator { kind: TerminatorKind::Call { destination, .. }, .. } = terminator {
2021            if let Some(local) = destination.as_local()
2022                && self.ssa.is_ssa(local)
2023            {
2024                let ty = self.local_decls[local].ty;
2025                let opaque = self.new_opaque(ty);
2026                self.assign(local, opaque);
2027            }
2028        }
2029        self.super_terminator(terminator, location);
2030    }
2031}
2032
2033struct StorageRemover<'tcx> {
2034    tcx: TyCtxt<'tcx>,
2035    reused_locals: DenseBitSet<Local>,
2036}
2037
2038impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
2039    fn tcx(&self) -> TyCtxt<'tcx> {
2040        self.tcx
2041    }
2042
2043    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
2044        if let Operand::Move(place) = *operand
2045            && !place.is_indirect_first_projection()
2046            && self.reused_locals.contains(place.local)
2047        {
2048            *operand = Operand::Copy(place);
2049        }
2050    }
2051
2052    fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
2053        match stmt.kind {
2054            // When removing storage statements, we need to remove both (#107511).
2055            StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
2056                if self.reused_locals.contains(l) =>
2057            {
2058                stmt.make_nop(true)
2059            }
2060            _ => self.super_statement(stmt, loc),
2061        }
2062    }
2063}