rustc_mir_transform/
gvn.rs

1//! Global value numbering.
2//!
3//! MIR may contain repeated and/or redundant computations. The objective of this pass is to detect
4//! such redundancies and re-use the already-computed result when possible.
5//!
6//! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
7//! values, the locals in which they are stored, and the assignment location.
8//!
9//! We traverse all assignments `x = rvalue` and operands.
10//!
11//! For each SSA one, we compute a symbolic representation of values that are assigned to SSA
12//! locals. This symbolic representation is defined by the `Value` enum. Each produced instance of
13//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
14//!
15//! For each non-SSA
16//! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
17//! replace the rvalue/operand by that constant. Otherwise, if there is an SSA local `y`
18//! associated to this `VnIndex`, and if its definition location strictly dominates the assignment
19//! to `x`, we replace the assignment by `x = y`.
20//!
21//! By opportunity, this pass simplifies some `Rvalue`s based on the accumulated knowledge.
22//!
23//! # Operational semantic
24//!
25//! Operationally, this pass attempts to prove bitwise equality between locals. Given this MIR:
26//! ```ignore (MIR)
27//! _a = some value // has VnIndex i
28//! // some MIR
29//! _b = some other value // also has VnIndex i
30//! ```
31//!
32//! We consider it to be replaceable by:
33//! ```ignore (MIR)
34//! _a = some value // has VnIndex i
35//! // some MIR
36//! _c = some other value // also has VnIndex i
37//! assume(_a bitwise equal to _c) // follows from having the same VnIndex
38//! _b = _a // follows from the `assume`
39//! ```
40//!
41//! Which is simplifiable to:
42//! ```ignore (MIR)
43//! _a = some value // has VnIndex i
44//! // some MIR
45//! _b = _a
46//! ```
47//!
48//! # Handling of references
49//!
50//! We handle references by assigning a different "provenance" index to each Ref/RawPtr rvalue.
51//! This ensure that we do not spuriously merge borrows that should not be merged. Meanwhile, we
52//! consider all the derefs of an immutable reference to a freeze type to give the same value:
53//! ```ignore (MIR)
54//! _a = *_b // _b is &Freeze
55//! _c = *_b // replaced by _c = _a
56//! ```
57//!
58//! # Determinism of constant propagation
59//!
60//! When registering a new `Value`, we attempt to opportunistically evaluate it as a constant.
61//! The evaluated form is inserted in `evaluated` as an `OpTy` or `None` if evaluation failed.
62//!
63//! The difficulty is non-deterministic evaluation of MIR constants. Some `Const` can have
64//! different runtime values each time they are evaluated. This is the case with
65//! `Const::Slice` which have a new pointer each time they are evaluated, and constants that
66//! contain a fn pointer (`AllocId` pointing to a `GlobalAlloc::Function`) pointing to a different
67//! symbol in each codegen unit.
68//!
69//! Meanwhile, we want to be able to read indirect constants. For instance:
70//! ```
71//! static A: &'static &'static u8 = &&63;
72//! fn foo() -> u8 {
73//!     **A // We want to replace by 63.
74//! }
75//! fn bar() -> u8 {
76//!     b"abc"[1] // We want to replace by 'b'.
77//! }
78//! ```
79//!
80//! The `Value::Constant` variant stores a possibly unevaluated constant. Evaluating that constant
81//! may be non-deterministic. When that happens, we assign a disambiguator to ensure that we do not
82//! merge the constants. See `duplicate_slice` test in `gvn.rs`.
83//!
84//! Second, when writing constants in MIR, we do not write `Const::Slice` or `Const`
85//! that contain `AllocId`s.
86
87use std::borrow::Cow;
88use std::hash::{Hash, Hasher};
89
90use either::Either;
91use hashbrown::hash_table::{Entry, HashTable};
92use itertools::Itertools as _;
93use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
94use rustc_const_eval::const_eval::DummyMachine;
95use rustc_const_eval::interpret::{
96    ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar,
97    intern_const_alloc_for_constprop,
98};
99use rustc_data_structures::fx::FxHasher;
100use rustc_data_structures::graph::dominators::Dominators;
101use rustc_hir::def::DefKind;
102use rustc_index::bit_set::DenseBitSet;
103use rustc_index::{IndexVec, newtype_index};
104use rustc_middle::bug;
105use rustc_middle::mir::interpret::GlobalAlloc;
106use rustc_middle::mir::visit::*;
107use rustc_middle::mir::*;
108use rustc_middle::ty::layout::HasTypingEnv;
109use rustc_middle::ty::{self, Ty, TyCtxt};
110use rustc_span::DUMMY_SP;
111use smallvec::SmallVec;
112use tracing::{debug, instrument, trace};
113
114use crate::ssa::SsaLocals;
115
116pub(super) struct GVN;
117
118impl<'tcx> crate::MirPass<'tcx> for GVN {
119    fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
120        sess.mir_opt_level() >= 2
121    }
122
123    #[instrument(level = "trace", skip(self, tcx, body))]
124    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
125        debug!(def_id = ?body.source.def_id());
126
127        let typing_env = body.typing_env(tcx);
128        let ssa = SsaLocals::new(tcx, body, typing_env);
129        // Clone dominators because we need them while mutating the body.
130        let dominators = body.basic_blocks.dominators().clone();
131
132        let mut state = VnState::new(tcx, body, typing_env, &ssa, dominators, &body.local_decls);
133
134        for local in body.args_iter().filter(|&local| ssa.is_ssa(local)) {
135            let opaque = state.new_opaque(body.local_decls[local].ty);
136            state.assign(local, opaque);
137        }
138
139        let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
140        for bb in reverse_postorder {
141            let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
142            state.visit_basic_block_data(bb, data);
143        }
144
145        // For each local that is reused (`y` above), we remove its storage statements do avoid any
146        // difficulty. Those locals are SSA, so should be easy to optimize by LLVM without storage
147        // statements.
148        StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
149    }
150
151    fn is_required(&self) -> bool {
152        false
153    }
154}
155
156newtype_index! {
157    /// This represents a `Value` in the symbolic execution.
158    #[debug_format = "_v{}"]
159    struct VnIndex {}
160}
161
162/// Marker type to forbid hashing and comparing opaque values.
163/// This struct should only be constructed by `ValueSet::insert_unique` to ensure we use that
164/// method to create non-unifiable values. It will ICE if used in `ValueSet::insert`.
165#[derive(Copy, Clone, Debug, Eq)]
166struct VnOpaque;
167impl PartialEq for VnOpaque {
168    fn eq(&self, _: &VnOpaque) -> bool {
169        // ICE if we try to compare unique values
170        unreachable!()
171    }
172}
173impl Hash for VnOpaque {
174    fn hash<T: Hasher>(&self, _: &mut T) {
175        // ICE if we try to hash unique values
176        unreachable!()
177    }
178}
179
180#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
181enum AddressKind {
182    Ref(BorrowKind),
183    Address(RawPtrKind),
184}
185
186#[derive(Debug, PartialEq, Eq, Hash)]
187enum Value<'tcx> {
188    // Root values.
189    /// Used to represent values we know nothing about.
190    /// The `usize` is a counter incremented by `new_opaque`.
191    Opaque(VnOpaque),
192    /// Evaluated or unevaluated constant value.
193    Constant {
194        value: Const<'tcx>,
195        /// Some constants do not have a deterministic value. To avoid merging two instances of the
196        /// same `Const`, we assign them an additional integer index.
197        // `disambiguator` is `None` iff the constant is deterministic.
198        disambiguator: Option<VnOpaque>,
199    },
200
201    // Aggregates.
202    /// An aggregate value, either tuple/closure/struct/enum.
203    /// This does not contain unions, as we cannot reason with the value.
204    Aggregate(VariantIdx, Vec<VnIndex>),
205    /// A raw pointer aggregate built from a thin pointer and metadata.
206    RawPtr {
207        /// Thin pointer component. This is field 0 in MIR.
208        pointer: VnIndex,
209        /// Metadata component. This is field 1 in MIR.
210        metadata: VnIndex,
211    },
212    /// This corresponds to a `[value; count]` expression.
213    Repeat(VnIndex, ty::Const<'tcx>),
214    /// The address of a place.
215    Address {
216        place: Place<'tcx>,
217        kind: AddressKind,
218        /// Give each borrow and pointer a different provenance, so we don't merge them.
219        provenance: VnOpaque,
220    },
221
222    // Extractions.
223    /// This is the *value* obtained by projecting another value.
224    Projection(VnIndex, ProjectionElem<VnIndex, ()>),
225    /// Discriminant of the given value.
226    Discriminant(VnIndex),
227
228    // Operations.
229    NullaryOp(NullOp<'tcx>, Ty<'tcx>),
230    UnaryOp(UnOp, VnIndex),
231    BinaryOp(BinOp, VnIndex, VnIndex),
232    Cast {
233        kind: CastKind,
234        value: VnIndex,
235    },
236}
237
238/// Stores and deduplicates pairs of `(Value, Ty)` into in `VnIndex` numbered values.
239///
240/// This data structure is mostly a partial reimplementation of `FxIndexMap<VnIndex, (Value, Ty)>`.
241/// We do not use a regular `FxIndexMap` to skip hashing values that are unique by construction,
242/// like opaque values, address with provenance and non-deterministic constants.
243struct ValueSet<'tcx> {
244    indices: HashTable<VnIndex>,
245    hashes: IndexVec<VnIndex, u64>,
246    values: IndexVec<VnIndex, Value<'tcx>>,
247    types: IndexVec<VnIndex, Ty<'tcx>>,
248}
249
250impl<'tcx> ValueSet<'tcx> {
251    fn new(num_values: usize) -> ValueSet<'tcx> {
252        ValueSet {
253            indices: HashTable::with_capacity(num_values),
254            hashes: IndexVec::with_capacity(num_values),
255            values: IndexVec::with_capacity(num_values),
256            types: IndexVec::with_capacity(num_values),
257        }
258    }
259
260    /// Insert a `(Value, Ty)` pair without hashing or deduplication.
261    /// This always creates a new `VnIndex`.
262    #[inline]
263    fn insert_unique(
264        &mut self,
265        ty: Ty<'tcx>,
266        value: impl FnOnce(VnOpaque) -> Value<'tcx>,
267    ) -> VnIndex {
268        let value = value(VnOpaque);
269
270        debug_assert!(match value {
271            Value::Opaque(_) | Value::Address { .. } => true,
272            Value::Constant { disambiguator, .. } => disambiguator.is_some(),
273            _ => false,
274        });
275
276        let index = self.hashes.push(0);
277        let _index = self.types.push(ty);
278        debug_assert_eq!(index, _index);
279        let _index = self.values.push(value);
280        debug_assert_eq!(index, _index);
281        index
282    }
283
284    /// Insert a `(Value, Ty)` pair to be deduplicated.
285    /// Returns `true` as second tuple field if this value did not exist previously.
286    #[allow(rustc::pass_by_value)] // closures take `&VnIndex`
287    fn insert(&mut self, ty: Ty<'tcx>, value: Value<'tcx>) -> (VnIndex, bool) {
288        debug_assert!(match value {
289            Value::Opaque(_) | Value::Address { .. } => false,
290            Value::Constant { disambiguator, .. } => disambiguator.is_none(),
291            _ => true,
292        });
293
294        let hash: u64 = {
295            let mut h = FxHasher::default();
296            value.hash(&mut h);
297            ty.hash(&mut h);
298            h.finish()
299        };
300
301        let eq = |index: &VnIndex| self.values[*index] == value && self.types[*index] == ty;
302        let hasher = |index: &VnIndex| self.hashes[*index];
303        match self.indices.entry(hash, eq, hasher) {
304            Entry::Occupied(entry) => {
305                let index = *entry.get();
306                (index, false)
307            }
308            Entry::Vacant(entry) => {
309                let index = self.hashes.push(hash);
310                entry.insert(index);
311                let _index = self.values.push(value);
312                debug_assert_eq!(index, _index);
313                let _index = self.types.push(ty);
314                debug_assert_eq!(index, _index);
315                (index, true)
316            }
317        }
318    }
319
320    /// Return the `Value` associated with the given `VnIndex`.
321    #[inline]
322    fn value(&self, index: VnIndex) -> &Value<'tcx> {
323        &self.values[index]
324    }
325
326    /// Return the type associated with the given `VnIndex`.
327    #[inline]
328    fn ty(&self, index: VnIndex) -> Ty<'tcx> {
329        self.types[index]
330    }
331
332    /// Replace the value associated with `index` with an opaque value.
333    #[inline]
334    fn forget(&mut self, index: VnIndex) {
335        self.values[index] = Value::Opaque(VnOpaque);
336    }
337}
338
339struct VnState<'body, 'tcx> {
340    tcx: TyCtxt<'tcx>,
341    ecx: InterpCx<'tcx, DummyMachine>,
342    local_decls: &'body LocalDecls<'tcx>,
343    is_coroutine: bool,
344    /// Value stored in each local.
345    locals: IndexVec<Local, Option<VnIndex>>,
346    /// Locals that are assigned that value.
347    // This vector does not hold all the values of `VnIndex` that we create.
348    rev_locals: IndexVec<VnIndex, SmallVec<[Local; 1]>>,
349    values: ValueSet<'tcx>,
350    /// Values evaluated as constants if possible.
351    evaluated: IndexVec<VnIndex, Option<OpTy<'tcx>>>,
352    /// Cache the deref values.
353    derefs: Vec<VnIndex>,
354    ssa: &'body SsaLocals,
355    dominators: Dominators<BasicBlock>,
356    reused_locals: DenseBitSet<Local>,
357}
358
359impl<'body, 'tcx> VnState<'body, 'tcx> {
360    fn new(
361        tcx: TyCtxt<'tcx>,
362        body: &Body<'tcx>,
363        typing_env: ty::TypingEnv<'tcx>,
364        ssa: &'body SsaLocals,
365        dominators: Dominators<BasicBlock>,
366        local_decls: &'body LocalDecls<'tcx>,
367    ) -> Self {
368        // Compute a rough estimate of the number of values in the body from the number of
369        // statements. This is meant to reduce the number of allocations, but it's all right if
370        // we miss the exact amount. We estimate based on 2 values per statement (one in LHS and
371        // one in RHS) and 4 values per terminator (for call operands).
372        let num_values =
373            2 * body.basic_blocks.iter().map(|bbdata| bbdata.statements.len()).sum::<usize>()
374                + 4 * body.basic_blocks.len();
375        VnState {
376            tcx,
377            ecx: InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine),
378            local_decls,
379            is_coroutine: body.coroutine.is_some(),
380            locals: IndexVec::from_elem(None, local_decls),
381            rev_locals: IndexVec::with_capacity(num_values),
382            values: ValueSet::new(num_values),
383            evaluated: IndexVec::with_capacity(num_values),
384            derefs: Vec::new(),
385            ssa,
386            dominators,
387            reused_locals: DenseBitSet::new_empty(local_decls.len()),
388        }
389    }
390
391    fn typing_env(&self) -> ty::TypingEnv<'tcx> {
392        self.ecx.typing_env()
393    }
394
395    #[instrument(level = "trace", skip(self), ret)]
396    fn insert(&mut self, ty: Ty<'tcx>, value: Value<'tcx>) -> VnIndex {
397        let (index, new) = self.values.insert(ty, value);
398        if new {
399            // Grow `evaluated` and `rev_locals` here to amortize the allocations.
400            let evaluated = self.eval_to_const(index);
401            let _index = self.evaluated.push(evaluated);
402            debug_assert_eq!(index, _index);
403            let _index = self.rev_locals.push(SmallVec::new());
404            debug_assert_eq!(index, _index);
405        }
406        index
407    }
408
409    /// Create a new `Value` for which we have no information at all, except that it is distinct
410    /// from all the others.
411    #[instrument(level = "trace", skip(self), ret)]
412    fn new_opaque(&mut self, ty: Ty<'tcx>) -> VnIndex {
413        let index = self.values.insert_unique(ty, Value::Opaque);
414        let _index = self.evaluated.push(None);
415        debug_assert_eq!(index, _index);
416        let _index = self.rev_locals.push(SmallVec::new());
417        debug_assert_eq!(index, _index);
418        index
419    }
420
421    /// Create a new `Value::Address` distinct from all the others.
422    #[instrument(level = "trace", skip(self), ret)]
423    fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> VnIndex {
424        let pty = place.ty(self.local_decls, self.tcx).ty;
425        let ty = match kind {
426            AddressKind::Ref(bk) => {
427                Ty::new_ref(self.tcx, self.tcx.lifetimes.re_erased, pty, bk.to_mutbl_lossy())
428            }
429            AddressKind::Address(mutbl) => Ty::new_ptr(self.tcx, pty, mutbl.to_mutbl_lossy()),
430        };
431        let index =
432            self.values.insert_unique(ty, |provenance| Value::Address { place, kind, provenance });
433        let evaluated = self.eval_to_const(index);
434        let _index = self.evaluated.push(evaluated);
435        debug_assert_eq!(index, _index);
436        let _index = self.rev_locals.push(SmallVec::new());
437        debug_assert_eq!(index, _index);
438        index
439    }
440
441    #[instrument(level = "trace", skip(self), ret)]
442    fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex {
443        let (index, new) = if value.is_deterministic() {
444            // The constant is deterministic, no need to disambiguate.
445            let constant = Value::Constant { value, disambiguator: None };
446            self.values.insert(value.ty(), constant)
447        } else {
448            // Multiple mentions of this constant will yield different values,
449            // so assign a different `disambiguator` to ensure they do not get the same `VnIndex`.
450            let index = self.values.insert_unique(value.ty(), |disambiguator| Value::Constant {
451                value,
452                disambiguator: Some(disambiguator),
453            });
454            (index, true)
455        };
456        if new {
457            let evaluated = self.eval_to_const(index);
458            let _index = self.evaluated.push(evaluated);
459            debug_assert_eq!(index, _index);
460            let _index = self.rev_locals.push(SmallVec::new());
461            debug_assert_eq!(index, _index);
462        }
463        index
464    }
465
466    #[inline]
467    fn get(&self, index: VnIndex) -> &Value<'tcx> {
468        self.values.value(index)
469    }
470
471    #[inline]
472    fn ty(&self, index: VnIndex) -> Ty<'tcx> {
473        self.values.ty(index)
474    }
475
476    /// Record that `local` is assigned `value`. `local` must be SSA.
477    #[instrument(level = "trace", skip(self))]
478    fn assign(&mut self, local: Local, value: VnIndex) {
479        debug_assert!(self.ssa.is_ssa(local));
480        self.locals[local] = Some(value);
481        self.rev_locals[value].push(local);
482    }
483
484    fn insert_bool(&mut self, flag: bool) -> VnIndex {
485        // Booleans are deterministic.
486        let value = Const::from_bool(self.tcx, flag);
487        debug_assert!(value.is_deterministic());
488        self.insert(self.tcx.types.bool, Value::Constant { value, disambiguator: None })
489    }
490
491    fn insert_scalar(&mut self, ty: Ty<'tcx>, scalar: Scalar) -> VnIndex {
492        // Scalars are deterministic.
493        let value = Const::from_scalar(self.tcx, scalar, ty);
494        debug_assert!(value.is_deterministic());
495        self.insert(ty, Value::Constant { value, disambiguator: None })
496    }
497
498    fn insert_tuple(&mut self, ty: Ty<'tcx>, values: Vec<VnIndex>) -> VnIndex {
499        self.insert(ty, Value::Aggregate(VariantIdx::ZERO, values))
500    }
501
502    fn insert_deref(&mut self, ty: Ty<'tcx>, value: VnIndex) -> VnIndex {
503        let value = self.insert(ty, Value::Projection(value, ProjectionElem::Deref));
504        self.derefs.push(value);
505        value
506    }
507
508    fn invalidate_derefs(&mut self) {
509        for deref in std::mem::take(&mut self.derefs) {
510            self.values.forget(deref);
511        }
512    }
513
514    #[instrument(level = "trace", skip(self), ret)]
515    fn eval_to_const(&mut self, value: VnIndex) -> Option<OpTy<'tcx>> {
516        use Value::*;
517        let ty = self.ty(value);
518        // Avoid computing layouts inside a coroutine, as that can cause cycles.
519        let ty = if !self.is_coroutine || ty.is_scalar() {
520            self.ecx.layout_of(ty).ok()?
521        } else {
522            return None;
523        };
524        let op = match *self.get(value) {
525            _ if ty.is_zst() => ImmTy::uninit(ty).into(),
526
527            Opaque(_) => return None,
528            // Do not bother evaluating repeat expressions. This would uselessly consume memory.
529            Repeat(..) => return None,
530
531            Constant { ref value, disambiguator: _ } => {
532                self.ecx.eval_mir_constant(value, DUMMY_SP, None).discard_err()?
533            }
534            Aggregate(variant, ref fields) => {
535                let fields = fields
536                    .iter()
537                    .map(|&f| self.evaluated[f].as_ref())
538                    .collect::<Option<Vec<_>>>()?;
539                let variant = if ty.ty.is_enum() { Some(variant) } else { None };
540                if matches!(ty.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..))
541                {
542                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
543                    let variant_dest = if let Some(variant) = variant {
544                        self.ecx.project_downcast(&dest, variant).discard_err()?
545                    } else {
546                        dest.clone()
547                    };
548                    for (field_index, op) in fields.into_iter().enumerate() {
549                        let field_dest = self
550                            .ecx
551                            .project_field(&variant_dest, FieldIdx::from_usize(field_index))
552                            .discard_err()?;
553                        self.ecx.copy_op(op, &field_dest).discard_err()?;
554                    }
555                    self.ecx
556                        .write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest)
557                        .discard_err()?;
558                    self.ecx
559                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
560                        .discard_err()?;
561                    dest.into()
562                } else {
563                    return None;
564                }
565            }
566            RawPtr { pointer, metadata } => {
567                let pointer = self.evaluated[pointer].as_ref()?;
568                let metadata = self.evaluated[metadata].as_ref()?;
569
570                // Pointers don't have fields, so don't `project_field` them.
571                let data = self.ecx.read_pointer(pointer).discard_err()?;
572                let meta = if metadata.layout.is_zst() {
573                    MemPlaceMeta::None
574                } else {
575                    MemPlaceMeta::Meta(self.ecx.read_scalar(metadata).discard_err()?)
576                };
577                let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx);
578                ImmTy::from_immediate(ptr_imm, ty).into()
579            }
580
581            Projection(base, elem) => {
582                let base = self.evaluated[base].as_ref()?;
583                // `Index` by constants should have been replaced by `ConstantIndex` by
584                // `simplify_place_projection`.
585                let elem = elem.try_map(|_| None, |()| ty.ty)?;
586                self.ecx.project(base, elem).discard_err()?
587            }
588            Address { place, kind: _, provenance: _ } => {
589                if !place.is_indirect_first_projection() {
590                    return None;
591                }
592                let local = self.locals[place.local]?;
593                let pointer = self.evaluated[local].as_ref()?;
594                let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?;
595                for elem in place.projection.iter().skip(1) {
596                    // `Index` by constants should have been replaced by `ConstantIndex` by
597                    // `simplify_place_projection`.
598                    let elem = elem.try_map(|_| None, |ty| ty)?;
599                    mplace = self.ecx.project(&mplace, elem).discard_err()?;
600                }
601                let pointer = mplace.to_ref(&self.ecx);
602                ImmTy::from_immediate(pointer, ty).into()
603            }
604
605            Discriminant(base) => {
606                let base = self.evaluated[base].as_ref()?;
607                let variant = self.ecx.read_discriminant(base).discard_err()?;
608                let discr_value =
609                    self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
610                discr_value.into()
611            }
612            NullaryOp(null_op, arg_ty) => {
613                let arg_layout = self.ecx.layout_of(arg_ty).ok()?;
614                if let NullOp::SizeOf | NullOp::AlignOf = null_op
615                    && arg_layout.is_unsized()
616                {
617                    return None;
618                }
619                let val = match null_op {
620                    NullOp::SizeOf => arg_layout.size.bytes(),
621                    NullOp::AlignOf => arg_layout.align.bytes(),
622                    NullOp::OffsetOf(fields) => self
623                        .ecx
624                        .tcx
625                        .offset_of_subfield(self.typing_env(), arg_layout, fields.iter())
626                        .bytes(),
627                    NullOp::UbChecks => return None,
628                    NullOp::ContractChecks => return None,
629                };
630                ImmTy::from_uint(val, ty).into()
631            }
632            UnaryOp(un_op, operand) => {
633                let operand = self.evaluated[operand].as_ref()?;
634                let operand = self.ecx.read_immediate(operand).discard_err()?;
635                let val = self.ecx.unary_op(un_op, &operand).discard_err()?;
636                val.into()
637            }
638            BinaryOp(bin_op, lhs, rhs) => {
639                let lhs = self.evaluated[lhs].as_ref()?;
640                let lhs = self.ecx.read_immediate(lhs).discard_err()?;
641                let rhs = self.evaluated[rhs].as_ref()?;
642                let rhs = self.ecx.read_immediate(rhs).discard_err()?;
643                let val = self.ecx.binary_op(bin_op, &lhs, &rhs).discard_err()?;
644                val.into()
645            }
646            Cast { kind, value } => match kind {
647                CastKind::IntToInt | CastKind::IntToFloat => {
648                    let value = self.evaluated[value].as_ref()?;
649                    let value = self.ecx.read_immediate(value).discard_err()?;
650                    let res = self.ecx.int_to_int_or_float(&value, ty).discard_err()?;
651                    res.into()
652                }
653                CastKind::FloatToFloat | CastKind::FloatToInt => {
654                    let value = self.evaluated[value].as_ref()?;
655                    let value = self.ecx.read_immediate(value).discard_err()?;
656                    let res = self.ecx.float_to_float_or_int(&value, ty).discard_err()?;
657                    res.into()
658                }
659                CastKind::Transmute => {
660                    let value = self.evaluated[value].as_ref()?;
661                    // `offset` for immediates generally only supports projections that match the
662                    // type of the immediate. However, as a HACK, we exploit that it can also do
663                    // limited transmutes: it only works between types with the same layout, and
664                    // cannot transmute pointers to integers.
665                    if value.as_mplace_or_imm().is_right() {
666                        let can_transmute = match (value.layout.backend_repr, ty.backend_repr) {
667                            (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => {
668                                s1.size(&self.ecx) == s2.size(&self.ecx)
669                                    && !matches!(s1.primitive(), Primitive::Pointer(..))
670                            }
671                            (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
672                                a1.size(&self.ecx) == a2.size(&self.ecx) &&
673                                b1.size(&self.ecx) == b2.size(&self.ecx) &&
674                                // The alignment of the second component determines its offset, so that also needs to match.
675                                b1.align(&self.ecx) == b2.align(&self.ecx) &&
676                                // None of the inputs may be a pointer.
677                                !matches!(a1.primitive(), Primitive::Pointer(..))
678                                    && !matches!(b1.primitive(), Primitive::Pointer(..))
679                            }
680                            _ => false,
681                        };
682                        if !can_transmute {
683                            return None;
684                        }
685                    }
686                    value.offset(Size::ZERO, ty, &self.ecx).discard_err()?
687                }
688                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) => {
689                    let src = self.evaluated[value].as_ref()?;
690                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
691                    self.ecx.unsize_into(src, ty, &dest).discard_err()?;
692                    self.ecx
693                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
694                        .discard_err()?;
695                    dest.into()
696                }
697                CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
698                    let src = self.evaluated[value].as_ref()?;
699                    let src = self.ecx.read_immediate(src).discard_err()?;
700                    let ret = self.ecx.ptr_to_ptr(&src, ty).discard_err()?;
701                    ret.into()
702                }
703                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::UnsafeFnPointer, _) => {
704                    let src = self.evaluated[value].as_ref()?;
705                    let src = self.ecx.read_immediate(src).discard_err()?;
706                    ImmTy::from_immediate(*src, ty).into()
707                }
708                _ => return None,
709            },
710        };
711        Some(op)
712    }
713
714    fn project(
715        &mut self,
716        place_ty: PlaceTy<'tcx>,
717        value: VnIndex,
718        proj: PlaceElem<'tcx>,
719        from_non_ssa_index: &mut bool,
720    ) -> Option<(PlaceTy<'tcx>, VnIndex)> {
721        let projection_ty = place_ty.projection_ty(self.tcx, proj);
722        let proj = match proj {
723            ProjectionElem::Deref => {
724                if let Some(Mutability::Not) = place_ty.ty.ref_mutability()
725                    && projection_ty.ty.is_freeze(self.tcx, self.typing_env())
726                {
727                    // An immutable borrow `_x` always points to the same value for the
728                    // lifetime of the borrow, so we can merge all instances of `*_x`.
729                    return Some((projection_ty, self.insert_deref(projection_ty.ty, value)));
730                } else {
731                    return None;
732                }
733            }
734            ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
735            ProjectionElem::Field(f, _) => {
736                if let Value::Aggregate(_, fields) = self.get(value) {
737                    return Some((projection_ty, fields[f.as_usize()]));
738                } else if let Value::Projection(outer_value, ProjectionElem::Downcast(_, read_variant)) = self.get(value)
739                    && let Value::Aggregate(written_variant, fields) = self.get(*outer_value)
740                    // This pass is not aware of control-flow, so we do not know whether the
741                    // replacement we are doing is actually reachable. We could be in any arm of
742                    // ```
743                    // match Some(x) {
744                    //     Some(y) => /* stuff */,
745                    //     None => /* other */,
746                    // }
747                    // ```
748                    //
749                    // In surface rust, the current statement would be unreachable.
750                    //
751                    // However, from the reference chapter on enums and RFC 2195,
752                    // accessing the wrong variant is not UB if the enum has repr.
753                    // So it's not impossible for a series of MIR opts to generate
754                    // a downcast to an inactive variant.
755                    && written_variant == read_variant
756                {
757                    return Some((projection_ty, fields[f.as_usize()]));
758                }
759                ProjectionElem::Field(f, ())
760            }
761            ProjectionElem::Index(idx) => {
762                if let Value::Repeat(inner, _) = self.get(value) {
763                    *from_non_ssa_index |= self.locals[idx].is_none();
764                    return Some((projection_ty, *inner));
765                }
766                let idx = self.locals[idx]?;
767                ProjectionElem::Index(idx)
768            }
769            ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
770                match self.get(value) {
771                    Value::Repeat(inner, _) => {
772                        return Some((projection_ty, *inner));
773                    }
774                    Value::Aggregate(_, operands) => {
775                        let offset = if from_end {
776                            operands.len() - offset as usize
777                        } else {
778                            offset as usize
779                        };
780                        let value = operands.get(offset).copied()?;
781                        return Some((projection_ty, value));
782                    }
783                    _ => {}
784                };
785                ProjectionElem::ConstantIndex { offset, min_length, from_end }
786            }
787            ProjectionElem::Subslice { from, to, from_end } => {
788                ProjectionElem::Subslice { from, to, from_end }
789            }
790            ProjectionElem::OpaqueCast(_) => ProjectionElem::OpaqueCast(()),
791            ProjectionElem::Subtype(_) => ProjectionElem::Subtype(()),
792            ProjectionElem::UnwrapUnsafeBinder(_) => ProjectionElem::UnwrapUnsafeBinder(()),
793        };
794
795        let value = self.insert(projection_ty.ty, Value::Projection(value, proj));
796        Some((projection_ty, value))
797    }
798
799    /// Simplify the projection chain if we know better.
800    #[instrument(level = "trace", skip(self))]
801    fn simplify_place_projection(&mut self, place: &mut Place<'tcx>, location: Location) {
802        // If the projection is indirect, we treat the local as a value, so can replace it with
803        // another local.
804        if place.is_indirect_first_projection()
805            && let Some(base) = self.locals[place.local]
806            && let Some(new_local) = self.try_as_local(base, location)
807            && place.local != new_local
808        {
809            place.local = new_local;
810            self.reused_locals.insert(new_local);
811        }
812
813        let mut projection = Cow::Borrowed(&place.projection[..]);
814
815        for i in 0..projection.len() {
816            let elem = projection[i];
817            if let ProjectionElem::Index(idx_local) = elem
818                && let Some(idx) = self.locals[idx_local]
819            {
820                if let Some(offset) = self.evaluated[idx].as_ref()
821                    && let Some(offset) = self.ecx.read_target_usize(offset).discard_err()
822                    && let Some(min_length) = offset.checked_add(1)
823                {
824                    projection.to_mut()[i] =
825                        ProjectionElem::ConstantIndex { offset, min_length, from_end: false };
826                } else if let Some(new_idx_local) = self.try_as_local(idx, location)
827                    && idx_local != new_idx_local
828                {
829                    projection.to_mut()[i] = ProjectionElem::Index(new_idx_local);
830                    self.reused_locals.insert(new_idx_local);
831                }
832            }
833        }
834
835        if projection.is_owned() {
836            place.projection = self.tcx.mk_place_elems(&projection);
837        }
838
839        trace!(?place);
840    }
841
842    /// Represent the *value* which would be read from `place`, and point `place` to a preexisting
843    /// place with the same value (if that already exists).
844    #[instrument(level = "trace", skip(self), ret)]
845    fn simplify_place_value(
846        &mut self,
847        place: &mut Place<'tcx>,
848        location: Location,
849    ) -> Option<VnIndex> {
850        self.simplify_place_projection(place, location);
851
852        // Invariant: `place` and `place_ref` point to the same value, even if they point to
853        // different memory locations.
854        let mut place_ref = place.as_ref();
855
856        // Invariant: `value` holds the value up-to the `index`th projection excluded.
857        let mut value = self.locals[place.local]?;
858        // Invariant: `value` has type `place_ty`, with optional downcast variant if needed.
859        let mut place_ty = PlaceTy::from_ty(self.local_decls[place.local].ty);
860        let mut from_non_ssa_index = false;
861        for (index, proj) in place.projection.iter().enumerate() {
862            if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
863                && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
864                && let AddressKind::Ref(BorrowKind::Shared) = kind
865                && let Some(v) = self.simplify_place_value(&mut pointee, location)
866            {
867                value = v;
868                // `pointee` holds a `Place`, so `ProjectionElem::Index` holds a `Local`.
869                // That local is SSA, but we otherwise have no guarantee on that local's value at
870                // the current location compared to its value where `pointee` was borrowed.
871                if pointee.projection.iter().all(|elem| !matches!(elem, ProjectionElem::Index(_))) {
872                    place_ref =
873                        pointee.project_deeper(&place.projection[index..], self.tcx).as_ref();
874                }
875            }
876            if let Some(local) = self.try_as_local(value, location) {
877                // Both `local` and `Place { local: place.local, projection: projection[..index] }`
878                // hold the same value. Therefore, following place holds the value in the original
879                // `place`.
880                place_ref = PlaceRef { local, projection: &place.projection[index..] };
881            }
882
883            (place_ty, value) = self.project(place_ty, value, proj, &mut from_non_ssa_index)?;
884        }
885
886        if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
887            && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
888            && let AddressKind::Ref(BorrowKind::Shared) = kind
889            && let Some(v) = self.simplify_place_value(&mut pointee, location)
890        {
891            value = v;
892            // `pointee` holds a `Place`, so `ProjectionElem::Index` holds a `Local`.
893            // That local is SSA, but we otherwise have no guarantee on that local's value at
894            // the current location compared to its value where `pointee` was borrowed.
895            if pointee.projection.iter().all(|elem| !matches!(elem, ProjectionElem::Index(_))) {
896                place_ref = pointee.project_deeper(&[], self.tcx).as_ref();
897            }
898        }
899        if let Some(new_local) = self.try_as_local(value, location) {
900            place_ref = PlaceRef { local: new_local, projection: &[] };
901        } else if from_non_ssa_index {
902            // If access to non-SSA locals is unavoidable, bail out.
903            return None;
904        }
905
906        if place_ref.local != place.local || place_ref.projection.len() < place.projection.len() {
907            // By the invariant on `place_ref`.
908            *place = place_ref.project_deeper(&[], self.tcx);
909            self.reused_locals.insert(place_ref.local);
910        }
911
912        Some(value)
913    }
914
915    #[instrument(level = "trace", skip(self), ret)]
916    fn simplify_operand(
917        &mut self,
918        operand: &mut Operand<'tcx>,
919        location: Location,
920    ) -> Option<VnIndex> {
921        match *operand {
922            Operand::Constant(ref constant) => Some(self.insert_constant(constant.const_)),
923            Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
924                let value = self.simplify_place_value(place, location)?;
925                if let Some(const_) = self.try_as_constant(value) {
926                    *operand = Operand::Constant(Box::new(const_));
927                }
928                Some(value)
929            }
930        }
931    }
932
933    #[instrument(level = "trace", skip(self), ret)]
934    fn simplify_rvalue(
935        &mut self,
936        lhs: &Place<'tcx>,
937        rvalue: &mut Rvalue<'tcx>,
938        location: Location,
939    ) -> Option<VnIndex> {
940        let value = match *rvalue {
941            // Forward values.
942            Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
943            Rvalue::CopyForDeref(place) => {
944                let mut operand = Operand::Copy(place);
945                let val = self.simplify_operand(&mut operand, location);
946                *rvalue = Rvalue::Use(operand);
947                return val;
948            }
949
950            // Roots.
951            Rvalue::Repeat(ref mut op, amount) => {
952                let op = self.simplify_operand(op, location)?;
953                Value::Repeat(op, amount)
954            }
955            Rvalue::NullaryOp(op, ty) => Value::NullaryOp(op, ty),
956            Rvalue::Aggregate(..) => return self.simplify_aggregate(lhs, rvalue, location),
957            Rvalue::Ref(_, borrow_kind, ref mut place) => {
958                self.simplify_place_projection(place, location);
959                return Some(self.new_pointer(*place, AddressKind::Ref(borrow_kind)));
960            }
961            Rvalue::RawPtr(mutbl, ref mut place) => {
962                self.simplify_place_projection(place, location);
963                return Some(self.new_pointer(*place, AddressKind::Address(mutbl)));
964            }
965            Rvalue::WrapUnsafeBinder(ref mut op, _) => {
966                let value = self.simplify_operand(op, location)?;
967                Value::Cast { kind: CastKind::Transmute, value }
968            }
969
970            // Operations.
971            Rvalue::Cast(ref mut kind, ref mut value, to) => {
972                return self.simplify_cast(kind, value, to, location);
973            }
974            Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
975                return self.simplify_binary(op, lhs, rhs, location);
976            }
977            Rvalue::UnaryOp(op, ref mut arg_op) => {
978                return self.simplify_unary(op, arg_op, location);
979            }
980            Rvalue::Discriminant(ref mut place) => {
981                let place = self.simplify_place_value(place, location)?;
982                if let Some(discr) = self.simplify_discriminant(place) {
983                    return Some(discr);
984                }
985                Value::Discriminant(place)
986            }
987
988            // Unsupported values.
989            Rvalue::ThreadLocalRef(..) | Rvalue::ShallowInitBox(..) => return None,
990        };
991        let ty = rvalue.ty(self.local_decls, self.tcx);
992        Some(self.insert(ty, value))
993    }
994
995    fn simplify_discriminant(&mut self, place: VnIndex) -> Option<VnIndex> {
996        let enum_ty = self.ty(place);
997        if enum_ty.is_enum()
998            && let Value::Aggregate(variant, _) = *self.get(place)
999        {
1000            let discr = self.ecx.discriminant_for_variant(enum_ty, variant).discard_err()?;
1001            return Some(self.insert_scalar(discr.layout.ty, discr.to_scalar()));
1002        }
1003
1004        None
1005    }
1006
1007    fn try_as_place_elem(
1008        &mut self,
1009        ty: Ty<'tcx>,
1010        proj: ProjectionElem<VnIndex, ()>,
1011        loc: Location,
1012    ) -> Option<PlaceElem<'tcx>> {
1013        proj.try_map(
1014            |value| {
1015                let local = self.try_as_local(value, loc)?;
1016                self.reused_locals.insert(local);
1017                Some(local)
1018            },
1019            |()| ty,
1020        )
1021    }
1022
1023    fn simplify_aggregate_to_copy(
1024        &mut self,
1025        ty: Ty<'tcx>,
1026        variant_index: VariantIdx,
1027        fields: &[VnIndex],
1028    ) -> Option<VnIndex> {
1029        let Some(&first_field) = fields.first() else { return None };
1030        let Value::Projection(copy_from_value, _) = *self.get(first_field) else { return None };
1031
1032        // All fields must correspond one-to-one and come from the same aggregate value.
1033        if fields.iter().enumerate().any(|(index, &v)| {
1034            if let Value::Projection(pointer, ProjectionElem::Field(from_index, _)) = *self.get(v)
1035                && copy_from_value == pointer
1036                && from_index.index() == index
1037            {
1038                return false;
1039            }
1040            true
1041        }) {
1042            return None;
1043        }
1044
1045        let mut copy_from_local_value = copy_from_value;
1046        if let Value::Projection(pointer, proj) = *self.get(copy_from_value)
1047            && let ProjectionElem::Downcast(_, read_variant) = proj
1048        {
1049            if variant_index == read_variant {
1050                // When copying a variant, there is no need to downcast.
1051                copy_from_local_value = pointer;
1052            } else {
1053                // The copied variant must be identical.
1054                return None;
1055            }
1056        }
1057
1058        // Both must be variants of the same type.
1059        if self.ty(copy_from_local_value) == ty { Some(copy_from_local_value) } else { None }
1060    }
1061
1062    fn simplify_aggregate(
1063        &mut self,
1064        lhs: &Place<'tcx>,
1065        rvalue: &mut Rvalue<'tcx>,
1066        location: Location,
1067    ) -> Option<VnIndex> {
1068        let tcx = self.tcx;
1069        let ty = rvalue.ty(self.local_decls, tcx);
1070
1071        let Rvalue::Aggregate(box ref kind, ref mut field_ops) = *rvalue else { bug!() };
1072
1073        if field_ops.is_empty() {
1074            let is_zst = match *kind {
1075                AggregateKind::Array(..)
1076                | AggregateKind::Tuple
1077                | AggregateKind::Closure(..)
1078                | AggregateKind::CoroutineClosure(..) => true,
1079                // Only enums can be non-ZST.
1080                AggregateKind::Adt(did, ..) => tcx.def_kind(did) != DefKind::Enum,
1081                // Coroutines are never ZST, as they at least contain the implicit states.
1082                AggregateKind::Coroutine(..) => false,
1083                AggregateKind::RawPtr(..) => bug!("MIR for RawPtr aggregate must have 2 fields"),
1084            };
1085
1086            if is_zst {
1087                return Some(self.insert_constant(Const::zero_sized(ty)));
1088            }
1089        }
1090
1091        let fields: Vec<_> = field_ops
1092            .iter_mut()
1093            .map(|op| {
1094                self.simplify_operand(op, location)
1095                    .unwrap_or_else(|| self.new_opaque(op.ty(self.local_decls, self.tcx)))
1096            })
1097            .collect();
1098
1099        let variant_index = match *kind {
1100            AggregateKind::Array(..) | AggregateKind::Tuple => {
1101                assert!(!field_ops.is_empty());
1102                FIRST_VARIANT
1103            }
1104            AggregateKind::Closure(..)
1105            | AggregateKind::CoroutineClosure(..)
1106            | AggregateKind::Coroutine(..) => FIRST_VARIANT,
1107            AggregateKind::Adt(_, variant_index, _, _, None) => variant_index,
1108            // Do not track unions.
1109            AggregateKind::Adt(_, _, _, _, Some(_)) => return None,
1110            AggregateKind::RawPtr(..) => {
1111                assert_eq!(field_ops.len(), 2);
1112                let [mut pointer, metadata] = fields.try_into().unwrap();
1113
1114                // Any thin pointer of matching mutability is fine as the data pointer.
1115                let mut was_updated = false;
1116                while let Value::Cast { kind: CastKind::PtrToPtr, value: cast_value } =
1117                    self.get(pointer)
1118                    && let ty::RawPtr(from_pointee_ty, from_mtbl) = self.ty(*cast_value).kind()
1119                    && let ty::RawPtr(_, output_mtbl) = ty.kind()
1120                    && from_mtbl == output_mtbl
1121                    && from_pointee_ty.is_sized(self.tcx, self.typing_env())
1122                {
1123                    pointer = *cast_value;
1124                    was_updated = true;
1125                }
1126
1127                if was_updated && let Some(op) = self.try_as_operand(pointer, location) {
1128                    field_ops[FieldIdx::ZERO] = op;
1129                }
1130
1131                return Some(self.insert(ty, Value::RawPtr { pointer, metadata }));
1132            }
1133        };
1134
1135        if ty.is_array()
1136            && fields.len() > 4
1137            && let Ok(&first) = fields.iter().all_equal_value()
1138        {
1139            let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
1140            if let Some(op) = self.try_as_operand(first, location) {
1141                *rvalue = Rvalue::Repeat(op, len);
1142            }
1143            return Some(self.insert(ty, Value::Repeat(first, len)));
1144        }
1145
1146        if let Some(value) = self.simplify_aggregate_to_copy(ty, variant_index, &fields) {
1147            // Allow introducing places with non-constant offsets, as those are still better than
1148            // reconstructing an aggregate. But avoid creating `*a = copy (*b)`, as they might be
1149            // aliases resulting in overlapping assignments.
1150            let allow_complex_projection =
1151                lhs.projection[..].iter().all(PlaceElem::is_stable_offset);
1152            if let Some(place) = self.try_as_place(value, location, allow_complex_projection) {
1153                self.reused_locals.insert(place.local);
1154                *rvalue = Rvalue::Use(Operand::Copy(place));
1155            }
1156            return Some(value);
1157        }
1158
1159        Some(self.insert(ty, Value::Aggregate(variant_index, fields)))
1160    }
1161
1162    #[instrument(level = "trace", skip(self), ret)]
1163    fn simplify_unary(
1164        &mut self,
1165        op: UnOp,
1166        arg_op: &mut Operand<'tcx>,
1167        location: Location,
1168    ) -> Option<VnIndex> {
1169        let mut arg_index = self.simplify_operand(arg_op, location)?;
1170        let arg_ty = self.ty(arg_index);
1171        let ret_ty = op.ty(self.tcx, arg_ty);
1172
1173        // PtrMetadata doesn't care about *const vs *mut vs & vs &mut,
1174        // so start by removing those distinctions so we can update the `Operand`
1175        if op == UnOp::PtrMetadata {
1176            let mut was_updated = false;
1177            loop {
1178                arg_index = match self.get(arg_index) {
1179                    // Pointer casts that preserve metadata, such as
1180                    // `*const [i32]` <-> `*mut [i32]` <-> `*mut [f32]`.
1181                    // It's critical that this not eliminate cases like
1182                    // `*const [T]` -> `*const T` which remove metadata.
1183                    // We run on potentially-generic MIR, though, so unlike codegen
1184                    // we can't always know exactly what the metadata are.
1185                    // To allow things like `*mut (?A, ?T)` <-> `*mut (?B, ?T)`,
1186                    // it's fine to get a projection as the type.
1187                    Value::Cast { kind: CastKind::PtrToPtr, value: inner }
1188                        if self.pointers_have_same_metadata(self.ty(*inner), arg_ty) =>
1189                    {
1190                        *inner
1191                    }
1192
1193                    // We have an unsizing cast, which assigns the length to wide pointer metadata.
1194                    Value::Cast {
1195                        kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1196                        value: from,
1197                    } if let Some(from) = self.ty(*from).builtin_deref(true)
1198                        && let ty::Array(_, len) = from.kind()
1199                        && let Some(to) = self.ty(arg_index).builtin_deref(true)
1200                        && let ty::Slice(..) = to.kind() =>
1201                    {
1202                        return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1203                    }
1204
1205                    // `&mut *p`, `&raw *p`, etc don't change metadata.
1206                    Value::Address { place, kind: _, provenance: _ }
1207                        if let PlaceRef { local, projection: [PlaceElem::Deref] } =
1208                            place.as_ref()
1209                            && let Some(local_index) = self.locals[local] =>
1210                    {
1211                        local_index
1212                    }
1213
1214                    _ => break,
1215                };
1216                was_updated = true;
1217            }
1218
1219            if was_updated && let Some(op) = self.try_as_operand(arg_index, location) {
1220                *arg_op = op;
1221            }
1222        }
1223
1224        let value = match (op, self.get(arg_index)) {
1225            (UnOp::Not, Value::UnaryOp(UnOp::Not, inner)) => return Some(*inner),
1226            (UnOp::Neg, Value::UnaryOp(UnOp::Neg, inner)) => return Some(*inner),
1227            (UnOp::Not, Value::BinaryOp(BinOp::Eq, lhs, rhs)) => {
1228                Value::BinaryOp(BinOp::Ne, *lhs, *rhs)
1229            }
1230            (UnOp::Not, Value::BinaryOp(BinOp::Ne, lhs, rhs)) => {
1231                Value::BinaryOp(BinOp::Eq, *lhs, *rhs)
1232            }
1233            (UnOp::PtrMetadata, Value::RawPtr { metadata, .. }) => return Some(*metadata),
1234            // We have an unsizing cast, which assigns the length to wide pointer metadata.
1235            (
1236                UnOp::PtrMetadata,
1237                Value::Cast {
1238                    kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1239                    value: inner,
1240                },
1241            ) if let ty::Slice(..) = arg_ty.builtin_deref(true).unwrap().kind()
1242                && let ty::Array(_, len) = self.ty(*inner).builtin_deref(true).unwrap().kind() =>
1243            {
1244                return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1245            }
1246            _ => Value::UnaryOp(op, arg_index),
1247        };
1248        Some(self.insert(ret_ty, value))
1249    }
1250
1251    #[instrument(level = "trace", skip(self), ret)]
1252    fn simplify_binary(
1253        &mut self,
1254        op: BinOp,
1255        lhs_operand: &mut Operand<'tcx>,
1256        rhs_operand: &mut Operand<'tcx>,
1257        location: Location,
1258    ) -> Option<VnIndex> {
1259        let lhs = self.simplify_operand(lhs_operand, location);
1260        let rhs = self.simplify_operand(rhs_operand, location);
1261
1262        // Only short-circuit options after we called `simplify_operand`
1263        // on both operands for side effect.
1264        let mut lhs = lhs?;
1265        let mut rhs = rhs?;
1266
1267        let lhs_ty = self.ty(lhs);
1268
1269        // If we're comparing pointers, remove `PtrToPtr` casts if the from
1270        // types of both casts and the metadata all match.
1271        if let BinOp::Eq | BinOp::Ne | BinOp::Lt | BinOp::Le | BinOp::Gt | BinOp::Ge = op
1272            && lhs_ty.is_any_ptr()
1273            && let Value::Cast { kind: CastKind::PtrToPtr, value: lhs_value } = self.get(lhs)
1274            && let Value::Cast { kind: CastKind::PtrToPtr, value: rhs_value } = self.get(rhs)
1275            && let lhs_from = self.ty(*lhs_value)
1276            && lhs_from == self.ty(*rhs_value)
1277            && self.pointers_have_same_metadata(lhs_from, lhs_ty)
1278        {
1279            lhs = *lhs_value;
1280            rhs = *rhs_value;
1281            if let Some(lhs_op) = self.try_as_operand(lhs, location)
1282                && let Some(rhs_op) = self.try_as_operand(rhs, location)
1283            {
1284                *lhs_operand = lhs_op;
1285                *rhs_operand = rhs_op;
1286            }
1287        }
1288
1289        if let Some(value) = self.simplify_binary_inner(op, lhs_ty, lhs, rhs) {
1290            return Some(value);
1291        }
1292        let ty = op.ty(self.tcx, lhs_ty, self.ty(rhs));
1293        let value = Value::BinaryOp(op, lhs, rhs);
1294        Some(self.insert(ty, value))
1295    }
1296
1297    fn simplify_binary_inner(
1298        &mut self,
1299        op: BinOp,
1300        lhs_ty: Ty<'tcx>,
1301        lhs: VnIndex,
1302        rhs: VnIndex,
1303    ) -> Option<VnIndex> {
1304        // Floats are weird enough that none of the logic below applies.
1305        let reasonable_ty =
1306            lhs_ty.is_integral() || lhs_ty.is_bool() || lhs_ty.is_char() || lhs_ty.is_any_ptr();
1307        if !reasonable_ty {
1308            return None;
1309        }
1310
1311        let layout = self.ecx.layout_of(lhs_ty).ok()?;
1312
1313        let as_bits = |value: VnIndex| {
1314            let constant = self.evaluated[value].as_ref()?;
1315            if layout.backend_repr.is_scalar() {
1316                let scalar = self.ecx.read_scalar(constant).discard_err()?;
1317                scalar.to_bits(constant.layout.size).discard_err()
1318            } else {
1319                // `constant` is a wide pointer. Do not evaluate to bits.
1320                None
1321            }
1322        };
1323
1324        // Represent the values as `Left(bits)` or `Right(VnIndex)`.
1325        use Either::{Left, Right};
1326        let a = as_bits(lhs).map_or(Right(lhs), Left);
1327        let b = as_bits(rhs).map_or(Right(rhs), Left);
1328
1329        let result = match (op, a, b) {
1330            // Neutral elements.
1331            (
1332                BinOp::Add
1333                | BinOp::AddWithOverflow
1334                | BinOp::AddUnchecked
1335                | BinOp::BitOr
1336                | BinOp::BitXor,
1337                Left(0),
1338                Right(p),
1339            )
1340            | (
1341                BinOp::Add
1342                | BinOp::AddWithOverflow
1343                | BinOp::AddUnchecked
1344                | BinOp::BitOr
1345                | BinOp::BitXor
1346                | BinOp::Sub
1347                | BinOp::SubWithOverflow
1348                | BinOp::SubUnchecked
1349                | BinOp::Offset
1350                | BinOp::Shl
1351                | BinOp::Shr,
1352                Right(p),
1353                Left(0),
1354            )
1355            | (BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked, Left(1), Right(p))
1356            | (
1357                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::Div,
1358                Right(p),
1359                Left(1),
1360            ) => p,
1361            // Attempt to simplify `x & ALL_ONES` to `x`, with `ALL_ONES` depending on type size.
1362            (BinOp::BitAnd, Right(p), Left(ones)) | (BinOp::BitAnd, Left(ones), Right(p))
1363                if ones == layout.size.truncate(u128::MAX)
1364                    || (layout.ty.is_bool() && ones == 1) =>
1365            {
1366                p
1367            }
1368            // Absorbing elements.
1369            (
1370                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::BitAnd,
1371                _,
1372                Left(0),
1373            )
1374            | (BinOp::Rem, _, Left(1))
1375            | (
1376                BinOp::Mul
1377                | BinOp::MulWithOverflow
1378                | BinOp::MulUnchecked
1379                | BinOp::Div
1380                | BinOp::Rem
1381                | BinOp::BitAnd
1382                | BinOp::Shl
1383                | BinOp::Shr,
1384                Left(0),
1385                _,
1386            ) => self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size)),
1387            // Attempt to simplify `x | ALL_ONES` to `ALL_ONES`.
1388            (BinOp::BitOr, _, Left(ones)) | (BinOp::BitOr, Left(ones), _)
1389                if ones == layout.size.truncate(u128::MAX)
1390                    || (layout.ty.is_bool() && ones == 1) =>
1391            {
1392                self.insert_scalar(lhs_ty, Scalar::from_uint(ones, layout.size))
1393            }
1394            // Sub/Xor with itself.
1395            (BinOp::Sub | BinOp::SubWithOverflow | BinOp::SubUnchecked | BinOp::BitXor, a, b)
1396                if a == b =>
1397            {
1398                self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size))
1399            }
1400            // Comparison:
1401            // - if both operands can be computed as bits, just compare the bits;
1402            // - if we proved that both operands have the same value, we can insert true/false;
1403            // - otherwise, do nothing, as we do not try to prove inequality.
1404            (BinOp::Eq, Left(a), Left(b)) => self.insert_bool(a == b),
1405            (BinOp::Eq, a, b) if a == b => self.insert_bool(true),
1406            (BinOp::Ne, Left(a), Left(b)) => self.insert_bool(a != b),
1407            (BinOp::Ne, a, b) if a == b => self.insert_bool(false),
1408            _ => return None,
1409        };
1410
1411        if op.is_overflowing() {
1412            let ty = Ty::new_tup(self.tcx, &[self.ty(result), self.tcx.types.bool]);
1413            let false_val = self.insert_bool(false);
1414            Some(self.insert_tuple(ty, vec![result, false_val]))
1415        } else {
1416            Some(result)
1417        }
1418    }
1419
1420    fn simplify_cast(
1421        &mut self,
1422        initial_kind: &mut CastKind,
1423        initial_operand: &mut Operand<'tcx>,
1424        to: Ty<'tcx>,
1425        location: Location,
1426    ) -> Option<VnIndex> {
1427        use CastKind::*;
1428        use rustc_middle::ty::adjustment::PointerCoercion::*;
1429
1430        let mut kind = *initial_kind;
1431        let mut value = self.simplify_operand(initial_operand, location)?;
1432        let mut from = self.ty(value);
1433        if from == to {
1434            return Some(value);
1435        }
1436
1437        if let CastKind::PointerCoercion(ReifyFnPointer | ClosureFnPointer(_), _) = kind {
1438            // Each reification of a generic fn may get a different pointer.
1439            // Do not try to merge them.
1440            return Some(self.new_opaque(to));
1441        }
1442
1443        let mut was_ever_updated = false;
1444        loop {
1445            let mut was_updated_this_iteration = false;
1446
1447            // Transmuting between raw pointers is just a pointer cast so long as
1448            // they have the same metadata type (like `*const i32` <=> `*mut u64`
1449            // or `*mut [i32]` <=> `*const [u64]`), including the common special
1450            // case of `*const T` <=> `*mut T`.
1451            if let Transmute = kind
1452                && from.is_raw_ptr()
1453                && to.is_raw_ptr()
1454                && self.pointers_have_same_metadata(from, to)
1455            {
1456                kind = PtrToPtr;
1457                was_updated_this_iteration = true;
1458            }
1459
1460            // If a cast just casts away the metadata again, then we can get it by
1461            // casting the original thin pointer passed to `from_raw_parts`
1462            if let PtrToPtr = kind
1463                && let Value::RawPtr { pointer, .. } = self.get(value)
1464                && let ty::RawPtr(to_pointee, _) = to.kind()
1465                && to_pointee.is_sized(self.tcx, self.typing_env())
1466            {
1467                from = self.ty(*pointer);
1468                value = *pointer;
1469                was_updated_this_iteration = true;
1470                if from == to {
1471                    return Some(*pointer);
1472                }
1473            }
1474
1475            // Aggregate-then-Transmute can just transmute the original field value,
1476            // so long as the bytes of a value from only from a single field.
1477            if let Transmute = kind
1478                && let Value::Aggregate(variant_idx, field_values) = self.get(value)
1479                && let Some((field_idx, field_ty)) =
1480                    self.value_is_all_in_one_field(from, *variant_idx)
1481            {
1482                from = field_ty;
1483                value = field_values[field_idx.as_usize()];
1484                was_updated_this_iteration = true;
1485                if field_ty == to {
1486                    return Some(value);
1487                }
1488            }
1489
1490            // Various cast-then-cast cases can be simplified.
1491            if let Value::Cast { kind: inner_kind, value: inner_value } = *self.get(value) {
1492                let inner_from = self.ty(inner_value);
1493                let new_kind = match (inner_kind, kind) {
1494                    // Even if there's a narrowing cast in here that's fine, because
1495                    // things like `*mut [i32] -> *mut i32 -> *const i32` and
1496                    // `*mut [i32] -> *const [i32] -> *const i32` can skip the middle in MIR.
1497                    (PtrToPtr, PtrToPtr) => Some(PtrToPtr),
1498                    // PtrToPtr-then-Transmute is fine so long as the pointer cast is identity:
1499                    // `*const T -> *mut T -> NonNull<T>` is fine, but we need to check for narrowing
1500                    // to skip things like `*const [i32] -> *const i32 -> NonNull<T>`.
1501                    (PtrToPtr, Transmute) if self.pointers_have_same_metadata(inner_from, from) => {
1502                        Some(Transmute)
1503                    }
1504                    // Similarly, for Transmute-then-PtrToPtr. Note that we need to check different
1505                    // variables for their metadata, and thus this can't merge with the previous arm.
1506                    (Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => {
1507                        Some(Transmute)
1508                    }
1509                    // If would be legal to always do this, but we don't want to hide information
1510                    // from the backend that it'd otherwise be able to use for optimizations.
1511                    (Transmute, Transmute)
1512                        if !self.type_may_have_niche_of_interest_to_backend(from) =>
1513                    {
1514                        Some(Transmute)
1515                    }
1516                    _ => None,
1517                };
1518                if let Some(new_kind) = new_kind {
1519                    kind = new_kind;
1520                    from = inner_from;
1521                    value = inner_value;
1522                    was_updated_this_iteration = true;
1523                    if inner_from == to {
1524                        return Some(inner_value);
1525                    }
1526                }
1527            }
1528
1529            if was_updated_this_iteration {
1530                was_ever_updated = true;
1531            } else {
1532                break;
1533            }
1534        }
1535
1536        if was_ever_updated && let Some(op) = self.try_as_operand(value, location) {
1537            *initial_operand = op;
1538            *initial_kind = kind;
1539        }
1540
1541        Some(self.insert(to, Value::Cast { kind, value }))
1542    }
1543
1544    fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool {
1545        let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1546        let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1547        if left_meta_ty == right_meta_ty {
1548            true
1549        } else if let Ok(left) =
1550            self.tcx.try_normalize_erasing_regions(self.typing_env(), left_meta_ty)
1551            && let Ok(right) =
1552                self.tcx.try_normalize_erasing_regions(self.typing_env(), right_meta_ty)
1553        {
1554            left == right
1555        } else {
1556            false
1557        }
1558    }
1559
1560    /// Returns `false` if we know for sure that this type has no interesting niche,
1561    /// and thus we can skip transmuting through it without worrying.
1562    ///
1563    /// The backend will emit `assume`s when transmuting between types with niches,
1564    /// so we want to preserve `i32 -> char -> u32` so that that data is around,
1565    /// but it's fine to skip whole-range-is-value steps like `A -> u32 -> B`.
1566    fn type_may_have_niche_of_interest_to_backend(&self, ty: Ty<'tcx>) -> bool {
1567        let Ok(layout) = self.ecx.layout_of(ty) else {
1568            // If it's too generic or something, then assume it might be interesting later.
1569            return true;
1570        };
1571
1572        if layout.uninhabited {
1573            return true;
1574        }
1575
1576        match layout.backend_repr {
1577            BackendRepr::Scalar(a) => !a.is_always_valid(&self.ecx),
1578            BackendRepr::ScalarPair(a, b) => {
1579                !a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx)
1580            }
1581            BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => false,
1582        }
1583    }
1584
1585    fn value_is_all_in_one_field(
1586        &self,
1587        ty: Ty<'tcx>,
1588        variant: VariantIdx,
1589    ) -> Option<(FieldIdx, Ty<'tcx>)> {
1590        if let Ok(layout) = self.ecx.layout_of(ty)
1591            && let abi::Variants::Single { index } = layout.variants
1592            && index == variant
1593            && let Some((field_idx, field_layout)) = layout.non_1zst_field(&self.ecx)
1594            && layout.size == field_layout.size
1595        {
1596            // We needed to check the variant to avoid trying to read the tag
1597            // field from an enum where no fields have variants, since that tag
1598            // field isn't in the `Aggregate` from which we're getting values.
1599            Some((field_idx, field_layout.ty))
1600        } else if let ty::Adt(adt, args) = ty.kind()
1601            && adt.is_struct()
1602            && adt.repr().transparent()
1603            && let [single_field] = adt.non_enum_variant().fields.raw.as_slice()
1604        {
1605            Some((FieldIdx::ZERO, single_field.ty(self.tcx, args)))
1606        } else {
1607            None
1608        }
1609    }
1610}
1611
1612fn op_to_prop_const<'tcx>(
1613    ecx: &mut InterpCx<'tcx, DummyMachine>,
1614    op: &OpTy<'tcx>,
1615) -> Option<ConstValue> {
1616    // Do not attempt to propagate unsized locals.
1617    if op.layout.is_unsized() {
1618        return None;
1619    }
1620
1621    // This constant is a ZST, just return an empty value.
1622    if op.layout.is_zst() {
1623        return Some(ConstValue::ZeroSized);
1624    }
1625
1626    // Do not synthetize too large constants. Codegen will just memcpy them, which we'd like to
1627    // avoid.
1628    if !matches!(op.layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
1629        return None;
1630    }
1631
1632    // If this constant has scalar ABI, return it as a `ConstValue::Scalar`.
1633    if let BackendRepr::Scalar(abi::Scalar::Initialized { .. }) = op.layout.backend_repr
1634        && let Some(scalar) = ecx.read_scalar(op).discard_err()
1635    {
1636        if !scalar.try_to_scalar_int().is_ok() {
1637            // Check that we do not leak a pointer.
1638            // Those pointers may lose part of their identity in codegen.
1639            // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1640            return None;
1641        }
1642        return Some(ConstValue::Scalar(scalar));
1643    }
1644
1645    // If this constant is already represented as an `Allocation`,
1646    // try putting it into global memory to return it.
1647    if let Either::Left(mplace) = op.as_mplace_or_imm() {
1648        let (size, _align) = ecx.size_and_align_of_val(&mplace).discard_err()??;
1649
1650        // Do not try interning a value that contains provenance.
1651        // Due to https://github.com/rust-lang/rust/issues/79738, doing so could lead to bugs.
1652        // FIXME: remove this hack once that issue is fixed.
1653        let alloc_ref = ecx.get_ptr_alloc(mplace.ptr(), size).discard_err()??;
1654        if alloc_ref.has_provenance() {
1655            return None;
1656        }
1657
1658        let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
1659        let (prov, offset) = pointer.prov_and_relative_offset();
1660        let alloc_id = prov.alloc_id();
1661        intern_const_alloc_for_constprop(ecx, alloc_id).discard_err()?;
1662
1663        // `alloc_id` may point to a static. Codegen will choke on an `Indirect` with anything
1664        // by `GlobalAlloc::Memory`, so do fall through to copying if needed.
1665        // FIXME: find a way to treat this more uniformly (probably by fixing codegen)
1666        if let GlobalAlloc::Memory(alloc) = ecx.tcx.global_alloc(alloc_id)
1667            // Transmuting a constant is just an offset in the allocation. If the alignment of the
1668            // allocation is not enough, fallback to copying into a properly aligned value.
1669            && alloc.inner().align >= op.layout.align.abi
1670        {
1671            return Some(ConstValue::Indirect { alloc_id, offset });
1672        }
1673    }
1674
1675    // Everything failed: create a new allocation to hold the data.
1676    let alloc_id =
1677        ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest)).discard_err()?;
1678    let value = ConstValue::Indirect { alloc_id, offset: Size::ZERO };
1679
1680    // Check that we do not leak a pointer.
1681    // Those pointers may lose part of their identity in codegen.
1682    // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1683    if ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().provenance().ptrs().is_empty() {
1684        return Some(value);
1685    }
1686
1687    None
1688}
1689
1690impl<'tcx> VnState<'_, 'tcx> {
1691    /// If either [`Self::try_as_constant`] as [`Self::try_as_place`] succeeds,
1692    /// returns that result as an [`Operand`].
1693    fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option<Operand<'tcx>> {
1694        if let Some(const_) = self.try_as_constant(index) {
1695            Some(Operand::Constant(Box::new(const_)))
1696        } else if let Some(place) = self.try_as_place(index, location, false) {
1697            self.reused_locals.insert(place.local);
1698            Some(Operand::Copy(place))
1699        } else {
1700            None
1701        }
1702    }
1703
1704    /// If `index` is a `Value::Constant`, return the `Constant` to be put in the MIR.
1705    fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
1706        // This was already constant in MIR, do not change it. If the constant is not
1707        // deterministic, adding an additional mention of it in MIR will not give the same value as
1708        // the former mention.
1709        if let Value::Constant { value, disambiguator: None } = *self.get(index) {
1710            debug_assert!(value.is_deterministic());
1711            return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1712        }
1713
1714        let op = self.evaluated[index].as_ref()?;
1715        if op.layout.is_unsized() {
1716            // Do not attempt to propagate unsized locals.
1717            return None;
1718        }
1719
1720        let value = op_to_prop_const(&mut self.ecx, op)?;
1721
1722        // Check that we do not leak a pointer.
1723        // Those pointers may lose part of their identity in codegen.
1724        // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1725        assert!(!value.may_have_provenance(self.tcx, op.layout.size));
1726
1727        let const_ = Const::Val(value, op.layout.ty);
1728        Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_ })
1729    }
1730
1731    /// Construct a place which holds the same value as `index` and for which all locals strictly
1732    /// dominate `loc`. If you used this place, add its base local to `reused_locals` to remove
1733    /// storage statements.
1734    #[instrument(level = "trace", skip(self), ret)]
1735    fn try_as_place(
1736        &mut self,
1737        mut index: VnIndex,
1738        loc: Location,
1739        allow_complex_projection: bool,
1740    ) -> Option<Place<'tcx>> {
1741        let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new();
1742        loop {
1743            if let Some(local) = self.try_as_local(index, loc) {
1744                projection.reverse();
1745                let place =
1746                    Place { local, projection: self.tcx.mk_place_elems(projection.as_slice()) };
1747                return Some(place);
1748            } else if projection.last() == Some(&PlaceElem::Deref) {
1749                // `Deref` can only be the first projection in a place.
1750                // If we are here, we failed to find a local, and we already have a `Deref`.
1751                // Trying to add projections will only result in an ill-formed place.
1752                return None;
1753            } else if let Value::Projection(pointer, proj) = *self.get(index)
1754                && (allow_complex_projection || proj.is_stable_offset())
1755                && let Some(proj) = self.try_as_place_elem(self.ty(index), proj, loc)
1756            {
1757                projection.push(proj);
1758                index = pointer;
1759            } else {
1760                return None;
1761            }
1762        }
1763    }
1764
1765    /// If there is a local which is assigned `index`, and its assignment strictly dominates `loc`,
1766    /// return it. If you used this local, add it to `reused_locals` to remove storage statements.
1767    fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
1768        let other = self.rev_locals.get(index)?;
1769        other
1770            .iter()
1771            .find(|&&other| self.ssa.assignment_dominates(&self.dominators, other, loc))
1772            .copied()
1773    }
1774}
1775
1776impl<'tcx> MutVisitor<'tcx> for VnState<'_, 'tcx> {
1777    fn tcx(&self) -> TyCtxt<'tcx> {
1778        self.tcx
1779    }
1780
1781    fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
1782        self.simplify_place_projection(place, location);
1783        if context.is_mutating_use() && place.is_indirect() {
1784            // Non-local mutation maybe invalidate deref.
1785            self.invalidate_derefs();
1786        }
1787        self.super_place(place, context, location);
1788    }
1789
1790    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1791        self.simplify_operand(operand, location);
1792        self.super_operand(operand, location);
1793    }
1794
1795    fn visit_assign(
1796        &mut self,
1797        lhs: &mut Place<'tcx>,
1798        rvalue: &mut Rvalue<'tcx>,
1799        location: Location,
1800    ) {
1801        self.simplify_place_projection(lhs, location);
1802
1803        let value = self.simplify_rvalue(lhs, rvalue, location);
1804        if let Some(value) = value {
1805            if let Some(const_) = self.try_as_constant(value) {
1806                *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
1807            } else if let Some(place) = self.try_as_place(value, location, false)
1808                && *rvalue != Rvalue::Use(Operand::Move(place))
1809                && *rvalue != Rvalue::Use(Operand::Copy(place))
1810            {
1811                *rvalue = Rvalue::Use(Operand::Copy(place));
1812                self.reused_locals.insert(place.local);
1813            }
1814        }
1815
1816        if lhs.is_indirect() {
1817            // Non-local mutation maybe invalidate deref.
1818            self.invalidate_derefs();
1819        }
1820
1821        if let Some(local) = lhs.as_local()
1822            && self.ssa.is_ssa(local)
1823            && let rvalue_ty = rvalue.ty(self.local_decls, self.tcx)
1824            // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark
1825            // `local` as reusable if we have an exact type match.
1826            && self.local_decls[local].ty == rvalue_ty
1827        {
1828            let value = value.unwrap_or_else(|| self.new_opaque(rvalue_ty));
1829            self.assign(local, value);
1830        }
1831    }
1832
1833    fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
1834        if let Terminator { kind: TerminatorKind::Call { destination, .. }, .. } = terminator {
1835            if let Some(local) = destination.as_local()
1836                && self.ssa.is_ssa(local)
1837            {
1838                let ty = self.local_decls[local].ty;
1839                let opaque = self.new_opaque(ty);
1840                self.assign(local, opaque);
1841            }
1842        }
1843        // Function calls and ASM may invalidate (nested) derefs. We must handle them carefully.
1844        // Currently, only preserving derefs for trivial terminators like SwitchInt and Goto.
1845        let safe_to_preserve_derefs = matches!(
1846            terminator.kind,
1847            TerminatorKind::SwitchInt { .. } | TerminatorKind::Goto { .. }
1848        );
1849        if !safe_to_preserve_derefs {
1850            self.invalidate_derefs();
1851        }
1852        self.super_terminator(terminator, location);
1853    }
1854}
1855
1856struct StorageRemover<'tcx> {
1857    tcx: TyCtxt<'tcx>,
1858    reused_locals: DenseBitSet<Local>,
1859}
1860
1861impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
1862    fn tcx(&self) -> TyCtxt<'tcx> {
1863        self.tcx
1864    }
1865
1866    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
1867        if let Operand::Move(place) = *operand
1868            && !place.is_indirect_first_projection()
1869            && self.reused_locals.contains(place.local)
1870        {
1871            *operand = Operand::Copy(place);
1872        }
1873    }
1874
1875    fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
1876        match stmt.kind {
1877            // When removing storage statements, we need to remove both (#107511).
1878            StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
1879                if self.reused_locals.contains(l) =>
1880            {
1881                stmt.make_nop()
1882            }
1883            _ => self.super_statement(stmt, loc),
1884        }
1885    }
1886}