rustc_mir_transform/
gvn.rs

1//! Global value numbering.
2//!
3//! MIR may contain repeated and/or redundant computations. The objective of this pass is to detect
4//! such redundancies and re-use the already-computed result when possible.
5//!
6//! In a first pass, we compute a symbolic representation of values that are assigned to SSA
7//! locals. This symbolic representation is defined by the `Value` enum. Each produced instance of
8//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
9//!
10//! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
11//! values, the locals in which they are stored, and the assignment location.
12//!
13//! In a second pass, we traverse all (non SSA) assignments `x = rvalue` and operands. For each
14//! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
15//! replace the rvalue/operand by that constant. Otherwise, if there is an SSA local `y`
16//! associated to this `VnIndex`, and if its definition location strictly dominates the assignment
17//! to `x`, we replace the assignment by `x = y`.
18//!
19//! By opportunity, this pass simplifies some `Rvalue`s based on the accumulated knowledge.
20//!
21//! # Operational semantic
22//!
23//! Operationally, this pass attempts to prove bitwise equality between locals. Given this MIR:
24//! ```ignore (MIR)
25//! _a = some value // has VnIndex i
26//! // some MIR
27//! _b = some other value // also has VnIndex i
28//! ```
29//!
30//! We consider it to be replacable by:
31//! ```ignore (MIR)
32//! _a = some value // has VnIndex i
33//! // some MIR
34//! _c = some other value // also has VnIndex i
35//! assume(_a bitwise equal to _c) // follows from having the same VnIndex
36//! _b = _a // follows from the `assume`
37//! ```
38//!
39//! Which is simplifiable to:
40//! ```ignore (MIR)
41//! _a = some value // has VnIndex i
42//! // some MIR
43//! _b = _a
44//! ```
45//!
46//! # Handling of references
47//!
48//! We handle references by assigning a different "provenance" index to each Ref/RawPtr rvalue.
49//! This ensure that we do not spuriously merge borrows that should not be merged. Meanwhile, we
50//! consider all the derefs of an immutable reference to a freeze type to give the same value:
51//! ```ignore (MIR)
52//! _a = *_b // _b is &Freeze
53//! _c = *_b // replaced by _c = _a
54//! ```
55//!
56//! # Determinism of constant propagation
57//!
58//! When registering a new `Value`, we attempt to opportunistically evaluate it as a constant.
59//! The evaluated form is inserted in `evaluated` as an `OpTy` or `None` if evaluation failed.
60//!
61//! The difficulty is non-deterministic evaluation of MIR constants. Some `Const` can have
62//! different runtime values each time they are evaluated. This is the case with
63//! `Const::Slice` which have a new pointer each time they are evaluated, and constants that
64//! contain a fn pointer (`AllocId` pointing to a `GlobalAlloc::Function`) pointing to a different
65//! symbol in each codegen unit.
66//!
67//! Meanwhile, we want to be able to read indirect constants. For instance:
68//! ```
69//! static A: &'static &'static u8 = &&63;
70//! fn foo() -> u8 {
71//!     **A // We want to replace by 63.
72//! }
73//! fn bar() -> u8 {
74//!     b"abc"[1] // We want to replace by 'b'.
75//! }
76//! ```
77//!
78//! The `Value::Constant` variant stores a possibly unevaluated constant. Evaluating that constant
79//! may be non-deterministic. When that happens, we assign a disambiguator to ensure that we do not
80//! merge the constants. See `duplicate_slice` test in `gvn.rs`.
81//!
82//! Second, when writing constants in MIR, we do not write `Const::Slice` or `Const`
83//! that contain `AllocId`s.
84
85use std::borrow::Cow;
86
87use either::Either;
88use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
89use rustc_const_eval::const_eval::DummyMachine;
90use rustc_const_eval::interpret::{
91    ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar,
92    intern_const_alloc_for_constprop,
93};
94use rustc_data_structures::fx::FxIndexSet;
95use rustc_data_structures::graph::dominators::Dominators;
96use rustc_hir::def::DefKind;
97use rustc_index::bit_set::DenseBitSet;
98use rustc_index::{IndexVec, newtype_index};
99use rustc_middle::bug;
100use rustc_middle::mir::interpret::GlobalAlloc;
101use rustc_middle::mir::visit::*;
102use rustc_middle::mir::*;
103use rustc_middle::ty::layout::{HasTypingEnv, LayoutOf};
104use rustc_middle::ty::{self, Ty, TyCtxt};
105use rustc_span::DUMMY_SP;
106use rustc_span::def_id::DefId;
107use smallvec::SmallVec;
108use tracing::{debug, instrument, trace};
109
110use crate::ssa::{AssignedValue, SsaLocals};
111
112pub(super) struct GVN;
113
114impl<'tcx> crate::MirPass<'tcx> for GVN {
115    fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
116        sess.mir_opt_level() >= 2
117    }
118
119    #[instrument(level = "trace", skip(self, tcx, body))]
120    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
121        debug!(def_id = ?body.source.def_id());
122
123        let typing_env = body.typing_env(tcx);
124        let ssa = SsaLocals::new(tcx, body, typing_env);
125        // Clone dominators because we need them while mutating the body.
126        let dominators = body.basic_blocks.dominators().clone();
127
128        let mut state = VnState::new(tcx, body, typing_env, &ssa, dominators, &body.local_decls);
129        ssa.for_each_assignment_mut(
130            body.basic_blocks.as_mut_preserves_cfg(),
131            |local, value, location| {
132                let value = match value {
133                    // We do not know anything of this assigned value.
134                    AssignedValue::Arg | AssignedValue::Terminator => None,
135                    // Try to get some insight.
136                    AssignedValue::Rvalue(rvalue) => {
137                        let value = state.simplify_rvalue(rvalue, location);
138                        // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark
139                        // `local` as reusable if we have an exact type match.
140                        if state.local_decls[local].ty != rvalue.ty(state.local_decls, tcx) {
141                            return;
142                        }
143                        value
144                    }
145                };
146                // `next_opaque` is `Some`, so `new_opaque` must return `Some`.
147                let value = value.or_else(|| state.new_opaque()).unwrap();
148                state.assign(local, value);
149            },
150        );
151
152        // Stop creating opaques during replacement as it is useless.
153        state.next_opaque = None;
154
155        let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
156        for bb in reverse_postorder {
157            let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
158            state.visit_basic_block_data(bb, data);
159        }
160
161        // For each local that is reused (`y` above), we remove its storage statements do avoid any
162        // difficulty. Those locals are SSA, so should be easy to optimize by LLVM without storage
163        // statements.
164        StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
165    }
166
167    fn is_required(&self) -> bool {
168        false
169    }
170}
171
172newtype_index! {
173    struct VnIndex {}
174}
175
176/// Computing the aggregate's type can be quite slow, so we only keep the minimal amount of
177/// information to reconstruct it when needed.
178#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
179enum AggregateTy<'tcx> {
180    /// Invariant: this must not be used for an empty array.
181    Array,
182    Tuple,
183    Def(DefId, ty::GenericArgsRef<'tcx>),
184    RawPtr {
185        /// Needed for cast propagation.
186        data_pointer_ty: Ty<'tcx>,
187        /// The data pointer can be anything thin, so doesn't determine the output.
188        output_pointer_ty: Ty<'tcx>,
189    },
190}
191
192#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
193enum AddressKind {
194    Ref(BorrowKind),
195    Address(RawPtrKind),
196}
197
198#[derive(Debug, PartialEq, Eq, Hash)]
199enum Value<'tcx> {
200    // Root values.
201    /// Used to represent values we know nothing about.
202    /// The `usize` is a counter incremented by `new_opaque`.
203    Opaque(usize),
204    /// Evaluated or unevaluated constant value.
205    Constant {
206        value: Const<'tcx>,
207        /// Some constants do not have a deterministic value. To avoid merging two instances of the
208        /// same `Const`, we assign them an additional integer index.
209        // `disambiguator` is 0 iff the constant is deterministic.
210        disambiguator: usize,
211    },
212    /// An aggregate value, either tuple/closure/struct/enum.
213    /// This does not contain unions, as we cannot reason with the value.
214    Aggregate(AggregateTy<'tcx>, VariantIdx, Vec<VnIndex>),
215    /// This corresponds to a `[value; count]` expression.
216    Repeat(VnIndex, ty::Const<'tcx>),
217    /// The address of a place.
218    Address {
219        place: Place<'tcx>,
220        kind: AddressKind,
221        /// Give each borrow and pointer a different provenance, so we don't merge them.
222        provenance: usize,
223    },
224
225    // Extractions.
226    /// This is the *value* obtained by projecting another value.
227    Projection(VnIndex, ProjectionElem<VnIndex, Ty<'tcx>>),
228    /// Discriminant of the given value.
229    Discriminant(VnIndex),
230    /// Length of an array or slice.
231    Len(VnIndex),
232
233    // Operations.
234    NullaryOp(NullOp<'tcx>, Ty<'tcx>),
235    UnaryOp(UnOp, VnIndex),
236    BinaryOp(BinOp, VnIndex, VnIndex),
237    Cast {
238        kind: CastKind,
239        value: VnIndex,
240        from: Ty<'tcx>,
241        to: Ty<'tcx>,
242    },
243}
244
245struct VnState<'body, 'tcx> {
246    tcx: TyCtxt<'tcx>,
247    ecx: InterpCx<'tcx, DummyMachine>,
248    local_decls: &'body LocalDecls<'tcx>,
249    /// Value stored in each local.
250    locals: IndexVec<Local, Option<VnIndex>>,
251    /// Locals that are assigned that value.
252    // This vector does not hold all the values of `VnIndex` that we create.
253    // It stops at the largest value created in the first phase of collecting assignments.
254    rev_locals: IndexVec<VnIndex, SmallVec<[Local; 1]>>,
255    values: FxIndexSet<Value<'tcx>>,
256    /// Values evaluated as constants if possible.
257    evaluated: IndexVec<VnIndex, Option<OpTy<'tcx>>>,
258    /// Counter to generate different values.
259    /// This is an option to stop creating opaques during replacement.
260    next_opaque: Option<usize>,
261    /// Cache the value of the `unsized_locals` features, to avoid fetching it repeatedly in a loop.
262    feature_unsized_locals: bool,
263    ssa: &'body SsaLocals,
264    dominators: Dominators<BasicBlock>,
265    reused_locals: DenseBitSet<Local>,
266}
267
268impl<'body, 'tcx> VnState<'body, 'tcx> {
269    fn new(
270        tcx: TyCtxt<'tcx>,
271        body: &Body<'tcx>,
272        typing_env: ty::TypingEnv<'tcx>,
273        ssa: &'body SsaLocals,
274        dominators: Dominators<BasicBlock>,
275        local_decls: &'body LocalDecls<'tcx>,
276    ) -> Self {
277        // Compute a rough estimate of the number of values in the body from the number of
278        // statements. This is meant to reduce the number of allocations, but it's all right if
279        // we miss the exact amount. We estimate based on 2 values per statement (one in LHS and
280        // one in RHS) and 4 values per terminator (for call operands).
281        let num_values =
282            2 * body.basic_blocks.iter().map(|bbdata| bbdata.statements.len()).sum::<usize>()
283                + 4 * body.basic_blocks.len();
284        VnState {
285            tcx,
286            ecx: InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine),
287            local_decls,
288            locals: IndexVec::from_elem(None, local_decls),
289            rev_locals: IndexVec::with_capacity(num_values),
290            values: FxIndexSet::with_capacity_and_hasher(num_values, Default::default()),
291            evaluated: IndexVec::with_capacity(num_values),
292            next_opaque: Some(1),
293            feature_unsized_locals: tcx.features().unsized_locals(),
294            ssa,
295            dominators,
296            reused_locals: DenseBitSet::new_empty(local_decls.len()),
297        }
298    }
299
300    fn typing_env(&self) -> ty::TypingEnv<'tcx> {
301        self.ecx.typing_env()
302    }
303
304    #[instrument(level = "trace", skip(self), ret)]
305    fn insert(&mut self, value: Value<'tcx>) -> VnIndex {
306        let (index, new) = self.values.insert_full(value);
307        let index = VnIndex::from_usize(index);
308        if new {
309            // Grow `evaluated` and `rev_locals` here to amortize the allocations.
310            let evaluated = self.eval_to_const(index);
311            let _index = self.evaluated.push(evaluated);
312            debug_assert_eq!(index, _index);
313            // No need to push to `rev_locals` if we finished listing assignments.
314            if self.next_opaque.is_some() {
315                let _index = self.rev_locals.push(SmallVec::new());
316                debug_assert_eq!(index, _index);
317            }
318        }
319        index
320    }
321
322    /// Create a new `Value` for which we have no information at all, except that it is distinct
323    /// from all the others.
324    #[instrument(level = "trace", skip(self), ret)]
325    fn new_opaque(&mut self) -> Option<VnIndex> {
326        let next_opaque = self.next_opaque.as_mut()?;
327        let value = Value::Opaque(*next_opaque);
328        *next_opaque += 1;
329        Some(self.insert(value))
330    }
331
332    /// Create a new `Value::Address` distinct from all the others.
333    #[instrument(level = "trace", skip(self), ret)]
334    fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> Option<VnIndex> {
335        let next_opaque = self.next_opaque.as_mut()?;
336        let value = Value::Address { place, kind, provenance: *next_opaque };
337        *next_opaque += 1;
338        Some(self.insert(value))
339    }
340
341    fn get(&self, index: VnIndex) -> &Value<'tcx> {
342        self.values.get_index(index.as_usize()).unwrap()
343    }
344
345    /// Record that `local` is assigned `value`. `local` must be SSA.
346    #[instrument(level = "trace", skip(self))]
347    fn assign(&mut self, local: Local, value: VnIndex) {
348        self.locals[local] = Some(value);
349
350        // Only register the value if its type is `Sized`, as we will emit copies of it.
351        let is_sized = !self.feature_unsized_locals
352            || self.local_decls[local].ty.is_sized(self.tcx, self.typing_env());
353        if is_sized {
354            self.rev_locals[value].push(local);
355        }
356    }
357
358    fn insert_constant(&mut self, value: Const<'tcx>) -> Option<VnIndex> {
359        let disambiguator = if value.is_deterministic() {
360            // The constant is deterministic, no need to disambiguate.
361            0
362        } else {
363            // Multiple mentions of this constant will yield different values,
364            // so assign a different `disambiguator` to ensure they do not get the same `VnIndex`.
365            let next_opaque = self.next_opaque.as_mut()?;
366            let disambiguator = *next_opaque;
367            *next_opaque += 1;
368            // `disambiguator: 0` means deterministic.
369            debug_assert_ne!(disambiguator, 0);
370            disambiguator
371        };
372        Some(self.insert(Value::Constant { value, disambiguator }))
373    }
374
375    fn insert_bool(&mut self, flag: bool) -> VnIndex {
376        // Booleans are deterministic.
377        let value = Const::from_bool(self.tcx, flag);
378        debug_assert!(value.is_deterministic());
379        self.insert(Value::Constant { value, disambiguator: 0 })
380    }
381
382    fn insert_scalar(&mut self, scalar: Scalar, ty: Ty<'tcx>) -> VnIndex {
383        // Scalars are deterministic.
384        let value = Const::from_scalar(self.tcx, scalar, ty);
385        debug_assert!(value.is_deterministic());
386        self.insert(Value::Constant { value, disambiguator: 0 })
387    }
388
389    fn insert_tuple(&mut self, values: Vec<VnIndex>) -> VnIndex {
390        self.insert(Value::Aggregate(AggregateTy::Tuple, VariantIdx::ZERO, values))
391    }
392
393    #[instrument(level = "trace", skip(self), ret)]
394    fn eval_to_const(&mut self, value: VnIndex) -> Option<OpTy<'tcx>> {
395        use Value::*;
396        let op = match *self.get(value) {
397            Opaque(_) => return None,
398            // Do not bother evaluating repeat expressions. This would uselessly consume memory.
399            Repeat(..) => return None,
400
401            Constant { ref value, disambiguator: _ } => {
402                self.ecx.eval_mir_constant(value, DUMMY_SP, None).discard_err()?
403            }
404            Aggregate(kind, variant, ref fields) => {
405                let fields = fields
406                    .iter()
407                    .map(|&f| self.evaluated[f].as_ref())
408                    .collect::<Option<Vec<_>>>()?;
409                let ty = match kind {
410                    AggregateTy::Array => {
411                        assert!(fields.len() > 0);
412                        Ty::new_array(self.tcx, fields[0].layout.ty, fields.len() as u64)
413                    }
414                    AggregateTy::Tuple => {
415                        Ty::new_tup_from_iter(self.tcx, fields.iter().map(|f| f.layout.ty))
416                    }
417                    AggregateTy::Def(def_id, args) => {
418                        self.tcx.type_of(def_id).instantiate(self.tcx, args)
419                    }
420                    AggregateTy::RawPtr { output_pointer_ty, .. } => output_pointer_ty,
421                };
422                let variant = if ty.is_enum() { Some(variant) } else { None };
423                let ty = self.ecx.layout_of(ty).ok()?;
424                if ty.is_zst() {
425                    ImmTy::uninit(ty).into()
426                } else if matches!(kind, AggregateTy::RawPtr { .. }) {
427                    // Pointers don't have fields, so don't `project_field` them.
428                    let data = self.ecx.read_pointer(fields[0]).discard_err()?;
429                    let meta = if fields[1].layout.is_zst() {
430                        MemPlaceMeta::None
431                    } else {
432                        MemPlaceMeta::Meta(self.ecx.read_scalar(fields[1]).discard_err()?)
433                    };
434                    let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx);
435                    ImmTy::from_immediate(ptr_imm, ty).into()
436                } else if matches!(
437                    ty.backend_repr,
438                    BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)
439                ) {
440                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
441                    let variant_dest = if let Some(variant) = variant {
442                        self.ecx.project_downcast(&dest, variant).discard_err()?
443                    } else {
444                        dest.clone()
445                    };
446                    for (field_index, op) in fields.into_iter().enumerate() {
447                        let field_dest =
448                            self.ecx.project_field(&variant_dest, field_index).discard_err()?;
449                        self.ecx.copy_op(op, &field_dest).discard_err()?;
450                    }
451                    self.ecx
452                        .write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest)
453                        .discard_err()?;
454                    self.ecx
455                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
456                        .discard_err()?;
457                    dest.into()
458                } else {
459                    return None;
460                }
461            }
462
463            Projection(base, elem) => {
464                let value = self.evaluated[base].as_ref()?;
465                let elem = match elem {
466                    ProjectionElem::Deref => ProjectionElem::Deref,
467                    ProjectionElem::Downcast(name, read_variant) => {
468                        ProjectionElem::Downcast(name, read_variant)
469                    }
470                    ProjectionElem::Field(f, ty) => ProjectionElem::Field(f, ty),
471                    ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
472                        ProjectionElem::ConstantIndex { offset, min_length, from_end }
473                    }
474                    ProjectionElem::Subslice { from, to, from_end } => {
475                        ProjectionElem::Subslice { from, to, from_end }
476                    }
477                    ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(ty),
478                    ProjectionElem::Subtype(ty) => ProjectionElem::Subtype(ty),
479                    ProjectionElem::UnwrapUnsafeBinder(ty) => {
480                        ProjectionElem::UnwrapUnsafeBinder(ty)
481                    }
482                    // This should have been replaced by a `ConstantIndex` earlier.
483                    ProjectionElem::Index(_) => return None,
484                };
485                self.ecx.project(value, elem).discard_err()?
486            }
487            Address { place, kind, provenance: _ } => {
488                if !place.is_indirect_first_projection() {
489                    return None;
490                }
491                let local = self.locals[place.local]?;
492                let pointer = self.evaluated[local].as_ref()?;
493                let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?;
494                for proj in place.projection.iter().skip(1) {
495                    // We have no call stack to associate a local with a value, so we cannot
496                    // interpret indexing.
497                    if matches!(proj, ProjectionElem::Index(_)) {
498                        return None;
499                    }
500                    mplace = self.ecx.project(&mplace, proj).discard_err()?;
501                }
502                let pointer = mplace.to_ref(&self.ecx);
503                let ty = match kind {
504                    AddressKind::Ref(bk) => Ty::new_ref(
505                        self.tcx,
506                        self.tcx.lifetimes.re_erased,
507                        mplace.layout.ty,
508                        bk.to_mutbl_lossy(),
509                    ),
510                    AddressKind::Address(mutbl) => {
511                        Ty::new_ptr(self.tcx, mplace.layout.ty, mutbl.to_mutbl_lossy())
512                    }
513                };
514                let layout = self.ecx.layout_of(ty).ok()?;
515                ImmTy::from_immediate(pointer, layout).into()
516            }
517
518            Discriminant(base) => {
519                let base = self.evaluated[base].as_ref()?;
520                let variant = self.ecx.read_discriminant(base).discard_err()?;
521                let discr_value =
522                    self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
523                discr_value.into()
524            }
525            Len(slice) => {
526                let slice = self.evaluated[slice].as_ref()?;
527                let usize_layout = self.ecx.layout_of(self.tcx.types.usize).unwrap();
528                let len = slice.len(&self.ecx).discard_err()?;
529                let imm = ImmTy::from_uint(len, usize_layout);
530                imm.into()
531            }
532            NullaryOp(null_op, ty) => {
533                let layout = self.ecx.layout_of(ty).ok()?;
534                if let NullOp::SizeOf | NullOp::AlignOf = null_op
535                    && layout.is_unsized()
536                {
537                    return None;
538                }
539                let val = match null_op {
540                    NullOp::SizeOf => layout.size.bytes(),
541                    NullOp::AlignOf => layout.align.abi.bytes(),
542                    NullOp::OffsetOf(fields) => self
543                        .ecx
544                        .tcx
545                        .offset_of_subfield(self.typing_env(), layout, fields.iter())
546                        .bytes(),
547                    NullOp::UbChecks => return None,
548                    NullOp::ContractChecks => return None,
549                };
550                let usize_layout = self.ecx.layout_of(self.tcx.types.usize).unwrap();
551                let imm = ImmTy::from_uint(val, usize_layout);
552                imm.into()
553            }
554            UnaryOp(un_op, operand) => {
555                let operand = self.evaluated[operand].as_ref()?;
556                let operand = self.ecx.read_immediate(operand).discard_err()?;
557                let val = self.ecx.unary_op(un_op, &operand).discard_err()?;
558                val.into()
559            }
560            BinaryOp(bin_op, lhs, rhs) => {
561                let lhs = self.evaluated[lhs].as_ref()?;
562                let lhs = self.ecx.read_immediate(lhs).discard_err()?;
563                let rhs = self.evaluated[rhs].as_ref()?;
564                let rhs = self.ecx.read_immediate(rhs).discard_err()?;
565                let val = self.ecx.binary_op(bin_op, &lhs, &rhs).discard_err()?;
566                val.into()
567            }
568            Cast { kind, value, from: _, to } => match kind {
569                CastKind::IntToInt | CastKind::IntToFloat => {
570                    let value = self.evaluated[value].as_ref()?;
571                    let value = self.ecx.read_immediate(value).discard_err()?;
572                    let to = self.ecx.layout_of(to).ok()?;
573                    let res = self.ecx.int_to_int_or_float(&value, to).discard_err()?;
574                    res.into()
575                }
576                CastKind::FloatToFloat | CastKind::FloatToInt => {
577                    let value = self.evaluated[value].as_ref()?;
578                    let value = self.ecx.read_immediate(value).discard_err()?;
579                    let to = self.ecx.layout_of(to).ok()?;
580                    let res = self.ecx.float_to_float_or_int(&value, to).discard_err()?;
581                    res.into()
582                }
583                CastKind::Transmute => {
584                    let value = self.evaluated[value].as_ref()?;
585                    let to = self.ecx.layout_of(to).ok()?;
586                    // `offset` for immediates generally only supports projections that match the
587                    // type of the immediate. However, as a HACK, we exploit that it can also do
588                    // limited transmutes: it only works between types with the same layout, and
589                    // cannot transmute pointers to integers.
590                    if value.as_mplace_or_imm().is_right() {
591                        let can_transmute = match (value.layout.backend_repr, to.backend_repr) {
592                            (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => {
593                                s1.size(&self.ecx) == s2.size(&self.ecx)
594                                    && !matches!(s1.primitive(), Primitive::Pointer(..))
595                            }
596                            (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
597                                a1.size(&self.ecx) == a2.size(&self.ecx) &&
598                                b1.size(&self.ecx) == b2.size(&self.ecx) &&
599                                // The alignment of the second component determines its offset, so that also needs to match.
600                                b1.align(&self.ecx) == b2.align(&self.ecx) &&
601                                // None of the inputs may be a pointer.
602                                !matches!(a1.primitive(), Primitive::Pointer(..))
603                                    && !matches!(b1.primitive(), Primitive::Pointer(..))
604                            }
605                            _ => false,
606                        };
607                        if !can_transmute {
608                            return None;
609                        }
610                    }
611                    value.offset(Size::ZERO, to, &self.ecx).discard_err()?
612                }
613                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) => {
614                    let src = self.evaluated[value].as_ref()?;
615                    let to = self.ecx.layout_of(to).ok()?;
616                    let dest = self.ecx.allocate(to, MemoryKind::Stack).discard_err()?;
617                    self.ecx.unsize_into(src, to, &dest.clone().into()).discard_err()?;
618                    self.ecx
619                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
620                        .discard_err()?;
621                    dest.into()
622                }
623                CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
624                    let src = self.evaluated[value].as_ref()?;
625                    let src = self.ecx.read_immediate(src).discard_err()?;
626                    let to = self.ecx.layout_of(to).ok()?;
627                    let ret = self.ecx.ptr_to_ptr(&src, to).discard_err()?;
628                    ret.into()
629                }
630                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::UnsafeFnPointer, _) => {
631                    let src = self.evaluated[value].as_ref()?;
632                    let src = self.ecx.read_immediate(src).discard_err()?;
633                    let to = self.ecx.layout_of(to).ok()?;
634                    ImmTy::from_immediate(*src, to).into()
635                }
636                _ => return None,
637            },
638        };
639        Some(op)
640    }
641
642    fn project(
643        &mut self,
644        place: PlaceRef<'tcx>,
645        value: VnIndex,
646        proj: PlaceElem<'tcx>,
647    ) -> Option<VnIndex> {
648        let proj = match proj {
649            ProjectionElem::Deref => {
650                let ty = place.ty(self.local_decls, self.tcx).ty;
651                // unsound: https://github.com/rust-lang/rust/issues/130853
652                if self.tcx.sess.opts.unstable_opts.unsound_mir_opts
653                    && let Some(Mutability::Not) = ty.ref_mutability()
654                    && let Some(pointee_ty) = ty.builtin_deref(true)
655                    && pointee_ty.is_freeze(self.tcx, self.typing_env())
656                {
657                    // An immutable borrow `_x` always points to the same value for the
658                    // lifetime of the borrow, so we can merge all instances of `*_x`.
659                    ProjectionElem::Deref
660                } else {
661                    return None;
662                }
663            }
664            ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
665            ProjectionElem::Field(f, ty) => {
666                if let Value::Aggregate(_, _, fields) = self.get(value) {
667                    return Some(fields[f.as_usize()]);
668                } else if let Value::Projection(outer_value, ProjectionElem::Downcast(_, read_variant)) = self.get(value)
669                    && let Value::Aggregate(_, written_variant, fields) = self.get(*outer_value)
670                    // This pass is not aware of control-flow, so we do not know whether the
671                    // replacement we are doing is actually reachable. We could be in any arm of
672                    // ```
673                    // match Some(x) {
674                    //     Some(y) => /* stuff */,
675                    //     None => /* other */,
676                    // }
677                    // ```
678                    //
679                    // In surface rust, the current statement would be unreachable.
680                    //
681                    // However, from the reference chapter on enums and RFC 2195,
682                    // accessing the wrong variant is not UB if the enum has repr.
683                    // So it's not impossible for a series of MIR opts to generate
684                    // a downcast to an inactive variant.
685                    && written_variant == read_variant
686                {
687                    return Some(fields[f.as_usize()]);
688                }
689                ProjectionElem::Field(f, ty)
690            }
691            ProjectionElem::Index(idx) => {
692                if let Value::Repeat(inner, _) = self.get(value) {
693                    return Some(*inner);
694                }
695                let idx = self.locals[idx]?;
696                ProjectionElem::Index(idx)
697            }
698            ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
699                match self.get(value) {
700                    Value::Repeat(inner, _) => {
701                        return Some(*inner);
702                    }
703                    Value::Aggregate(AggregateTy::Array, _, operands) => {
704                        let offset = if from_end {
705                            operands.len() - offset as usize
706                        } else {
707                            offset as usize
708                        };
709                        return operands.get(offset).copied();
710                    }
711                    _ => {}
712                };
713                ProjectionElem::ConstantIndex { offset, min_length, from_end }
714            }
715            ProjectionElem::Subslice { from, to, from_end } => {
716                ProjectionElem::Subslice { from, to, from_end }
717            }
718            ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(ty),
719            ProjectionElem::Subtype(ty) => ProjectionElem::Subtype(ty),
720            ProjectionElem::UnwrapUnsafeBinder(ty) => ProjectionElem::UnwrapUnsafeBinder(ty),
721        };
722
723        Some(self.insert(Value::Projection(value, proj)))
724    }
725
726    /// Simplify the projection chain if we know better.
727    #[instrument(level = "trace", skip(self))]
728    fn simplify_place_projection(&mut self, place: &mut Place<'tcx>, location: Location) {
729        // If the projection is indirect, we treat the local as a value, so can replace it with
730        // another local.
731        if place.is_indirect_first_projection()
732            && let Some(base) = self.locals[place.local]
733            && let Some(new_local) = self.try_as_local(base, location)
734            && place.local != new_local
735        {
736            place.local = new_local;
737            self.reused_locals.insert(new_local);
738        }
739
740        let mut projection = Cow::Borrowed(&place.projection[..]);
741
742        for i in 0..projection.len() {
743            let elem = projection[i];
744            if let ProjectionElem::Index(idx_local) = elem
745                && let Some(idx) = self.locals[idx_local]
746            {
747                if let Some(offset) = self.evaluated[idx].as_ref()
748                    && let Some(offset) = self.ecx.read_target_usize(offset).discard_err()
749                    && let Some(min_length) = offset.checked_add(1)
750                {
751                    projection.to_mut()[i] =
752                        ProjectionElem::ConstantIndex { offset, min_length, from_end: false };
753                } else if let Some(new_idx_local) = self.try_as_local(idx, location)
754                    && idx_local != new_idx_local
755                {
756                    projection.to_mut()[i] = ProjectionElem::Index(new_idx_local);
757                    self.reused_locals.insert(new_idx_local);
758                }
759            }
760        }
761
762        if projection.is_owned() {
763            place.projection = self.tcx.mk_place_elems(&projection);
764        }
765
766        trace!(?place);
767    }
768
769    /// Represent the *value* which would be read from `place`, and point `place` to a preexisting
770    /// place with the same value (if that already exists).
771    #[instrument(level = "trace", skip(self), ret)]
772    fn simplify_place_value(
773        &mut self,
774        place: &mut Place<'tcx>,
775        location: Location,
776    ) -> Option<VnIndex> {
777        self.simplify_place_projection(place, location);
778
779        // Invariant: `place` and `place_ref` point to the same value, even if they point to
780        // different memory locations.
781        let mut place_ref = place.as_ref();
782
783        // Invariant: `value` holds the value up-to the `index`th projection excluded.
784        let mut value = self.locals[place.local]?;
785        for (index, proj) in place.projection.iter().enumerate() {
786            if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
787                && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
788                && let AddressKind::Ref(BorrowKind::Shared) = kind
789                && let Some(v) = self.simplify_place_value(&mut pointee, location)
790            {
791                value = v;
792                place_ref = pointee.project_deeper(&place.projection[index..], self.tcx).as_ref();
793            }
794            if let Some(local) = self.try_as_local(value, location) {
795                // Both `local` and `Place { local: place.local, projection: projection[..index] }`
796                // hold the same value. Therefore, following place holds the value in the original
797                // `place`.
798                place_ref = PlaceRef { local, projection: &place.projection[index..] };
799            }
800
801            let base = PlaceRef { local: place.local, projection: &place.projection[..index] };
802            value = self.project(base, value, proj)?;
803        }
804
805        if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
806            && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
807            && let AddressKind::Ref(BorrowKind::Shared) = kind
808            && let Some(v) = self.simplify_place_value(&mut pointee, location)
809        {
810            value = v;
811            place_ref = pointee.project_deeper(&[], self.tcx).as_ref();
812        }
813        if let Some(new_local) = self.try_as_local(value, location) {
814            place_ref = PlaceRef { local: new_local, projection: &[] };
815        }
816
817        if place_ref.local != place.local || place_ref.projection.len() < place.projection.len() {
818            // By the invariant on `place_ref`.
819            *place = place_ref.project_deeper(&[], self.tcx);
820            self.reused_locals.insert(place_ref.local);
821        }
822
823        Some(value)
824    }
825
826    #[instrument(level = "trace", skip(self), ret)]
827    fn simplify_operand(
828        &mut self,
829        operand: &mut Operand<'tcx>,
830        location: Location,
831    ) -> Option<VnIndex> {
832        match *operand {
833            Operand::Constant(ref constant) => self.insert_constant(constant.const_),
834            Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
835                let value = self.simplify_place_value(place, location)?;
836                if let Some(const_) = self.try_as_constant(value) {
837                    *operand = Operand::Constant(Box::new(const_));
838                }
839                Some(value)
840            }
841        }
842    }
843
844    #[instrument(level = "trace", skip(self), ret)]
845    fn simplify_rvalue(
846        &mut self,
847        rvalue: &mut Rvalue<'tcx>,
848        location: Location,
849    ) -> Option<VnIndex> {
850        let value = match *rvalue {
851            // Forward values.
852            Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
853            Rvalue::CopyForDeref(place) => {
854                let mut operand = Operand::Copy(place);
855                let val = self.simplify_operand(&mut operand, location);
856                *rvalue = Rvalue::Use(operand);
857                return val;
858            }
859
860            // Roots.
861            Rvalue::Repeat(ref mut op, amount) => {
862                let op = self.simplify_operand(op, location)?;
863                Value::Repeat(op, amount)
864            }
865            Rvalue::NullaryOp(op, ty) => Value::NullaryOp(op, ty),
866            Rvalue::Aggregate(..) => return self.simplify_aggregate(rvalue, location),
867            Rvalue::Ref(_, borrow_kind, ref mut place) => {
868                self.simplify_place_projection(place, location);
869                return self.new_pointer(*place, AddressKind::Ref(borrow_kind));
870            }
871            Rvalue::RawPtr(mutbl, ref mut place) => {
872                self.simplify_place_projection(place, location);
873                return self.new_pointer(*place, AddressKind::Address(mutbl));
874            }
875            Rvalue::WrapUnsafeBinder(ref mut op, _) => {
876                return self.simplify_operand(op, location);
877            }
878
879            // Operations.
880            Rvalue::Len(ref mut place) => return self.simplify_len(place, location),
881            Rvalue::Cast(ref mut kind, ref mut value, to) => {
882                return self.simplify_cast(kind, value, to, location);
883            }
884            Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
885                return self.simplify_binary(op, lhs, rhs, location);
886            }
887            Rvalue::UnaryOp(op, ref mut arg_op) => {
888                return self.simplify_unary(op, arg_op, location);
889            }
890            Rvalue::Discriminant(ref mut place) => {
891                let place = self.simplify_place_value(place, location)?;
892                if let Some(discr) = self.simplify_discriminant(place) {
893                    return Some(discr);
894                }
895                Value::Discriminant(place)
896            }
897
898            // Unsupported values.
899            Rvalue::ThreadLocalRef(..) | Rvalue::ShallowInitBox(..) => return None,
900        };
901        debug!(?value);
902        Some(self.insert(value))
903    }
904
905    fn simplify_discriminant(&mut self, place: VnIndex) -> Option<VnIndex> {
906        if let Value::Aggregate(enum_ty, variant, _) = *self.get(place)
907            && let AggregateTy::Def(enum_did, enum_args) = enum_ty
908            && let DefKind::Enum = self.tcx.def_kind(enum_did)
909        {
910            let enum_ty = self.tcx.type_of(enum_did).instantiate(self.tcx, enum_args);
911            let discr = self.ecx.discriminant_for_variant(enum_ty, variant).discard_err()?;
912            return Some(self.insert_scalar(discr.to_scalar(), discr.layout.ty));
913        }
914
915        None
916    }
917
918    fn try_as_place_elem(
919        &mut self,
920        proj: ProjectionElem<VnIndex, Ty<'tcx>>,
921        loc: Location,
922    ) -> Option<PlaceElem<'tcx>> {
923        Some(match proj {
924            ProjectionElem::Deref => ProjectionElem::Deref,
925            ProjectionElem::Field(idx, ty) => ProjectionElem::Field(idx, ty),
926            ProjectionElem::Index(idx) => {
927                let Some(local) = self.try_as_local(idx, loc) else {
928                    return None;
929                };
930                self.reused_locals.insert(local);
931                ProjectionElem::Index(local)
932            }
933            ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
934                ProjectionElem::ConstantIndex { offset, min_length, from_end }
935            }
936            ProjectionElem::Subslice { from, to, from_end } => {
937                ProjectionElem::Subslice { from, to, from_end }
938            }
939            ProjectionElem::Downcast(symbol, idx) => ProjectionElem::Downcast(symbol, idx),
940            ProjectionElem::OpaqueCast(idx) => ProjectionElem::OpaqueCast(idx),
941            ProjectionElem::Subtype(idx) => ProjectionElem::Subtype(idx),
942            ProjectionElem::UnwrapUnsafeBinder(ty) => ProjectionElem::UnwrapUnsafeBinder(ty),
943        })
944    }
945
946    fn simplify_aggregate_to_copy(
947        &mut self,
948        rvalue: &mut Rvalue<'tcx>,
949        location: Location,
950        fields: &[VnIndex],
951        variant_index: VariantIdx,
952    ) -> Option<VnIndex> {
953        let Some(&first_field) = fields.first() else {
954            return None;
955        };
956        let Value::Projection(copy_from_value, _) = *self.get(first_field) else {
957            return None;
958        };
959        // All fields must correspond one-to-one and come from the same aggregate value.
960        if fields.iter().enumerate().any(|(index, &v)| {
961            if let Value::Projection(pointer, ProjectionElem::Field(from_index, _)) = *self.get(v)
962                && copy_from_value == pointer
963                && from_index.index() == index
964            {
965                return false;
966            }
967            true
968        }) {
969            return None;
970        }
971
972        let mut copy_from_local_value = copy_from_value;
973        if let Value::Projection(pointer, proj) = *self.get(copy_from_value)
974            && let ProjectionElem::Downcast(_, read_variant) = proj
975        {
976            if variant_index == read_variant {
977                // When copying a variant, there is no need to downcast.
978                copy_from_local_value = pointer;
979            } else {
980                // The copied variant must be identical.
981                return None;
982            }
983        }
984
985        let tcx = self.tcx;
986        let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new();
987        loop {
988            if let Some(local) = self.try_as_local(copy_from_local_value, location) {
989                projection.reverse();
990                let place = Place { local, projection: tcx.mk_place_elems(projection.as_slice()) };
991                if rvalue.ty(self.local_decls, tcx) == place.ty(self.local_decls, tcx).ty {
992                    self.reused_locals.insert(local);
993                    *rvalue = Rvalue::Use(Operand::Copy(place));
994                    return Some(copy_from_value);
995                }
996                return None;
997            } else if let Value::Projection(pointer, proj) = *self.get(copy_from_local_value)
998                && let Some(proj) = self.try_as_place_elem(proj, location)
999            {
1000                projection.push(proj);
1001                copy_from_local_value = pointer;
1002            } else {
1003                return None;
1004            }
1005        }
1006    }
1007
1008    fn simplify_aggregate(
1009        &mut self,
1010        rvalue: &mut Rvalue<'tcx>,
1011        location: Location,
1012    ) -> Option<VnIndex> {
1013        let Rvalue::Aggregate(box ref kind, ref mut field_ops) = *rvalue else { bug!() };
1014
1015        let tcx = self.tcx;
1016        if field_ops.is_empty() {
1017            let is_zst = match *kind {
1018                AggregateKind::Array(..)
1019                | AggregateKind::Tuple
1020                | AggregateKind::Closure(..)
1021                | AggregateKind::CoroutineClosure(..) => true,
1022                // Only enums can be non-ZST.
1023                AggregateKind::Adt(did, ..) => tcx.def_kind(did) != DefKind::Enum,
1024                // Coroutines are never ZST, as they at least contain the implicit states.
1025                AggregateKind::Coroutine(..) => false,
1026                AggregateKind::RawPtr(..) => bug!("MIR for RawPtr aggregate must have 2 fields"),
1027            };
1028
1029            if is_zst {
1030                let ty = rvalue.ty(self.local_decls, tcx);
1031                return self.insert_constant(Const::zero_sized(ty));
1032            }
1033        }
1034
1035        let (mut ty, variant_index) = match *kind {
1036            AggregateKind::Array(..) => {
1037                assert!(!field_ops.is_empty());
1038                (AggregateTy::Array, FIRST_VARIANT)
1039            }
1040            AggregateKind::Tuple => {
1041                assert!(!field_ops.is_empty());
1042                (AggregateTy::Tuple, FIRST_VARIANT)
1043            }
1044            AggregateKind::Closure(did, args)
1045            | AggregateKind::CoroutineClosure(did, args)
1046            | AggregateKind::Coroutine(did, args) => (AggregateTy::Def(did, args), FIRST_VARIANT),
1047            AggregateKind::Adt(did, variant_index, args, _, None) => {
1048                (AggregateTy::Def(did, args), variant_index)
1049            }
1050            // Do not track unions.
1051            AggregateKind::Adt(_, _, _, _, Some(_)) => return None,
1052            AggregateKind::RawPtr(pointee_ty, mtbl) => {
1053                assert_eq!(field_ops.len(), 2);
1054                let data_pointer_ty = field_ops[FieldIdx::ZERO].ty(self.local_decls, self.tcx);
1055                let output_pointer_ty = Ty::new_ptr(self.tcx, pointee_ty, mtbl);
1056                (AggregateTy::RawPtr { data_pointer_ty, output_pointer_ty }, FIRST_VARIANT)
1057            }
1058        };
1059
1060        let fields: Option<Vec<_>> = field_ops
1061            .iter_mut()
1062            .map(|op| self.simplify_operand(op, location).or_else(|| self.new_opaque()))
1063            .collect();
1064        let mut fields = fields?;
1065
1066        if let AggregateTy::RawPtr { data_pointer_ty, output_pointer_ty } = &mut ty {
1067            let mut was_updated = false;
1068
1069            // Any thin pointer of matching mutability is fine as the data pointer.
1070            while let Value::Cast {
1071                kind: CastKind::PtrToPtr,
1072                value: cast_value,
1073                from: cast_from,
1074                to: _,
1075            } = self.get(fields[0])
1076                && let ty::RawPtr(from_pointee_ty, from_mtbl) = cast_from.kind()
1077                && let ty::RawPtr(_, output_mtbl) = output_pointer_ty.kind()
1078                && from_mtbl == output_mtbl
1079                && from_pointee_ty.is_sized(self.tcx, self.typing_env())
1080            {
1081                fields[0] = *cast_value;
1082                *data_pointer_ty = *cast_from;
1083                was_updated = true;
1084            }
1085
1086            if was_updated && let Some(op) = self.try_as_operand(fields[0], location) {
1087                field_ops[FieldIdx::ZERO] = op;
1088            }
1089        }
1090
1091        if let AggregateTy::Array = ty
1092            && fields.len() > 4
1093        {
1094            let first = fields[0];
1095            if fields.iter().all(|&v| v == first) {
1096                let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
1097                if let Some(op) = self.try_as_operand(first, location) {
1098                    *rvalue = Rvalue::Repeat(op, len);
1099                }
1100                return Some(self.insert(Value::Repeat(first, len)));
1101            }
1102        }
1103
1104        // unsound: https://github.com/rust-lang/rust/issues/132353
1105        if tcx.sess.opts.unstable_opts.unsound_mir_opts
1106            && let AggregateTy::Def(_, _) = ty
1107            && let Some(value) =
1108                self.simplify_aggregate_to_copy(rvalue, location, &fields, variant_index)
1109        {
1110            return Some(value);
1111        }
1112
1113        Some(self.insert(Value::Aggregate(ty, variant_index, fields)))
1114    }
1115
1116    #[instrument(level = "trace", skip(self), ret)]
1117    fn simplify_unary(
1118        &mut self,
1119        op: UnOp,
1120        arg_op: &mut Operand<'tcx>,
1121        location: Location,
1122    ) -> Option<VnIndex> {
1123        let mut arg_index = self.simplify_operand(arg_op, location)?;
1124
1125        // PtrMetadata doesn't care about *const vs *mut vs & vs &mut,
1126        // so start by removing those distinctions so we can update the `Operand`
1127        if op == UnOp::PtrMetadata {
1128            let mut was_updated = false;
1129            loop {
1130                match self.get(arg_index) {
1131                    // Pointer casts that preserve metadata, such as
1132                    // `*const [i32]` <-> `*mut [i32]` <-> `*mut [f32]`.
1133                    // It's critical that this not eliminate cases like
1134                    // `*const [T]` -> `*const T` which remove metadata.
1135                    // We run on potentially-generic MIR, though, so unlike codegen
1136                    // we can't always know exactly what the metadata are.
1137                    // To allow things like `*mut (?A, ?T)` <-> `*mut (?B, ?T)`,
1138                    // it's fine to get a projection as the type.
1139                    Value::Cast { kind: CastKind::PtrToPtr, value: inner, from, to }
1140                        if self.pointers_have_same_metadata(*from, *to) =>
1141                    {
1142                        arg_index = *inner;
1143                        was_updated = true;
1144                        continue;
1145                    }
1146
1147                    // `&mut *p`, `&raw *p`, etc don't change metadata.
1148                    Value::Address { place, kind: _, provenance: _ }
1149                        if let PlaceRef { local, projection: [PlaceElem::Deref] } =
1150                            place.as_ref()
1151                            && let Some(local_index) = self.locals[local] =>
1152                    {
1153                        arg_index = local_index;
1154                        was_updated = true;
1155                        continue;
1156                    }
1157
1158                    _ => {
1159                        if was_updated && let Some(op) = self.try_as_operand(arg_index, location) {
1160                            *arg_op = op;
1161                        }
1162                        break;
1163                    }
1164                }
1165            }
1166        }
1167
1168        let value = match (op, self.get(arg_index)) {
1169            (UnOp::Not, Value::UnaryOp(UnOp::Not, inner)) => return Some(*inner),
1170            (UnOp::Neg, Value::UnaryOp(UnOp::Neg, inner)) => return Some(*inner),
1171            (UnOp::Not, Value::BinaryOp(BinOp::Eq, lhs, rhs)) => {
1172                Value::BinaryOp(BinOp::Ne, *lhs, *rhs)
1173            }
1174            (UnOp::Not, Value::BinaryOp(BinOp::Ne, lhs, rhs)) => {
1175                Value::BinaryOp(BinOp::Eq, *lhs, *rhs)
1176            }
1177            (UnOp::PtrMetadata, Value::Aggregate(AggregateTy::RawPtr { .. }, _, fields)) => {
1178                return Some(fields[1]);
1179            }
1180            // We have an unsizing cast, which assigns the length to wide pointer metadata.
1181            (
1182                UnOp::PtrMetadata,
1183                Value::Cast {
1184                    kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1185                    from,
1186                    to,
1187                    ..
1188                },
1189            ) if let ty::Slice(..) = to.builtin_deref(true).unwrap().kind()
1190                && let ty::Array(_, len) = from.builtin_deref(true).unwrap().kind() =>
1191            {
1192                return self.insert_constant(Const::Ty(self.tcx.types.usize, *len));
1193            }
1194            _ => Value::UnaryOp(op, arg_index),
1195        };
1196        Some(self.insert(value))
1197    }
1198
1199    #[instrument(level = "trace", skip(self), ret)]
1200    fn simplify_binary(
1201        &mut self,
1202        op: BinOp,
1203        lhs_operand: &mut Operand<'tcx>,
1204        rhs_operand: &mut Operand<'tcx>,
1205        location: Location,
1206    ) -> Option<VnIndex> {
1207        let lhs = self.simplify_operand(lhs_operand, location);
1208        let rhs = self.simplify_operand(rhs_operand, location);
1209        // Only short-circuit options after we called `simplify_operand`
1210        // on both operands for side effect.
1211        let mut lhs = lhs?;
1212        let mut rhs = rhs?;
1213
1214        let lhs_ty = lhs_operand.ty(self.local_decls, self.tcx);
1215
1216        // If we're comparing pointers, remove `PtrToPtr` casts if the from
1217        // types of both casts and the metadata all match.
1218        if let BinOp::Eq | BinOp::Ne | BinOp::Lt | BinOp::Le | BinOp::Gt | BinOp::Ge = op
1219            && lhs_ty.is_any_ptr()
1220            && let Value::Cast {
1221                kind: CastKind::PtrToPtr, value: lhs_value, from: lhs_from, ..
1222            } = self.get(lhs)
1223            && let Value::Cast {
1224                kind: CastKind::PtrToPtr, value: rhs_value, from: rhs_from, ..
1225            } = self.get(rhs)
1226            && lhs_from == rhs_from
1227            && self.pointers_have_same_metadata(*lhs_from, lhs_ty)
1228        {
1229            lhs = *lhs_value;
1230            rhs = *rhs_value;
1231            if let Some(lhs_op) = self.try_as_operand(lhs, location)
1232                && let Some(rhs_op) = self.try_as_operand(rhs, location)
1233            {
1234                *lhs_operand = lhs_op;
1235                *rhs_operand = rhs_op;
1236            }
1237        }
1238
1239        if let Some(value) = self.simplify_binary_inner(op, lhs_ty, lhs, rhs) {
1240            return Some(value);
1241        }
1242        let value = Value::BinaryOp(op, lhs, rhs);
1243        Some(self.insert(value))
1244    }
1245
1246    fn simplify_binary_inner(
1247        &mut self,
1248        op: BinOp,
1249        lhs_ty: Ty<'tcx>,
1250        lhs: VnIndex,
1251        rhs: VnIndex,
1252    ) -> Option<VnIndex> {
1253        // Floats are weird enough that none of the logic below applies.
1254        let reasonable_ty =
1255            lhs_ty.is_integral() || lhs_ty.is_bool() || lhs_ty.is_char() || lhs_ty.is_any_ptr();
1256        if !reasonable_ty {
1257            return None;
1258        }
1259
1260        let layout = self.ecx.layout_of(lhs_ty).ok()?;
1261
1262        let as_bits = |value| {
1263            let constant = self.evaluated[value].as_ref()?;
1264            if layout.backend_repr.is_scalar() {
1265                let scalar = self.ecx.read_scalar(constant).discard_err()?;
1266                scalar.to_bits(constant.layout.size).discard_err()
1267            } else {
1268                // `constant` is a wide pointer. Do not evaluate to bits.
1269                None
1270            }
1271        };
1272
1273        // Represent the values as `Left(bits)` or `Right(VnIndex)`.
1274        use Either::{Left, Right};
1275        let a = as_bits(lhs).map_or(Right(lhs), Left);
1276        let b = as_bits(rhs).map_or(Right(rhs), Left);
1277
1278        let result = match (op, a, b) {
1279            // Neutral elements.
1280            (
1281                BinOp::Add
1282                | BinOp::AddWithOverflow
1283                | BinOp::AddUnchecked
1284                | BinOp::BitOr
1285                | BinOp::BitXor,
1286                Left(0),
1287                Right(p),
1288            )
1289            | (
1290                BinOp::Add
1291                | BinOp::AddWithOverflow
1292                | BinOp::AddUnchecked
1293                | BinOp::BitOr
1294                | BinOp::BitXor
1295                | BinOp::Sub
1296                | BinOp::SubWithOverflow
1297                | BinOp::SubUnchecked
1298                | BinOp::Offset
1299                | BinOp::Shl
1300                | BinOp::Shr,
1301                Right(p),
1302                Left(0),
1303            )
1304            | (BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked, Left(1), Right(p))
1305            | (
1306                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::Div,
1307                Right(p),
1308                Left(1),
1309            ) => p,
1310            // Attempt to simplify `x & ALL_ONES` to `x`, with `ALL_ONES` depending on type size.
1311            (BinOp::BitAnd, Right(p), Left(ones)) | (BinOp::BitAnd, Left(ones), Right(p))
1312                if ones == layout.size.truncate(u128::MAX)
1313                    || (layout.ty.is_bool() && ones == 1) =>
1314            {
1315                p
1316            }
1317            // Absorbing elements.
1318            (
1319                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::BitAnd,
1320                _,
1321                Left(0),
1322            )
1323            | (BinOp::Rem, _, Left(1))
1324            | (
1325                BinOp::Mul
1326                | BinOp::MulWithOverflow
1327                | BinOp::MulUnchecked
1328                | BinOp::Div
1329                | BinOp::Rem
1330                | BinOp::BitAnd
1331                | BinOp::Shl
1332                | BinOp::Shr,
1333                Left(0),
1334                _,
1335            ) => self.insert_scalar(Scalar::from_uint(0u128, layout.size), lhs_ty),
1336            // Attempt to simplify `x | ALL_ONES` to `ALL_ONES`.
1337            (BinOp::BitOr, _, Left(ones)) | (BinOp::BitOr, Left(ones), _)
1338                if ones == layout.size.truncate(u128::MAX)
1339                    || (layout.ty.is_bool() && ones == 1) =>
1340            {
1341                self.insert_scalar(Scalar::from_uint(ones, layout.size), lhs_ty)
1342            }
1343            // Sub/Xor with itself.
1344            (BinOp::Sub | BinOp::SubWithOverflow | BinOp::SubUnchecked | BinOp::BitXor, a, b)
1345                if a == b =>
1346            {
1347                self.insert_scalar(Scalar::from_uint(0u128, layout.size), lhs_ty)
1348            }
1349            // Comparison:
1350            // - if both operands can be computed as bits, just compare the bits;
1351            // - if we proved that both operands have the same value, we can insert true/false;
1352            // - otherwise, do nothing, as we do not try to prove inequality.
1353            (BinOp::Eq, Left(a), Left(b)) => self.insert_bool(a == b),
1354            (BinOp::Eq, a, b) if a == b => self.insert_bool(true),
1355            (BinOp::Ne, Left(a), Left(b)) => self.insert_bool(a != b),
1356            (BinOp::Ne, a, b) if a == b => self.insert_bool(false),
1357            _ => return None,
1358        };
1359
1360        if op.is_overflowing() {
1361            let false_val = self.insert_bool(false);
1362            Some(self.insert_tuple(vec![result, false_val]))
1363        } else {
1364            Some(result)
1365        }
1366    }
1367
1368    fn simplify_cast(
1369        &mut self,
1370        initial_kind: &mut CastKind,
1371        initial_operand: &mut Operand<'tcx>,
1372        to: Ty<'tcx>,
1373        location: Location,
1374    ) -> Option<VnIndex> {
1375        use CastKind::*;
1376        use rustc_middle::ty::adjustment::PointerCoercion::*;
1377
1378        let mut from = initial_operand.ty(self.local_decls, self.tcx);
1379        let mut kind = *initial_kind;
1380        let mut value = self.simplify_operand(initial_operand, location)?;
1381        if from == to {
1382            return Some(value);
1383        }
1384
1385        if let CastKind::PointerCoercion(ReifyFnPointer | ClosureFnPointer(_), _) = kind {
1386            // Each reification of a generic fn may get a different pointer.
1387            // Do not try to merge them.
1388            return self.new_opaque();
1389        }
1390
1391        let mut was_ever_updated = false;
1392        loop {
1393            let mut was_updated_this_iteration = false;
1394
1395            // Transmuting between raw pointers is just a pointer cast so long as
1396            // they have the same metadata type (like `*const i32` <=> `*mut u64`
1397            // or `*mut [i32]` <=> `*const [u64]`), including the common special
1398            // case of `*const T` <=> `*mut T`.
1399            if let Transmute = kind
1400                && from.is_raw_ptr()
1401                && to.is_raw_ptr()
1402                && self.pointers_have_same_metadata(from, to)
1403            {
1404                kind = PtrToPtr;
1405                was_updated_this_iteration = true;
1406            }
1407
1408            // If a cast just casts away the metadata again, then we can get it by
1409            // casting the original thin pointer passed to `from_raw_parts`
1410            if let PtrToPtr = kind
1411                && let Value::Aggregate(AggregateTy::RawPtr { data_pointer_ty, .. }, _, fields) =
1412                    self.get(value)
1413                && let ty::RawPtr(to_pointee, _) = to.kind()
1414                && to_pointee.is_sized(self.tcx, self.typing_env())
1415            {
1416                from = *data_pointer_ty;
1417                value = fields[0];
1418                was_updated_this_iteration = true;
1419                if *data_pointer_ty == to {
1420                    return Some(fields[0]);
1421                }
1422            }
1423
1424            // Aggregate-then-Transmute can just transmute the original field value,
1425            // so long as the bytes of a value from only from a single field.
1426            if let Transmute = kind
1427                && let Value::Aggregate(_aggregate_ty, variant_idx, field_values) = self.get(value)
1428                && let Some((field_idx, field_ty)) =
1429                    self.value_is_all_in_one_field(from, *variant_idx)
1430            {
1431                from = field_ty;
1432                value = field_values[field_idx.as_usize()];
1433                was_updated_this_iteration = true;
1434                if field_ty == to {
1435                    return Some(value);
1436                }
1437            }
1438
1439            // Various cast-then-cast cases can be simplified.
1440            if let Value::Cast {
1441                kind: inner_kind,
1442                value: inner_value,
1443                from: inner_from,
1444                to: inner_to,
1445            } = *self.get(value)
1446            {
1447                let new_kind = match (inner_kind, kind) {
1448                    // Even if there's a narrowing cast in here that's fine, because
1449                    // things like `*mut [i32] -> *mut i32 -> *const i32` and
1450                    // `*mut [i32] -> *const [i32] -> *const i32` can skip the middle in MIR.
1451                    (PtrToPtr, PtrToPtr) => Some(PtrToPtr),
1452                    // PtrToPtr-then-Transmute is fine so long as the pointer cast is identity:
1453                    // `*const T -> *mut T -> NonNull<T>` is fine, but we need to check for narrowing
1454                    // to skip things like `*const [i32] -> *const i32 -> NonNull<T>`.
1455                    (PtrToPtr, Transmute)
1456                        if self.pointers_have_same_metadata(inner_from, inner_to) =>
1457                    {
1458                        Some(Transmute)
1459                    }
1460                    // Similarly, for Transmute-then-PtrToPtr. Note that we need to check different
1461                    // variables for their metadata, and thus this can't merge with the previous arm.
1462                    (Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => {
1463                        Some(Transmute)
1464                    }
1465                    // If would be legal to always do this, but we don't want to hide information
1466                    // from the backend that it'd otherwise be able to use for optimizations.
1467                    (Transmute, Transmute)
1468                        if !self.type_may_have_niche_of_interest_to_backend(inner_to) =>
1469                    {
1470                        Some(Transmute)
1471                    }
1472                    _ => None,
1473                };
1474                if let Some(new_kind) = new_kind {
1475                    kind = new_kind;
1476                    from = inner_from;
1477                    value = inner_value;
1478                    was_updated_this_iteration = true;
1479                    if inner_from == to {
1480                        return Some(inner_value);
1481                    }
1482                }
1483            }
1484
1485            if was_updated_this_iteration {
1486                was_ever_updated = true;
1487            } else {
1488                break;
1489            }
1490        }
1491
1492        if was_ever_updated && let Some(op) = self.try_as_operand(value, location) {
1493            *initial_operand = op;
1494            *initial_kind = kind;
1495        }
1496
1497        Some(self.insert(Value::Cast { kind, value, from, to }))
1498    }
1499
1500    fn simplify_len(&mut self, place: &mut Place<'tcx>, location: Location) -> Option<VnIndex> {
1501        // Trivial case: we are fetching a statically known length.
1502        let place_ty = place.ty(self.local_decls, self.tcx).ty;
1503        if let ty::Array(_, len) = place_ty.kind() {
1504            return self.insert_constant(Const::Ty(self.tcx.types.usize, *len));
1505        }
1506
1507        let mut inner = self.simplify_place_value(place, location)?;
1508
1509        // The length information is stored in the wide pointer.
1510        // Reborrowing copies length information from one pointer to the other.
1511        while let Value::Address { place: borrowed, .. } = self.get(inner)
1512            && let [PlaceElem::Deref] = borrowed.projection[..]
1513            && let Some(borrowed) = self.locals[borrowed.local]
1514        {
1515            inner = borrowed;
1516        }
1517
1518        // We have an unsizing cast, which assigns the length to wide pointer metadata.
1519        if let Value::Cast { kind, from, to, .. } = self.get(inner)
1520            && let CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) = kind
1521            && let Some(from) = from.builtin_deref(true)
1522            && let ty::Array(_, len) = from.kind()
1523            && let Some(to) = to.builtin_deref(true)
1524            && let ty::Slice(..) = to.kind()
1525        {
1526            return self.insert_constant(Const::Ty(self.tcx.types.usize, *len));
1527        }
1528
1529        // Fallback: a symbolic `Len`.
1530        Some(self.insert(Value::Len(inner)))
1531    }
1532
1533    fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool {
1534        let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1535        let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1536        if left_meta_ty == right_meta_ty {
1537            true
1538        } else if let Ok(left) =
1539            self.tcx.try_normalize_erasing_regions(self.typing_env(), left_meta_ty)
1540            && let Ok(right) =
1541                self.tcx.try_normalize_erasing_regions(self.typing_env(), right_meta_ty)
1542        {
1543            left == right
1544        } else {
1545            false
1546        }
1547    }
1548
1549    /// Returns `false` if we know for sure that this type has no interesting niche,
1550    /// and thus we can skip transmuting through it without worrying.
1551    ///
1552    /// The backend will emit `assume`s when transmuting between types with niches,
1553    /// so we want to preserve `i32 -> char -> u32` so that that data is around,
1554    /// but it's fine to skip whole-range-is-value steps like `A -> u32 -> B`.
1555    fn type_may_have_niche_of_interest_to_backend(&self, ty: Ty<'tcx>) -> bool {
1556        let Ok(layout) = self.ecx.layout_of(ty) else {
1557            // If it's too generic or something, then assume it might be interesting later.
1558            return true;
1559        };
1560
1561        match layout.backend_repr {
1562            BackendRepr::Uninhabited => true,
1563            BackendRepr::Scalar(a) => !a.is_always_valid(&self.ecx),
1564            BackendRepr::ScalarPair(a, b) => {
1565                !a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx)
1566            }
1567            BackendRepr::Vector { .. } | BackendRepr::Memory { .. } => false,
1568        }
1569    }
1570
1571    fn value_is_all_in_one_field(
1572        &self,
1573        ty: Ty<'tcx>,
1574        variant: VariantIdx,
1575    ) -> Option<(FieldIdx, Ty<'tcx>)> {
1576        if let Ok(layout) = self.ecx.layout_of(ty)
1577            && let abi::Variants::Single { index } = layout.variants
1578            && index == variant
1579            && let Some((field_idx, field_layout)) = layout.non_1zst_field(&self.ecx)
1580            && layout.size == field_layout.size
1581        {
1582            // We needed to check the variant to avoid trying to read the tag
1583            // field from an enum where no fields have variants, since that tag
1584            // field isn't in the `Aggregate` from which we're getting values.
1585            Some((FieldIdx::from_usize(field_idx), field_layout.ty))
1586        } else if let ty::Adt(adt, args) = ty.kind()
1587            && adt.is_struct()
1588            && adt.repr().transparent()
1589            && let [single_field] = adt.non_enum_variant().fields.raw.as_slice()
1590        {
1591            Some((FieldIdx::ZERO, single_field.ty(self.tcx, args)))
1592        } else {
1593            None
1594        }
1595    }
1596}
1597
1598fn op_to_prop_const<'tcx>(
1599    ecx: &mut InterpCx<'tcx, DummyMachine>,
1600    op: &OpTy<'tcx>,
1601) -> Option<ConstValue<'tcx>> {
1602    // Do not attempt to propagate unsized locals.
1603    if op.layout.is_unsized() {
1604        return None;
1605    }
1606
1607    // This constant is a ZST, just return an empty value.
1608    if op.layout.is_zst() {
1609        return Some(ConstValue::ZeroSized);
1610    }
1611
1612    // Do not synthetize too large constants. Codegen will just memcpy them, which we'd like to
1613    // avoid.
1614    if !matches!(op.layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
1615        return None;
1616    }
1617
1618    // If this constant has scalar ABI, return it as a `ConstValue::Scalar`.
1619    if let BackendRepr::Scalar(abi::Scalar::Initialized { .. }) = op.layout.backend_repr
1620        && let Some(scalar) = ecx.read_scalar(op).discard_err()
1621    {
1622        if !scalar.try_to_scalar_int().is_ok() {
1623            // Check that we do not leak a pointer.
1624            // Those pointers may lose part of their identity in codegen.
1625            // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1626            return None;
1627        }
1628        return Some(ConstValue::Scalar(scalar));
1629    }
1630
1631    // If this constant is already represented as an `Allocation`,
1632    // try putting it into global memory to return it.
1633    if let Either::Left(mplace) = op.as_mplace_or_imm() {
1634        let (size, _align) = ecx.size_and_align_of_mplace(&mplace).discard_err()??;
1635
1636        // Do not try interning a value that contains provenance.
1637        // Due to https://github.com/rust-lang/rust/issues/79738, doing so could lead to bugs.
1638        // FIXME: remove this hack once that issue is fixed.
1639        let alloc_ref = ecx.get_ptr_alloc(mplace.ptr(), size).discard_err()??;
1640        if alloc_ref.has_provenance() {
1641            return None;
1642        }
1643
1644        let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
1645        let (prov, offset) = pointer.into_parts();
1646        let alloc_id = prov.alloc_id();
1647        intern_const_alloc_for_constprop(ecx, alloc_id).discard_err()?;
1648
1649        // `alloc_id` may point to a static. Codegen will choke on an `Indirect` with anything
1650        // by `GlobalAlloc::Memory`, so do fall through to copying if needed.
1651        // FIXME: find a way to treat this more uniformly (probably by fixing codegen)
1652        if let GlobalAlloc::Memory(alloc) = ecx.tcx.global_alloc(alloc_id)
1653            // Transmuting a constant is just an offset in the allocation. If the alignment of the
1654            // allocation is not enough, fallback to copying into a properly aligned value.
1655            && alloc.inner().align >= op.layout.align.abi
1656        {
1657            return Some(ConstValue::Indirect { alloc_id, offset });
1658        }
1659    }
1660
1661    // Everything failed: create a new allocation to hold the data.
1662    let alloc_id =
1663        ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest)).discard_err()?;
1664    let value = ConstValue::Indirect { alloc_id, offset: Size::ZERO };
1665
1666    // Check that we do not leak a pointer.
1667    // Those pointers may lose part of their identity in codegen.
1668    // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1669    if ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().provenance().ptrs().is_empty() {
1670        return Some(value);
1671    }
1672
1673    None
1674}
1675
1676impl<'tcx> VnState<'_, 'tcx> {
1677    /// If either [`Self::try_as_constant`] as [`Self::try_as_local`] succeeds,
1678    /// returns that result as an [`Operand`].
1679    fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option<Operand<'tcx>> {
1680        if let Some(const_) = self.try_as_constant(index) {
1681            Some(Operand::Constant(Box::new(const_)))
1682        } else if let Some(local) = self.try_as_local(index, location) {
1683            self.reused_locals.insert(local);
1684            Some(Operand::Copy(local.into()))
1685        } else {
1686            None
1687        }
1688    }
1689
1690    /// If `index` is a `Value::Constant`, return the `Constant` to be put in the MIR.
1691    fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
1692        // This was already constant in MIR, do not change it. If the constant is not
1693        // deterministic, adding an additional mention of it in MIR will not give the same value as
1694        // the former mention.
1695        if let Value::Constant { value, disambiguator: 0 } = *self.get(index) {
1696            debug_assert!(value.is_deterministic());
1697            return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1698        }
1699
1700        let op = self.evaluated[index].as_ref()?;
1701        if op.layout.is_unsized() {
1702            // Do not attempt to propagate unsized locals.
1703            return None;
1704        }
1705
1706        let value = op_to_prop_const(&mut self.ecx, op)?;
1707
1708        // Check that we do not leak a pointer.
1709        // Those pointers may lose part of their identity in codegen.
1710        // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1711        assert!(!value.may_have_provenance(self.tcx, op.layout.size));
1712
1713        let const_ = Const::Val(value, op.layout.ty);
1714        Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_ })
1715    }
1716
1717    /// If there is a local which is assigned `index`, and its assignment strictly dominates `loc`,
1718    /// return it. If you used this local, add it to `reused_locals` to remove storage statements.
1719    fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
1720        let other = self.rev_locals.get(index)?;
1721        other
1722            .iter()
1723            .find(|&&other| self.ssa.assignment_dominates(&self.dominators, other, loc))
1724            .copied()
1725    }
1726}
1727
1728impl<'tcx> MutVisitor<'tcx> for VnState<'_, 'tcx> {
1729    fn tcx(&self) -> TyCtxt<'tcx> {
1730        self.tcx
1731    }
1732
1733    fn visit_place(&mut self, place: &mut Place<'tcx>, _: PlaceContext, location: Location) {
1734        self.simplify_place_projection(place, location);
1735    }
1736
1737    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1738        self.simplify_operand(operand, location);
1739    }
1740
1741    fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, location: Location) {
1742        if let StatementKind::Assign(box (ref mut lhs, ref mut rvalue)) = stmt.kind {
1743            self.simplify_place_projection(lhs, location);
1744
1745            // Do not try to simplify a constant, it's already in canonical shape.
1746            if matches!(rvalue, Rvalue::Use(Operand::Constant(_))) {
1747                return;
1748            }
1749
1750            let value = lhs
1751                .as_local()
1752                .and_then(|local| self.locals[local])
1753                .or_else(|| self.simplify_rvalue(rvalue, location));
1754            let Some(value) = value else { return };
1755
1756            if let Some(const_) = self.try_as_constant(value) {
1757                *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
1758            } else if let Some(local) = self.try_as_local(value, location)
1759                && *rvalue != Rvalue::Use(Operand::Move(local.into()))
1760            {
1761                *rvalue = Rvalue::Use(Operand::Copy(local.into()));
1762                self.reused_locals.insert(local);
1763            }
1764
1765            return;
1766        }
1767        self.super_statement(stmt, location);
1768    }
1769}
1770
1771struct StorageRemover<'tcx> {
1772    tcx: TyCtxt<'tcx>,
1773    reused_locals: DenseBitSet<Local>,
1774}
1775
1776impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
1777    fn tcx(&self) -> TyCtxt<'tcx> {
1778        self.tcx
1779    }
1780
1781    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
1782        if let Operand::Move(place) = *operand
1783            && !place.is_indirect_first_projection()
1784            && self.reused_locals.contains(place.local)
1785        {
1786            *operand = Operand::Copy(place);
1787        }
1788    }
1789
1790    fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
1791        match stmt.kind {
1792            // When removing storage statements, we need to remove both (#107511).
1793            StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
1794                if self.reused_locals.contains(l) =>
1795            {
1796                stmt.make_nop()
1797            }
1798            _ => self.super_statement(stmt, loc),
1799        }
1800    }
1801}