rustc_mir_transform/
gvn.rs

1//! Global value numbering.
2//!
3//! MIR may contain repeated and/or redundant computations. The objective of this pass is to detect
4//! such redundancies and re-use the already-computed result when possible.
5//!
6//! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
7//! values, the locals in which they are stored, and the assignment location.
8//!
9//! We traverse all assignments `x = rvalue` and operands.
10//!
11//! For each SSA one, we compute a symbolic representation of values that are assigned to SSA
12//! locals. This symbolic representation is defined by the `Value` enum. Each produced instance of
13//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
14//!
15//! For each non-SSA
16//! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
17//! replace the rvalue/operand by that constant. Otherwise, if there is an SSA local `y`
18//! associated to this `VnIndex`, and if its definition location strictly dominates the assignment
19//! to `x`, we replace the assignment by `x = y`.
20//!
21//! By opportunity, this pass simplifies some `Rvalue`s based on the accumulated knowledge.
22//!
23//! # Operational semantic
24//!
25//! Operationally, this pass attempts to prove bitwise equality between locals. Given this MIR:
26//! ```ignore (MIR)
27//! _a = some value // has VnIndex i
28//! // some MIR
29//! _b = some other value // also has VnIndex i
30//! ```
31//!
32//! We consider it to be replaceable by:
33//! ```ignore (MIR)
34//! _a = some value // has VnIndex i
35//! // some MIR
36//! _c = some other value // also has VnIndex i
37//! assume(_a bitwise equal to _c) // follows from having the same VnIndex
38//! _b = _a // follows from the `assume`
39//! ```
40//!
41//! Which is simplifiable to:
42//! ```ignore (MIR)
43//! _a = some value // has VnIndex i
44//! // some MIR
45//! _b = _a
46//! ```
47//!
48//! # Handling of references
49//!
50//! We handle references by assigning a different "provenance" index to each Ref/RawPtr rvalue.
51//! This ensure that we do not spuriously merge borrows that should not be merged. Meanwhile, we
52//! consider all the derefs of an immutable reference to a freeze type to give the same value:
53//! ```ignore (MIR)
54//! _a = *_b // _b is &Freeze
55//! _c = *_b // replaced by _c = _a
56//! ```
57//!
58//! # Determinism of constant propagation
59//!
60//! When registering a new `Value`, we attempt to opportunistically evaluate it as a constant.
61//! The evaluated form is inserted in `evaluated` as an `OpTy` or `None` if evaluation failed.
62//!
63//! The difficulty is non-deterministic evaluation of MIR constants. Some `Const` can have
64//! different runtime values each time they are evaluated. This is the case with
65//! `Const::Slice` which have a new pointer each time they are evaluated, and constants that
66//! contain a fn pointer (`AllocId` pointing to a `GlobalAlloc::Function`) pointing to a different
67//! symbol in each codegen unit.
68//!
69//! Meanwhile, we want to be able to read indirect constants. For instance:
70//! ```
71//! static A: &'static &'static u8 = &&63;
72//! fn foo() -> u8 {
73//!     **A // We want to replace by 63.
74//! }
75//! fn bar() -> u8 {
76//!     b"abc"[1] // We want to replace by 'b'.
77//! }
78//! ```
79//!
80//! The `Value::Constant` variant stores a possibly unevaluated constant. Evaluating that constant
81//! may be non-deterministic. When that happens, we assign a disambiguator to ensure that we do not
82//! merge the constants. See `duplicate_slice` test in `gvn.rs`.
83//!
84//! Second, when writing constants in MIR, we do not write `Const::Slice` or `Const`
85//! that contain `AllocId`s.
86
87use std::borrow::Cow;
88
89use either::Either;
90use itertools::Itertools as _;
91use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
92use rustc_const_eval::const_eval::DummyMachine;
93use rustc_const_eval::interpret::{
94    ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar,
95    intern_const_alloc_for_constprop,
96};
97use rustc_data_structures::fx::{FxIndexSet, MutableValues};
98use rustc_data_structures::graph::dominators::Dominators;
99use rustc_hir::def::DefKind;
100use rustc_index::bit_set::DenseBitSet;
101use rustc_index::{IndexVec, newtype_index};
102use rustc_middle::bug;
103use rustc_middle::mir::interpret::GlobalAlloc;
104use rustc_middle::mir::visit::*;
105use rustc_middle::mir::*;
106use rustc_middle::ty::layout::HasTypingEnv;
107use rustc_middle::ty::{self, Ty, TyCtxt};
108use rustc_span::DUMMY_SP;
109use smallvec::SmallVec;
110use tracing::{debug, instrument, trace};
111
112use crate::ssa::SsaLocals;
113
114pub(super) struct GVN;
115
116impl<'tcx> crate::MirPass<'tcx> for GVN {
117    fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
118        sess.mir_opt_level() >= 2
119    }
120
121    #[instrument(level = "trace", skip(self, tcx, body))]
122    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
123        debug!(def_id = ?body.source.def_id());
124
125        let typing_env = body.typing_env(tcx);
126        let ssa = SsaLocals::new(tcx, body, typing_env);
127        // Clone dominators because we need them while mutating the body.
128        let dominators = body.basic_blocks.dominators().clone();
129        let maybe_loop_headers = loops::maybe_loop_headers(body);
130
131        let mut state = VnState::new(tcx, body, typing_env, &ssa, dominators, &body.local_decls);
132
133        for local in body.args_iter().filter(|&local| ssa.is_ssa(local)) {
134            let opaque = state.new_opaque(body.local_decls[local].ty);
135            state.assign(local, opaque);
136        }
137
138        let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
139        for bb in reverse_postorder {
140            // N.B. With loops, reverse postorder cannot produce a valid topological order.
141            // A statement or terminator from inside the loop, that is not processed yet, may have performed an indirect write.
142            if maybe_loop_headers.contains(bb) {
143                state.invalidate_derefs();
144            }
145            let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
146            state.visit_basic_block_data(bb, data);
147        }
148
149        // For each local that is reused (`y` above), we remove its storage statements do avoid any
150        // difficulty. Those locals are SSA, so should be easy to optimize by LLVM without storage
151        // statements.
152        StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
153    }
154
155    fn is_required(&self) -> bool {
156        false
157    }
158}
159
160newtype_index! {
161    struct VnIndex {}
162}
163
164#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
165enum AddressKind {
166    Ref(BorrowKind),
167    Address(RawPtrKind),
168}
169
170#[derive(Debug, PartialEq, Eq, Hash)]
171enum Value<'tcx> {
172    // Root values.
173    /// Used to represent values we know nothing about.
174    /// The `usize` is a counter incremented by `new_opaque`.
175    Opaque(usize),
176    /// Evaluated or unevaluated constant value.
177    Constant {
178        value: Const<'tcx>,
179        /// Some constants do not have a deterministic value. To avoid merging two instances of the
180        /// same `Const`, we assign them an additional integer index.
181        // `disambiguator` is 0 iff the constant is deterministic.
182        disambiguator: usize,
183    },
184    /// An aggregate value, either tuple/closure/struct/enum.
185    /// This does not contain unions, as we cannot reason with the value.
186    Aggregate(VariantIdx, Vec<VnIndex>),
187    /// A raw pointer aggregate built from a thin pointer and metadata.
188    RawPtr {
189        /// Thin pointer component. This is field 0 in MIR.
190        pointer: VnIndex,
191        /// Metadata component. This is field 1 in MIR.
192        metadata: VnIndex,
193    },
194    /// This corresponds to a `[value; count]` expression.
195    Repeat(VnIndex, ty::Const<'tcx>),
196    /// The address of a place.
197    Address {
198        place: Place<'tcx>,
199        kind: AddressKind,
200        /// Give each borrow and pointer a different provenance, so we don't merge them.
201        provenance: usize,
202    },
203
204    // Extractions.
205    /// This is the *value* obtained by projecting another value.
206    Projection(VnIndex, ProjectionElem<VnIndex, ()>),
207    /// Discriminant of the given value.
208    Discriminant(VnIndex),
209    /// Length of an array or slice.
210    Len(VnIndex),
211
212    // Operations.
213    NullaryOp(NullOp<'tcx>, Ty<'tcx>),
214    UnaryOp(UnOp, VnIndex),
215    BinaryOp(BinOp, VnIndex, VnIndex),
216    Cast {
217        kind: CastKind,
218        value: VnIndex,
219    },
220}
221
222struct VnState<'body, 'tcx> {
223    tcx: TyCtxt<'tcx>,
224    ecx: InterpCx<'tcx, DummyMachine>,
225    local_decls: &'body LocalDecls<'tcx>,
226    is_coroutine: bool,
227    /// Value stored in each local.
228    locals: IndexVec<Local, Option<VnIndex>>,
229    /// Locals that are assigned that value.
230    // This vector does not hold all the values of `VnIndex` that we create.
231    rev_locals: IndexVec<VnIndex, SmallVec<[Local; 1]>>,
232    values: FxIndexSet<(Value<'tcx>, Ty<'tcx>)>,
233    /// Values evaluated as constants if possible.
234    evaluated: IndexVec<VnIndex, Option<OpTy<'tcx>>>,
235    /// Counter to generate different values.
236    next_opaque: usize,
237    /// Cache the deref values.
238    derefs: Vec<VnIndex>,
239    ssa: &'body SsaLocals,
240    dominators: Dominators<BasicBlock>,
241    reused_locals: DenseBitSet<Local>,
242}
243
244impl<'body, 'tcx> VnState<'body, 'tcx> {
245    fn new(
246        tcx: TyCtxt<'tcx>,
247        body: &Body<'tcx>,
248        typing_env: ty::TypingEnv<'tcx>,
249        ssa: &'body SsaLocals,
250        dominators: Dominators<BasicBlock>,
251        local_decls: &'body LocalDecls<'tcx>,
252    ) -> Self {
253        // Compute a rough estimate of the number of values in the body from the number of
254        // statements. This is meant to reduce the number of allocations, but it's all right if
255        // we miss the exact amount. We estimate based on 2 values per statement (one in LHS and
256        // one in RHS) and 4 values per terminator (for call operands).
257        let num_values =
258            2 * body.basic_blocks.iter().map(|bbdata| bbdata.statements.len()).sum::<usize>()
259                + 4 * body.basic_blocks.len();
260        VnState {
261            tcx,
262            ecx: InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine),
263            local_decls,
264            is_coroutine: body.coroutine.is_some(),
265            locals: IndexVec::from_elem(None, local_decls),
266            rev_locals: IndexVec::with_capacity(num_values),
267            values: FxIndexSet::with_capacity_and_hasher(num_values, Default::default()),
268            evaluated: IndexVec::with_capacity(num_values),
269            next_opaque: 1,
270            derefs: Vec::new(),
271            ssa,
272            dominators,
273            reused_locals: DenseBitSet::new_empty(local_decls.len()),
274        }
275    }
276
277    fn typing_env(&self) -> ty::TypingEnv<'tcx> {
278        self.ecx.typing_env()
279    }
280
281    #[instrument(level = "trace", skip(self), ret)]
282    fn insert(&mut self, ty: Ty<'tcx>, value: Value<'tcx>) -> VnIndex {
283        let (index, new) = self.values.insert_full((value, ty));
284        let index = VnIndex::from_usize(index);
285        if new {
286            // Grow `evaluated` and `rev_locals` here to amortize the allocations.
287            let evaluated = self.eval_to_const(index);
288            let _index = self.evaluated.push(evaluated);
289            debug_assert_eq!(index, _index);
290            let _index = self.rev_locals.push(SmallVec::new());
291            debug_assert_eq!(index, _index);
292        }
293        index
294    }
295
296    fn next_opaque(&mut self) -> usize {
297        let next_opaque = self.next_opaque;
298        self.next_opaque += 1;
299        next_opaque
300    }
301
302    /// Create a new `Value` for which we have no information at all, except that it is distinct
303    /// from all the others.
304    #[instrument(level = "trace", skip(self), ret)]
305    fn new_opaque(&mut self, ty: Ty<'tcx>) -> VnIndex {
306        let value = Value::Opaque(self.next_opaque());
307        self.insert(ty, value)
308    }
309
310    /// Create a new `Value::Address` distinct from all the others.
311    #[instrument(level = "trace", skip(self), ret)]
312    fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> VnIndex {
313        let pty = place.ty(self.local_decls, self.tcx).ty;
314        let ty = match kind {
315            AddressKind::Ref(bk) => {
316                Ty::new_ref(self.tcx, self.tcx.lifetimes.re_erased, pty, bk.to_mutbl_lossy())
317            }
318            AddressKind::Address(mutbl) => Ty::new_ptr(self.tcx, pty, mutbl.to_mutbl_lossy()),
319        };
320        let value = Value::Address { place, kind, provenance: self.next_opaque() };
321        self.insert(ty, value)
322    }
323
324    #[inline]
325    fn get(&self, index: VnIndex) -> &Value<'tcx> {
326        &self.values.get_index(index.as_usize()).unwrap().0
327    }
328
329    #[inline]
330    fn ty(&self, index: VnIndex) -> Ty<'tcx> {
331        self.values.get_index(index.as_usize()).unwrap().1
332    }
333
334    /// Record that `local` is assigned `value`. `local` must be SSA.
335    #[instrument(level = "trace", skip(self))]
336    fn assign(&mut self, local: Local, value: VnIndex) {
337        debug_assert!(self.ssa.is_ssa(local));
338        self.locals[local] = Some(value);
339        self.rev_locals[value].push(local);
340    }
341
342    fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex {
343        let disambiguator = if value.is_deterministic() {
344            // The constant is deterministic, no need to disambiguate.
345            0
346        } else {
347            // Multiple mentions of this constant will yield different values,
348            // so assign a different `disambiguator` to ensure they do not get the same `VnIndex`.
349            let disambiguator = self.next_opaque();
350            // `disambiguator: 0` means deterministic.
351            debug_assert_ne!(disambiguator, 0);
352            disambiguator
353        };
354        self.insert(value.ty(), Value::Constant { value, disambiguator })
355    }
356
357    fn insert_bool(&mut self, flag: bool) -> VnIndex {
358        // Booleans are deterministic.
359        let value = Const::from_bool(self.tcx, flag);
360        debug_assert!(value.is_deterministic());
361        self.insert(self.tcx.types.bool, Value::Constant { value, disambiguator: 0 })
362    }
363
364    fn insert_scalar(&mut self, ty: Ty<'tcx>, scalar: Scalar) -> VnIndex {
365        // Scalars are deterministic.
366        let value = Const::from_scalar(self.tcx, scalar, ty);
367        debug_assert!(value.is_deterministic());
368        self.insert(ty, Value::Constant { value, disambiguator: 0 })
369    }
370
371    fn insert_tuple(&mut self, ty: Ty<'tcx>, values: Vec<VnIndex>) -> VnIndex {
372        self.insert(ty, Value::Aggregate(VariantIdx::ZERO, values))
373    }
374
375    fn insert_deref(&mut self, ty: Ty<'tcx>, value: VnIndex) -> VnIndex {
376        let value = self.insert(ty, Value::Projection(value, ProjectionElem::Deref));
377        self.derefs.push(value);
378        value
379    }
380
381    fn invalidate_derefs(&mut self) {
382        for deref in std::mem::take(&mut self.derefs) {
383            let opaque = self.next_opaque();
384            self.values.get_index_mut2(deref.index()).unwrap().0 = Value::Opaque(opaque);
385        }
386    }
387
388    #[instrument(level = "trace", skip(self), ret)]
389    fn eval_to_const(&mut self, value: VnIndex) -> Option<OpTy<'tcx>> {
390        use Value::*;
391        let ty = self.ty(value);
392        // Avoid computing layouts inside a coroutine, as that can cause cycles.
393        let ty = if !self.is_coroutine || ty.is_scalar() {
394            self.ecx.layout_of(ty).ok()?
395        } else {
396            return None;
397        };
398        let op = match *self.get(value) {
399            _ if ty.is_zst() => ImmTy::uninit(ty).into(),
400
401            Opaque(_) => return None,
402            // Do not bother evaluating repeat expressions. This would uselessly consume memory.
403            Repeat(..) => return None,
404
405            Constant { ref value, disambiguator: _ } => {
406                self.ecx.eval_mir_constant(value, DUMMY_SP, None).discard_err()?
407            }
408            Aggregate(variant, ref fields) => {
409                let fields = fields
410                    .iter()
411                    .map(|&f| self.evaluated[f].as_ref())
412                    .collect::<Option<Vec<_>>>()?;
413                let variant = if ty.ty.is_enum() { Some(variant) } else { None };
414                if matches!(ty.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..))
415                {
416                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
417                    let variant_dest = if let Some(variant) = variant {
418                        self.ecx.project_downcast(&dest, variant).discard_err()?
419                    } else {
420                        dest.clone()
421                    };
422                    for (field_index, op) in fields.into_iter().enumerate() {
423                        let field_dest = self
424                            .ecx
425                            .project_field(&variant_dest, FieldIdx::from_usize(field_index))
426                            .discard_err()?;
427                        self.ecx.copy_op(op, &field_dest).discard_err()?;
428                    }
429                    self.ecx
430                        .write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest)
431                        .discard_err()?;
432                    self.ecx
433                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
434                        .discard_err()?;
435                    dest.into()
436                } else {
437                    return None;
438                }
439            }
440            RawPtr { pointer, metadata } => {
441                let pointer = self.evaluated[pointer].as_ref()?;
442                let metadata = self.evaluated[metadata].as_ref()?;
443
444                // Pointers don't have fields, so don't `project_field` them.
445                let data = self.ecx.read_pointer(pointer).discard_err()?;
446                let meta = if metadata.layout.is_zst() {
447                    MemPlaceMeta::None
448                } else {
449                    MemPlaceMeta::Meta(self.ecx.read_scalar(metadata).discard_err()?)
450                };
451                let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx);
452                ImmTy::from_immediate(ptr_imm, ty).into()
453            }
454
455            Projection(base, elem) => {
456                let base = self.evaluated[base].as_ref()?;
457                // `Index` by constants should have been replaced by `ConstantIndex` by
458                // `simplify_place_projection`.
459                let elem = elem.try_map(|_| None, |()| ty.ty)?;
460                self.ecx.project(base, elem).discard_err()?
461            }
462            Address { place, kind: _, provenance: _ } => {
463                if !place.is_indirect_first_projection() {
464                    return None;
465                }
466                let local = self.locals[place.local]?;
467                let pointer = self.evaluated[local].as_ref()?;
468                let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?;
469                for elem in place.projection.iter().skip(1) {
470                    // `Index` by constants should have been replaced by `ConstantIndex` by
471                    // `simplify_place_projection`.
472                    let elem = elem.try_map(|_| None, |ty| ty)?;
473                    mplace = self.ecx.project(&mplace, elem).discard_err()?;
474                }
475                let pointer = mplace.to_ref(&self.ecx);
476                ImmTy::from_immediate(pointer, ty).into()
477            }
478
479            Discriminant(base) => {
480                let base = self.evaluated[base].as_ref()?;
481                let variant = self.ecx.read_discriminant(base).discard_err()?;
482                let discr_value =
483                    self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
484                discr_value.into()
485            }
486            Len(slice) => {
487                let slice = self.evaluated[slice].as_ref()?;
488                let len = slice.len(&self.ecx).discard_err()?;
489                ImmTy::from_uint(len, ty).into()
490            }
491            NullaryOp(null_op, arg_ty) => {
492                let arg_layout = self.ecx.layout_of(arg_ty).ok()?;
493                if let NullOp::SizeOf | NullOp::AlignOf = null_op
494                    && arg_layout.is_unsized()
495                {
496                    return None;
497                }
498                let val = match null_op {
499                    NullOp::SizeOf => arg_layout.size.bytes(),
500                    NullOp::AlignOf => arg_layout.align.abi.bytes(),
501                    NullOp::OffsetOf(fields) => self
502                        .ecx
503                        .tcx
504                        .offset_of_subfield(self.typing_env(), arg_layout, fields.iter())
505                        .bytes(),
506                    NullOp::UbChecks => return None,
507                    NullOp::ContractChecks => return None,
508                };
509                ImmTy::from_uint(val, ty).into()
510            }
511            UnaryOp(un_op, operand) => {
512                let operand = self.evaluated[operand].as_ref()?;
513                let operand = self.ecx.read_immediate(operand).discard_err()?;
514                let val = self.ecx.unary_op(un_op, &operand).discard_err()?;
515                val.into()
516            }
517            BinaryOp(bin_op, lhs, rhs) => {
518                let lhs = self.evaluated[lhs].as_ref()?;
519                let lhs = self.ecx.read_immediate(lhs).discard_err()?;
520                let rhs = self.evaluated[rhs].as_ref()?;
521                let rhs = self.ecx.read_immediate(rhs).discard_err()?;
522                let val = self.ecx.binary_op(bin_op, &lhs, &rhs).discard_err()?;
523                val.into()
524            }
525            Cast { kind, value } => match kind {
526                CastKind::IntToInt | CastKind::IntToFloat => {
527                    let value = self.evaluated[value].as_ref()?;
528                    let value = self.ecx.read_immediate(value).discard_err()?;
529                    let res = self.ecx.int_to_int_or_float(&value, ty).discard_err()?;
530                    res.into()
531                }
532                CastKind::FloatToFloat | CastKind::FloatToInt => {
533                    let value = self.evaluated[value].as_ref()?;
534                    let value = self.ecx.read_immediate(value).discard_err()?;
535                    let res = self.ecx.float_to_float_or_int(&value, ty).discard_err()?;
536                    res.into()
537                }
538                CastKind::Transmute => {
539                    let value = self.evaluated[value].as_ref()?;
540                    // `offset` for immediates generally only supports projections that match the
541                    // type of the immediate. However, as a HACK, we exploit that it can also do
542                    // limited transmutes: it only works between types with the same layout, and
543                    // cannot transmute pointers to integers.
544                    if value.as_mplace_or_imm().is_right() {
545                        let can_transmute = match (value.layout.backend_repr, ty.backend_repr) {
546                            (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => {
547                                s1.size(&self.ecx) == s2.size(&self.ecx)
548                                    && !matches!(s1.primitive(), Primitive::Pointer(..))
549                            }
550                            (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
551                                a1.size(&self.ecx) == a2.size(&self.ecx) &&
552                                b1.size(&self.ecx) == b2.size(&self.ecx) &&
553                                // The alignment of the second component determines its offset, so that also needs to match.
554                                b1.align(&self.ecx) == b2.align(&self.ecx) &&
555                                // None of the inputs may be a pointer.
556                                !matches!(a1.primitive(), Primitive::Pointer(..))
557                                    && !matches!(b1.primitive(), Primitive::Pointer(..))
558                            }
559                            _ => false,
560                        };
561                        if !can_transmute {
562                            return None;
563                        }
564                    }
565                    value.offset(Size::ZERO, ty, &self.ecx).discard_err()?
566                }
567                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) => {
568                    let src = self.evaluated[value].as_ref()?;
569                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
570                    self.ecx.unsize_into(src, ty, &dest).discard_err()?;
571                    self.ecx
572                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
573                        .discard_err()?;
574                    dest.into()
575                }
576                CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
577                    let src = self.evaluated[value].as_ref()?;
578                    let src = self.ecx.read_immediate(src).discard_err()?;
579                    let ret = self.ecx.ptr_to_ptr(&src, ty).discard_err()?;
580                    ret.into()
581                }
582                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::UnsafeFnPointer, _) => {
583                    let src = self.evaluated[value].as_ref()?;
584                    let src = self.ecx.read_immediate(src).discard_err()?;
585                    ImmTy::from_immediate(*src, ty).into()
586                }
587                _ => return None,
588            },
589        };
590        Some(op)
591    }
592
593    fn project(
594        &mut self,
595        place_ty: PlaceTy<'tcx>,
596        value: VnIndex,
597        proj: PlaceElem<'tcx>,
598        from_non_ssa_index: &mut bool,
599    ) -> Option<(PlaceTy<'tcx>, VnIndex)> {
600        let projection_ty = place_ty.projection_ty(self.tcx, proj);
601        let proj = match proj {
602            ProjectionElem::Deref => {
603                if let Some(Mutability::Not) = place_ty.ty.ref_mutability()
604                    && projection_ty.ty.is_freeze(self.tcx, self.typing_env())
605                {
606                    // An immutable borrow `_x` always points to the same value for the
607                    // lifetime of the borrow, so we can merge all instances of `*_x`.
608                    return Some((projection_ty, self.insert_deref(projection_ty.ty, value)));
609                } else {
610                    return None;
611                }
612            }
613            ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
614            ProjectionElem::Field(f, _) => {
615                if let Value::Aggregate(_, fields) = self.get(value) {
616                    return Some((projection_ty, fields[f.as_usize()]));
617                } else if let Value::Projection(outer_value, ProjectionElem::Downcast(_, read_variant)) = self.get(value)
618                    && let Value::Aggregate(written_variant, fields) = self.get(*outer_value)
619                    // This pass is not aware of control-flow, so we do not know whether the
620                    // replacement we are doing is actually reachable. We could be in any arm of
621                    // ```
622                    // match Some(x) {
623                    //     Some(y) => /* stuff */,
624                    //     None => /* other */,
625                    // }
626                    // ```
627                    //
628                    // In surface rust, the current statement would be unreachable.
629                    //
630                    // However, from the reference chapter on enums and RFC 2195,
631                    // accessing the wrong variant is not UB if the enum has repr.
632                    // So it's not impossible for a series of MIR opts to generate
633                    // a downcast to an inactive variant.
634                    && written_variant == read_variant
635                {
636                    return Some((projection_ty, fields[f.as_usize()]));
637                }
638                ProjectionElem::Field(f, ())
639            }
640            ProjectionElem::Index(idx) => {
641                if let Value::Repeat(inner, _) = self.get(value) {
642                    *from_non_ssa_index |= self.locals[idx].is_none();
643                    return Some((projection_ty, *inner));
644                }
645                let idx = self.locals[idx]?;
646                ProjectionElem::Index(idx)
647            }
648            ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
649                match self.get(value) {
650                    Value::Repeat(inner, _) => {
651                        return Some((projection_ty, *inner));
652                    }
653                    Value::Aggregate(_, operands) => {
654                        let offset = if from_end {
655                            operands.len() - offset as usize
656                        } else {
657                            offset as usize
658                        };
659                        let value = operands.get(offset).copied()?;
660                        return Some((projection_ty, value));
661                    }
662                    _ => {}
663                };
664                ProjectionElem::ConstantIndex { offset, min_length, from_end }
665            }
666            ProjectionElem::Subslice { from, to, from_end } => {
667                ProjectionElem::Subslice { from, to, from_end }
668            }
669            ProjectionElem::OpaqueCast(_) => ProjectionElem::OpaqueCast(()),
670            ProjectionElem::Subtype(_) => ProjectionElem::Subtype(()),
671            ProjectionElem::UnwrapUnsafeBinder(_) => ProjectionElem::UnwrapUnsafeBinder(()),
672        };
673
674        let value = self.insert(projection_ty.ty, Value::Projection(value, proj));
675        Some((projection_ty, value))
676    }
677
678    /// Simplify the projection chain if we know better.
679    #[instrument(level = "trace", skip(self))]
680    fn simplify_place_projection(&mut self, place: &mut Place<'tcx>, location: Location) {
681        // If the projection is indirect, we treat the local as a value, so can replace it with
682        // another local.
683        if place.is_indirect_first_projection()
684            && let Some(base) = self.locals[place.local]
685            && let Some(new_local) = self.try_as_local(base, location)
686            && place.local != new_local
687        {
688            place.local = new_local;
689            self.reused_locals.insert(new_local);
690        }
691
692        let mut projection = Cow::Borrowed(&place.projection[..]);
693
694        for i in 0..projection.len() {
695            let elem = projection[i];
696            if let ProjectionElem::Index(idx_local) = elem
697                && let Some(idx) = self.locals[idx_local]
698            {
699                if let Some(offset) = self.evaluated[idx].as_ref()
700                    && let Some(offset) = self.ecx.read_target_usize(offset).discard_err()
701                    && let Some(min_length) = offset.checked_add(1)
702                {
703                    projection.to_mut()[i] =
704                        ProjectionElem::ConstantIndex { offset, min_length, from_end: false };
705                } else if let Some(new_idx_local) = self.try_as_local(idx, location)
706                    && idx_local != new_idx_local
707                {
708                    projection.to_mut()[i] = ProjectionElem::Index(new_idx_local);
709                    self.reused_locals.insert(new_idx_local);
710                }
711            }
712        }
713
714        if projection.is_owned() {
715            place.projection = self.tcx.mk_place_elems(&projection);
716        }
717
718        trace!(?place);
719    }
720
721    /// Represent the *value* which would be read from `place`, and point `place` to a preexisting
722    /// place with the same value (if that already exists).
723    #[instrument(level = "trace", skip(self), ret)]
724    fn simplify_place_value(
725        &mut self,
726        place: &mut Place<'tcx>,
727        location: Location,
728    ) -> Option<VnIndex> {
729        self.simplify_place_projection(place, location);
730
731        // Invariant: `place` and `place_ref` point to the same value, even if they point to
732        // different memory locations.
733        let mut place_ref = place.as_ref();
734
735        // Invariant: `value` holds the value up-to the `index`th projection excluded.
736        let mut value = self.locals[place.local]?;
737        // Invariant: `value` has type `place_ty`, with optional downcast variant if needed.
738        let mut place_ty = PlaceTy::from_ty(self.local_decls[place.local].ty);
739        let mut from_non_ssa_index = false;
740        for (index, proj) in place.projection.iter().enumerate() {
741            if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
742                && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
743                && let AddressKind::Ref(BorrowKind::Shared) = kind
744                && let Some(v) = self.simplify_place_value(&mut pointee, location)
745            {
746                value = v;
747                // `pointee` holds a `Place`, so `ProjectionElem::Index` holds a `Local`.
748                // That local is SSA, but we otherwise have no guarantee on that local's value at
749                // the current location compared to its value where `pointee` was borrowed.
750                if pointee.projection.iter().all(|elem| !matches!(elem, ProjectionElem::Index(_))) {
751                    place_ref =
752                        pointee.project_deeper(&place.projection[index..], self.tcx).as_ref();
753                }
754            }
755            if let Some(local) = self.try_as_local(value, location) {
756                // Both `local` and `Place { local: place.local, projection: projection[..index] }`
757                // hold the same value. Therefore, following place holds the value in the original
758                // `place`.
759                place_ref = PlaceRef { local, projection: &place.projection[index..] };
760            }
761
762            (place_ty, value) = self.project(place_ty, value, proj, &mut from_non_ssa_index)?;
763        }
764
765        if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
766            && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
767            && let AddressKind::Ref(BorrowKind::Shared) = kind
768            && let Some(v) = self.simplify_place_value(&mut pointee, location)
769        {
770            value = v;
771            // `pointee` holds a `Place`, so `ProjectionElem::Index` holds a `Local`.
772            // That local is SSA, but we otherwise have no guarantee on that local's value at
773            // the current location compared to its value where `pointee` was borrowed.
774            if pointee.projection.iter().all(|elem| !matches!(elem, ProjectionElem::Index(_))) {
775                place_ref = pointee.project_deeper(&[], self.tcx).as_ref();
776            }
777        }
778        if let Some(new_local) = self.try_as_local(value, location) {
779            place_ref = PlaceRef { local: new_local, projection: &[] };
780        } else if from_non_ssa_index {
781            // If access to non-SSA locals is unavoidable, bail out.
782            return None;
783        }
784
785        if place_ref.local != place.local || place_ref.projection.len() < place.projection.len() {
786            // By the invariant on `place_ref`.
787            *place = place_ref.project_deeper(&[], self.tcx);
788            self.reused_locals.insert(place_ref.local);
789        }
790
791        Some(value)
792    }
793
794    #[instrument(level = "trace", skip(self), ret)]
795    fn simplify_operand(
796        &mut self,
797        operand: &mut Operand<'tcx>,
798        location: Location,
799    ) -> Option<VnIndex> {
800        match *operand {
801            Operand::Constant(ref constant) => Some(self.insert_constant(constant.const_)),
802            Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
803                let value = self.simplify_place_value(place, location)?;
804                if let Some(const_) = self.try_as_constant(value) {
805                    *operand = Operand::Constant(Box::new(const_));
806                }
807                Some(value)
808            }
809        }
810    }
811
812    #[instrument(level = "trace", skip(self), ret)]
813    fn simplify_rvalue(
814        &mut self,
815        lhs: &Place<'tcx>,
816        rvalue: &mut Rvalue<'tcx>,
817        location: Location,
818    ) -> Option<VnIndex> {
819        let value = match *rvalue {
820            // Forward values.
821            Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
822            Rvalue::CopyForDeref(place) => {
823                let mut operand = Operand::Copy(place);
824                let val = self.simplify_operand(&mut operand, location);
825                *rvalue = Rvalue::Use(operand);
826                return val;
827            }
828
829            // Roots.
830            Rvalue::Repeat(ref mut op, amount) => {
831                let op = self.simplify_operand(op, location)?;
832                Value::Repeat(op, amount)
833            }
834            Rvalue::NullaryOp(op, ty) => Value::NullaryOp(op, ty),
835            Rvalue::Aggregate(..) => return self.simplify_aggregate(lhs, rvalue, location),
836            Rvalue::Ref(_, borrow_kind, ref mut place) => {
837                self.simplify_place_projection(place, location);
838                return Some(self.new_pointer(*place, AddressKind::Ref(borrow_kind)));
839            }
840            Rvalue::RawPtr(mutbl, ref mut place) => {
841                self.simplify_place_projection(place, location);
842                return Some(self.new_pointer(*place, AddressKind::Address(mutbl)));
843            }
844            Rvalue::WrapUnsafeBinder(ref mut op, _) => {
845                let value = self.simplify_operand(op, location)?;
846                Value::Cast { kind: CastKind::Transmute, value }
847            }
848
849            // Operations.
850            Rvalue::Len(ref mut place) => return self.simplify_len(place, location),
851            Rvalue::Cast(ref mut kind, ref mut value, to) => {
852                return self.simplify_cast(kind, value, to, location);
853            }
854            Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
855                return self.simplify_binary(op, lhs, rhs, location);
856            }
857            Rvalue::UnaryOp(op, ref mut arg_op) => {
858                return self.simplify_unary(op, arg_op, location);
859            }
860            Rvalue::Discriminant(ref mut place) => {
861                let place = self.simplify_place_value(place, location)?;
862                if let Some(discr) = self.simplify_discriminant(place) {
863                    return Some(discr);
864                }
865                Value::Discriminant(place)
866            }
867
868            // Unsupported values.
869            Rvalue::ThreadLocalRef(..) | Rvalue::ShallowInitBox(..) => return None,
870        };
871        let ty = rvalue.ty(self.local_decls, self.tcx);
872        Some(self.insert(ty, value))
873    }
874
875    fn simplify_discriminant(&mut self, place: VnIndex) -> Option<VnIndex> {
876        let enum_ty = self.ty(place);
877        if enum_ty.is_enum()
878            && let Value::Aggregate(variant, _) = *self.get(place)
879        {
880            let discr = self.ecx.discriminant_for_variant(enum_ty, variant).discard_err()?;
881            return Some(self.insert_scalar(discr.layout.ty, discr.to_scalar()));
882        }
883
884        None
885    }
886
887    fn try_as_place_elem(
888        &mut self,
889        ty: Ty<'tcx>,
890        proj: ProjectionElem<VnIndex, ()>,
891        loc: Location,
892    ) -> Option<PlaceElem<'tcx>> {
893        proj.try_map(
894            |value| {
895                let local = self.try_as_local(value, loc)?;
896                self.reused_locals.insert(local);
897                Some(local)
898            },
899            |()| ty,
900        )
901    }
902
903    fn simplify_aggregate_to_copy(
904        &mut self,
905        ty: Ty<'tcx>,
906        variant_index: VariantIdx,
907        fields: &[VnIndex],
908    ) -> Option<VnIndex> {
909        let Some(&first_field) = fields.first() else { return None };
910        let Value::Projection(copy_from_value, _) = *self.get(first_field) else { return None };
911
912        // All fields must correspond one-to-one and come from the same aggregate value.
913        if fields.iter().enumerate().any(|(index, &v)| {
914            if let Value::Projection(pointer, ProjectionElem::Field(from_index, _)) = *self.get(v)
915                && copy_from_value == pointer
916                && from_index.index() == index
917            {
918                return false;
919            }
920            true
921        }) {
922            return None;
923        }
924
925        let mut copy_from_local_value = copy_from_value;
926        if let Value::Projection(pointer, proj) = *self.get(copy_from_value)
927            && let ProjectionElem::Downcast(_, read_variant) = proj
928        {
929            if variant_index == read_variant {
930                // When copying a variant, there is no need to downcast.
931                copy_from_local_value = pointer;
932            } else {
933                // The copied variant must be identical.
934                return None;
935            }
936        }
937
938        // Both must be variants of the same type.
939        if self.ty(copy_from_local_value) == ty { Some(copy_from_local_value) } else { None }
940    }
941
942    fn simplify_aggregate(
943        &mut self,
944        lhs: &Place<'tcx>,
945        rvalue: &mut Rvalue<'tcx>,
946        location: Location,
947    ) -> Option<VnIndex> {
948        let tcx = self.tcx;
949        let ty = rvalue.ty(self.local_decls, tcx);
950
951        let Rvalue::Aggregate(box ref kind, ref mut field_ops) = *rvalue else { bug!() };
952
953        if field_ops.is_empty() {
954            let is_zst = match *kind {
955                AggregateKind::Array(..)
956                | AggregateKind::Tuple
957                | AggregateKind::Closure(..)
958                | AggregateKind::CoroutineClosure(..) => true,
959                // Only enums can be non-ZST.
960                AggregateKind::Adt(did, ..) => tcx.def_kind(did) != DefKind::Enum,
961                // Coroutines are never ZST, as they at least contain the implicit states.
962                AggregateKind::Coroutine(..) => false,
963                AggregateKind::RawPtr(..) => bug!("MIR for RawPtr aggregate must have 2 fields"),
964            };
965
966            if is_zst {
967                return Some(self.insert_constant(Const::zero_sized(ty)));
968            }
969        }
970
971        let fields: Vec<_> = field_ops
972            .iter_mut()
973            .map(|op| {
974                self.simplify_operand(op, location)
975                    .unwrap_or_else(|| self.new_opaque(op.ty(self.local_decls, self.tcx)))
976            })
977            .collect();
978
979        let variant_index = match *kind {
980            AggregateKind::Array(..) | AggregateKind::Tuple => {
981                assert!(!field_ops.is_empty());
982                FIRST_VARIANT
983            }
984            AggregateKind::Closure(..)
985            | AggregateKind::CoroutineClosure(..)
986            | AggregateKind::Coroutine(..) => FIRST_VARIANT,
987            AggregateKind::Adt(_, variant_index, _, _, None) => variant_index,
988            // Do not track unions.
989            AggregateKind::Adt(_, _, _, _, Some(_)) => return None,
990            AggregateKind::RawPtr(..) => {
991                assert_eq!(field_ops.len(), 2);
992                let [mut pointer, metadata] = fields.try_into().unwrap();
993
994                // Any thin pointer of matching mutability is fine as the data pointer.
995                let mut was_updated = false;
996                while let Value::Cast { kind: CastKind::PtrToPtr, value: cast_value } =
997                    self.get(pointer)
998                    && let ty::RawPtr(from_pointee_ty, from_mtbl) = self.ty(*cast_value).kind()
999                    && let ty::RawPtr(_, output_mtbl) = ty.kind()
1000                    && from_mtbl == output_mtbl
1001                    && from_pointee_ty.is_sized(self.tcx, self.typing_env())
1002                {
1003                    pointer = *cast_value;
1004                    was_updated = true;
1005                }
1006
1007                if was_updated && let Some(op) = self.try_as_operand(pointer, location) {
1008                    field_ops[FieldIdx::ZERO] = op;
1009                }
1010
1011                return Some(self.insert(ty, Value::RawPtr { pointer, metadata }));
1012            }
1013        };
1014
1015        if ty.is_array()
1016            && fields.len() > 4
1017            && let Ok(&first) = fields.iter().all_equal_value()
1018        {
1019            let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
1020            if let Some(op) = self.try_as_operand(first, location) {
1021                *rvalue = Rvalue::Repeat(op, len);
1022            }
1023            return Some(self.insert(ty, Value::Repeat(first, len)));
1024        }
1025
1026        if let Some(value) = self.simplify_aggregate_to_copy(ty, variant_index, &fields) {
1027            // Allow introducing places with non-constant offsets, as those are still better than
1028            // reconstructing an aggregate. But avoid creating `*a = copy (*b)`, as they might be
1029            // aliases resulting in overlapping assignments.
1030            let allow_complex_projection =
1031                lhs.projection[..].iter().all(PlaceElem::is_stable_offset);
1032            if let Some(place) = self.try_as_place(value, location, allow_complex_projection) {
1033                self.reused_locals.insert(place.local);
1034                *rvalue = Rvalue::Use(Operand::Copy(place));
1035            }
1036            return Some(value);
1037        }
1038
1039        Some(self.insert(ty, Value::Aggregate(variant_index, fields)))
1040    }
1041
1042    #[instrument(level = "trace", skip(self), ret)]
1043    fn simplify_unary(
1044        &mut self,
1045        op: UnOp,
1046        arg_op: &mut Operand<'tcx>,
1047        location: Location,
1048    ) -> Option<VnIndex> {
1049        let mut arg_index = self.simplify_operand(arg_op, location)?;
1050        let arg_ty = self.ty(arg_index);
1051        let ret_ty = op.ty(self.tcx, arg_ty);
1052
1053        // PtrMetadata doesn't care about *const vs *mut vs & vs &mut,
1054        // so start by removing those distinctions so we can update the `Operand`
1055        if op == UnOp::PtrMetadata {
1056            let mut was_updated = false;
1057            loop {
1058                match self.get(arg_index) {
1059                    // Pointer casts that preserve metadata, such as
1060                    // `*const [i32]` <-> `*mut [i32]` <-> `*mut [f32]`.
1061                    // It's critical that this not eliminate cases like
1062                    // `*const [T]` -> `*const T` which remove metadata.
1063                    // We run on potentially-generic MIR, though, so unlike codegen
1064                    // we can't always know exactly what the metadata are.
1065                    // To allow things like `*mut (?A, ?T)` <-> `*mut (?B, ?T)`,
1066                    // it's fine to get a projection as the type.
1067                    Value::Cast { kind: CastKind::PtrToPtr, value: inner }
1068                        if self.pointers_have_same_metadata(self.ty(*inner), arg_ty) =>
1069                    {
1070                        arg_index = *inner;
1071                        was_updated = true;
1072                        continue;
1073                    }
1074
1075                    // `&mut *p`, `&raw *p`, etc don't change metadata.
1076                    Value::Address { place, kind: _, provenance: _ }
1077                        if let PlaceRef { local, projection: [PlaceElem::Deref] } =
1078                            place.as_ref()
1079                            && let Some(local_index) = self.locals[local] =>
1080                    {
1081                        arg_index = local_index;
1082                        was_updated = true;
1083                        continue;
1084                    }
1085
1086                    _ => {
1087                        if was_updated && let Some(op) = self.try_as_operand(arg_index, location) {
1088                            *arg_op = op;
1089                        }
1090                        break;
1091                    }
1092                }
1093            }
1094        }
1095
1096        let value = match (op, self.get(arg_index)) {
1097            (UnOp::Not, Value::UnaryOp(UnOp::Not, inner)) => return Some(*inner),
1098            (UnOp::Neg, Value::UnaryOp(UnOp::Neg, inner)) => return Some(*inner),
1099            (UnOp::Not, Value::BinaryOp(BinOp::Eq, lhs, rhs)) => {
1100                Value::BinaryOp(BinOp::Ne, *lhs, *rhs)
1101            }
1102            (UnOp::Not, Value::BinaryOp(BinOp::Ne, lhs, rhs)) => {
1103                Value::BinaryOp(BinOp::Eq, *lhs, *rhs)
1104            }
1105            (UnOp::PtrMetadata, Value::RawPtr { metadata, .. }) => return Some(*metadata),
1106            // We have an unsizing cast, which assigns the length to wide pointer metadata.
1107            (
1108                UnOp::PtrMetadata,
1109                Value::Cast {
1110                    kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1111                    value: inner,
1112                },
1113            ) if let ty::Slice(..) = arg_ty.builtin_deref(true).unwrap().kind()
1114                && let ty::Array(_, len) = self.ty(*inner).builtin_deref(true).unwrap().kind() =>
1115            {
1116                return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1117            }
1118            _ => Value::UnaryOp(op, arg_index),
1119        };
1120        Some(self.insert(ret_ty, value))
1121    }
1122
1123    #[instrument(level = "trace", skip(self), ret)]
1124    fn simplify_binary(
1125        &mut self,
1126        op: BinOp,
1127        lhs_operand: &mut Operand<'tcx>,
1128        rhs_operand: &mut Operand<'tcx>,
1129        location: Location,
1130    ) -> Option<VnIndex> {
1131        let lhs = self.simplify_operand(lhs_operand, location);
1132        let rhs = self.simplify_operand(rhs_operand, location);
1133
1134        // Only short-circuit options after we called `simplify_operand`
1135        // on both operands for side effect.
1136        let mut lhs = lhs?;
1137        let mut rhs = rhs?;
1138
1139        let lhs_ty = self.ty(lhs);
1140
1141        // If we're comparing pointers, remove `PtrToPtr` casts if the from
1142        // types of both casts and the metadata all match.
1143        if let BinOp::Eq | BinOp::Ne | BinOp::Lt | BinOp::Le | BinOp::Gt | BinOp::Ge = op
1144            && lhs_ty.is_any_ptr()
1145            && let Value::Cast { kind: CastKind::PtrToPtr, value: lhs_value } = self.get(lhs)
1146            && let Value::Cast { kind: CastKind::PtrToPtr, value: rhs_value } = self.get(rhs)
1147            && let lhs_from = self.ty(*lhs_value)
1148            && lhs_from == self.ty(*rhs_value)
1149            && self.pointers_have_same_metadata(lhs_from, lhs_ty)
1150        {
1151            lhs = *lhs_value;
1152            rhs = *rhs_value;
1153            if let Some(lhs_op) = self.try_as_operand(lhs, location)
1154                && let Some(rhs_op) = self.try_as_operand(rhs, location)
1155            {
1156                *lhs_operand = lhs_op;
1157                *rhs_operand = rhs_op;
1158            }
1159        }
1160
1161        if let Some(value) = self.simplify_binary_inner(op, lhs_ty, lhs, rhs) {
1162            return Some(value);
1163        }
1164        let ty = op.ty(self.tcx, lhs_ty, self.ty(rhs));
1165        let value = Value::BinaryOp(op, lhs, rhs);
1166        Some(self.insert(ty, value))
1167    }
1168
1169    fn simplify_binary_inner(
1170        &mut self,
1171        op: BinOp,
1172        lhs_ty: Ty<'tcx>,
1173        lhs: VnIndex,
1174        rhs: VnIndex,
1175    ) -> Option<VnIndex> {
1176        // Floats are weird enough that none of the logic below applies.
1177        let reasonable_ty =
1178            lhs_ty.is_integral() || lhs_ty.is_bool() || lhs_ty.is_char() || lhs_ty.is_any_ptr();
1179        if !reasonable_ty {
1180            return None;
1181        }
1182
1183        let layout = self.ecx.layout_of(lhs_ty).ok()?;
1184
1185        let as_bits = |value: VnIndex| {
1186            let constant = self.evaluated[value].as_ref()?;
1187            if layout.backend_repr.is_scalar() {
1188                let scalar = self.ecx.read_scalar(constant).discard_err()?;
1189                scalar.to_bits(constant.layout.size).discard_err()
1190            } else {
1191                // `constant` is a wide pointer. Do not evaluate to bits.
1192                None
1193            }
1194        };
1195
1196        // Represent the values as `Left(bits)` or `Right(VnIndex)`.
1197        use Either::{Left, Right};
1198        let a = as_bits(lhs).map_or(Right(lhs), Left);
1199        let b = as_bits(rhs).map_or(Right(rhs), Left);
1200
1201        let result = match (op, a, b) {
1202            // Neutral elements.
1203            (
1204                BinOp::Add
1205                | BinOp::AddWithOverflow
1206                | BinOp::AddUnchecked
1207                | BinOp::BitOr
1208                | BinOp::BitXor,
1209                Left(0),
1210                Right(p),
1211            )
1212            | (
1213                BinOp::Add
1214                | BinOp::AddWithOverflow
1215                | BinOp::AddUnchecked
1216                | BinOp::BitOr
1217                | BinOp::BitXor
1218                | BinOp::Sub
1219                | BinOp::SubWithOverflow
1220                | BinOp::SubUnchecked
1221                | BinOp::Offset
1222                | BinOp::Shl
1223                | BinOp::Shr,
1224                Right(p),
1225                Left(0),
1226            )
1227            | (BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked, Left(1), Right(p))
1228            | (
1229                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::Div,
1230                Right(p),
1231                Left(1),
1232            ) => p,
1233            // Attempt to simplify `x & ALL_ONES` to `x`, with `ALL_ONES` depending on type size.
1234            (BinOp::BitAnd, Right(p), Left(ones)) | (BinOp::BitAnd, Left(ones), Right(p))
1235                if ones == layout.size.truncate(u128::MAX)
1236                    || (layout.ty.is_bool() && ones == 1) =>
1237            {
1238                p
1239            }
1240            // Absorbing elements.
1241            (
1242                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::BitAnd,
1243                _,
1244                Left(0),
1245            )
1246            | (BinOp::Rem, _, Left(1))
1247            | (
1248                BinOp::Mul
1249                | BinOp::MulWithOverflow
1250                | BinOp::MulUnchecked
1251                | BinOp::Div
1252                | BinOp::Rem
1253                | BinOp::BitAnd
1254                | BinOp::Shl
1255                | BinOp::Shr,
1256                Left(0),
1257                _,
1258            ) => self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size)),
1259            // Attempt to simplify `x | ALL_ONES` to `ALL_ONES`.
1260            (BinOp::BitOr, _, Left(ones)) | (BinOp::BitOr, Left(ones), _)
1261                if ones == layout.size.truncate(u128::MAX)
1262                    || (layout.ty.is_bool() && ones == 1) =>
1263            {
1264                self.insert_scalar(lhs_ty, Scalar::from_uint(ones, layout.size))
1265            }
1266            // Sub/Xor with itself.
1267            (BinOp::Sub | BinOp::SubWithOverflow | BinOp::SubUnchecked | BinOp::BitXor, a, b)
1268                if a == b =>
1269            {
1270                self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size))
1271            }
1272            // Comparison:
1273            // - if both operands can be computed as bits, just compare the bits;
1274            // - if we proved that both operands have the same value, we can insert true/false;
1275            // - otherwise, do nothing, as we do not try to prove inequality.
1276            (BinOp::Eq, Left(a), Left(b)) => self.insert_bool(a == b),
1277            (BinOp::Eq, a, b) if a == b => self.insert_bool(true),
1278            (BinOp::Ne, Left(a), Left(b)) => self.insert_bool(a != b),
1279            (BinOp::Ne, a, b) if a == b => self.insert_bool(false),
1280            _ => return None,
1281        };
1282
1283        if op.is_overflowing() {
1284            let ty = Ty::new_tup(self.tcx, &[self.ty(result), self.tcx.types.bool]);
1285            let false_val = self.insert_bool(false);
1286            Some(self.insert_tuple(ty, vec![result, false_val]))
1287        } else {
1288            Some(result)
1289        }
1290    }
1291
1292    fn simplify_cast(
1293        &mut self,
1294        initial_kind: &mut CastKind,
1295        initial_operand: &mut Operand<'tcx>,
1296        to: Ty<'tcx>,
1297        location: Location,
1298    ) -> Option<VnIndex> {
1299        use CastKind::*;
1300        use rustc_middle::ty::adjustment::PointerCoercion::*;
1301
1302        let mut kind = *initial_kind;
1303        let mut value = self.simplify_operand(initial_operand, location)?;
1304        let mut from = self.ty(value);
1305        if from == to {
1306            return Some(value);
1307        }
1308
1309        if let CastKind::PointerCoercion(ReifyFnPointer | ClosureFnPointer(_), _) = kind {
1310            // Each reification of a generic fn may get a different pointer.
1311            // Do not try to merge them.
1312            return Some(self.new_opaque(to));
1313        }
1314
1315        let mut was_ever_updated = false;
1316        loop {
1317            let mut was_updated_this_iteration = false;
1318
1319            // Transmuting between raw pointers is just a pointer cast so long as
1320            // they have the same metadata type (like `*const i32` <=> `*mut u64`
1321            // or `*mut [i32]` <=> `*const [u64]`), including the common special
1322            // case of `*const T` <=> `*mut T`.
1323            if let Transmute = kind
1324                && from.is_raw_ptr()
1325                && to.is_raw_ptr()
1326                && self.pointers_have_same_metadata(from, to)
1327            {
1328                kind = PtrToPtr;
1329                was_updated_this_iteration = true;
1330            }
1331
1332            // If a cast just casts away the metadata again, then we can get it by
1333            // casting the original thin pointer passed to `from_raw_parts`
1334            if let PtrToPtr = kind
1335                && let Value::RawPtr { pointer, .. } = self.get(value)
1336                && let ty::RawPtr(to_pointee, _) = to.kind()
1337                && to_pointee.is_sized(self.tcx, self.typing_env())
1338            {
1339                from = self.ty(*pointer);
1340                value = *pointer;
1341                was_updated_this_iteration = true;
1342                if from == to {
1343                    return Some(*pointer);
1344                }
1345            }
1346
1347            // Aggregate-then-Transmute can just transmute the original field value,
1348            // so long as the bytes of a value from only from a single field.
1349            if let Transmute = kind
1350                && let Value::Aggregate(variant_idx, field_values) = self.get(value)
1351                && let Some((field_idx, field_ty)) =
1352                    self.value_is_all_in_one_field(from, *variant_idx)
1353            {
1354                from = field_ty;
1355                value = field_values[field_idx.as_usize()];
1356                was_updated_this_iteration = true;
1357                if field_ty == to {
1358                    return Some(value);
1359                }
1360            }
1361
1362            // Various cast-then-cast cases can be simplified.
1363            if let Value::Cast { kind: inner_kind, value: inner_value } = *self.get(value) {
1364                let inner_from = self.ty(inner_value);
1365                let new_kind = match (inner_kind, kind) {
1366                    // Even if there's a narrowing cast in here that's fine, because
1367                    // things like `*mut [i32] -> *mut i32 -> *const i32` and
1368                    // `*mut [i32] -> *const [i32] -> *const i32` can skip the middle in MIR.
1369                    (PtrToPtr, PtrToPtr) => Some(PtrToPtr),
1370                    // PtrToPtr-then-Transmute is fine so long as the pointer cast is identity:
1371                    // `*const T -> *mut T -> NonNull<T>` is fine, but we need to check for narrowing
1372                    // to skip things like `*const [i32] -> *const i32 -> NonNull<T>`.
1373                    (PtrToPtr, Transmute) if self.pointers_have_same_metadata(inner_from, from) => {
1374                        Some(Transmute)
1375                    }
1376                    // Similarly, for Transmute-then-PtrToPtr. Note that we need to check different
1377                    // variables for their metadata, and thus this can't merge with the previous arm.
1378                    (Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => {
1379                        Some(Transmute)
1380                    }
1381                    // If would be legal to always do this, but we don't want to hide information
1382                    // from the backend that it'd otherwise be able to use for optimizations.
1383                    (Transmute, Transmute)
1384                        if !self.type_may_have_niche_of_interest_to_backend(from) =>
1385                    {
1386                        Some(Transmute)
1387                    }
1388                    _ => None,
1389                };
1390                if let Some(new_kind) = new_kind {
1391                    kind = new_kind;
1392                    from = inner_from;
1393                    value = inner_value;
1394                    was_updated_this_iteration = true;
1395                    if inner_from == to {
1396                        return Some(inner_value);
1397                    }
1398                }
1399            }
1400
1401            if was_updated_this_iteration {
1402                was_ever_updated = true;
1403            } else {
1404                break;
1405            }
1406        }
1407
1408        if was_ever_updated && let Some(op) = self.try_as_operand(value, location) {
1409            *initial_operand = op;
1410            *initial_kind = kind;
1411        }
1412
1413        Some(self.insert(to, Value::Cast { kind, value }))
1414    }
1415
1416    fn simplify_len(&mut self, place: &mut Place<'tcx>, location: Location) -> Option<VnIndex> {
1417        // Trivial case: we are fetching a statically known length.
1418        let place_ty = place.ty(self.local_decls, self.tcx).ty;
1419        if let ty::Array(_, len) = place_ty.kind() {
1420            return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1421        }
1422
1423        let mut inner = self.simplify_place_value(place, location)?;
1424
1425        // The length information is stored in the wide pointer.
1426        // Reborrowing copies length information from one pointer to the other.
1427        while let Value::Address { place: borrowed, .. } = self.get(inner)
1428            && let [PlaceElem::Deref] = borrowed.projection[..]
1429            && let Some(borrowed) = self.locals[borrowed.local]
1430        {
1431            inner = borrowed;
1432        }
1433
1434        // We have an unsizing cast, which assigns the length to wide pointer metadata.
1435        if let Value::Cast { kind, value: from } = self.get(inner)
1436            && let CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) = kind
1437            && let Some(from) = self.ty(*from).builtin_deref(true)
1438            && let ty::Array(_, len) = from.kind()
1439            && let Some(to) = self.ty(inner).builtin_deref(true)
1440            && let ty::Slice(..) = to.kind()
1441        {
1442            return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1443        }
1444
1445        // Fallback: a symbolic `Len`.
1446        Some(self.insert(self.tcx.types.usize, Value::Len(inner)))
1447    }
1448
1449    fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool {
1450        let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1451        let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1452        if left_meta_ty == right_meta_ty {
1453            true
1454        } else if let Ok(left) =
1455            self.tcx.try_normalize_erasing_regions(self.typing_env(), left_meta_ty)
1456            && let Ok(right) =
1457                self.tcx.try_normalize_erasing_regions(self.typing_env(), right_meta_ty)
1458        {
1459            left == right
1460        } else {
1461            false
1462        }
1463    }
1464
1465    /// Returns `false` if we know for sure that this type has no interesting niche,
1466    /// and thus we can skip transmuting through it without worrying.
1467    ///
1468    /// The backend will emit `assume`s when transmuting between types with niches,
1469    /// so we want to preserve `i32 -> char -> u32` so that that data is around,
1470    /// but it's fine to skip whole-range-is-value steps like `A -> u32 -> B`.
1471    fn type_may_have_niche_of_interest_to_backend(&self, ty: Ty<'tcx>) -> bool {
1472        let Ok(layout) = self.ecx.layout_of(ty) else {
1473            // If it's too generic or something, then assume it might be interesting later.
1474            return true;
1475        };
1476
1477        if layout.uninhabited {
1478            return true;
1479        }
1480
1481        match layout.backend_repr {
1482            BackendRepr::Scalar(a) => !a.is_always_valid(&self.ecx),
1483            BackendRepr::ScalarPair(a, b) => {
1484                !a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx)
1485            }
1486            BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => false,
1487        }
1488    }
1489
1490    fn value_is_all_in_one_field(
1491        &self,
1492        ty: Ty<'tcx>,
1493        variant: VariantIdx,
1494    ) -> Option<(FieldIdx, Ty<'tcx>)> {
1495        if let Ok(layout) = self.ecx.layout_of(ty)
1496            && let abi::Variants::Single { index } = layout.variants
1497            && index == variant
1498            && let Some((field_idx, field_layout)) = layout.non_1zst_field(&self.ecx)
1499            && layout.size == field_layout.size
1500        {
1501            // We needed to check the variant to avoid trying to read the tag
1502            // field from an enum where no fields have variants, since that tag
1503            // field isn't in the `Aggregate` from which we're getting values.
1504            Some((field_idx, field_layout.ty))
1505        } else if let ty::Adt(adt, args) = ty.kind()
1506            && adt.is_struct()
1507            && adt.repr().transparent()
1508            && let [single_field] = adt.non_enum_variant().fields.raw.as_slice()
1509        {
1510            Some((FieldIdx::ZERO, single_field.ty(self.tcx, args)))
1511        } else {
1512            None
1513        }
1514    }
1515}
1516
1517fn op_to_prop_const<'tcx>(
1518    ecx: &mut InterpCx<'tcx, DummyMachine>,
1519    op: &OpTy<'tcx>,
1520) -> Option<ConstValue> {
1521    // Do not attempt to propagate unsized locals.
1522    if op.layout.is_unsized() {
1523        return None;
1524    }
1525
1526    // This constant is a ZST, just return an empty value.
1527    if op.layout.is_zst() {
1528        return Some(ConstValue::ZeroSized);
1529    }
1530
1531    // Do not synthetize too large constants. Codegen will just memcpy them, which we'd like to
1532    // avoid.
1533    if !matches!(op.layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
1534        return None;
1535    }
1536
1537    // If this constant has scalar ABI, return it as a `ConstValue::Scalar`.
1538    if let BackendRepr::Scalar(abi::Scalar::Initialized { .. }) = op.layout.backend_repr
1539        && let Some(scalar) = ecx.read_scalar(op).discard_err()
1540    {
1541        if !scalar.try_to_scalar_int().is_ok() {
1542            // Check that we do not leak a pointer.
1543            // Those pointers may lose part of their identity in codegen.
1544            // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1545            return None;
1546        }
1547        return Some(ConstValue::Scalar(scalar));
1548    }
1549
1550    // If this constant is already represented as an `Allocation`,
1551    // try putting it into global memory to return it.
1552    if let Either::Left(mplace) = op.as_mplace_or_imm() {
1553        let (size, _align) = ecx.size_and_align_of_val(&mplace).discard_err()??;
1554
1555        // Do not try interning a value that contains provenance.
1556        // Due to https://github.com/rust-lang/rust/issues/79738, doing so could lead to bugs.
1557        // FIXME: remove this hack once that issue is fixed.
1558        let alloc_ref = ecx.get_ptr_alloc(mplace.ptr(), size).discard_err()??;
1559        if alloc_ref.has_provenance() {
1560            return None;
1561        }
1562
1563        let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
1564        let (prov, offset) = pointer.prov_and_relative_offset();
1565        let alloc_id = prov.alloc_id();
1566        intern_const_alloc_for_constprop(ecx, alloc_id).discard_err()?;
1567
1568        // `alloc_id` may point to a static. Codegen will choke on an `Indirect` with anything
1569        // by `GlobalAlloc::Memory`, so do fall through to copying if needed.
1570        // FIXME: find a way to treat this more uniformly (probably by fixing codegen)
1571        if let GlobalAlloc::Memory(alloc) = ecx.tcx.global_alloc(alloc_id)
1572            // Transmuting a constant is just an offset in the allocation. If the alignment of the
1573            // allocation is not enough, fallback to copying into a properly aligned value.
1574            && alloc.inner().align >= op.layout.align.abi
1575        {
1576            return Some(ConstValue::Indirect { alloc_id, offset });
1577        }
1578    }
1579
1580    // Everything failed: create a new allocation to hold the data.
1581    let alloc_id =
1582        ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest)).discard_err()?;
1583    let value = ConstValue::Indirect { alloc_id, offset: Size::ZERO };
1584
1585    // Check that we do not leak a pointer.
1586    // Those pointers may lose part of their identity in codegen.
1587    // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1588    if ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().provenance().ptrs().is_empty() {
1589        return Some(value);
1590    }
1591
1592    None
1593}
1594
1595impl<'tcx> VnState<'_, 'tcx> {
1596    /// If either [`Self::try_as_constant`] as [`Self::try_as_place`] succeeds,
1597    /// returns that result as an [`Operand`].
1598    fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option<Operand<'tcx>> {
1599        if let Some(const_) = self.try_as_constant(index) {
1600            Some(Operand::Constant(Box::new(const_)))
1601        } else if let Some(place) = self.try_as_place(index, location, false) {
1602            self.reused_locals.insert(place.local);
1603            Some(Operand::Copy(place))
1604        } else {
1605            None
1606        }
1607    }
1608
1609    /// If `index` is a `Value::Constant`, return the `Constant` to be put in the MIR.
1610    fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
1611        // This was already constant in MIR, do not change it. If the constant is not
1612        // deterministic, adding an additional mention of it in MIR will not give the same value as
1613        // the former mention.
1614        if let Value::Constant { value, disambiguator: 0 } = *self.get(index) {
1615            debug_assert!(value.is_deterministic());
1616            return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1617        }
1618
1619        let op = self.evaluated[index].as_ref()?;
1620        if op.layout.is_unsized() {
1621            // Do not attempt to propagate unsized locals.
1622            return None;
1623        }
1624
1625        let value = op_to_prop_const(&mut self.ecx, op)?;
1626
1627        // Check that we do not leak a pointer.
1628        // Those pointers may lose part of their identity in codegen.
1629        // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1630        assert!(!value.may_have_provenance(self.tcx, op.layout.size));
1631
1632        let const_ = Const::Val(value, op.layout.ty);
1633        Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_ })
1634    }
1635
1636    /// Construct a place which holds the same value as `index` and for which all locals strictly
1637    /// dominate `loc`. If you used this place, add its base local to `reused_locals` to remove
1638    /// storage statements.
1639    #[instrument(level = "trace", skip(self), ret)]
1640    fn try_as_place(
1641        &mut self,
1642        mut index: VnIndex,
1643        loc: Location,
1644        allow_complex_projection: bool,
1645    ) -> Option<Place<'tcx>> {
1646        let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new();
1647        loop {
1648            if let Some(local) = self.try_as_local(index, loc) {
1649                projection.reverse();
1650                let place =
1651                    Place { local, projection: self.tcx.mk_place_elems(projection.as_slice()) };
1652                return Some(place);
1653            } else if projection.last() == Some(&PlaceElem::Deref) {
1654                // `Deref` can only be the first projection in a place.
1655                // If we are here, we failed to find a local, and we already have a `Deref`.
1656                // Trying to add projections will only result in an ill-formed place.
1657                return None;
1658            } else if let Value::Projection(pointer, proj) = *self.get(index)
1659                && (allow_complex_projection || proj.is_stable_offset())
1660                && let Some(proj) = self.try_as_place_elem(self.ty(index), proj, loc)
1661            {
1662                projection.push(proj);
1663                index = pointer;
1664            } else {
1665                return None;
1666            }
1667        }
1668    }
1669
1670    /// If there is a local which is assigned `index`, and its assignment strictly dominates `loc`,
1671    /// return it. If you used this local, add it to `reused_locals` to remove storage statements.
1672    fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
1673        let other = self.rev_locals.get(index)?;
1674        other
1675            .iter()
1676            .find(|&&other| self.ssa.assignment_dominates(&self.dominators, other, loc))
1677            .copied()
1678    }
1679}
1680
1681impl<'tcx> MutVisitor<'tcx> for VnState<'_, 'tcx> {
1682    fn tcx(&self) -> TyCtxt<'tcx> {
1683        self.tcx
1684    }
1685
1686    fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
1687        self.simplify_place_projection(place, location);
1688        if context.is_mutating_use() && place.is_indirect() {
1689            // Non-local mutation maybe invalidate deref.
1690            self.invalidate_derefs();
1691        }
1692        self.super_place(place, context, location);
1693    }
1694
1695    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1696        self.simplify_operand(operand, location);
1697        self.super_operand(operand, location);
1698    }
1699
1700    fn visit_assign(
1701        &mut self,
1702        lhs: &mut Place<'tcx>,
1703        rvalue: &mut Rvalue<'tcx>,
1704        location: Location,
1705    ) {
1706        self.simplify_place_projection(lhs, location);
1707
1708        let value = self.simplify_rvalue(lhs, rvalue, location);
1709        if let Some(value) = value {
1710            if let Some(const_) = self.try_as_constant(value) {
1711                *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
1712            } else if let Some(place) = self.try_as_place(value, location, false)
1713                && *rvalue != Rvalue::Use(Operand::Move(place))
1714                && *rvalue != Rvalue::Use(Operand::Copy(place))
1715            {
1716                *rvalue = Rvalue::Use(Operand::Copy(place));
1717                self.reused_locals.insert(place.local);
1718            }
1719        }
1720
1721        if lhs.is_indirect() {
1722            // Non-local mutation maybe invalidate deref.
1723            self.invalidate_derefs();
1724        }
1725
1726        if let Some(local) = lhs.as_local()
1727            && self.ssa.is_ssa(local)
1728            && let rvalue_ty = rvalue.ty(self.local_decls, self.tcx)
1729            // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark
1730            // `local` as reusable if we have an exact type match.
1731            && self.local_decls[local].ty == rvalue_ty
1732        {
1733            let value = value.unwrap_or_else(|| self.new_opaque(rvalue_ty));
1734            self.assign(local, value);
1735        }
1736    }
1737
1738    fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
1739        if let Terminator { kind: TerminatorKind::Call { destination, .. }, .. } = terminator {
1740            if let Some(local) = destination.as_local()
1741                && self.ssa.is_ssa(local)
1742            {
1743                let ty = self.local_decls[local].ty;
1744                let opaque = self.new_opaque(ty);
1745                self.assign(local, opaque);
1746            }
1747        }
1748        // Function calls and ASM may invalidate (nested) derefs. We must handle them carefully.
1749        // Currently, only preserving derefs for trivial terminators like SwitchInt and Goto.
1750        let safe_to_preserve_derefs = matches!(
1751            terminator.kind,
1752            TerminatorKind::SwitchInt { .. } | TerminatorKind::Goto { .. }
1753        );
1754        if !safe_to_preserve_derefs {
1755            self.invalidate_derefs();
1756        }
1757        self.super_terminator(terminator, location);
1758    }
1759}
1760
1761struct StorageRemover<'tcx> {
1762    tcx: TyCtxt<'tcx>,
1763    reused_locals: DenseBitSet<Local>,
1764}
1765
1766impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
1767    fn tcx(&self) -> TyCtxt<'tcx> {
1768        self.tcx
1769    }
1770
1771    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
1772        if let Operand::Move(place) = *operand
1773            && !place.is_indirect_first_projection()
1774            && self.reused_locals.contains(place.local)
1775        {
1776            *operand = Operand::Copy(place);
1777        }
1778    }
1779
1780    fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
1781        match stmt.kind {
1782            // When removing storage statements, we need to remove both (#107511).
1783            StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
1784                if self.reused_locals.contains(l) =>
1785            {
1786                stmt.make_nop()
1787            }
1788            _ => self.super_statement(stmt, loc),
1789        }
1790    }
1791}