1use std::borrow::Cow;
90use std::hash::{Hash, Hasher};
91
92use either::Either;
93use itertools::Itertools as _;
94use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
95use rustc_arena::DroplessArena;
96use rustc_const_eval::const_eval::DummyMachine;
97use rustc_const_eval::interpret::{
98 ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar,
99 intern_const_alloc_for_constprop,
100};
101use rustc_data_structures::fx::FxHasher;
102use rustc_data_structures::graph::dominators::Dominators;
103use rustc_data_structures::hash_table::{Entry, HashTable};
104use rustc_hir::def::DefKind;
105use rustc_index::bit_set::DenseBitSet;
106use rustc_index::{IndexVec, newtype_index};
107use rustc_middle::bug;
108use rustc_middle::mir::interpret::{AllocRange, GlobalAlloc};
109use rustc_middle::mir::visit::*;
110use rustc_middle::mir::*;
111use rustc_middle::ty::layout::HasTypingEnv;
112use rustc_middle::ty::{self, Ty, TyCtxt};
113use rustc_span::DUMMY_SP;
114use smallvec::SmallVec;
115use tracing::{debug, instrument, trace};
116
117use crate::ssa::SsaLocals;
118
119pub(super) struct GVN;
120
121impl<'tcx> crate::MirPass<'tcx> for GVN {
122 fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
123 sess.mir_opt_level() >= 2
124 }
125
126 #[instrument(level = "trace", skip(self, tcx, body))]
127 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
128 debug!(def_id = ?body.source.def_id());
129
130 let typing_env = body.typing_env(tcx);
131 let ssa = SsaLocals::new(tcx, body, typing_env);
132 let dominators = body.basic_blocks.dominators().clone();
134
135 let arena = DroplessArena::default();
136 let mut state =
137 VnState::new(tcx, body, typing_env, &ssa, dominators, &body.local_decls, &arena);
138
139 for local in body.args_iter().filter(|&local| ssa.is_ssa(local)) {
140 let opaque = state.new_argument(body.local_decls[local].ty);
141 state.assign(local, opaque);
142 }
143
144 let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
145 for bb in reverse_postorder {
146 let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
147 state.visit_basic_block_data(bb, data);
148 }
149
150 StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
154 }
155
156 fn is_required(&self) -> bool {
157 false
158 }
159}
160
161newtype_index! {
162 #[debug_format = "_v{}"]
164 struct VnIndex {}
165}
166
167#[derive(Copy, Clone, Debug, Eq)]
171struct VnOpaque;
172impl PartialEq for VnOpaque {
173 fn eq(&self, _: &VnOpaque) -> bool {
174 unreachable!()
176 }
177}
178impl Hash for VnOpaque {
179 fn hash<T: Hasher>(&self, _: &mut T) {
180 unreachable!()
182 }
183}
184
185#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
186enum AddressKind {
187 Ref(BorrowKind),
188 Address(RawPtrKind),
189}
190
191#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
192enum AddressBase {
193 Local(Local),
195 Deref(VnIndex),
197}
198
199#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
200enum Value<'a, 'tcx> {
201 Opaque(VnOpaque),
204 Argument(VnOpaque),
206 Constant {
208 value: Const<'tcx>,
209 disambiguator: Option<VnOpaque>,
213 },
214
215 Aggregate(VariantIdx, &'a [VnIndex]),
219 Union(FieldIdx, VnIndex),
221 RawPtr {
223 pointer: VnIndex,
225 metadata: VnIndex,
227 },
228 Repeat(VnIndex, ty::Const<'tcx>),
230 Address {
232 base: AddressBase,
233 projection: &'a [ProjectionElem<VnIndex, Ty<'tcx>>],
236 kind: AddressKind,
237 provenance: VnOpaque,
239 },
240
241 Projection(VnIndex, ProjectionElem<VnIndex, ()>),
244 Discriminant(VnIndex),
246
247 RuntimeChecks(RuntimeChecks),
249 UnaryOp(UnOp, VnIndex),
250 BinaryOp(BinOp, VnIndex, VnIndex),
251 Cast {
252 kind: CastKind,
253 value: VnIndex,
254 },
255}
256
257struct ValueSet<'a, 'tcx> {
263 indices: HashTable<VnIndex>,
264 hashes: IndexVec<VnIndex, u64>,
265 values: IndexVec<VnIndex, Value<'a, 'tcx>>,
266 types: IndexVec<VnIndex, Ty<'tcx>>,
267}
268
269impl<'a, 'tcx> ValueSet<'a, 'tcx> {
270 fn new(num_values: usize) -> ValueSet<'a, 'tcx> {
271 ValueSet {
272 indices: HashTable::with_capacity(num_values),
273 hashes: IndexVec::with_capacity(num_values),
274 values: IndexVec::with_capacity(num_values),
275 types: IndexVec::with_capacity(num_values),
276 }
277 }
278
279 #[inline]
282 fn insert_unique(
283 &mut self,
284 ty: Ty<'tcx>,
285 value: impl FnOnce(VnOpaque) -> Value<'a, 'tcx>,
286 ) -> VnIndex {
287 let value = value(VnOpaque);
288
289 debug_assert!(match value {
290 Value::Opaque(_) | Value::Argument(_) | Value::Address { .. } => true,
291 Value::Constant { disambiguator, .. } => disambiguator.is_some(),
292 _ => false,
293 });
294
295 let index = self.hashes.push(0);
296 let _index = self.types.push(ty);
297 debug_assert_eq!(index, _index);
298 let _index = self.values.push(value);
299 debug_assert_eq!(index, _index);
300 index
301 }
302
303 #[cfg_attr(not(bootstrap), allow(rustc::disallowed_pass_by_ref))] #[cfg_attr(bootstrap, allow(rustc::pass_by_value))]
307 fn insert(&mut self, ty: Ty<'tcx>, value: Value<'a, 'tcx>) -> (VnIndex, bool) {
308 debug_assert!(match value {
309 Value::Opaque(_) | Value::Address { .. } => false,
310 Value::Constant { disambiguator, .. } => disambiguator.is_none(),
311 _ => true,
312 });
313
314 let hash: u64 = {
315 let mut h = FxHasher::default();
316 value.hash(&mut h);
317 ty.hash(&mut h);
318 h.finish()
319 };
320
321 let eq = |index: &VnIndex| self.values[*index] == value && self.types[*index] == ty;
322 let hasher = |index: &VnIndex| self.hashes[*index];
323 match self.indices.entry(hash, eq, hasher) {
324 Entry::Occupied(entry) => {
325 let index = *entry.get();
326 (index, false)
327 }
328 Entry::Vacant(entry) => {
329 let index = self.hashes.push(hash);
330 entry.insert(index);
331 let _index = self.values.push(value);
332 debug_assert_eq!(index, _index);
333 let _index = self.types.push(ty);
334 debug_assert_eq!(index, _index);
335 (index, true)
336 }
337 }
338 }
339
340 #[inline]
342 fn value(&self, index: VnIndex) -> Value<'a, 'tcx> {
343 self.values[index]
344 }
345
346 #[inline]
348 fn ty(&self, index: VnIndex) -> Ty<'tcx> {
349 self.types[index]
350 }
351}
352
353struct VnState<'body, 'a, 'tcx> {
354 tcx: TyCtxt<'tcx>,
355 ecx: InterpCx<'tcx, DummyMachine>,
356 local_decls: &'body LocalDecls<'tcx>,
357 is_coroutine: bool,
358 locals: IndexVec<Local, Option<VnIndex>>,
360 rev_locals: IndexVec<VnIndex, SmallVec<[Local; 1]>>,
363 values: ValueSet<'a, 'tcx>,
364 evaluated: IndexVec<VnIndex, Option<Option<&'a OpTy<'tcx>>>>,
369 ssa: &'body SsaLocals,
370 dominators: Dominators<BasicBlock>,
371 reused_locals: DenseBitSet<Local>,
372 arena: &'a DroplessArena,
373}
374
375impl<'body, 'a, 'tcx> VnState<'body, 'a, 'tcx> {
376 fn new(
377 tcx: TyCtxt<'tcx>,
378 body: &Body<'tcx>,
379 typing_env: ty::TypingEnv<'tcx>,
380 ssa: &'body SsaLocals,
381 dominators: Dominators<BasicBlock>,
382 local_decls: &'body LocalDecls<'tcx>,
383 arena: &'a DroplessArena,
384 ) -> Self {
385 let num_values =
390 2 * body.basic_blocks.iter().map(|bbdata| bbdata.statements.len()).sum::<usize>()
391 + 4 * body.basic_blocks.len();
392 VnState {
393 tcx,
394 ecx: InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine),
395 local_decls,
396 is_coroutine: body.coroutine.is_some(),
397 locals: IndexVec::from_elem(None, local_decls),
398 rev_locals: IndexVec::with_capacity(num_values),
399 values: ValueSet::new(num_values),
400 evaluated: IndexVec::with_capacity(num_values),
401 ssa,
402 dominators,
403 reused_locals: DenseBitSet::new_empty(local_decls.len()),
404 arena,
405 }
406 }
407
408 fn typing_env(&self) -> ty::TypingEnv<'tcx> {
409 self.ecx.typing_env()
410 }
411
412 fn insert_unique(
413 &mut self,
414 ty: Ty<'tcx>,
415 value: impl FnOnce(VnOpaque) -> Value<'a, 'tcx>,
416 ) -> VnIndex {
417 let index = self.values.insert_unique(ty, value);
418 let _index = self.evaluated.push(None);
419 debug_assert_eq!(index, _index);
420 let _index = self.rev_locals.push(SmallVec::new());
421 debug_assert_eq!(index, _index);
422 index
423 }
424
425 #[instrument(level = "trace", skip(self), ret)]
426 fn insert(&mut self, ty: Ty<'tcx>, value: Value<'a, 'tcx>) -> VnIndex {
427 let (index, new) = self.values.insert(ty, value);
428 if new {
429 let _index = self.evaluated.push(None);
431 debug_assert_eq!(index, _index);
432 let _index = self.rev_locals.push(SmallVec::new());
433 debug_assert_eq!(index, _index);
434 }
435 index
436 }
437
438 #[instrument(level = "trace", skip(self), ret)]
441 fn new_opaque(&mut self, ty: Ty<'tcx>) -> VnIndex {
442 let index = self.insert_unique(ty, Value::Opaque);
443 self.evaluated[index] = Some(None);
444 index
445 }
446
447 #[instrument(level = "trace", skip(self), ret)]
448 fn new_argument(&mut self, ty: Ty<'tcx>) -> VnIndex {
449 let index = self.insert_unique(ty, Value::Argument);
450 self.evaluated[index] = Some(None);
451 index
452 }
453
454 #[instrument(level = "trace", skip(self), ret)]
456 fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> Option<VnIndex> {
457 let pty = place.ty(self.local_decls, self.tcx).ty;
458 let ty = match kind {
459 AddressKind::Ref(bk) => {
460 Ty::new_ref(self.tcx, self.tcx.lifetimes.re_erased, pty, bk.to_mutbl_lossy())
461 }
462 AddressKind::Address(mutbl) => Ty::new_ptr(self.tcx, pty, mutbl.to_mutbl_lossy()),
463 };
464
465 let mut projection = place.projection.iter();
466 let base = if place.is_indirect_first_projection() {
467 let base = self.locals[place.local]?;
468 projection.next();
470 AddressBase::Deref(base)
471 } else if self.ssa.is_ssa(place.local) {
472 AddressBase::Local(place.local)
474 } else {
475 return None;
476 };
477 let projection =
479 projection.map(|proj| proj.try_map(|index| self.locals[index], |ty| ty).ok_or(()));
480 let projection = self.arena.try_alloc_from_iter(projection).ok()?;
481
482 let index = self.insert_unique(ty, |provenance| Value::Address {
483 base,
484 projection,
485 kind,
486 provenance,
487 });
488 Some(index)
489 }
490
491 #[instrument(level = "trace", skip(self), ret)]
492 fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex {
493 if is_deterministic(value) {
494 let constant = Value::Constant { value, disambiguator: None };
496 self.insert(value.ty(), constant)
497 } else {
498 self.insert_unique(value.ty(), |disambiguator| Value::Constant {
501 value,
502 disambiguator: Some(disambiguator),
503 })
504 }
505 }
506
507 #[inline]
508 fn get(&self, index: VnIndex) -> Value<'a, 'tcx> {
509 self.values.value(index)
510 }
511
512 #[inline]
513 fn ty(&self, index: VnIndex) -> Ty<'tcx> {
514 self.values.ty(index)
515 }
516
517 #[instrument(level = "trace", skip(self))]
519 fn assign(&mut self, local: Local, value: VnIndex) {
520 debug_assert!(self.ssa.is_ssa(local));
521 self.locals[local] = Some(value);
522 self.rev_locals[value].push(local);
523 }
524
525 fn insert_bool(&mut self, flag: bool) -> VnIndex {
526 let value = Const::from_bool(self.tcx, flag);
528 debug_assert!(is_deterministic(value));
529 self.insert(self.tcx.types.bool, Value::Constant { value, disambiguator: None })
530 }
531
532 fn insert_scalar(&mut self, ty: Ty<'tcx>, scalar: Scalar) -> VnIndex {
533 let value = Const::from_scalar(self.tcx, scalar, ty);
535 debug_assert!(is_deterministic(value));
536 self.insert(ty, Value::Constant { value, disambiguator: None })
537 }
538
539 fn insert_tuple(&mut self, ty: Ty<'tcx>, values: &[VnIndex]) -> VnIndex {
540 self.insert(ty, Value::Aggregate(VariantIdx::ZERO, self.arena.alloc_slice(values)))
541 }
542
543 #[instrument(level = "trace", skip(self), ret)]
544 fn eval_to_const_inner(&mut self, value: VnIndex) -> Option<OpTy<'tcx>> {
545 use Value::*;
546 let ty = self.ty(value);
547 let ty = if !self.is_coroutine || ty.is_scalar() {
549 self.ecx.layout_of(ty).ok()?
550 } else {
551 return None;
552 };
553 let op = match self.get(value) {
554 _ if ty.is_zst() => ImmTy::uninit(ty).into(),
555
556 Opaque(_) | Argument(_) => return None,
557 RuntimeChecks(..) => return None,
559
560 Repeat(value, _count) => {
565 let value = self.eval_to_const(value)?;
566 if value.is_immediate_uninit() {
567 ImmTy::uninit(ty).into()
568 } else {
569 return None;
570 }
571 }
572 Constant { ref value, disambiguator: _ } => {
573 self.ecx.eval_mir_constant(value, DUMMY_SP, None).discard_err()?
574 }
575 Aggregate(variant, ref fields) => {
576 let fields =
577 fields.iter().map(|&f| self.eval_to_const(f)).collect::<Option<Vec<_>>>()?;
578 let variant = if ty.ty.is_enum() { Some(variant) } else { None };
579 let (BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) = ty.backend_repr
580 else {
581 return None;
582 };
583 let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
584 let variant_dest = if let Some(variant) = variant {
585 self.ecx.project_downcast(&dest, variant).discard_err()?
586 } else {
587 dest.clone()
588 };
589 for (field_index, op) in fields.into_iter().enumerate() {
590 let field_dest = self
591 .ecx
592 .project_field(&variant_dest, FieldIdx::from_usize(field_index))
593 .discard_err()?;
594 self.ecx.copy_op(op, &field_dest).discard_err()?;
595 }
596 self.ecx
597 .write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest)
598 .discard_err()?;
599 self.ecx
600 .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
601 .discard_err()?;
602 dest.into()
603 }
604 Union(active_field, field) => {
605 let field = self.eval_to_const(field)?;
606 if field.layout.layout.is_zst() {
607 ImmTy::from_immediate(Immediate::Uninit, ty).into()
608 } else if matches!(
609 ty.backend_repr,
610 BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)
611 ) {
612 let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
613 let field_dest = self.ecx.project_field(&dest, active_field).discard_err()?;
614 self.ecx.copy_op(field, &field_dest).discard_err()?;
615 self.ecx
616 .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
617 .discard_err()?;
618 dest.into()
619 } else {
620 return None;
621 }
622 }
623 RawPtr { pointer, metadata } => {
624 let pointer = self.eval_to_const(pointer)?;
625 let metadata = self.eval_to_const(metadata)?;
626
627 let data = self.ecx.read_pointer(pointer).discard_err()?;
629 let meta = if metadata.layout.is_zst() {
630 MemPlaceMeta::None
631 } else {
632 MemPlaceMeta::Meta(self.ecx.read_scalar(metadata).discard_err()?)
633 };
634 let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx);
635 ImmTy::from_immediate(ptr_imm, ty).into()
636 }
637
638 Projection(base, elem) => {
639 let base = self.eval_to_const(base)?;
640 let elem = elem.try_map(|_| None, |()| ty.ty)?;
643 self.ecx.project(base, elem).discard_err()?
644 }
645 Address { base, projection, .. } => {
646 debug_assert!(!projection.contains(&ProjectionElem::Deref));
647 let pointer = match base {
648 AddressBase::Deref(pointer) => self.eval_to_const(pointer)?,
649 AddressBase::Local(_) => return None,
651 };
652 let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?;
653 for elem in projection {
654 let elem = elem.try_map(|_| None, |ty| ty)?;
657 mplace = self.ecx.project(&mplace, elem).discard_err()?;
658 }
659 let pointer = mplace.to_ref(&self.ecx);
660 ImmTy::from_immediate(pointer, ty).into()
661 }
662
663 Discriminant(base) => {
664 let base = self.eval_to_const(base)?;
665 let variant = self.ecx.read_discriminant(base).discard_err()?;
666 let discr_value =
667 self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
668 discr_value.into()
669 }
670 UnaryOp(un_op, operand) => {
671 let operand = self.eval_to_const(operand)?;
672 let operand = self.ecx.read_immediate(operand).discard_err()?;
673 let val = self.ecx.unary_op(un_op, &operand).discard_err()?;
674 val.into()
675 }
676 BinaryOp(bin_op, lhs, rhs) => {
677 let lhs = self.eval_to_const(lhs)?;
678 let rhs = self.eval_to_const(rhs)?;
679 let lhs = self.ecx.read_immediate(lhs).discard_err()?;
680 let rhs = self.ecx.read_immediate(rhs).discard_err()?;
681 let val = self.ecx.binary_op(bin_op, &lhs, &rhs).discard_err()?;
682 val.into()
683 }
684 Cast { kind, value } => match kind {
685 CastKind::IntToInt | CastKind::IntToFloat => {
686 let value = self.eval_to_const(value)?;
687 let value = self.ecx.read_immediate(value).discard_err()?;
688 let res = self.ecx.int_to_int_or_float(&value, ty).discard_err()?;
689 res.into()
690 }
691 CastKind::FloatToFloat | CastKind::FloatToInt => {
692 let value = self.eval_to_const(value)?;
693 let value = self.ecx.read_immediate(value).discard_err()?;
694 let res = self.ecx.float_to_float_or_int(&value, ty).discard_err()?;
695 res.into()
696 }
697 CastKind::Transmute | CastKind::Subtype => {
698 let value = self.eval_to_const(value)?;
699 if value.as_mplace_or_imm().is_right() {
704 let can_transmute = match (value.layout.backend_repr, ty.backend_repr) {
705 (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => {
706 s1.size(&self.ecx) == s2.size(&self.ecx)
707 && !matches!(s1.primitive(), Primitive::Pointer(..))
708 }
709 (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
710 a1.size(&self.ecx) == a2.size(&self.ecx)
711 && b1.size(&self.ecx) == b2.size(&self.ecx)
712 && b1.align(&self.ecx) == b2.align(&self.ecx)
714 && !matches!(a1.primitive(), Primitive::Pointer(..))
716 && !matches!(b1.primitive(), Primitive::Pointer(..))
717 }
718 _ => false,
719 };
720 if !can_transmute {
721 return None;
722 }
723 }
724 value.offset(Size::ZERO, ty, &self.ecx).discard_err()?
725 }
726 CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) => {
727 let src = self.eval_to_const(value)?;
728 let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
729 self.ecx.unsize_into(src, ty, &dest).discard_err()?;
730 self.ecx
731 .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
732 .discard_err()?;
733 dest.into()
734 }
735 CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
736 let src = self.eval_to_const(value)?;
737 let src = self.ecx.read_immediate(src).discard_err()?;
738 let ret = self.ecx.ptr_to_ptr(&src, ty).discard_err()?;
739 ret.into()
740 }
741 CastKind::PointerCoercion(ty::adjustment::PointerCoercion::UnsafeFnPointer, _) => {
742 let src = self.eval_to_const(value)?;
743 let src = self.ecx.read_immediate(src).discard_err()?;
744 ImmTy::from_immediate(*src, ty).into()
745 }
746 _ => return None,
747 },
748 };
749 Some(op)
750 }
751
752 fn eval_to_const(&mut self, index: VnIndex) -> Option<&'a OpTy<'tcx>> {
753 if let Some(op) = self.evaluated[index] {
754 return op;
755 }
756 let op = self.eval_to_const_inner(index);
757 self.evaluated[index] = Some(self.arena.alloc(op).as_ref());
758 self.evaluated[index].unwrap()
759 }
760
761 #[instrument(level = "trace", skip(self), ret)]
763 fn dereference_address(
764 &mut self,
765 base: AddressBase,
766 projection: &[ProjectionElem<VnIndex, Ty<'tcx>>],
767 ) -> Option<VnIndex> {
768 let (mut place_ty, mut value) = match base {
769 AddressBase::Local(local) => {
771 let local = self.locals[local]?;
772 let place_ty = PlaceTy::from_ty(self.ty(local));
773 (place_ty, local)
774 }
775 AddressBase::Deref(reborrow) => {
777 let place_ty = PlaceTy::from_ty(self.ty(reborrow));
778 self.project(place_ty, reborrow, ProjectionElem::Deref)?
779 }
780 };
781 for &proj in projection {
782 (place_ty, value) = self.project(place_ty, value, proj)?;
783 }
784 Some(value)
785 }
786
787 #[instrument(level = "trace", skip(self), ret)]
788 fn project(
789 &mut self,
790 place_ty: PlaceTy<'tcx>,
791 value: VnIndex,
792 proj: ProjectionElem<VnIndex, Ty<'tcx>>,
793 ) -> Option<(PlaceTy<'tcx>, VnIndex)> {
794 let projection_ty = place_ty.projection_ty(self.tcx, proj);
795 let proj = match proj {
796 ProjectionElem::Deref => {
797 if let Some(Mutability::Not) = place_ty.ty.ref_mutability()
798 && projection_ty.ty.is_freeze(self.tcx, self.typing_env())
799 {
800 if let Value::Address { base, projection, .. } = self.get(value)
801 && let Some(value) = self.dereference_address(base, projection)
802 {
803 return Some((projection_ty, value));
804 }
805 if projection_ty.ty.is_ref() {
815 return None;
816 }
817
818 let deref = self
821 .insert(projection_ty.ty, Value::Projection(value, ProjectionElem::Deref));
822 return Some((projection_ty, deref));
823 } else {
824 return None;
825 }
826 }
827 ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
828 ProjectionElem::Field(f, _) => match self.get(value) {
829 Value::Aggregate(_, fields) => return Some((projection_ty, fields[f.as_usize()])),
830 Value::Union(active, field) if active == f => return Some((projection_ty, field)),
831 Value::Projection(outer_value, ProjectionElem::Downcast(_, read_variant))
832 if let Value::Aggregate(written_variant, fields) = self.get(outer_value)
833 && written_variant == read_variant =>
849 {
850 return Some((projection_ty, fields[f.as_usize()]));
851 }
852 _ => ProjectionElem::Field(f, ()),
853 },
854 ProjectionElem::Index(idx) => {
855 if let Value::Repeat(inner, _) = self.get(value) {
856 return Some((projection_ty, inner));
857 }
858 ProjectionElem::Index(idx)
859 }
860 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
861 match self.get(value) {
862 Value::Repeat(inner, _) => {
863 return Some((projection_ty, inner));
864 }
865 Value::Aggregate(_, operands) => {
866 let offset = if from_end {
867 operands.len() - offset as usize
868 } else {
869 offset as usize
870 };
871 let value = operands.get(offset).copied()?;
872 return Some((projection_ty, value));
873 }
874 _ => {}
875 };
876 ProjectionElem::ConstantIndex { offset, min_length, from_end }
877 }
878 ProjectionElem::Subslice { from, to, from_end } => {
879 ProjectionElem::Subslice { from, to, from_end }
880 }
881 ProjectionElem::OpaqueCast(_) => ProjectionElem::OpaqueCast(()),
882 ProjectionElem::UnwrapUnsafeBinder(_) => ProjectionElem::UnwrapUnsafeBinder(()),
883 };
884
885 let value = self.insert(projection_ty.ty, Value::Projection(value, proj));
886 Some((projection_ty, value))
887 }
888
889 #[instrument(level = "trace", skip(self))]
891 fn simplify_place_projection(&mut self, place: &mut Place<'tcx>, location: Location) {
892 if place.is_indirect_first_projection()
895 && let Some(base) = self.locals[place.local]
896 && let Some(new_local) = self.try_as_local(base, location)
897 && place.local != new_local
898 {
899 place.local = new_local;
900 self.reused_locals.insert(new_local);
901 }
902
903 let mut projection = Cow::Borrowed(&place.projection[..]);
904
905 for i in 0..projection.len() {
906 let elem = projection[i];
907 if let ProjectionElem::Index(idx_local) = elem
908 && let Some(idx) = self.locals[idx_local]
909 {
910 if let Some(offset) = self.eval_to_const(idx)
911 && let Some(offset) = self.ecx.read_target_usize(offset).discard_err()
912 && let Some(min_length) = offset.checked_add(1)
913 {
914 projection.to_mut()[i] =
915 ProjectionElem::ConstantIndex { offset, min_length, from_end: false };
916 } else if let Some(new_idx_local) = self.try_as_local(idx, location)
917 && idx_local != new_idx_local
918 {
919 projection.to_mut()[i] = ProjectionElem::Index(new_idx_local);
920 self.reused_locals.insert(new_idx_local);
921 }
922 }
923 }
924
925 if Cow::is_owned(&projection) {
926 place.projection = self.tcx.mk_place_elems(&projection);
927 }
928
929 trace!(?place);
930 }
931
932 #[instrument(level = "trace", skip(self), ret)]
935 fn compute_place_value(
936 &mut self,
937 place: Place<'tcx>,
938 location: Location,
939 ) -> Result<VnIndex, PlaceRef<'tcx>> {
940 let mut place_ref = place.as_ref();
943
944 let Some(mut value) = self.locals[place.local] else { return Err(place_ref) };
946 let mut place_ty = PlaceTy::from_ty(self.local_decls[place.local].ty);
948 for (index, proj) in place.projection.iter().enumerate() {
949 if let Some(local) = self.try_as_local(value, location) {
950 place_ref = PlaceRef { local, projection: &place.projection[index..] };
954 }
955
956 let Some(proj) = proj.try_map(|value| self.locals[value], |ty| ty) else {
957 return Err(place_ref);
958 };
959 let Some(ty_and_value) = self.project(place_ty, value, proj) else {
960 return Err(place_ref);
961 };
962 (place_ty, value) = ty_and_value;
963 }
964
965 Ok(value)
966 }
967
968 #[instrument(level = "trace", skip(self), ret)]
971 fn simplify_place_value(
972 &mut self,
973 place: &mut Place<'tcx>,
974 location: Location,
975 ) -> Option<VnIndex> {
976 self.simplify_place_projection(place, location);
977
978 match self.compute_place_value(*place, location) {
979 Ok(value) => {
980 if let Some(new_place) = self.try_as_place(value, location, true)
981 && (new_place.local != place.local
982 || new_place.projection.len() < place.projection.len())
983 {
984 *place = new_place;
985 self.reused_locals.insert(new_place.local);
986 }
987 Some(value)
988 }
989 Err(place_ref) => {
990 if place_ref.local != place.local
991 || place_ref.projection.len() < place.projection.len()
992 {
993 *place = place_ref.project_deeper(&[], self.tcx);
995 self.reused_locals.insert(place_ref.local);
996 }
997 None
998 }
999 }
1000 }
1001
1002 #[instrument(level = "trace", skip(self), ret)]
1003 fn simplify_operand(
1004 &mut self,
1005 operand: &mut Operand<'tcx>,
1006 location: Location,
1007 ) -> Option<VnIndex> {
1008 let value = match *operand {
1009 Operand::RuntimeChecks(c) => self.insert(self.tcx.types.bool, Value::RuntimeChecks(c)),
1010 Operand::Constant(ref constant) => self.insert_constant(constant.const_),
1011 Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
1012 self.simplify_place_value(place, location)?
1013 }
1014 };
1015 if let Some(const_) = self.try_as_constant(value) {
1016 *operand = Operand::Constant(Box::new(const_));
1017 } else if let Value::RuntimeChecks(c) = self.get(value) {
1018 *operand = Operand::RuntimeChecks(c);
1019 }
1020 Some(value)
1021 }
1022
1023 #[instrument(level = "trace", skip(self), ret)]
1024 fn simplify_rvalue(
1025 &mut self,
1026 lhs: &Place<'tcx>,
1027 rvalue: &mut Rvalue<'tcx>,
1028 location: Location,
1029 ) -> Option<VnIndex> {
1030 let value = match *rvalue {
1031 Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
1033
1034 Rvalue::Repeat(ref mut op, amount) => {
1036 let op = self.simplify_operand(op, location)?;
1037 Value::Repeat(op, amount)
1038 }
1039 Rvalue::Aggregate(..) => return self.simplify_aggregate(rvalue, location),
1040 Rvalue::Ref(_, borrow_kind, ref mut place) => {
1041 self.simplify_place_projection(place, location);
1042 return self.new_pointer(*place, AddressKind::Ref(borrow_kind));
1043 }
1044 Rvalue::RawPtr(mutbl, ref mut place) => {
1045 self.simplify_place_projection(place, location);
1046 return self.new_pointer(*place, AddressKind::Address(mutbl));
1047 }
1048 Rvalue::WrapUnsafeBinder(ref mut op, _) => {
1049 let value = self.simplify_operand(op, location)?;
1050 Value::Cast { kind: CastKind::Transmute, value }
1051 }
1052
1053 Rvalue::Cast(ref mut kind, ref mut value, to) => {
1055 return self.simplify_cast(kind, value, to, location);
1056 }
1057 Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
1058 return self.simplify_binary(op, lhs, rhs, location);
1059 }
1060 Rvalue::UnaryOp(op, ref mut arg_op) => {
1061 return self.simplify_unary(op, arg_op, location);
1062 }
1063 Rvalue::Discriminant(ref mut place) => {
1064 let place = self.simplify_place_value(place, location)?;
1065 if let Some(discr) = self.simplify_discriminant(place) {
1066 return Some(discr);
1067 }
1068 Value::Discriminant(place)
1069 }
1070
1071 Rvalue::ThreadLocalRef(..) => return None,
1073 Rvalue::CopyForDeref(_) => {
1074 bug!("forbidden in runtime MIR: {rvalue:?}")
1075 }
1076 };
1077 let ty = rvalue.ty(self.local_decls, self.tcx);
1078 Some(self.insert(ty, value))
1079 }
1080
1081 fn simplify_discriminant(&mut self, place: VnIndex) -> Option<VnIndex> {
1082 let enum_ty = self.ty(place);
1083 if enum_ty.is_enum()
1084 && let Value::Aggregate(variant, _) = self.get(place)
1085 {
1086 let discr = self.ecx.discriminant_for_variant(enum_ty, variant).discard_err()?;
1087 return Some(self.insert_scalar(discr.layout.ty, discr.to_scalar()));
1088 }
1089
1090 None
1091 }
1092
1093 fn try_as_place_elem(
1094 &mut self,
1095 ty: Ty<'tcx>,
1096 proj: ProjectionElem<VnIndex, ()>,
1097 loc: Location,
1098 ) -> Option<PlaceElem<'tcx>> {
1099 proj.try_map(
1100 |value| {
1101 let local = self.try_as_local(value, loc)?;
1102 self.reused_locals.insert(local);
1103 Some(local)
1104 },
1105 |()| ty,
1106 )
1107 }
1108
1109 fn simplify_aggregate_to_copy(
1110 &mut self,
1111 ty: Ty<'tcx>,
1112 variant_index: VariantIdx,
1113 fields: &[VnIndex],
1114 ) -> Option<VnIndex> {
1115 let Some(&first_field) = fields.first() else { return None };
1116 let Value::Projection(copy_from_value, _) = self.get(first_field) else { return None };
1117
1118 if fields.iter().enumerate().any(|(index, &v)| {
1120 if let Value::Projection(pointer, ProjectionElem::Field(from_index, _)) = self.get(v)
1121 && copy_from_value == pointer
1122 && from_index.index() == index
1123 {
1124 return false;
1125 }
1126 true
1127 }) {
1128 return None;
1129 }
1130
1131 let mut copy_from_local_value = copy_from_value;
1132 if let Value::Projection(pointer, proj) = self.get(copy_from_value)
1133 && let ProjectionElem::Downcast(_, read_variant) = proj
1134 {
1135 if variant_index == read_variant {
1136 copy_from_local_value = pointer;
1138 } else {
1139 return None;
1141 }
1142 }
1143
1144 if self.ty(copy_from_local_value) == ty { Some(copy_from_local_value) } else { None }
1146 }
1147
1148 fn simplify_aggregate(
1149 &mut self,
1150 rvalue: &mut Rvalue<'tcx>,
1151 location: Location,
1152 ) -> Option<VnIndex> {
1153 let tcx = self.tcx;
1154 let ty = rvalue.ty(self.local_decls, tcx);
1155
1156 let Rvalue::Aggregate(box ref kind, ref mut field_ops) = *rvalue else { bug!() };
1157
1158 if field_ops.is_empty() {
1159 let is_zst = match *kind {
1160 AggregateKind::Array(..)
1161 | AggregateKind::Tuple
1162 | AggregateKind::Closure(..)
1163 | AggregateKind::CoroutineClosure(..) => true,
1164 AggregateKind::Adt(did, ..) => tcx.def_kind(did) != DefKind::Enum,
1166 AggregateKind::Coroutine(..) => false,
1168 AggregateKind::RawPtr(..) => bug!("MIR for RawPtr aggregate must have 2 fields"),
1169 };
1170
1171 if is_zst {
1172 return Some(self.insert_constant(Const::zero_sized(ty)));
1173 }
1174 }
1175
1176 let fields = self.arena.alloc_from_iter(field_ops.iter_mut().map(|op| {
1177 self.simplify_operand(op, location)
1178 .unwrap_or_else(|| self.new_opaque(op.ty(self.local_decls, self.tcx)))
1179 }));
1180
1181 let variant_index = match *kind {
1182 AggregateKind::Array(..) | AggregateKind::Tuple => {
1183 assert!(!field_ops.is_empty());
1184 FIRST_VARIANT
1185 }
1186 AggregateKind::Closure(..)
1187 | AggregateKind::CoroutineClosure(..)
1188 | AggregateKind::Coroutine(..) => FIRST_VARIANT,
1189 AggregateKind::Adt(_, variant_index, _, _, None) => variant_index,
1190 AggregateKind::Adt(_, _, _, _, Some(active_field)) => {
1192 let field = *fields.first()?;
1193 return Some(self.insert(ty, Value::Union(active_field, field)));
1194 }
1195 AggregateKind::RawPtr(..) => {
1196 assert_eq!(field_ops.len(), 2);
1197 let [mut pointer, metadata] = fields.try_into().unwrap();
1198
1199 let mut was_updated = false;
1201 while let Value::Cast { kind: CastKind::PtrToPtr, value: cast_value } =
1202 self.get(pointer)
1203 && let ty::RawPtr(from_pointee_ty, from_mtbl) = self.ty(cast_value).kind()
1204 && let ty::RawPtr(_, output_mtbl) = ty.kind()
1205 && from_mtbl == output_mtbl
1206 && from_pointee_ty.is_sized(self.tcx, self.typing_env())
1207 {
1208 pointer = cast_value;
1209 was_updated = true;
1210 }
1211
1212 if was_updated && let Some(op) = self.try_as_operand(pointer, location) {
1213 field_ops[FieldIdx::ZERO] = op;
1214 }
1215
1216 return Some(self.insert(ty, Value::RawPtr { pointer, metadata }));
1217 }
1218 };
1219
1220 if ty.is_array()
1221 && fields.len() > 4
1222 && let Ok(&first) = fields.iter().all_equal_value()
1223 {
1224 let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
1225 if let Some(op) = self.try_as_operand(first, location) {
1226 *rvalue = Rvalue::Repeat(op, len);
1227 }
1228 return Some(self.insert(ty, Value::Repeat(first, len)));
1229 }
1230
1231 if let Some(value) = self.simplify_aggregate_to_copy(ty, variant_index, &fields) {
1232 if let Some(place) = self.try_as_place(value, location, true) {
1233 self.reused_locals.insert(place.local);
1234 *rvalue = Rvalue::Use(Operand::Copy(place));
1235 }
1236 return Some(value);
1237 }
1238
1239 Some(self.insert(ty, Value::Aggregate(variant_index, fields)))
1240 }
1241
1242 #[instrument(level = "trace", skip(self), ret)]
1243 fn simplify_unary(
1244 &mut self,
1245 op: UnOp,
1246 arg_op: &mut Operand<'tcx>,
1247 location: Location,
1248 ) -> Option<VnIndex> {
1249 let mut arg_index = self.simplify_operand(arg_op, location)?;
1250 let arg_ty = self.ty(arg_index);
1251 let ret_ty = op.ty(self.tcx, arg_ty);
1252
1253 if op == UnOp::PtrMetadata {
1256 let mut was_updated = false;
1257 loop {
1258 arg_index = match self.get(arg_index) {
1259 Value::Cast { kind: CastKind::PtrToPtr, value: inner }
1268 if self.pointers_have_same_metadata(self.ty(inner), arg_ty) =>
1269 {
1270 inner
1271 }
1272
1273 Value::Cast {
1275 kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1276 value: from,
1277 } if let Some(from) = self.ty(from).builtin_deref(true)
1278 && let ty::Array(_, len) = from.kind()
1279 && let Some(to) = self.ty(arg_index).builtin_deref(true)
1280 && let ty::Slice(..) = to.kind() =>
1281 {
1282 return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1283 }
1284
1285 Value::Address { base: AddressBase::Deref(reborrowed), projection, .. }
1287 if projection.is_empty() =>
1288 {
1289 reborrowed
1290 }
1291
1292 _ => break,
1293 };
1294 was_updated = true;
1295 }
1296
1297 if was_updated && let Some(op) = self.try_as_operand(arg_index, location) {
1298 *arg_op = op;
1299 }
1300 }
1301
1302 let value = match (op, self.get(arg_index)) {
1303 (UnOp::Not, Value::UnaryOp(UnOp::Not, inner)) => return Some(inner),
1304 (UnOp::Neg, Value::UnaryOp(UnOp::Neg, inner)) => return Some(inner),
1305 (UnOp::Not, Value::BinaryOp(BinOp::Eq, lhs, rhs)) => {
1306 Value::BinaryOp(BinOp::Ne, lhs, rhs)
1307 }
1308 (UnOp::Not, Value::BinaryOp(BinOp::Ne, lhs, rhs)) => {
1309 Value::BinaryOp(BinOp::Eq, lhs, rhs)
1310 }
1311 (UnOp::PtrMetadata, Value::RawPtr { metadata, .. }) => return Some(metadata),
1312 (
1314 UnOp::PtrMetadata,
1315 Value::Cast {
1316 kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1317 value: inner,
1318 },
1319 ) if let ty::Slice(..) = arg_ty.builtin_deref(true).unwrap().kind()
1320 && let ty::Array(_, len) = self.ty(inner).builtin_deref(true).unwrap().kind() =>
1321 {
1322 return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1323 }
1324 _ => Value::UnaryOp(op, arg_index),
1325 };
1326 Some(self.insert(ret_ty, value))
1327 }
1328
1329 #[instrument(level = "trace", skip(self), ret)]
1330 fn simplify_binary(
1331 &mut self,
1332 op: BinOp,
1333 lhs_operand: &mut Operand<'tcx>,
1334 rhs_operand: &mut Operand<'tcx>,
1335 location: Location,
1336 ) -> Option<VnIndex> {
1337 let lhs = self.simplify_operand(lhs_operand, location);
1338 let rhs = self.simplify_operand(rhs_operand, location);
1339
1340 let mut lhs = lhs?;
1343 let mut rhs = rhs?;
1344
1345 let lhs_ty = self.ty(lhs);
1346
1347 if let BinOp::Eq | BinOp::Ne | BinOp::Lt | BinOp::Le | BinOp::Gt | BinOp::Ge = op
1350 && lhs_ty.is_any_ptr()
1351 && let Value::Cast { kind: CastKind::PtrToPtr, value: lhs_value } = self.get(lhs)
1352 && let Value::Cast { kind: CastKind::PtrToPtr, value: rhs_value } = self.get(rhs)
1353 && let lhs_from = self.ty(lhs_value)
1354 && lhs_from == self.ty(rhs_value)
1355 && self.pointers_have_same_metadata(lhs_from, lhs_ty)
1356 {
1357 lhs = lhs_value;
1358 rhs = rhs_value;
1359 if let Some(lhs_op) = self.try_as_operand(lhs, location)
1360 && let Some(rhs_op) = self.try_as_operand(rhs, location)
1361 {
1362 *lhs_operand = lhs_op;
1363 *rhs_operand = rhs_op;
1364 }
1365 }
1366
1367 if let Some(value) = self.simplify_binary_inner(op, lhs_ty, lhs, rhs) {
1368 return Some(value);
1369 }
1370 let ty = op.ty(self.tcx, lhs_ty, self.ty(rhs));
1371 let value = Value::BinaryOp(op, lhs, rhs);
1372 Some(self.insert(ty, value))
1373 }
1374
1375 fn simplify_binary_inner(
1376 &mut self,
1377 op: BinOp,
1378 lhs_ty: Ty<'tcx>,
1379 lhs: VnIndex,
1380 rhs: VnIndex,
1381 ) -> Option<VnIndex> {
1382 let reasonable_ty =
1384 lhs_ty.is_integral() || lhs_ty.is_bool() || lhs_ty.is_char() || lhs_ty.is_any_ptr();
1385 if !reasonable_ty {
1386 return None;
1387 }
1388
1389 let layout = self.ecx.layout_of(lhs_ty).ok()?;
1390
1391 let mut as_bits = |value: VnIndex| {
1392 let constant = self.eval_to_const(value)?;
1393 if layout.backend_repr.is_scalar() {
1394 let scalar = self.ecx.read_scalar(constant).discard_err()?;
1395 scalar.to_bits(constant.layout.size).discard_err()
1396 } else {
1397 None
1399 }
1400 };
1401
1402 use Either::{Left, Right};
1404 let a = as_bits(lhs).map_or(Right(lhs), Left);
1405 let b = as_bits(rhs).map_or(Right(rhs), Left);
1406
1407 let result = match (op, a, b) {
1408 (
1410 BinOp::Add
1411 | BinOp::AddWithOverflow
1412 | BinOp::AddUnchecked
1413 | BinOp::BitOr
1414 | BinOp::BitXor,
1415 Left(0),
1416 Right(p),
1417 )
1418 | (
1419 BinOp::Add
1420 | BinOp::AddWithOverflow
1421 | BinOp::AddUnchecked
1422 | BinOp::BitOr
1423 | BinOp::BitXor
1424 | BinOp::Sub
1425 | BinOp::SubWithOverflow
1426 | BinOp::SubUnchecked
1427 | BinOp::Offset
1428 | BinOp::Shl
1429 | BinOp::Shr,
1430 Right(p),
1431 Left(0),
1432 )
1433 | (BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked, Left(1), Right(p))
1434 | (
1435 BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::Div,
1436 Right(p),
1437 Left(1),
1438 ) => p,
1439 (BinOp::BitAnd, Right(p), Left(ones)) | (BinOp::BitAnd, Left(ones), Right(p))
1441 if ones == layout.size.truncate(u128::MAX)
1442 || (layout.ty.is_bool() && ones == 1) =>
1443 {
1444 p
1445 }
1446 (
1448 BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::BitAnd,
1449 _,
1450 Left(0),
1451 )
1452 | (BinOp::Rem, _, Left(1))
1453 | (
1454 BinOp::Mul
1455 | BinOp::MulWithOverflow
1456 | BinOp::MulUnchecked
1457 | BinOp::Div
1458 | BinOp::Rem
1459 | BinOp::BitAnd
1460 | BinOp::Shl
1461 | BinOp::Shr,
1462 Left(0),
1463 _,
1464 ) => self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size)),
1465 (BinOp::BitOr, _, Left(ones)) | (BinOp::BitOr, Left(ones), _)
1467 if ones == layout.size.truncate(u128::MAX)
1468 || (layout.ty.is_bool() && ones == 1) =>
1469 {
1470 self.insert_scalar(lhs_ty, Scalar::from_uint(ones, layout.size))
1471 }
1472 (BinOp::Sub | BinOp::SubWithOverflow | BinOp::SubUnchecked | BinOp::BitXor, a, b)
1474 if a == b =>
1475 {
1476 self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size))
1477 }
1478 (BinOp::Eq, Left(a), Left(b)) => self.insert_bool(a == b),
1483 (BinOp::Eq, a, b) if a == b => self.insert_bool(true),
1484 (BinOp::Ne, Left(a), Left(b)) => self.insert_bool(a != b),
1485 (BinOp::Ne, a, b) if a == b => self.insert_bool(false),
1486 _ => return None,
1487 };
1488
1489 if op.is_overflowing() {
1490 let ty = Ty::new_tup(self.tcx, &[self.ty(result), self.tcx.types.bool]);
1491 let false_val = self.insert_bool(false);
1492 Some(self.insert_tuple(ty, &[result, false_val]))
1493 } else {
1494 Some(result)
1495 }
1496 }
1497
1498 fn simplify_cast(
1499 &mut self,
1500 initial_kind: &mut CastKind,
1501 initial_operand: &mut Operand<'tcx>,
1502 to: Ty<'tcx>,
1503 location: Location,
1504 ) -> Option<VnIndex> {
1505 use CastKind::*;
1506 use rustc_middle::ty::adjustment::PointerCoercion::*;
1507
1508 let mut kind = *initial_kind;
1509 let mut value = self.simplify_operand(initial_operand, location)?;
1510 let mut from = self.ty(value);
1511 if from == to {
1512 return Some(value);
1513 }
1514
1515 if let CastKind::PointerCoercion(ReifyFnPointer(_) | ClosureFnPointer(_), _) = kind {
1516 return Some(self.new_opaque(to));
1519 }
1520
1521 let mut was_ever_updated = false;
1522 loop {
1523 let mut was_updated_this_iteration = false;
1524
1525 if let Transmute = kind
1530 && from.is_raw_ptr()
1531 && to.is_raw_ptr()
1532 && self.pointers_have_same_metadata(from, to)
1533 {
1534 kind = PtrToPtr;
1535 was_updated_this_iteration = true;
1536 }
1537
1538 if let PtrToPtr = kind
1541 && let Value::RawPtr { pointer, .. } = self.get(value)
1542 && let ty::RawPtr(to_pointee, _) = to.kind()
1543 && to_pointee.is_sized(self.tcx, self.typing_env())
1544 {
1545 from = self.ty(pointer);
1546 value = pointer;
1547 was_updated_this_iteration = true;
1548 if from == to {
1549 return Some(pointer);
1550 }
1551 }
1552
1553 if let Transmute = kind
1556 && let Value::Aggregate(variant_idx, field_values) = self.get(value)
1557 && let Some((field_idx, field_ty)) =
1558 self.value_is_all_in_one_field(from, variant_idx)
1559 {
1560 from = field_ty;
1561 value = field_values[field_idx.as_usize()];
1562 was_updated_this_iteration = true;
1563 if field_ty == to {
1564 return Some(value);
1565 }
1566 }
1567
1568 if let Value::Cast { kind: inner_kind, value: inner_value } = self.get(value) {
1570 let inner_from = self.ty(inner_value);
1571 let new_kind = match (inner_kind, kind) {
1572 (PtrToPtr, PtrToPtr) => Some(PtrToPtr),
1576 (PtrToPtr, Transmute) if self.pointers_have_same_metadata(inner_from, from) => {
1580 Some(Transmute)
1581 }
1582 (Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => {
1585 Some(Transmute)
1586 }
1587 (Transmute, Transmute)
1590 if !self.transmute_may_have_niche_of_interest_to_backend(
1591 inner_from, from, to,
1592 ) =>
1593 {
1594 Some(Transmute)
1595 }
1596 _ => None,
1597 };
1598 if let Some(new_kind) = new_kind {
1599 kind = new_kind;
1600 from = inner_from;
1601 value = inner_value;
1602 was_updated_this_iteration = true;
1603 if inner_from == to {
1604 return Some(inner_value);
1605 }
1606 }
1607 }
1608
1609 if was_updated_this_iteration {
1610 was_ever_updated = true;
1611 } else {
1612 break;
1613 }
1614 }
1615
1616 if was_ever_updated && let Some(op) = self.try_as_operand(value, location) {
1617 *initial_operand = op;
1618 *initial_kind = kind;
1619 }
1620
1621 Some(self.insert(to, Value::Cast { kind, value }))
1622 }
1623
1624 fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool {
1625 let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1626 let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1627 if left_meta_ty == right_meta_ty {
1628 true
1629 } else if let Ok(left) =
1630 self.tcx.try_normalize_erasing_regions(self.typing_env(), left_meta_ty)
1631 && let Ok(right) =
1632 self.tcx.try_normalize_erasing_regions(self.typing_env(), right_meta_ty)
1633 {
1634 left == right
1635 } else {
1636 false
1637 }
1638 }
1639
1640 fn transmute_may_have_niche_of_interest_to_backend(
1647 &self,
1648 from_ty: Ty<'tcx>,
1649 middle_ty: Ty<'tcx>,
1650 to_ty: Ty<'tcx>,
1651 ) -> bool {
1652 let Ok(middle_layout) = self.ecx.layout_of(middle_ty) else {
1653 return true;
1655 };
1656
1657 if middle_layout.uninhabited {
1658 return true;
1659 }
1660
1661 match middle_layout.backend_repr {
1662 BackendRepr::Scalar(mid) => {
1663 if mid.is_always_valid(&self.ecx) {
1664 false
1667 } else if let Ok(from_layout) = self.ecx.layout_of(from_ty)
1668 && !from_layout.uninhabited
1669 && from_layout.size == middle_layout.size
1670 && let BackendRepr::Scalar(from_a) = from_layout.backend_repr
1671 && let mid_range = mid.valid_range(&self.ecx)
1672 && let from_range = from_a.valid_range(&self.ecx)
1673 && mid_range.contains_range(from_range, middle_layout.size)
1674 {
1675 false
1681 } else if let Ok(to_layout) = self.ecx.layout_of(to_ty)
1682 && !to_layout.uninhabited
1683 && to_layout.size == middle_layout.size
1684 && let BackendRepr::Scalar(to_a) = to_layout.backend_repr
1685 && let mid_range = mid.valid_range(&self.ecx)
1686 && let to_range = to_a.valid_range(&self.ecx)
1687 && mid_range.contains_range(to_range, middle_layout.size)
1688 {
1689 false
1695 } else {
1696 true
1697 }
1698 }
1699 BackendRepr::ScalarPair(a, b) => {
1700 !a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx)
1701 }
1702 BackendRepr::SimdVector { .. }
1703 | BackendRepr::ScalableVector { .. }
1704 | BackendRepr::Memory { .. } => false,
1705 }
1706 }
1707
1708 fn value_is_all_in_one_field(
1709 &self,
1710 ty: Ty<'tcx>,
1711 variant: VariantIdx,
1712 ) -> Option<(FieldIdx, Ty<'tcx>)> {
1713 if let Ok(layout) = self.ecx.layout_of(ty)
1714 && let abi::Variants::Single { index } = layout.variants
1715 && index == variant
1716 && let Some((field_idx, field_layout)) = layout.non_1zst_field(&self.ecx)
1717 && layout.size == field_layout.size
1718 {
1719 Some((field_idx, field_layout.ty))
1723 } else if let ty::Adt(adt, args) = ty.kind()
1724 && adt.is_struct()
1725 && adt.repr().transparent()
1726 && let [single_field] = adt.non_enum_variant().fields.raw.as_slice()
1727 {
1728 Some((FieldIdx::ZERO, single_field.ty(self.tcx, args)))
1729 } else {
1730 None
1731 }
1732 }
1733}
1734
1735fn is_deterministic(c: Const<'_>) -> bool {
1744 if c.ty().is_primitive() {
1746 return true;
1747 }
1748
1749 match c {
1750 Const::Ty(..) => false,
1754 Const::Unevaluated(..) => false,
1756 Const::Val(..) => true,
1760 }
1761}
1762
1763fn may_have_provenance(tcx: TyCtxt<'_>, value: ConstValue, size: Size) -> bool {
1766 match value {
1767 ConstValue::ZeroSized | ConstValue::Scalar(Scalar::Int(_)) => return false,
1768 ConstValue::Scalar(Scalar::Ptr(..)) | ConstValue::Slice { .. } => return true,
1769 ConstValue::Indirect { alloc_id, offset } => !tcx
1770 .global_alloc(alloc_id)
1771 .unwrap_memory()
1772 .inner()
1773 .provenance()
1774 .range_empty(AllocRange::from(offset..offset + size), &tcx),
1775 }
1776}
1777
1778fn op_to_prop_const<'tcx>(
1779 ecx: &mut InterpCx<'tcx, DummyMachine>,
1780 op: &OpTy<'tcx>,
1781) -> Option<ConstValue> {
1782 if op.layout.is_unsized() {
1784 return None;
1785 }
1786
1787 if op.layout.is_zst() {
1789 return Some(ConstValue::ZeroSized);
1790 }
1791
1792 if !op.is_immediate_uninit()
1797 && !matches!(op.layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..))
1798 {
1799 return None;
1800 }
1801
1802 if let BackendRepr::Scalar(abi::Scalar::Initialized { .. }) = op.layout.backend_repr
1804 && let Some(scalar) = ecx.read_scalar(op).discard_err()
1805 {
1806 if !scalar.try_to_scalar_int().is_ok() {
1807 return None;
1811 }
1812 return Some(ConstValue::Scalar(scalar));
1813 }
1814
1815 if let Either::Left(mplace) = op.as_mplace_or_imm() {
1818 let (size, _align) = ecx.size_and_align_of_val(&mplace).discard_err()??;
1819
1820 let alloc_ref = ecx.get_ptr_alloc(mplace.ptr(), size).discard_err()??;
1824 if alloc_ref.has_provenance() {
1825 return None;
1826 }
1827
1828 let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
1829 let (prov, offset) = pointer.prov_and_relative_offset();
1830 let alloc_id = prov.alloc_id();
1831 intern_const_alloc_for_constprop(ecx, alloc_id).discard_err()?;
1832
1833 if let GlobalAlloc::Memory(alloc) = ecx.tcx.global_alloc(alloc_id)
1837 && alloc.inner().align >= op.layout.align.abi
1840 {
1841 return Some(ConstValue::Indirect { alloc_id, offset });
1842 }
1843 }
1844
1845 let alloc_id =
1847 ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest)).discard_err()?;
1848 Some(ConstValue::Indirect { alloc_id, offset: Size::ZERO })
1849}
1850
1851impl<'tcx> VnState<'_, '_, 'tcx> {
1852 fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option<Operand<'tcx>> {
1855 if let Some(const_) = self.try_as_constant(index) {
1856 Some(Operand::Constant(Box::new(const_)))
1857 } else if let Value::RuntimeChecks(c) = self.get(index) {
1858 Some(Operand::RuntimeChecks(c))
1859 } else if let Some(place) = self.try_as_place(index, location, false) {
1860 self.reused_locals.insert(place.local);
1861 Some(Operand::Copy(place))
1862 } else {
1863 None
1864 }
1865 }
1866
1867 fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
1869 let value = self.get(index);
1870
1871 if let Value::Constant { value, disambiguator: None } = value
1873 && let Const::Val(..) = value
1874 {
1875 return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1876 }
1877
1878 if let Some(value) = self.try_as_evaluated_constant(index) {
1879 return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1880 }
1881
1882 if let Value::Constant { value, disambiguator: None } = value {
1884 return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1885 }
1886
1887 None
1888 }
1889
1890 fn try_as_evaluated_constant(&mut self, index: VnIndex) -> Option<Const<'tcx>> {
1891 let op = self.eval_to_const(index)?;
1892 if op.layout.is_unsized() {
1893 return None;
1895 }
1896
1897 let value = op_to_prop_const(&mut self.ecx, op)?;
1898
1899 if may_have_provenance(self.tcx, value, op.layout.size) {
1903 return None;
1904 }
1905
1906 Some(Const::Val(value, op.layout.ty))
1907 }
1908
1909 #[instrument(level = "trace", skip(self), ret)]
1913 fn try_as_place(
1914 &mut self,
1915 mut index: VnIndex,
1916 loc: Location,
1917 allow_complex_projection: bool,
1918 ) -> Option<Place<'tcx>> {
1919 let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new();
1920 loop {
1921 if let Some(local) = self.try_as_local(index, loc) {
1922 projection.reverse();
1923 let place =
1924 Place { local, projection: self.tcx.mk_place_elems(projection.as_slice()) };
1925 return Some(place);
1926 } else if projection.last() == Some(&PlaceElem::Deref) {
1927 return None;
1931 } else if let Value::Projection(pointer, proj) = self.get(index)
1932 && (allow_complex_projection || proj.is_stable_offset())
1933 && let Some(proj) = self.try_as_place_elem(self.ty(index), proj, loc)
1934 {
1935 if proj == PlaceElem::Deref {
1936 match self.get(pointer) {
1939 Value::Argument(_)
1940 if let Some(Mutability::Not) = self.ty(pointer).ref_mutability() => {}
1941 _ => {
1942 return None;
1943 }
1944 }
1945 }
1946 projection.push(proj);
1947 index = pointer;
1948 } else {
1949 return None;
1950 }
1951 }
1952 }
1953
1954 fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
1957 let other = self.rev_locals.get(index)?;
1958 other
1959 .iter()
1960 .find(|&&other| self.ssa.assignment_dominates(&self.dominators, other, loc))
1961 .copied()
1962 }
1963}
1964
1965impl<'tcx> MutVisitor<'tcx> for VnState<'_, '_, 'tcx> {
1966 fn tcx(&self) -> TyCtxt<'tcx> {
1967 self.tcx
1968 }
1969
1970 fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
1971 self.simplify_place_projection(place, location);
1972 self.super_place(place, context, location);
1973 }
1974
1975 fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1976 self.simplify_operand(operand, location);
1977 self.super_operand(operand, location);
1978 }
1979
1980 fn visit_assign(
1981 &mut self,
1982 lhs: &mut Place<'tcx>,
1983 rvalue: &mut Rvalue<'tcx>,
1984 location: Location,
1985 ) {
1986 self.simplify_place_projection(lhs, location);
1987
1988 let value = self.simplify_rvalue(lhs, rvalue, location);
1989 if let Some(value) = value {
1990 if let Some(const_) = self.try_as_constant(value) {
1991 *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
1992 } else if let Some(place) = self.try_as_place(value, location, false)
1993 && *rvalue != Rvalue::Use(Operand::Move(place))
1994 && *rvalue != Rvalue::Use(Operand::Copy(place))
1995 {
1996 *rvalue = Rvalue::Use(Operand::Copy(place));
1997 self.reused_locals.insert(place.local);
1998 }
1999 }
2000
2001 if let Some(local) = lhs.as_local()
2002 && self.ssa.is_ssa(local)
2003 && let rvalue_ty = rvalue.ty(self.local_decls, self.tcx)
2004 && self.local_decls[local].ty == rvalue_ty
2007 {
2008 let value = value.unwrap_or_else(|| self.new_opaque(rvalue_ty));
2009 self.assign(local, value);
2010 }
2011 }
2012
2013 fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
2014 if let Terminator { kind: TerminatorKind::Call { destination, .. }, .. } = terminator {
2015 if let Some(local) = destination.as_local()
2016 && self.ssa.is_ssa(local)
2017 {
2018 let ty = self.local_decls[local].ty;
2019 let opaque = self.new_opaque(ty);
2020 self.assign(local, opaque);
2021 }
2022 }
2023 self.super_terminator(terminator, location);
2024 }
2025}
2026
2027struct StorageRemover<'tcx> {
2028 tcx: TyCtxt<'tcx>,
2029 reused_locals: DenseBitSet<Local>,
2030}
2031
2032impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
2033 fn tcx(&self) -> TyCtxt<'tcx> {
2034 self.tcx
2035 }
2036
2037 fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
2038 if let Operand::Move(place) = *operand
2039 && !place.is_indirect_first_projection()
2040 && self.reused_locals.contains(place.local)
2041 {
2042 *operand = Operand::Copy(place);
2043 }
2044 }
2045
2046 fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
2047 match stmt.kind {
2048 StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
2050 if self.reused_locals.contains(l) =>
2051 {
2052 stmt.make_nop(true)
2053 }
2054 _ => self.super_statement(stmt, loc),
2055 }
2056 }
2057}