1use std::borrow::Cow;
88use std::hash::{Hash, Hasher};
89
90use either::Either;
91use itertools::Itertools as _;
92use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
93use rustc_arena::DroplessArena;
94use rustc_const_eval::const_eval::DummyMachine;
95use rustc_const_eval::interpret::{
96 ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar,
97 intern_const_alloc_for_constprop,
98};
99use rustc_data_structures::fx::FxHasher;
100use rustc_data_structures::graph::dominators::Dominators;
101use rustc_data_structures::hash_table::{Entry, HashTable};
102use rustc_hir::def::DefKind;
103use rustc_index::bit_set::DenseBitSet;
104use rustc_index::{IndexVec, newtype_index};
105use rustc_middle::bug;
106use rustc_middle::mir::interpret::GlobalAlloc;
107use rustc_middle::mir::visit::*;
108use rustc_middle::mir::*;
109use rustc_middle::ty::layout::HasTypingEnv;
110use rustc_middle::ty::{self, Ty, TyCtxt};
111use rustc_span::DUMMY_SP;
112use smallvec::SmallVec;
113use tracing::{debug, instrument, trace};
114
115use crate::ssa::SsaLocals;
116
117pub(super) struct GVN;
118
119impl<'tcx> crate::MirPass<'tcx> for GVN {
120 fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
121 sess.mir_opt_level() >= 2
122 }
123
124 #[instrument(level = "trace", skip(self, tcx, body))]
125 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
126 debug!(def_id = ?body.source.def_id());
127
128 let typing_env = body.typing_env(tcx);
129 let ssa = SsaLocals::new(tcx, body, typing_env);
130 let dominators = body.basic_blocks.dominators().clone();
132
133 let arena = DroplessArena::default();
134 let mut state =
135 VnState::new(tcx, body, typing_env, &ssa, dominators, &body.local_decls, &arena);
136
137 for local in body.args_iter().filter(|&local| ssa.is_ssa(local)) {
138 let opaque = state.new_argument(body.local_decls[local].ty);
139 state.assign(local, opaque);
140 }
141
142 let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
143 for bb in reverse_postorder {
144 let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
145 state.visit_basic_block_data(bb, data);
146 }
147
148 StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
152 }
153
154 fn is_required(&self) -> bool {
155 false
156 }
157}
158
159newtype_index! {
160 #[debug_format = "_v{}"]
162 struct VnIndex {}
163}
164
165#[derive(Copy, Clone, Debug, Eq)]
169struct VnOpaque;
170impl PartialEq for VnOpaque {
171 fn eq(&self, _: &VnOpaque) -> bool {
172 unreachable!()
174 }
175}
176impl Hash for VnOpaque {
177 fn hash<T: Hasher>(&self, _: &mut T) {
178 unreachable!()
180 }
181}
182
183#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
184enum AddressKind {
185 Ref(BorrowKind),
186 Address(RawPtrKind),
187}
188
189#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
190enum AddressBase {
191 Local(Local),
193 Deref(VnIndex),
195}
196
197#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
198enum Value<'a, 'tcx> {
199 Opaque(VnOpaque),
202 Argument(VnOpaque),
204 Constant {
206 value: Const<'tcx>,
207 disambiguator: Option<VnOpaque>,
211 },
212
213 Aggregate(VariantIdx, &'a [VnIndex]),
217 Union(FieldIdx, VnIndex),
219 RawPtr {
221 pointer: VnIndex,
223 metadata: VnIndex,
225 },
226 Repeat(VnIndex, ty::Const<'tcx>),
228 Address {
230 base: AddressBase,
231 projection: &'a [ProjectionElem<VnIndex, Ty<'tcx>>],
234 kind: AddressKind,
235 provenance: VnOpaque,
237 },
238
239 Projection(VnIndex, ProjectionElem<VnIndex, ()>),
242 Discriminant(VnIndex),
244
245 RuntimeChecks(RuntimeChecks),
247 UnaryOp(UnOp, VnIndex),
248 BinaryOp(BinOp, VnIndex, VnIndex),
249 Cast {
250 kind: CastKind,
251 value: VnIndex,
252 },
253}
254
255struct ValueSet<'a, 'tcx> {
261 indices: HashTable<VnIndex>,
262 hashes: IndexVec<VnIndex, u64>,
263 values: IndexVec<VnIndex, Value<'a, 'tcx>>,
264 types: IndexVec<VnIndex, Ty<'tcx>>,
265}
266
267impl<'a, 'tcx> ValueSet<'a, 'tcx> {
268 fn new(num_values: usize) -> ValueSet<'a, 'tcx> {
269 ValueSet {
270 indices: HashTable::with_capacity(num_values),
271 hashes: IndexVec::with_capacity(num_values),
272 values: IndexVec::with_capacity(num_values),
273 types: IndexVec::with_capacity(num_values),
274 }
275 }
276
277 #[inline]
280 fn insert_unique(
281 &mut self,
282 ty: Ty<'tcx>,
283 value: impl FnOnce(VnOpaque) -> Value<'a, 'tcx>,
284 ) -> VnIndex {
285 let value = value(VnOpaque);
286
287 debug_assert!(match value {
288 Value::Opaque(_) | Value::Argument(_) | Value::Address { .. } => true,
289 Value::Constant { disambiguator, .. } => disambiguator.is_some(),
290 _ => false,
291 });
292
293 let index = self.hashes.push(0);
294 let _index = self.types.push(ty);
295 debug_assert_eq!(index, _index);
296 let _index = self.values.push(value);
297 debug_assert_eq!(index, _index);
298 index
299 }
300
301 #[allow(rustc::pass_by_value)] fn insert(&mut self, ty: Ty<'tcx>, value: Value<'a, 'tcx>) -> (VnIndex, bool) {
305 debug_assert!(match value {
306 Value::Opaque(_) | Value::Address { .. } => false,
307 Value::Constant { disambiguator, .. } => disambiguator.is_none(),
308 _ => true,
309 });
310
311 let hash: u64 = {
312 let mut h = FxHasher::default();
313 value.hash(&mut h);
314 ty.hash(&mut h);
315 h.finish()
316 };
317
318 let eq = |index: &VnIndex| self.values[*index] == value && self.types[*index] == ty;
319 let hasher = |index: &VnIndex| self.hashes[*index];
320 match self.indices.entry(hash, eq, hasher) {
321 Entry::Occupied(entry) => {
322 let index = *entry.get();
323 (index, false)
324 }
325 Entry::Vacant(entry) => {
326 let index = self.hashes.push(hash);
327 entry.insert(index);
328 let _index = self.values.push(value);
329 debug_assert_eq!(index, _index);
330 let _index = self.types.push(ty);
331 debug_assert_eq!(index, _index);
332 (index, true)
333 }
334 }
335 }
336
337 #[inline]
339 fn value(&self, index: VnIndex) -> Value<'a, 'tcx> {
340 self.values[index]
341 }
342
343 #[inline]
345 fn ty(&self, index: VnIndex) -> Ty<'tcx> {
346 self.types[index]
347 }
348}
349
350struct VnState<'body, 'a, 'tcx> {
351 tcx: TyCtxt<'tcx>,
352 ecx: InterpCx<'tcx, DummyMachine>,
353 local_decls: &'body LocalDecls<'tcx>,
354 is_coroutine: bool,
355 locals: IndexVec<Local, Option<VnIndex>>,
357 rev_locals: IndexVec<VnIndex, SmallVec<[Local; 1]>>,
360 values: ValueSet<'a, 'tcx>,
361 evaluated: IndexVec<VnIndex, Option<Option<&'a OpTy<'tcx>>>>,
366 ssa: &'body SsaLocals,
367 dominators: Dominators<BasicBlock>,
368 reused_locals: DenseBitSet<Local>,
369 arena: &'a DroplessArena,
370}
371
372impl<'body, 'a, 'tcx> VnState<'body, 'a, 'tcx> {
373 fn new(
374 tcx: TyCtxt<'tcx>,
375 body: &Body<'tcx>,
376 typing_env: ty::TypingEnv<'tcx>,
377 ssa: &'body SsaLocals,
378 dominators: Dominators<BasicBlock>,
379 local_decls: &'body LocalDecls<'tcx>,
380 arena: &'a DroplessArena,
381 ) -> Self {
382 let num_values =
387 2 * body.basic_blocks.iter().map(|bbdata| bbdata.statements.len()).sum::<usize>()
388 + 4 * body.basic_blocks.len();
389 VnState {
390 tcx,
391 ecx: InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine),
392 local_decls,
393 is_coroutine: body.coroutine.is_some(),
394 locals: IndexVec::from_elem(None, local_decls),
395 rev_locals: IndexVec::with_capacity(num_values),
396 values: ValueSet::new(num_values),
397 evaluated: IndexVec::with_capacity(num_values),
398 ssa,
399 dominators,
400 reused_locals: DenseBitSet::new_empty(local_decls.len()),
401 arena,
402 }
403 }
404
405 fn typing_env(&self) -> ty::TypingEnv<'tcx> {
406 self.ecx.typing_env()
407 }
408
409 fn insert_unique(
410 &mut self,
411 ty: Ty<'tcx>,
412 value: impl FnOnce(VnOpaque) -> Value<'a, 'tcx>,
413 ) -> VnIndex {
414 let index = self.values.insert_unique(ty, value);
415 let _index = self.evaluated.push(None);
416 debug_assert_eq!(index, _index);
417 let _index = self.rev_locals.push(SmallVec::new());
418 debug_assert_eq!(index, _index);
419 index
420 }
421
422 #[instrument(level = "trace", skip(self), ret)]
423 fn insert(&mut self, ty: Ty<'tcx>, value: Value<'a, 'tcx>) -> VnIndex {
424 let (index, new) = self.values.insert(ty, value);
425 if new {
426 let _index = self.evaluated.push(None);
428 debug_assert_eq!(index, _index);
429 let _index = self.rev_locals.push(SmallVec::new());
430 debug_assert_eq!(index, _index);
431 }
432 index
433 }
434
435 #[instrument(level = "trace", skip(self), ret)]
438 fn new_opaque(&mut self, ty: Ty<'tcx>) -> VnIndex {
439 let index = self.insert_unique(ty, Value::Opaque);
440 self.evaluated[index] = Some(None);
441 index
442 }
443
444 #[instrument(level = "trace", skip(self), ret)]
445 fn new_argument(&mut self, ty: Ty<'tcx>) -> VnIndex {
446 let index = self.insert_unique(ty, Value::Argument);
447 self.evaluated[index] = Some(None);
448 index
449 }
450
451 #[instrument(level = "trace", skip(self), ret)]
453 fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> Option<VnIndex> {
454 let pty = place.ty(self.local_decls, self.tcx).ty;
455 let ty = match kind {
456 AddressKind::Ref(bk) => {
457 Ty::new_ref(self.tcx, self.tcx.lifetimes.re_erased, pty, bk.to_mutbl_lossy())
458 }
459 AddressKind::Address(mutbl) => Ty::new_ptr(self.tcx, pty, mutbl.to_mutbl_lossy()),
460 };
461
462 let mut projection = place.projection.iter();
463 let base = if place.is_indirect_first_projection() {
464 let base = self.locals[place.local]?;
465 projection.next();
467 AddressBase::Deref(base)
468 } else if self.ssa.is_ssa(place.local) {
469 AddressBase::Local(place.local)
471 } else {
472 return None;
473 };
474 let projection =
476 projection.map(|proj| proj.try_map(|index| self.locals[index], |ty| ty).ok_or(()));
477 let projection = self.arena.try_alloc_from_iter(projection).ok()?;
478
479 let index = self.insert_unique(ty, |provenance| Value::Address {
480 base,
481 projection,
482 kind,
483 provenance,
484 });
485 Some(index)
486 }
487
488 #[instrument(level = "trace", skip(self), ret)]
489 fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex {
490 if value.is_deterministic() {
491 let constant = Value::Constant { value, disambiguator: None };
493 self.insert(value.ty(), constant)
494 } else {
495 self.insert_unique(value.ty(), |disambiguator| Value::Constant {
498 value,
499 disambiguator: Some(disambiguator),
500 })
501 }
502 }
503
504 #[inline]
505 fn get(&self, index: VnIndex) -> Value<'a, 'tcx> {
506 self.values.value(index)
507 }
508
509 #[inline]
510 fn ty(&self, index: VnIndex) -> Ty<'tcx> {
511 self.values.ty(index)
512 }
513
514 #[instrument(level = "trace", skip(self))]
516 fn assign(&mut self, local: Local, value: VnIndex) {
517 debug_assert!(self.ssa.is_ssa(local));
518 self.locals[local] = Some(value);
519 self.rev_locals[value].push(local);
520 }
521
522 fn insert_bool(&mut self, flag: bool) -> VnIndex {
523 let value = Const::from_bool(self.tcx, flag);
525 debug_assert!(value.is_deterministic());
526 self.insert(self.tcx.types.bool, Value::Constant { value, disambiguator: None })
527 }
528
529 fn insert_scalar(&mut self, ty: Ty<'tcx>, scalar: Scalar) -> VnIndex {
530 let value = Const::from_scalar(self.tcx, scalar, ty);
532 debug_assert!(value.is_deterministic());
533 self.insert(ty, Value::Constant { value, disambiguator: None })
534 }
535
536 fn insert_tuple(&mut self, ty: Ty<'tcx>, values: &[VnIndex]) -> VnIndex {
537 self.insert(ty, Value::Aggregate(VariantIdx::ZERO, self.arena.alloc_slice(values)))
538 }
539
540 #[instrument(level = "trace", skip(self), ret)]
541 fn eval_to_const_inner(&mut self, value: VnIndex) -> Option<OpTy<'tcx>> {
542 use Value::*;
543 let ty = self.ty(value);
544 let ty = if !self.is_coroutine || ty.is_scalar() {
546 self.ecx.layout_of(ty).ok()?
547 } else {
548 return None;
549 };
550 let op = match self.get(value) {
551 _ if ty.is_zst() => ImmTy::uninit(ty).into(),
552
553 Opaque(_) | Argument(_) => return None,
554 RuntimeChecks(..) => return None,
556
557 Repeat(value, _count) => {
562 let value = self.eval_to_const(value)?;
563 if value.is_immediate_uninit() {
564 ImmTy::uninit(ty).into()
565 } else {
566 return None;
567 }
568 }
569 Constant { ref value, disambiguator: _ } => {
570 self.ecx.eval_mir_constant(value, DUMMY_SP, None).discard_err()?
571 }
572 Aggregate(variant, ref fields) => {
573 let fields =
574 fields.iter().map(|&f| self.eval_to_const(f)).collect::<Option<Vec<_>>>()?;
575 let variant = if ty.ty.is_enum() { Some(variant) } else { None };
576 let (BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) = ty.backend_repr
577 else {
578 return None;
579 };
580 let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
581 let variant_dest = if let Some(variant) = variant {
582 self.ecx.project_downcast(&dest, variant).discard_err()?
583 } else {
584 dest.clone()
585 };
586 for (field_index, op) in fields.into_iter().enumerate() {
587 let field_dest = self
588 .ecx
589 .project_field(&variant_dest, FieldIdx::from_usize(field_index))
590 .discard_err()?;
591 self.ecx.copy_op(op, &field_dest).discard_err()?;
592 }
593 self.ecx
594 .write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest)
595 .discard_err()?;
596 self.ecx
597 .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
598 .discard_err()?;
599 dest.into()
600 }
601 Union(active_field, field) => {
602 let field = self.eval_to_const(field)?;
603 if field.layout.layout.is_zst() {
604 ImmTy::from_immediate(Immediate::Uninit, ty).into()
605 } else if matches!(
606 ty.backend_repr,
607 BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)
608 ) {
609 let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
610 let field_dest = self.ecx.project_field(&dest, active_field).discard_err()?;
611 self.ecx.copy_op(field, &field_dest).discard_err()?;
612 self.ecx
613 .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
614 .discard_err()?;
615 dest.into()
616 } else {
617 return None;
618 }
619 }
620 RawPtr { pointer, metadata } => {
621 let pointer = self.eval_to_const(pointer)?;
622 let metadata = self.eval_to_const(metadata)?;
623
624 let data = self.ecx.read_pointer(pointer).discard_err()?;
626 let meta = if metadata.layout.is_zst() {
627 MemPlaceMeta::None
628 } else {
629 MemPlaceMeta::Meta(self.ecx.read_scalar(metadata).discard_err()?)
630 };
631 let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx);
632 ImmTy::from_immediate(ptr_imm, ty).into()
633 }
634
635 Projection(base, elem) => {
636 let base = self.eval_to_const(base)?;
637 let elem = elem.try_map(|_| None, |()| ty.ty)?;
640 self.ecx.project(base, elem).discard_err()?
641 }
642 Address { base, projection, .. } => {
643 debug_assert!(!projection.contains(&ProjectionElem::Deref));
644 let pointer = match base {
645 AddressBase::Deref(pointer) => self.eval_to_const(pointer)?,
646 AddressBase::Local(_) => return None,
648 };
649 let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?;
650 for elem in projection {
651 let elem = elem.try_map(|_| None, |ty| ty)?;
654 mplace = self.ecx.project(&mplace, elem).discard_err()?;
655 }
656 let pointer = mplace.to_ref(&self.ecx);
657 ImmTy::from_immediate(pointer, ty).into()
658 }
659
660 Discriminant(base) => {
661 let base = self.eval_to_const(base)?;
662 let variant = self.ecx.read_discriminant(base).discard_err()?;
663 let discr_value =
664 self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
665 discr_value.into()
666 }
667 UnaryOp(un_op, operand) => {
668 let operand = self.eval_to_const(operand)?;
669 let operand = self.ecx.read_immediate(operand).discard_err()?;
670 let val = self.ecx.unary_op(un_op, &operand).discard_err()?;
671 val.into()
672 }
673 BinaryOp(bin_op, lhs, rhs) => {
674 let lhs = self.eval_to_const(lhs)?;
675 let rhs = self.eval_to_const(rhs)?;
676 let lhs = self.ecx.read_immediate(lhs).discard_err()?;
677 let rhs = self.ecx.read_immediate(rhs).discard_err()?;
678 let val = self.ecx.binary_op(bin_op, &lhs, &rhs).discard_err()?;
679 val.into()
680 }
681 Cast { kind, value } => match kind {
682 CastKind::IntToInt | CastKind::IntToFloat => {
683 let value = self.eval_to_const(value)?;
684 let value = self.ecx.read_immediate(value).discard_err()?;
685 let res = self.ecx.int_to_int_or_float(&value, ty).discard_err()?;
686 res.into()
687 }
688 CastKind::FloatToFloat | CastKind::FloatToInt => {
689 let value = self.eval_to_const(value)?;
690 let value = self.ecx.read_immediate(value).discard_err()?;
691 let res = self.ecx.float_to_float_or_int(&value, ty).discard_err()?;
692 res.into()
693 }
694 CastKind::Transmute | CastKind::Subtype => {
695 let value = self.eval_to_const(value)?;
696 if value.as_mplace_or_imm().is_right() {
701 let can_transmute = match (value.layout.backend_repr, ty.backend_repr) {
702 (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => {
703 s1.size(&self.ecx) == s2.size(&self.ecx)
704 && !matches!(s1.primitive(), Primitive::Pointer(..))
705 }
706 (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
707 a1.size(&self.ecx) == a2.size(&self.ecx)
708 && b1.size(&self.ecx) == b2.size(&self.ecx)
709 && b1.align(&self.ecx) == b2.align(&self.ecx)
711 && !matches!(a1.primitive(), Primitive::Pointer(..))
713 && !matches!(b1.primitive(), Primitive::Pointer(..))
714 }
715 _ => false,
716 };
717 if !can_transmute {
718 return None;
719 }
720 }
721 value.offset(Size::ZERO, ty, &self.ecx).discard_err()?
722 }
723 CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) => {
724 let src = self.eval_to_const(value)?;
725 let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
726 self.ecx.unsize_into(src, ty, &dest).discard_err()?;
727 self.ecx
728 .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
729 .discard_err()?;
730 dest.into()
731 }
732 CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
733 let src = self.eval_to_const(value)?;
734 let src = self.ecx.read_immediate(src).discard_err()?;
735 let ret = self.ecx.ptr_to_ptr(&src, ty).discard_err()?;
736 ret.into()
737 }
738 CastKind::PointerCoercion(ty::adjustment::PointerCoercion::UnsafeFnPointer, _) => {
739 let src = self.eval_to_const(value)?;
740 let src = self.ecx.read_immediate(src).discard_err()?;
741 ImmTy::from_immediate(*src, ty).into()
742 }
743 _ => return None,
744 },
745 };
746 Some(op)
747 }
748
749 fn eval_to_const(&mut self, index: VnIndex) -> Option<&'a OpTy<'tcx>> {
750 if let Some(op) = self.evaluated[index] {
751 return op;
752 }
753 let op = self.eval_to_const_inner(index);
754 self.evaluated[index] = Some(self.arena.alloc(op).as_ref());
755 self.evaluated[index].unwrap()
756 }
757
758 #[instrument(level = "trace", skip(self), ret)]
760 fn dereference_address(
761 &mut self,
762 base: AddressBase,
763 projection: &[ProjectionElem<VnIndex, Ty<'tcx>>],
764 ) -> Option<VnIndex> {
765 let (mut place_ty, mut value) = match base {
766 AddressBase::Local(local) => {
768 let local = self.locals[local]?;
769 let place_ty = PlaceTy::from_ty(self.ty(local));
770 (place_ty, local)
771 }
772 AddressBase::Deref(reborrow) => {
774 let place_ty = PlaceTy::from_ty(self.ty(reborrow));
775 self.project(place_ty, reborrow, ProjectionElem::Deref)?
776 }
777 };
778 for &proj in projection {
779 (place_ty, value) = self.project(place_ty, value, proj)?;
780 }
781 Some(value)
782 }
783
784 #[instrument(level = "trace", skip(self), ret)]
785 fn project(
786 &mut self,
787 place_ty: PlaceTy<'tcx>,
788 value: VnIndex,
789 proj: ProjectionElem<VnIndex, Ty<'tcx>>,
790 ) -> Option<(PlaceTy<'tcx>, VnIndex)> {
791 let projection_ty = place_ty.projection_ty(self.tcx, proj);
792 let proj = match proj {
793 ProjectionElem::Deref => {
794 if let Some(Mutability::Not) = place_ty.ty.ref_mutability()
795 && projection_ty.ty.is_freeze(self.tcx, self.typing_env())
796 {
797 if let Value::Address { base, projection, .. } = self.get(value)
798 && let Some(value) = self.dereference_address(base, projection)
799 {
800 return Some((projection_ty, value));
801 }
802 if projection_ty.ty.is_ref() {
812 return None;
813 }
814
815 let deref = self
818 .insert(projection_ty.ty, Value::Projection(value, ProjectionElem::Deref));
819 return Some((projection_ty, deref));
820 } else {
821 return None;
822 }
823 }
824 ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
825 ProjectionElem::Field(f, _) => match self.get(value) {
826 Value::Aggregate(_, fields) => return Some((projection_ty, fields[f.as_usize()])),
827 Value::Union(active, field) if active == f => return Some((projection_ty, field)),
828 Value::Projection(outer_value, ProjectionElem::Downcast(_, read_variant))
829 if let Value::Aggregate(written_variant, fields) = self.get(outer_value)
830 && written_variant == read_variant =>
846 {
847 return Some((projection_ty, fields[f.as_usize()]));
848 }
849 _ => ProjectionElem::Field(f, ()),
850 },
851 ProjectionElem::Index(idx) => {
852 if let Value::Repeat(inner, _) = self.get(value) {
853 return Some((projection_ty, inner));
854 }
855 ProjectionElem::Index(idx)
856 }
857 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
858 match self.get(value) {
859 Value::Repeat(inner, _) => {
860 return Some((projection_ty, inner));
861 }
862 Value::Aggregate(_, operands) => {
863 let offset = if from_end {
864 operands.len() - offset as usize
865 } else {
866 offset as usize
867 };
868 let value = operands.get(offset).copied()?;
869 return Some((projection_ty, value));
870 }
871 _ => {}
872 };
873 ProjectionElem::ConstantIndex { offset, min_length, from_end }
874 }
875 ProjectionElem::Subslice { from, to, from_end } => {
876 ProjectionElem::Subslice { from, to, from_end }
877 }
878 ProjectionElem::OpaqueCast(_) => ProjectionElem::OpaqueCast(()),
879 ProjectionElem::UnwrapUnsafeBinder(_) => ProjectionElem::UnwrapUnsafeBinder(()),
880 };
881
882 let value = self.insert(projection_ty.ty, Value::Projection(value, proj));
883 Some((projection_ty, value))
884 }
885
886 #[instrument(level = "trace", skip(self))]
888 fn simplify_place_projection(&mut self, place: &mut Place<'tcx>, location: Location) {
889 if place.is_indirect_first_projection()
892 && let Some(base) = self.locals[place.local]
893 && let Some(new_local) = self.try_as_local(base, location)
894 && place.local != new_local
895 {
896 place.local = new_local;
897 self.reused_locals.insert(new_local);
898 }
899
900 let mut projection = Cow::Borrowed(&place.projection[..]);
901
902 for i in 0..projection.len() {
903 let elem = projection[i];
904 if let ProjectionElem::Index(idx_local) = elem
905 && let Some(idx) = self.locals[idx_local]
906 {
907 if let Some(offset) = self.eval_to_const(idx)
908 && let Some(offset) = self.ecx.read_target_usize(offset).discard_err()
909 && let Some(min_length) = offset.checked_add(1)
910 {
911 projection.to_mut()[i] =
912 ProjectionElem::ConstantIndex { offset, min_length, from_end: false };
913 } else if let Some(new_idx_local) = self.try_as_local(idx, location)
914 && idx_local != new_idx_local
915 {
916 projection.to_mut()[i] = ProjectionElem::Index(new_idx_local);
917 self.reused_locals.insert(new_idx_local);
918 }
919 }
920 }
921
922 if Cow::is_owned(&projection) {
923 place.projection = self.tcx.mk_place_elems(&projection);
924 }
925
926 trace!(?place);
927 }
928
929 #[instrument(level = "trace", skip(self), ret)]
932 fn compute_place_value(
933 &mut self,
934 place: Place<'tcx>,
935 location: Location,
936 ) -> Result<VnIndex, PlaceRef<'tcx>> {
937 let mut place_ref = place.as_ref();
940
941 let Some(mut value) = self.locals[place.local] else { return Err(place_ref) };
943 let mut place_ty = PlaceTy::from_ty(self.local_decls[place.local].ty);
945 for (index, proj) in place.projection.iter().enumerate() {
946 if let Some(local) = self.try_as_local(value, location) {
947 place_ref = PlaceRef { local, projection: &place.projection[index..] };
951 }
952
953 let Some(proj) = proj.try_map(|value| self.locals[value], |ty| ty) else {
954 return Err(place_ref);
955 };
956 let Some(ty_and_value) = self.project(place_ty, value, proj) else {
957 return Err(place_ref);
958 };
959 (place_ty, value) = ty_and_value;
960 }
961
962 Ok(value)
963 }
964
965 #[instrument(level = "trace", skip(self), ret)]
968 fn simplify_place_value(
969 &mut self,
970 place: &mut Place<'tcx>,
971 location: Location,
972 ) -> Option<VnIndex> {
973 self.simplify_place_projection(place, location);
974
975 match self.compute_place_value(*place, location) {
976 Ok(value) => {
977 if let Some(new_place) = self.try_as_place(value, location, true)
978 && (new_place.local != place.local
979 || new_place.projection.len() < place.projection.len())
980 {
981 *place = new_place;
982 self.reused_locals.insert(new_place.local);
983 }
984 Some(value)
985 }
986 Err(place_ref) => {
987 if place_ref.local != place.local
988 || place_ref.projection.len() < place.projection.len()
989 {
990 *place = place_ref.project_deeper(&[], self.tcx);
992 self.reused_locals.insert(place_ref.local);
993 }
994 None
995 }
996 }
997 }
998
999 #[instrument(level = "trace", skip(self), ret)]
1000 fn simplify_operand(
1001 &mut self,
1002 operand: &mut Operand<'tcx>,
1003 location: Location,
1004 ) -> Option<VnIndex> {
1005 match *operand {
1006 Operand::RuntimeChecks(c) => {
1007 Some(self.insert(self.tcx.types.bool, Value::RuntimeChecks(c)))
1008 }
1009 Operand::Constant(ref constant) => Some(self.insert_constant(constant.const_)),
1010 Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
1011 let value = self.simplify_place_value(place, location)?;
1012 if let Some(const_) = self.try_as_constant(value) {
1013 *operand = Operand::Constant(Box::new(const_));
1014 } else if let Value::RuntimeChecks(c) = self.get(value) {
1015 *operand = Operand::RuntimeChecks(c);
1016 }
1017 Some(value)
1018 }
1019 }
1020 }
1021
1022 #[instrument(level = "trace", skip(self), ret)]
1023 fn simplify_rvalue(
1024 &mut self,
1025 lhs: &Place<'tcx>,
1026 rvalue: &mut Rvalue<'tcx>,
1027 location: Location,
1028 ) -> Option<VnIndex> {
1029 let value = match *rvalue {
1030 Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
1032
1033 Rvalue::Repeat(ref mut op, amount) => {
1035 let op = self.simplify_operand(op, location)?;
1036 Value::Repeat(op, amount)
1037 }
1038 Rvalue::Aggregate(..) => return self.simplify_aggregate(rvalue, location),
1039 Rvalue::Ref(_, borrow_kind, ref mut place) => {
1040 self.simplify_place_projection(place, location);
1041 return self.new_pointer(*place, AddressKind::Ref(borrow_kind));
1042 }
1043 Rvalue::RawPtr(mutbl, ref mut place) => {
1044 self.simplify_place_projection(place, location);
1045 return self.new_pointer(*place, AddressKind::Address(mutbl));
1046 }
1047 Rvalue::WrapUnsafeBinder(ref mut op, _) => {
1048 let value = self.simplify_operand(op, location)?;
1049 Value::Cast { kind: CastKind::Transmute, value }
1050 }
1051
1052 Rvalue::Cast(ref mut kind, ref mut value, to) => {
1054 return self.simplify_cast(kind, value, to, location);
1055 }
1056 Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
1057 return self.simplify_binary(op, lhs, rhs, location);
1058 }
1059 Rvalue::UnaryOp(op, ref mut arg_op) => {
1060 return self.simplify_unary(op, arg_op, location);
1061 }
1062 Rvalue::Discriminant(ref mut place) => {
1063 let place = self.simplify_place_value(place, location)?;
1064 if let Some(discr) = self.simplify_discriminant(place) {
1065 return Some(discr);
1066 }
1067 Value::Discriminant(place)
1068 }
1069
1070 Rvalue::ThreadLocalRef(..) => return None,
1072 Rvalue::CopyForDeref(_) | Rvalue::ShallowInitBox(..) => {
1073 bug!("forbidden in runtime MIR: {rvalue:?}")
1074 }
1075 };
1076 let ty = rvalue.ty(self.local_decls, self.tcx);
1077 Some(self.insert(ty, value))
1078 }
1079
1080 fn simplify_discriminant(&mut self, place: VnIndex) -> Option<VnIndex> {
1081 let enum_ty = self.ty(place);
1082 if enum_ty.is_enum()
1083 && let Value::Aggregate(variant, _) = self.get(place)
1084 {
1085 let discr = self.ecx.discriminant_for_variant(enum_ty, variant).discard_err()?;
1086 return Some(self.insert_scalar(discr.layout.ty, discr.to_scalar()));
1087 }
1088
1089 None
1090 }
1091
1092 fn try_as_place_elem(
1093 &mut self,
1094 ty: Ty<'tcx>,
1095 proj: ProjectionElem<VnIndex, ()>,
1096 loc: Location,
1097 ) -> Option<PlaceElem<'tcx>> {
1098 proj.try_map(
1099 |value| {
1100 let local = self.try_as_local(value, loc)?;
1101 self.reused_locals.insert(local);
1102 Some(local)
1103 },
1104 |()| ty,
1105 )
1106 }
1107
1108 fn simplify_aggregate_to_copy(
1109 &mut self,
1110 ty: Ty<'tcx>,
1111 variant_index: VariantIdx,
1112 fields: &[VnIndex],
1113 ) -> Option<VnIndex> {
1114 let Some(&first_field) = fields.first() else { return None };
1115 let Value::Projection(copy_from_value, _) = self.get(first_field) else { return None };
1116
1117 if fields.iter().enumerate().any(|(index, &v)| {
1119 if let Value::Projection(pointer, ProjectionElem::Field(from_index, _)) = self.get(v)
1120 && copy_from_value == pointer
1121 && from_index.index() == index
1122 {
1123 return false;
1124 }
1125 true
1126 }) {
1127 return None;
1128 }
1129
1130 let mut copy_from_local_value = copy_from_value;
1131 if let Value::Projection(pointer, proj) = self.get(copy_from_value)
1132 && let ProjectionElem::Downcast(_, read_variant) = proj
1133 {
1134 if variant_index == read_variant {
1135 copy_from_local_value = pointer;
1137 } else {
1138 return None;
1140 }
1141 }
1142
1143 if self.ty(copy_from_local_value) == ty { Some(copy_from_local_value) } else { None }
1145 }
1146
1147 fn simplify_aggregate(
1148 &mut self,
1149 rvalue: &mut Rvalue<'tcx>,
1150 location: Location,
1151 ) -> Option<VnIndex> {
1152 let tcx = self.tcx;
1153 let ty = rvalue.ty(self.local_decls, tcx);
1154
1155 let Rvalue::Aggregate(box ref kind, ref mut field_ops) = *rvalue else { bug!() };
1156
1157 if field_ops.is_empty() {
1158 let is_zst = match *kind {
1159 AggregateKind::Array(..)
1160 | AggregateKind::Tuple
1161 | AggregateKind::Closure(..)
1162 | AggregateKind::CoroutineClosure(..) => true,
1163 AggregateKind::Adt(did, ..) => tcx.def_kind(did) != DefKind::Enum,
1165 AggregateKind::Coroutine(..) => false,
1167 AggregateKind::RawPtr(..) => bug!("MIR for RawPtr aggregate must have 2 fields"),
1168 };
1169
1170 if is_zst {
1171 return Some(self.insert_constant(Const::zero_sized(ty)));
1172 }
1173 }
1174
1175 let fields = self.arena.alloc_from_iter(field_ops.iter_mut().map(|op| {
1176 self.simplify_operand(op, location)
1177 .unwrap_or_else(|| self.new_opaque(op.ty(self.local_decls, self.tcx)))
1178 }));
1179
1180 let variant_index = match *kind {
1181 AggregateKind::Array(..) | AggregateKind::Tuple => {
1182 assert!(!field_ops.is_empty());
1183 FIRST_VARIANT
1184 }
1185 AggregateKind::Closure(..)
1186 | AggregateKind::CoroutineClosure(..)
1187 | AggregateKind::Coroutine(..) => FIRST_VARIANT,
1188 AggregateKind::Adt(_, variant_index, _, _, None) => variant_index,
1189 AggregateKind::Adt(_, _, _, _, Some(active_field)) => {
1191 let field = *fields.first()?;
1192 return Some(self.insert(ty, Value::Union(active_field, field)));
1193 }
1194 AggregateKind::RawPtr(..) => {
1195 assert_eq!(field_ops.len(), 2);
1196 let [mut pointer, metadata] = fields.try_into().unwrap();
1197
1198 let mut was_updated = false;
1200 while let Value::Cast { kind: CastKind::PtrToPtr, value: cast_value } =
1201 self.get(pointer)
1202 && let ty::RawPtr(from_pointee_ty, from_mtbl) = self.ty(cast_value).kind()
1203 && let ty::RawPtr(_, output_mtbl) = ty.kind()
1204 && from_mtbl == output_mtbl
1205 && from_pointee_ty.is_sized(self.tcx, self.typing_env())
1206 {
1207 pointer = cast_value;
1208 was_updated = true;
1209 }
1210
1211 if was_updated && let Some(op) = self.try_as_operand(pointer, location) {
1212 field_ops[FieldIdx::ZERO] = op;
1213 }
1214
1215 return Some(self.insert(ty, Value::RawPtr { pointer, metadata }));
1216 }
1217 };
1218
1219 if ty.is_array()
1220 && fields.len() > 4
1221 && let Ok(&first) = fields.iter().all_equal_value()
1222 {
1223 let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
1224 if let Some(op) = self.try_as_operand(first, location) {
1225 *rvalue = Rvalue::Repeat(op, len);
1226 }
1227 return Some(self.insert(ty, Value::Repeat(first, len)));
1228 }
1229
1230 if let Some(value) = self.simplify_aggregate_to_copy(ty, variant_index, &fields) {
1231 if let Some(place) = self.try_as_place(value, location, true) {
1232 self.reused_locals.insert(place.local);
1233 *rvalue = Rvalue::Use(Operand::Copy(place));
1234 }
1235 return Some(value);
1236 }
1237
1238 Some(self.insert(ty, Value::Aggregate(variant_index, fields)))
1239 }
1240
1241 #[instrument(level = "trace", skip(self), ret)]
1242 fn simplify_unary(
1243 &mut self,
1244 op: UnOp,
1245 arg_op: &mut Operand<'tcx>,
1246 location: Location,
1247 ) -> Option<VnIndex> {
1248 let mut arg_index = self.simplify_operand(arg_op, location)?;
1249 let arg_ty = self.ty(arg_index);
1250 let ret_ty = op.ty(self.tcx, arg_ty);
1251
1252 if op == UnOp::PtrMetadata {
1255 let mut was_updated = false;
1256 loop {
1257 arg_index = match self.get(arg_index) {
1258 Value::Cast { kind: CastKind::PtrToPtr, value: inner }
1267 if self.pointers_have_same_metadata(self.ty(inner), arg_ty) =>
1268 {
1269 inner
1270 }
1271
1272 Value::Cast {
1274 kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1275 value: from,
1276 } if let Some(from) = self.ty(from).builtin_deref(true)
1277 && let ty::Array(_, len) = from.kind()
1278 && let Some(to) = self.ty(arg_index).builtin_deref(true)
1279 && let ty::Slice(..) = to.kind() =>
1280 {
1281 return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1282 }
1283
1284 Value::Address { base: AddressBase::Deref(reborrowed), projection, .. }
1286 if projection.is_empty() =>
1287 {
1288 reborrowed
1289 }
1290
1291 _ => break,
1292 };
1293 was_updated = true;
1294 }
1295
1296 if was_updated && let Some(op) = self.try_as_operand(arg_index, location) {
1297 *arg_op = op;
1298 }
1299 }
1300
1301 let value = match (op, self.get(arg_index)) {
1302 (UnOp::Not, Value::UnaryOp(UnOp::Not, inner)) => return Some(inner),
1303 (UnOp::Neg, Value::UnaryOp(UnOp::Neg, inner)) => return Some(inner),
1304 (UnOp::Not, Value::BinaryOp(BinOp::Eq, lhs, rhs)) => {
1305 Value::BinaryOp(BinOp::Ne, lhs, rhs)
1306 }
1307 (UnOp::Not, Value::BinaryOp(BinOp::Ne, lhs, rhs)) => {
1308 Value::BinaryOp(BinOp::Eq, lhs, rhs)
1309 }
1310 (UnOp::PtrMetadata, Value::RawPtr { metadata, .. }) => return Some(metadata),
1311 (
1313 UnOp::PtrMetadata,
1314 Value::Cast {
1315 kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1316 value: inner,
1317 },
1318 ) if let ty::Slice(..) = arg_ty.builtin_deref(true).unwrap().kind()
1319 && let ty::Array(_, len) = self.ty(inner).builtin_deref(true).unwrap().kind() =>
1320 {
1321 return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1322 }
1323 _ => Value::UnaryOp(op, arg_index),
1324 };
1325 Some(self.insert(ret_ty, value))
1326 }
1327
1328 #[instrument(level = "trace", skip(self), ret)]
1329 fn simplify_binary(
1330 &mut self,
1331 op: BinOp,
1332 lhs_operand: &mut Operand<'tcx>,
1333 rhs_operand: &mut Operand<'tcx>,
1334 location: Location,
1335 ) -> Option<VnIndex> {
1336 let lhs = self.simplify_operand(lhs_operand, location);
1337 let rhs = self.simplify_operand(rhs_operand, location);
1338
1339 let mut lhs = lhs?;
1342 let mut rhs = rhs?;
1343
1344 let lhs_ty = self.ty(lhs);
1345
1346 if let BinOp::Eq | BinOp::Ne | BinOp::Lt | BinOp::Le | BinOp::Gt | BinOp::Ge = op
1349 && lhs_ty.is_any_ptr()
1350 && let Value::Cast { kind: CastKind::PtrToPtr, value: lhs_value } = self.get(lhs)
1351 && let Value::Cast { kind: CastKind::PtrToPtr, value: rhs_value } = self.get(rhs)
1352 && let lhs_from = self.ty(lhs_value)
1353 && lhs_from == self.ty(rhs_value)
1354 && self.pointers_have_same_metadata(lhs_from, lhs_ty)
1355 {
1356 lhs = lhs_value;
1357 rhs = rhs_value;
1358 if let Some(lhs_op) = self.try_as_operand(lhs, location)
1359 && let Some(rhs_op) = self.try_as_operand(rhs, location)
1360 {
1361 *lhs_operand = lhs_op;
1362 *rhs_operand = rhs_op;
1363 }
1364 }
1365
1366 if let Some(value) = self.simplify_binary_inner(op, lhs_ty, lhs, rhs) {
1367 return Some(value);
1368 }
1369 let ty = op.ty(self.tcx, lhs_ty, self.ty(rhs));
1370 let value = Value::BinaryOp(op, lhs, rhs);
1371 Some(self.insert(ty, value))
1372 }
1373
1374 fn simplify_binary_inner(
1375 &mut self,
1376 op: BinOp,
1377 lhs_ty: Ty<'tcx>,
1378 lhs: VnIndex,
1379 rhs: VnIndex,
1380 ) -> Option<VnIndex> {
1381 let reasonable_ty =
1383 lhs_ty.is_integral() || lhs_ty.is_bool() || lhs_ty.is_char() || lhs_ty.is_any_ptr();
1384 if !reasonable_ty {
1385 return None;
1386 }
1387
1388 let layout = self.ecx.layout_of(lhs_ty).ok()?;
1389
1390 let mut as_bits = |value: VnIndex| {
1391 let constant = self.eval_to_const(value)?;
1392 if layout.backend_repr.is_scalar() {
1393 let scalar = self.ecx.read_scalar(constant).discard_err()?;
1394 scalar.to_bits(constant.layout.size).discard_err()
1395 } else {
1396 None
1398 }
1399 };
1400
1401 use Either::{Left, Right};
1403 let a = as_bits(lhs).map_or(Right(lhs), Left);
1404 let b = as_bits(rhs).map_or(Right(rhs), Left);
1405
1406 let result = match (op, a, b) {
1407 (
1409 BinOp::Add
1410 | BinOp::AddWithOverflow
1411 | BinOp::AddUnchecked
1412 | BinOp::BitOr
1413 | BinOp::BitXor,
1414 Left(0),
1415 Right(p),
1416 )
1417 | (
1418 BinOp::Add
1419 | BinOp::AddWithOverflow
1420 | BinOp::AddUnchecked
1421 | BinOp::BitOr
1422 | BinOp::BitXor
1423 | BinOp::Sub
1424 | BinOp::SubWithOverflow
1425 | BinOp::SubUnchecked
1426 | BinOp::Offset
1427 | BinOp::Shl
1428 | BinOp::Shr,
1429 Right(p),
1430 Left(0),
1431 )
1432 | (BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked, Left(1), Right(p))
1433 | (
1434 BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::Div,
1435 Right(p),
1436 Left(1),
1437 ) => p,
1438 (BinOp::BitAnd, Right(p), Left(ones)) | (BinOp::BitAnd, Left(ones), Right(p))
1440 if ones == layout.size.truncate(u128::MAX)
1441 || (layout.ty.is_bool() && ones == 1) =>
1442 {
1443 p
1444 }
1445 (
1447 BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::BitAnd,
1448 _,
1449 Left(0),
1450 )
1451 | (BinOp::Rem, _, Left(1))
1452 | (
1453 BinOp::Mul
1454 | BinOp::MulWithOverflow
1455 | BinOp::MulUnchecked
1456 | BinOp::Div
1457 | BinOp::Rem
1458 | BinOp::BitAnd
1459 | BinOp::Shl
1460 | BinOp::Shr,
1461 Left(0),
1462 _,
1463 ) => self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size)),
1464 (BinOp::BitOr, _, Left(ones)) | (BinOp::BitOr, Left(ones), _)
1466 if ones == layout.size.truncate(u128::MAX)
1467 || (layout.ty.is_bool() && ones == 1) =>
1468 {
1469 self.insert_scalar(lhs_ty, Scalar::from_uint(ones, layout.size))
1470 }
1471 (BinOp::Sub | BinOp::SubWithOverflow | BinOp::SubUnchecked | BinOp::BitXor, a, b)
1473 if a == b =>
1474 {
1475 self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size))
1476 }
1477 (BinOp::Eq, Left(a), Left(b)) => self.insert_bool(a == b),
1482 (BinOp::Eq, a, b) if a == b => self.insert_bool(true),
1483 (BinOp::Ne, Left(a), Left(b)) => self.insert_bool(a != b),
1484 (BinOp::Ne, a, b) if a == b => self.insert_bool(false),
1485 _ => return None,
1486 };
1487
1488 if op.is_overflowing() {
1489 let ty = Ty::new_tup(self.tcx, &[self.ty(result), self.tcx.types.bool]);
1490 let false_val = self.insert_bool(false);
1491 Some(self.insert_tuple(ty, &[result, false_val]))
1492 } else {
1493 Some(result)
1494 }
1495 }
1496
1497 fn simplify_cast(
1498 &mut self,
1499 initial_kind: &mut CastKind,
1500 initial_operand: &mut Operand<'tcx>,
1501 to: Ty<'tcx>,
1502 location: Location,
1503 ) -> Option<VnIndex> {
1504 use CastKind::*;
1505 use rustc_middle::ty::adjustment::PointerCoercion::*;
1506
1507 let mut kind = *initial_kind;
1508 let mut value = self.simplify_operand(initial_operand, location)?;
1509 let mut from = self.ty(value);
1510 if from == to {
1511 return Some(value);
1512 }
1513
1514 if let CastKind::PointerCoercion(ReifyFnPointer(_) | ClosureFnPointer(_), _) = kind {
1515 return Some(self.new_opaque(to));
1518 }
1519
1520 let mut was_ever_updated = false;
1521 loop {
1522 let mut was_updated_this_iteration = false;
1523
1524 if let Transmute = kind
1529 && from.is_raw_ptr()
1530 && to.is_raw_ptr()
1531 && self.pointers_have_same_metadata(from, to)
1532 {
1533 kind = PtrToPtr;
1534 was_updated_this_iteration = true;
1535 }
1536
1537 if let PtrToPtr = kind
1540 && let Value::RawPtr { pointer, .. } = self.get(value)
1541 && let ty::RawPtr(to_pointee, _) = to.kind()
1542 && to_pointee.is_sized(self.tcx, self.typing_env())
1543 {
1544 from = self.ty(pointer);
1545 value = pointer;
1546 was_updated_this_iteration = true;
1547 if from == to {
1548 return Some(pointer);
1549 }
1550 }
1551
1552 if let Transmute = kind
1555 && let Value::Aggregate(variant_idx, field_values) = self.get(value)
1556 && let Some((field_idx, field_ty)) =
1557 self.value_is_all_in_one_field(from, variant_idx)
1558 {
1559 from = field_ty;
1560 value = field_values[field_idx.as_usize()];
1561 was_updated_this_iteration = true;
1562 if field_ty == to {
1563 return Some(value);
1564 }
1565 }
1566
1567 if let Value::Cast { kind: inner_kind, value: inner_value } = self.get(value) {
1569 let inner_from = self.ty(inner_value);
1570 let new_kind = match (inner_kind, kind) {
1571 (PtrToPtr, PtrToPtr) => Some(PtrToPtr),
1575 (PtrToPtr, Transmute) if self.pointers_have_same_metadata(inner_from, from) => {
1579 Some(Transmute)
1580 }
1581 (Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => {
1584 Some(Transmute)
1585 }
1586 (Transmute, Transmute)
1589 if !self.transmute_may_have_niche_of_interest_to_backend(
1590 inner_from, from, to,
1591 ) =>
1592 {
1593 Some(Transmute)
1594 }
1595 _ => None,
1596 };
1597 if let Some(new_kind) = new_kind {
1598 kind = new_kind;
1599 from = inner_from;
1600 value = inner_value;
1601 was_updated_this_iteration = true;
1602 if inner_from == to {
1603 return Some(inner_value);
1604 }
1605 }
1606 }
1607
1608 if was_updated_this_iteration {
1609 was_ever_updated = true;
1610 } else {
1611 break;
1612 }
1613 }
1614
1615 if was_ever_updated && let Some(op) = self.try_as_operand(value, location) {
1616 *initial_operand = op;
1617 *initial_kind = kind;
1618 }
1619
1620 Some(self.insert(to, Value::Cast { kind, value }))
1621 }
1622
1623 fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool {
1624 let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1625 let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1626 if left_meta_ty == right_meta_ty {
1627 true
1628 } else if let Ok(left) =
1629 self.tcx.try_normalize_erasing_regions(self.typing_env(), left_meta_ty)
1630 && let Ok(right) =
1631 self.tcx.try_normalize_erasing_regions(self.typing_env(), right_meta_ty)
1632 {
1633 left == right
1634 } else {
1635 false
1636 }
1637 }
1638
1639 fn transmute_may_have_niche_of_interest_to_backend(
1646 &self,
1647 from_ty: Ty<'tcx>,
1648 middle_ty: Ty<'tcx>,
1649 to_ty: Ty<'tcx>,
1650 ) -> bool {
1651 let Ok(middle_layout) = self.ecx.layout_of(middle_ty) else {
1652 return true;
1654 };
1655
1656 if middle_layout.uninhabited {
1657 return true;
1658 }
1659
1660 match middle_layout.backend_repr {
1661 BackendRepr::Scalar(mid) => {
1662 if mid.is_always_valid(&self.ecx) {
1663 false
1666 } else if let Ok(from_layout) = self.ecx.layout_of(from_ty)
1667 && !from_layout.uninhabited
1668 && from_layout.size == middle_layout.size
1669 && let BackendRepr::Scalar(from_a) = from_layout.backend_repr
1670 && let mid_range = mid.valid_range(&self.ecx)
1671 && let from_range = from_a.valid_range(&self.ecx)
1672 && mid_range.contains_range(from_range, middle_layout.size)
1673 {
1674 false
1680 } else if let Ok(to_layout) = self.ecx.layout_of(to_ty)
1681 && !to_layout.uninhabited
1682 && to_layout.size == middle_layout.size
1683 && let BackendRepr::Scalar(to_a) = to_layout.backend_repr
1684 && let mid_range = mid.valid_range(&self.ecx)
1685 && let to_range = to_a.valid_range(&self.ecx)
1686 && mid_range.contains_range(to_range, middle_layout.size)
1687 {
1688 false
1694 } else {
1695 true
1696 }
1697 }
1698 BackendRepr::ScalarPair(a, b) => {
1699 !a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx)
1700 }
1701 BackendRepr::SimdVector { .. }
1702 | BackendRepr::ScalableVector { .. }
1703 | BackendRepr::Memory { .. } => false,
1704 }
1705 }
1706
1707 fn value_is_all_in_one_field(
1708 &self,
1709 ty: Ty<'tcx>,
1710 variant: VariantIdx,
1711 ) -> Option<(FieldIdx, Ty<'tcx>)> {
1712 if let Ok(layout) = self.ecx.layout_of(ty)
1713 && let abi::Variants::Single { index } = layout.variants
1714 && index == variant
1715 && let Some((field_idx, field_layout)) = layout.non_1zst_field(&self.ecx)
1716 && layout.size == field_layout.size
1717 {
1718 Some((field_idx, field_layout.ty))
1722 } else if let ty::Adt(adt, args) = ty.kind()
1723 && adt.is_struct()
1724 && adt.repr().transparent()
1725 && let [single_field] = adt.non_enum_variant().fields.raw.as_slice()
1726 {
1727 Some((FieldIdx::ZERO, single_field.ty(self.tcx, args)))
1728 } else {
1729 None
1730 }
1731 }
1732}
1733
1734fn op_to_prop_const<'tcx>(
1735 ecx: &mut InterpCx<'tcx, DummyMachine>,
1736 op: &OpTy<'tcx>,
1737) -> Option<ConstValue> {
1738 if op.layout.is_unsized() {
1740 return None;
1741 }
1742
1743 if op.layout.is_zst() {
1745 return Some(ConstValue::ZeroSized);
1746 }
1747
1748 if !op.is_immediate_uninit()
1753 && !matches!(op.layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..))
1754 {
1755 return None;
1756 }
1757
1758 if let BackendRepr::Scalar(abi::Scalar::Initialized { .. }) = op.layout.backend_repr
1760 && let Some(scalar) = ecx.read_scalar(op).discard_err()
1761 {
1762 if !scalar.try_to_scalar_int().is_ok() {
1763 return None;
1767 }
1768 return Some(ConstValue::Scalar(scalar));
1769 }
1770
1771 if let Either::Left(mplace) = op.as_mplace_or_imm() {
1774 let (size, _align) = ecx.size_and_align_of_val(&mplace).discard_err()??;
1775
1776 let alloc_ref = ecx.get_ptr_alloc(mplace.ptr(), size).discard_err()??;
1780 if alloc_ref.has_provenance() {
1781 return None;
1782 }
1783
1784 let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
1785 let (prov, offset) = pointer.prov_and_relative_offset();
1786 let alloc_id = prov.alloc_id();
1787 intern_const_alloc_for_constprop(ecx, alloc_id).discard_err()?;
1788
1789 if let GlobalAlloc::Memory(alloc) = ecx.tcx.global_alloc(alloc_id)
1793 && alloc.inner().align >= op.layout.align.abi
1796 {
1797 return Some(ConstValue::Indirect { alloc_id, offset });
1798 }
1799 }
1800
1801 let alloc_id =
1803 ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest)).discard_err()?;
1804 let value = ConstValue::Indirect { alloc_id, offset: Size::ZERO };
1805
1806 if ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().provenance().ptrs().is_empty() {
1810 return Some(value);
1811 }
1812
1813 None
1814}
1815
1816impl<'tcx> VnState<'_, '_, 'tcx> {
1817 fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option<Operand<'tcx>> {
1820 if let Some(const_) = self.try_as_constant(index) {
1821 Some(Operand::Constant(Box::new(const_)))
1822 } else if let Value::RuntimeChecks(c) = self.get(index) {
1823 Some(Operand::RuntimeChecks(c))
1824 } else if let Some(place) = self.try_as_place(index, location, false) {
1825 self.reused_locals.insert(place.local);
1826 Some(Operand::Copy(place))
1827 } else {
1828 None
1829 }
1830 }
1831
1832 fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
1834 if let Value::Constant { value, disambiguator: None } = self.get(index) {
1838 debug_assert!(value.is_deterministic());
1839 return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1840 }
1841
1842 let op = self.eval_to_const(index)?;
1843 if op.layout.is_unsized() {
1844 return None;
1846 }
1847
1848 let value = op_to_prop_const(&mut self.ecx, op)?;
1849
1850 assert!(!value.may_have_provenance(self.tcx, op.layout.size));
1854
1855 let const_ = Const::Val(value, op.layout.ty);
1856 Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_ })
1857 }
1858
1859 #[instrument(level = "trace", skip(self), ret)]
1863 fn try_as_place(
1864 &mut self,
1865 mut index: VnIndex,
1866 loc: Location,
1867 allow_complex_projection: bool,
1868 ) -> Option<Place<'tcx>> {
1869 let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new();
1870 loop {
1871 if let Some(local) = self.try_as_local(index, loc) {
1872 projection.reverse();
1873 let place =
1874 Place { local, projection: self.tcx.mk_place_elems(projection.as_slice()) };
1875 return Some(place);
1876 } else if projection.last() == Some(&PlaceElem::Deref) {
1877 return None;
1881 } else if let Value::Projection(pointer, proj) = self.get(index)
1882 && (allow_complex_projection || proj.is_stable_offset())
1883 && let Some(proj) = self.try_as_place_elem(self.ty(index), proj, loc)
1884 {
1885 if proj == PlaceElem::Deref {
1886 match self.get(pointer) {
1889 Value::Argument(_)
1890 if let Some(Mutability::Not) = self.ty(pointer).ref_mutability() => {}
1891 _ => {
1892 return None;
1893 }
1894 }
1895 }
1896 projection.push(proj);
1897 index = pointer;
1898 } else {
1899 return None;
1900 }
1901 }
1902 }
1903
1904 fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
1907 let other = self.rev_locals.get(index)?;
1908 other
1909 .iter()
1910 .find(|&&other| self.ssa.assignment_dominates(&self.dominators, other, loc))
1911 .copied()
1912 }
1913}
1914
1915impl<'tcx> MutVisitor<'tcx> for VnState<'_, '_, 'tcx> {
1916 fn tcx(&self) -> TyCtxt<'tcx> {
1917 self.tcx
1918 }
1919
1920 fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
1921 self.simplify_place_projection(place, location);
1922 self.super_place(place, context, location);
1923 }
1924
1925 fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1926 self.simplify_operand(operand, location);
1927 self.super_operand(operand, location);
1928 }
1929
1930 fn visit_assign(
1931 &mut self,
1932 lhs: &mut Place<'tcx>,
1933 rvalue: &mut Rvalue<'tcx>,
1934 location: Location,
1935 ) {
1936 self.simplify_place_projection(lhs, location);
1937
1938 let value = self.simplify_rvalue(lhs, rvalue, location);
1939 if let Some(value) = value {
1940 if let Some(const_) = self.try_as_constant(value) {
1941 *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
1942 } else if let Some(place) = self.try_as_place(value, location, false)
1943 && *rvalue != Rvalue::Use(Operand::Move(place))
1944 && *rvalue != Rvalue::Use(Operand::Copy(place))
1945 {
1946 *rvalue = Rvalue::Use(Operand::Copy(place));
1947 self.reused_locals.insert(place.local);
1948 }
1949 }
1950
1951 if let Some(local) = lhs.as_local()
1952 && self.ssa.is_ssa(local)
1953 && let rvalue_ty = rvalue.ty(self.local_decls, self.tcx)
1954 && self.local_decls[local].ty == rvalue_ty
1957 {
1958 let value = value.unwrap_or_else(|| self.new_opaque(rvalue_ty));
1959 self.assign(local, value);
1960 }
1961 }
1962
1963 fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
1964 if let Terminator { kind: TerminatorKind::Call { destination, .. }, .. } = terminator {
1965 if let Some(local) = destination.as_local()
1966 && self.ssa.is_ssa(local)
1967 {
1968 let ty = self.local_decls[local].ty;
1969 let opaque = self.new_opaque(ty);
1970 self.assign(local, opaque);
1971 }
1972 }
1973 self.super_terminator(terminator, location);
1974 }
1975}
1976
1977struct StorageRemover<'tcx> {
1978 tcx: TyCtxt<'tcx>,
1979 reused_locals: DenseBitSet<Local>,
1980}
1981
1982impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
1983 fn tcx(&self) -> TyCtxt<'tcx> {
1984 self.tcx
1985 }
1986
1987 fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
1988 if let Operand::Move(place) = *operand
1989 && !place.is_indirect_first_projection()
1990 && self.reused_locals.contains(place.local)
1991 {
1992 *operand = Operand::Copy(place);
1993 }
1994 }
1995
1996 fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
1997 match stmt.kind {
1998 StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
2000 if self.reused_locals.contains(l) =>
2001 {
2002 stmt.make_nop(true)
2003 }
2004 _ => self.super_statement(stmt, loc),
2005 }
2006 }
2007}