1use std::borrow::Cow;
88
89use either::Either;
90use itertools::Itertools as _;
91use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
92use rustc_const_eval::const_eval::DummyMachine;
93use rustc_const_eval::interpret::{
94 ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar,
95 intern_const_alloc_for_constprop,
96};
97use rustc_data_structures::fx::{FxIndexSet, MutableValues};
98use rustc_data_structures::graph::dominators::Dominators;
99use rustc_hir::def::DefKind;
100use rustc_index::bit_set::DenseBitSet;
101use rustc_index::{IndexVec, newtype_index};
102use rustc_middle::bug;
103use rustc_middle::mir::interpret::GlobalAlloc;
104use rustc_middle::mir::visit::*;
105use rustc_middle::mir::*;
106use rustc_middle::ty::layout::HasTypingEnv;
107use rustc_middle::ty::{self, Ty, TyCtxt};
108use rustc_span::DUMMY_SP;
109use smallvec::SmallVec;
110use tracing::{debug, instrument, trace};
111
112use crate::ssa::SsaLocals;
113
114pub(super) struct GVN;
115
116impl<'tcx> crate::MirPass<'tcx> for GVN {
117 fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
118 sess.mir_opt_level() >= 2
119 }
120
121 #[instrument(level = "trace", skip(self, tcx, body))]
122 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
123 debug!(def_id = ?body.source.def_id());
124
125 let typing_env = body.typing_env(tcx);
126 let ssa = SsaLocals::new(tcx, body, typing_env);
127 let dominators = body.basic_blocks.dominators().clone();
129 let maybe_loop_headers = loops::maybe_loop_headers(body);
130
131 let mut state = VnState::new(tcx, body, typing_env, &ssa, dominators, &body.local_decls);
132
133 for local in body.args_iter().filter(|&local| ssa.is_ssa(local)) {
134 let opaque = state.new_opaque(body.local_decls[local].ty);
135 state.assign(local, opaque);
136 }
137
138 let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
139 for bb in reverse_postorder {
140 if maybe_loop_headers.contains(bb) {
143 state.invalidate_derefs();
144 }
145 let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
146 state.visit_basic_block_data(bb, data);
147 }
148
149 StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
153 }
154
155 fn is_required(&self) -> bool {
156 false
157 }
158}
159
160newtype_index! {
161 struct VnIndex {}
162}
163
164#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
165enum AddressKind {
166 Ref(BorrowKind),
167 Address(RawPtrKind),
168}
169
170#[derive(Debug, PartialEq, Eq, Hash)]
171enum Value<'tcx> {
172 Opaque(usize),
176 Constant {
178 value: Const<'tcx>,
179 disambiguator: usize,
183 },
184 Aggregate(VariantIdx, Vec<VnIndex>),
187 RawPtr {
189 pointer: VnIndex,
191 metadata: VnIndex,
193 },
194 Repeat(VnIndex, ty::Const<'tcx>),
196 Address {
198 place: Place<'tcx>,
199 kind: AddressKind,
200 provenance: usize,
202 },
203
204 Projection(VnIndex, ProjectionElem<VnIndex, ()>),
207 Discriminant(VnIndex),
209 Len(VnIndex),
211
212 NullaryOp(NullOp<'tcx>, Ty<'tcx>),
214 UnaryOp(UnOp, VnIndex),
215 BinaryOp(BinOp, VnIndex, VnIndex),
216 Cast {
217 kind: CastKind,
218 value: VnIndex,
219 },
220}
221
222struct VnState<'body, 'tcx> {
223 tcx: TyCtxt<'tcx>,
224 ecx: InterpCx<'tcx, DummyMachine>,
225 local_decls: &'body LocalDecls<'tcx>,
226 is_coroutine: bool,
227 locals: IndexVec<Local, Option<VnIndex>>,
229 rev_locals: IndexVec<VnIndex, SmallVec<[Local; 1]>>,
232 values: FxIndexSet<(Value<'tcx>, Ty<'tcx>)>,
233 evaluated: IndexVec<VnIndex, Option<OpTy<'tcx>>>,
235 next_opaque: usize,
237 derefs: Vec<VnIndex>,
239 ssa: &'body SsaLocals,
240 dominators: Dominators<BasicBlock>,
241 reused_locals: DenseBitSet<Local>,
242}
243
244impl<'body, 'tcx> VnState<'body, 'tcx> {
245 fn new(
246 tcx: TyCtxt<'tcx>,
247 body: &Body<'tcx>,
248 typing_env: ty::TypingEnv<'tcx>,
249 ssa: &'body SsaLocals,
250 dominators: Dominators<BasicBlock>,
251 local_decls: &'body LocalDecls<'tcx>,
252 ) -> Self {
253 let num_values =
258 2 * body.basic_blocks.iter().map(|bbdata| bbdata.statements.len()).sum::<usize>()
259 + 4 * body.basic_blocks.len();
260 VnState {
261 tcx,
262 ecx: InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine),
263 local_decls,
264 is_coroutine: body.coroutine.is_some(),
265 locals: IndexVec::from_elem(None, local_decls),
266 rev_locals: IndexVec::with_capacity(num_values),
267 values: FxIndexSet::with_capacity_and_hasher(num_values, Default::default()),
268 evaluated: IndexVec::with_capacity(num_values),
269 next_opaque: 1,
270 derefs: Vec::new(),
271 ssa,
272 dominators,
273 reused_locals: DenseBitSet::new_empty(local_decls.len()),
274 }
275 }
276
277 fn typing_env(&self) -> ty::TypingEnv<'tcx> {
278 self.ecx.typing_env()
279 }
280
281 #[instrument(level = "trace", skip(self), ret)]
282 fn insert(&mut self, ty: Ty<'tcx>, value: Value<'tcx>) -> VnIndex {
283 let (index, new) = self.values.insert_full((value, ty));
284 let index = VnIndex::from_usize(index);
285 if new {
286 let evaluated = self.eval_to_const(index);
288 let _index = self.evaluated.push(evaluated);
289 debug_assert_eq!(index, _index);
290 let _index = self.rev_locals.push(SmallVec::new());
291 debug_assert_eq!(index, _index);
292 }
293 index
294 }
295
296 fn next_opaque(&mut self) -> usize {
297 let next_opaque = self.next_opaque;
298 self.next_opaque += 1;
299 next_opaque
300 }
301
302 #[instrument(level = "trace", skip(self), ret)]
305 fn new_opaque(&mut self, ty: Ty<'tcx>) -> VnIndex {
306 let value = Value::Opaque(self.next_opaque());
307 self.insert(ty, value)
308 }
309
310 #[instrument(level = "trace", skip(self), ret)]
312 fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> VnIndex {
313 let pty = place.ty(self.local_decls, self.tcx).ty;
314 let ty = match kind {
315 AddressKind::Ref(bk) => {
316 Ty::new_ref(self.tcx, self.tcx.lifetimes.re_erased, pty, bk.to_mutbl_lossy())
317 }
318 AddressKind::Address(mutbl) => Ty::new_ptr(self.tcx, pty, mutbl.to_mutbl_lossy()),
319 };
320 let value = Value::Address { place, kind, provenance: self.next_opaque() };
321 self.insert(ty, value)
322 }
323
324 #[inline]
325 fn get(&self, index: VnIndex) -> &Value<'tcx> {
326 &self.values.get_index(index.as_usize()).unwrap().0
327 }
328
329 #[inline]
330 fn ty(&self, index: VnIndex) -> Ty<'tcx> {
331 self.values.get_index(index.as_usize()).unwrap().1
332 }
333
334 #[instrument(level = "trace", skip(self))]
336 fn assign(&mut self, local: Local, value: VnIndex) {
337 debug_assert!(self.ssa.is_ssa(local));
338 self.locals[local] = Some(value);
339 self.rev_locals[value].push(local);
340 }
341
342 fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex {
343 let disambiguator = if value.is_deterministic() {
344 0
346 } else {
347 let disambiguator = self.next_opaque();
350 debug_assert_ne!(disambiguator, 0);
352 disambiguator
353 };
354 self.insert(value.ty(), Value::Constant { value, disambiguator })
355 }
356
357 fn insert_bool(&mut self, flag: bool) -> VnIndex {
358 let value = Const::from_bool(self.tcx, flag);
360 debug_assert!(value.is_deterministic());
361 self.insert(self.tcx.types.bool, Value::Constant { value, disambiguator: 0 })
362 }
363
364 fn insert_scalar(&mut self, ty: Ty<'tcx>, scalar: Scalar) -> VnIndex {
365 let value = Const::from_scalar(self.tcx, scalar, ty);
367 debug_assert!(value.is_deterministic());
368 self.insert(ty, Value::Constant { value, disambiguator: 0 })
369 }
370
371 fn insert_tuple(&mut self, ty: Ty<'tcx>, values: Vec<VnIndex>) -> VnIndex {
372 self.insert(ty, Value::Aggregate(VariantIdx::ZERO, values))
373 }
374
375 fn insert_deref(&mut self, ty: Ty<'tcx>, value: VnIndex) -> VnIndex {
376 let value = self.insert(ty, Value::Projection(value, ProjectionElem::Deref));
377 self.derefs.push(value);
378 value
379 }
380
381 fn invalidate_derefs(&mut self) {
382 for deref in std::mem::take(&mut self.derefs) {
383 let opaque = self.next_opaque();
384 self.values.get_index_mut2(deref.index()).unwrap().0 = Value::Opaque(opaque);
385 }
386 }
387
388 #[instrument(level = "trace", skip(self), ret)]
389 fn eval_to_const(&mut self, value: VnIndex) -> Option<OpTy<'tcx>> {
390 use Value::*;
391 let ty = self.ty(value);
392 let ty = if !self.is_coroutine || ty.is_scalar() {
394 self.ecx.layout_of(ty).ok()?
395 } else {
396 return None;
397 };
398 let op = match *self.get(value) {
399 _ if ty.is_zst() => ImmTy::uninit(ty).into(),
400
401 Opaque(_) => return None,
402 Repeat(..) => return None,
404
405 Constant { ref value, disambiguator: _ } => {
406 self.ecx.eval_mir_constant(value, DUMMY_SP, None).discard_err()?
407 }
408 Aggregate(variant, ref fields) => {
409 let fields = fields
410 .iter()
411 .map(|&f| self.evaluated[f].as_ref())
412 .collect::<Option<Vec<_>>>()?;
413 let variant = if ty.ty.is_enum() { Some(variant) } else { None };
414 if matches!(ty.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..))
415 {
416 let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
417 let variant_dest = if let Some(variant) = variant {
418 self.ecx.project_downcast(&dest, variant).discard_err()?
419 } else {
420 dest.clone()
421 };
422 for (field_index, op) in fields.into_iter().enumerate() {
423 let field_dest = self
424 .ecx
425 .project_field(&variant_dest, FieldIdx::from_usize(field_index))
426 .discard_err()?;
427 self.ecx.copy_op(op, &field_dest).discard_err()?;
428 }
429 self.ecx
430 .write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest)
431 .discard_err()?;
432 self.ecx
433 .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
434 .discard_err()?;
435 dest.into()
436 } else {
437 return None;
438 }
439 }
440 RawPtr { pointer, metadata } => {
441 let pointer = self.evaluated[pointer].as_ref()?;
442 let metadata = self.evaluated[metadata].as_ref()?;
443
444 let data = self.ecx.read_pointer(pointer).discard_err()?;
446 let meta = if metadata.layout.is_zst() {
447 MemPlaceMeta::None
448 } else {
449 MemPlaceMeta::Meta(self.ecx.read_scalar(metadata).discard_err()?)
450 };
451 let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx);
452 ImmTy::from_immediate(ptr_imm, ty).into()
453 }
454
455 Projection(base, elem) => {
456 let base = self.evaluated[base].as_ref()?;
457 let elem = elem.try_map(|_| None, |()| ty.ty)?;
460 self.ecx.project(base, elem).discard_err()?
461 }
462 Address { place, kind: _, provenance: _ } => {
463 if !place.is_indirect_first_projection() {
464 return None;
465 }
466 let local = self.locals[place.local]?;
467 let pointer = self.evaluated[local].as_ref()?;
468 let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?;
469 for elem in place.projection.iter().skip(1) {
470 let elem = elem.try_map(|_| None, |ty| ty)?;
473 mplace = self.ecx.project(&mplace, elem).discard_err()?;
474 }
475 let pointer = mplace.to_ref(&self.ecx);
476 ImmTy::from_immediate(pointer, ty).into()
477 }
478
479 Discriminant(base) => {
480 let base = self.evaluated[base].as_ref()?;
481 let variant = self.ecx.read_discriminant(base).discard_err()?;
482 let discr_value =
483 self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
484 discr_value.into()
485 }
486 Len(slice) => {
487 let slice = self.evaluated[slice].as_ref()?;
488 let len = slice.len(&self.ecx).discard_err()?;
489 ImmTy::from_uint(len, ty).into()
490 }
491 NullaryOp(null_op, arg_ty) => {
492 let arg_layout = self.ecx.layout_of(arg_ty).ok()?;
493 if let NullOp::SizeOf | NullOp::AlignOf = null_op
494 && arg_layout.is_unsized()
495 {
496 return None;
497 }
498 let val = match null_op {
499 NullOp::SizeOf => arg_layout.size.bytes(),
500 NullOp::AlignOf => arg_layout.align.abi.bytes(),
501 NullOp::OffsetOf(fields) => self
502 .ecx
503 .tcx
504 .offset_of_subfield(self.typing_env(), arg_layout, fields.iter())
505 .bytes(),
506 NullOp::UbChecks => return None,
507 NullOp::ContractChecks => return None,
508 };
509 ImmTy::from_uint(val, ty).into()
510 }
511 UnaryOp(un_op, operand) => {
512 let operand = self.evaluated[operand].as_ref()?;
513 let operand = self.ecx.read_immediate(operand).discard_err()?;
514 let val = self.ecx.unary_op(un_op, &operand).discard_err()?;
515 val.into()
516 }
517 BinaryOp(bin_op, lhs, rhs) => {
518 let lhs = self.evaluated[lhs].as_ref()?;
519 let lhs = self.ecx.read_immediate(lhs).discard_err()?;
520 let rhs = self.evaluated[rhs].as_ref()?;
521 let rhs = self.ecx.read_immediate(rhs).discard_err()?;
522 let val = self.ecx.binary_op(bin_op, &lhs, &rhs).discard_err()?;
523 val.into()
524 }
525 Cast { kind, value } => match kind {
526 CastKind::IntToInt | CastKind::IntToFloat => {
527 let value = self.evaluated[value].as_ref()?;
528 let value = self.ecx.read_immediate(value).discard_err()?;
529 let res = self.ecx.int_to_int_or_float(&value, ty).discard_err()?;
530 res.into()
531 }
532 CastKind::FloatToFloat | CastKind::FloatToInt => {
533 let value = self.evaluated[value].as_ref()?;
534 let value = self.ecx.read_immediate(value).discard_err()?;
535 let res = self.ecx.float_to_float_or_int(&value, ty).discard_err()?;
536 res.into()
537 }
538 CastKind::Transmute => {
539 let value = self.evaluated[value].as_ref()?;
540 if value.as_mplace_or_imm().is_right() {
545 let can_transmute = match (value.layout.backend_repr, ty.backend_repr) {
546 (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => {
547 s1.size(&self.ecx) == s2.size(&self.ecx)
548 && !matches!(s1.primitive(), Primitive::Pointer(..))
549 }
550 (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
551 a1.size(&self.ecx) == a2.size(&self.ecx) &&
552 b1.size(&self.ecx) == b2.size(&self.ecx) &&
553 b1.align(&self.ecx) == b2.align(&self.ecx) &&
555 !matches!(a1.primitive(), Primitive::Pointer(..))
557 && !matches!(b1.primitive(), Primitive::Pointer(..))
558 }
559 _ => false,
560 };
561 if !can_transmute {
562 return None;
563 }
564 }
565 value.offset(Size::ZERO, ty, &self.ecx).discard_err()?
566 }
567 CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) => {
568 let src = self.evaluated[value].as_ref()?;
569 let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
570 self.ecx.unsize_into(src, ty, &dest).discard_err()?;
571 self.ecx
572 .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
573 .discard_err()?;
574 dest.into()
575 }
576 CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
577 let src = self.evaluated[value].as_ref()?;
578 let src = self.ecx.read_immediate(src).discard_err()?;
579 let ret = self.ecx.ptr_to_ptr(&src, ty).discard_err()?;
580 ret.into()
581 }
582 CastKind::PointerCoercion(ty::adjustment::PointerCoercion::UnsafeFnPointer, _) => {
583 let src = self.evaluated[value].as_ref()?;
584 let src = self.ecx.read_immediate(src).discard_err()?;
585 ImmTy::from_immediate(*src, ty).into()
586 }
587 _ => return None,
588 },
589 };
590 Some(op)
591 }
592
593 fn project(
594 &mut self,
595 place_ty: PlaceTy<'tcx>,
596 value: VnIndex,
597 proj: PlaceElem<'tcx>,
598 from_non_ssa_index: &mut bool,
599 ) -> Option<(PlaceTy<'tcx>, VnIndex)> {
600 let projection_ty = place_ty.projection_ty(self.tcx, proj);
601 let proj = match proj {
602 ProjectionElem::Deref => {
603 if let Some(Mutability::Not) = place_ty.ty.ref_mutability()
604 && projection_ty.ty.is_freeze(self.tcx, self.typing_env())
605 {
606 return Some((projection_ty, self.insert_deref(projection_ty.ty, value)));
609 } else {
610 return None;
611 }
612 }
613 ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
614 ProjectionElem::Field(f, _) => {
615 if let Value::Aggregate(_, fields) = self.get(value) {
616 return Some((projection_ty, fields[f.as_usize()]));
617 } else if let Value::Projection(outer_value, ProjectionElem::Downcast(_, read_variant)) = self.get(value)
618 && let Value::Aggregate(written_variant, fields) = self.get(*outer_value)
619 && written_variant == read_variant
635 {
636 return Some((projection_ty, fields[f.as_usize()]));
637 }
638 ProjectionElem::Field(f, ())
639 }
640 ProjectionElem::Index(idx) => {
641 if let Value::Repeat(inner, _) = self.get(value) {
642 *from_non_ssa_index |= self.locals[idx].is_none();
643 return Some((projection_ty, *inner));
644 }
645 let idx = self.locals[idx]?;
646 ProjectionElem::Index(idx)
647 }
648 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
649 match self.get(value) {
650 Value::Repeat(inner, _) => {
651 return Some((projection_ty, *inner));
652 }
653 Value::Aggregate(_, operands) => {
654 let offset = if from_end {
655 operands.len() - offset as usize
656 } else {
657 offset as usize
658 };
659 let value = operands.get(offset).copied()?;
660 return Some((projection_ty, value));
661 }
662 _ => {}
663 };
664 ProjectionElem::ConstantIndex { offset, min_length, from_end }
665 }
666 ProjectionElem::Subslice { from, to, from_end } => {
667 ProjectionElem::Subslice { from, to, from_end }
668 }
669 ProjectionElem::OpaqueCast(_) => ProjectionElem::OpaqueCast(()),
670 ProjectionElem::Subtype(_) => ProjectionElem::Subtype(()),
671 ProjectionElem::UnwrapUnsafeBinder(_) => ProjectionElem::UnwrapUnsafeBinder(()),
672 };
673
674 let value = self.insert(projection_ty.ty, Value::Projection(value, proj));
675 Some((projection_ty, value))
676 }
677
678 #[instrument(level = "trace", skip(self))]
680 fn simplify_place_projection(&mut self, place: &mut Place<'tcx>, location: Location) {
681 if place.is_indirect_first_projection()
684 && let Some(base) = self.locals[place.local]
685 && let Some(new_local) = self.try_as_local(base, location)
686 && place.local != new_local
687 {
688 place.local = new_local;
689 self.reused_locals.insert(new_local);
690 }
691
692 let mut projection = Cow::Borrowed(&place.projection[..]);
693
694 for i in 0..projection.len() {
695 let elem = projection[i];
696 if let ProjectionElem::Index(idx_local) = elem
697 && let Some(idx) = self.locals[idx_local]
698 {
699 if let Some(offset) = self.evaluated[idx].as_ref()
700 && let Some(offset) = self.ecx.read_target_usize(offset).discard_err()
701 && let Some(min_length) = offset.checked_add(1)
702 {
703 projection.to_mut()[i] =
704 ProjectionElem::ConstantIndex { offset, min_length, from_end: false };
705 } else if let Some(new_idx_local) = self.try_as_local(idx, location)
706 && idx_local != new_idx_local
707 {
708 projection.to_mut()[i] = ProjectionElem::Index(new_idx_local);
709 self.reused_locals.insert(new_idx_local);
710 }
711 }
712 }
713
714 if projection.is_owned() {
715 place.projection = self.tcx.mk_place_elems(&projection);
716 }
717
718 trace!(?place);
719 }
720
721 #[instrument(level = "trace", skip(self), ret)]
724 fn simplify_place_value(
725 &mut self,
726 place: &mut Place<'tcx>,
727 location: Location,
728 ) -> Option<VnIndex> {
729 self.simplify_place_projection(place, location);
730
731 let mut place_ref = place.as_ref();
734
735 let mut value = self.locals[place.local]?;
737 let mut place_ty = PlaceTy::from_ty(self.local_decls[place.local].ty);
739 let mut from_non_ssa_index = false;
740 for (index, proj) in place.projection.iter().enumerate() {
741 if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
742 && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
743 && let AddressKind::Ref(BorrowKind::Shared) = kind
744 && let Some(v) = self.simplify_place_value(&mut pointee, location)
745 {
746 value = v;
747 if pointee.projection.iter().all(|elem| !matches!(elem, ProjectionElem::Index(_))) {
751 place_ref =
752 pointee.project_deeper(&place.projection[index..], self.tcx).as_ref();
753 }
754 }
755 if let Some(local) = self.try_as_local(value, location) {
756 place_ref = PlaceRef { local, projection: &place.projection[index..] };
760 }
761
762 (place_ty, value) = self.project(place_ty, value, proj, &mut from_non_ssa_index)?;
763 }
764
765 if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
766 && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
767 && let AddressKind::Ref(BorrowKind::Shared) = kind
768 && let Some(v) = self.simplify_place_value(&mut pointee, location)
769 {
770 value = v;
771 if pointee.projection.iter().all(|elem| !matches!(elem, ProjectionElem::Index(_))) {
775 place_ref = pointee.project_deeper(&[], self.tcx).as_ref();
776 }
777 }
778 if let Some(new_local) = self.try_as_local(value, location) {
779 place_ref = PlaceRef { local: new_local, projection: &[] };
780 } else if from_non_ssa_index {
781 return None;
783 }
784
785 if place_ref.local != place.local || place_ref.projection.len() < place.projection.len() {
786 *place = place_ref.project_deeper(&[], self.tcx);
788 self.reused_locals.insert(place_ref.local);
789 }
790
791 Some(value)
792 }
793
794 #[instrument(level = "trace", skip(self), ret)]
795 fn simplify_operand(
796 &mut self,
797 operand: &mut Operand<'tcx>,
798 location: Location,
799 ) -> Option<VnIndex> {
800 match *operand {
801 Operand::Constant(ref constant) => Some(self.insert_constant(constant.const_)),
802 Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
803 let value = self.simplify_place_value(place, location)?;
804 if let Some(const_) = self.try_as_constant(value) {
805 *operand = Operand::Constant(Box::new(const_));
806 }
807 Some(value)
808 }
809 }
810 }
811
812 #[instrument(level = "trace", skip(self), ret)]
813 fn simplify_rvalue(
814 &mut self,
815 lhs: &Place<'tcx>,
816 rvalue: &mut Rvalue<'tcx>,
817 location: Location,
818 ) -> Option<VnIndex> {
819 let value = match *rvalue {
820 Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
822 Rvalue::CopyForDeref(place) => {
823 let mut operand = Operand::Copy(place);
824 let val = self.simplify_operand(&mut operand, location);
825 *rvalue = Rvalue::Use(operand);
826 return val;
827 }
828
829 Rvalue::Repeat(ref mut op, amount) => {
831 let op = self.simplify_operand(op, location)?;
832 Value::Repeat(op, amount)
833 }
834 Rvalue::NullaryOp(op, ty) => Value::NullaryOp(op, ty),
835 Rvalue::Aggregate(..) => return self.simplify_aggregate(lhs, rvalue, location),
836 Rvalue::Ref(_, borrow_kind, ref mut place) => {
837 self.simplify_place_projection(place, location);
838 return Some(self.new_pointer(*place, AddressKind::Ref(borrow_kind)));
839 }
840 Rvalue::RawPtr(mutbl, ref mut place) => {
841 self.simplify_place_projection(place, location);
842 return Some(self.new_pointer(*place, AddressKind::Address(mutbl)));
843 }
844 Rvalue::WrapUnsafeBinder(ref mut op, _) => {
845 let value = self.simplify_operand(op, location)?;
846 Value::Cast { kind: CastKind::Transmute, value }
847 }
848
849 Rvalue::Len(ref mut place) => return self.simplify_len(place, location),
851 Rvalue::Cast(ref mut kind, ref mut value, to) => {
852 return self.simplify_cast(kind, value, to, location);
853 }
854 Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
855 return self.simplify_binary(op, lhs, rhs, location);
856 }
857 Rvalue::UnaryOp(op, ref mut arg_op) => {
858 return self.simplify_unary(op, arg_op, location);
859 }
860 Rvalue::Discriminant(ref mut place) => {
861 let place = self.simplify_place_value(place, location)?;
862 if let Some(discr) = self.simplify_discriminant(place) {
863 return Some(discr);
864 }
865 Value::Discriminant(place)
866 }
867
868 Rvalue::ThreadLocalRef(..) | Rvalue::ShallowInitBox(..) => return None,
870 };
871 let ty = rvalue.ty(self.local_decls, self.tcx);
872 Some(self.insert(ty, value))
873 }
874
875 fn simplify_discriminant(&mut self, place: VnIndex) -> Option<VnIndex> {
876 let enum_ty = self.ty(place);
877 if enum_ty.is_enum()
878 && let Value::Aggregate(variant, _) = *self.get(place)
879 {
880 let discr = self.ecx.discriminant_for_variant(enum_ty, variant).discard_err()?;
881 return Some(self.insert_scalar(discr.layout.ty, discr.to_scalar()));
882 }
883
884 None
885 }
886
887 fn try_as_place_elem(
888 &mut self,
889 ty: Ty<'tcx>,
890 proj: ProjectionElem<VnIndex, ()>,
891 loc: Location,
892 ) -> Option<PlaceElem<'tcx>> {
893 proj.try_map(
894 |value| {
895 let local = self.try_as_local(value, loc)?;
896 self.reused_locals.insert(local);
897 Some(local)
898 },
899 |()| ty,
900 )
901 }
902
903 fn simplify_aggregate_to_copy(
904 &mut self,
905 ty: Ty<'tcx>,
906 variant_index: VariantIdx,
907 fields: &[VnIndex],
908 ) -> Option<VnIndex> {
909 let Some(&first_field) = fields.first() else { return None };
910 let Value::Projection(copy_from_value, _) = *self.get(first_field) else { return None };
911
912 if fields.iter().enumerate().any(|(index, &v)| {
914 if let Value::Projection(pointer, ProjectionElem::Field(from_index, _)) = *self.get(v)
915 && copy_from_value == pointer
916 && from_index.index() == index
917 {
918 return false;
919 }
920 true
921 }) {
922 return None;
923 }
924
925 let mut copy_from_local_value = copy_from_value;
926 if let Value::Projection(pointer, proj) = *self.get(copy_from_value)
927 && let ProjectionElem::Downcast(_, read_variant) = proj
928 {
929 if variant_index == read_variant {
930 copy_from_local_value = pointer;
932 } else {
933 return None;
935 }
936 }
937
938 if self.ty(copy_from_local_value) == ty { Some(copy_from_local_value) } else { None }
940 }
941
942 fn simplify_aggregate(
943 &mut self,
944 lhs: &Place<'tcx>,
945 rvalue: &mut Rvalue<'tcx>,
946 location: Location,
947 ) -> Option<VnIndex> {
948 let tcx = self.tcx;
949 let ty = rvalue.ty(self.local_decls, tcx);
950
951 let Rvalue::Aggregate(box ref kind, ref mut field_ops) = *rvalue else { bug!() };
952
953 if field_ops.is_empty() {
954 let is_zst = match *kind {
955 AggregateKind::Array(..)
956 | AggregateKind::Tuple
957 | AggregateKind::Closure(..)
958 | AggregateKind::CoroutineClosure(..) => true,
959 AggregateKind::Adt(did, ..) => tcx.def_kind(did) != DefKind::Enum,
961 AggregateKind::Coroutine(..) => false,
963 AggregateKind::RawPtr(..) => bug!("MIR for RawPtr aggregate must have 2 fields"),
964 };
965
966 if is_zst {
967 return Some(self.insert_constant(Const::zero_sized(ty)));
968 }
969 }
970
971 let fields: Vec<_> = field_ops
972 .iter_mut()
973 .map(|op| {
974 self.simplify_operand(op, location)
975 .unwrap_or_else(|| self.new_opaque(op.ty(self.local_decls, self.tcx)))
976 })
977 .collect();
978
979 let variant_index = match *kind {
980 AggregateKind::Array(..) | AggregateKind::Tuple => {
981 assert!(!field_ops.is_empty());
982 FIRST_VARIANT
983 }
984 AggregateKind::Closure(..)
985 | AggregateKind::CoroutineClosure(..)
986 | AggregateKind::Coroutine(..) => FIRST_VARIANT,
987 AggregateKind::Adt(_, variant_index, _, _, None) => variant_index,
988 AggregateKind::Adt(_, _, _, _, Some(_)) => return None,
990 AggregateKind::RawPtr(..) => {
991 assert_eq!(field_ops.len(), 2);
992 let [mut pointer, metadata] = fields.try_into().unwrap();
993
994 let mut was_updated = false;
996 while let Value::Cast { kind: CastKind::PtrToPtr, value: cast_value } =
997 self.get(pointer)
998 && let ty::RawPtr(from_pointee_ty, from_mtbl) = self.ty(*cast_value).kind()
999 && let ty::RawPtr(_, output_mtbl) = ty.kind()
1000 && from_mtbl == output_mtbl
1001 && from_pointee_ty.is_sized(self.tcx, self.typing_env())
1002 {
1003 pointer = *cast_value;
1004 was_updated = true;
1005 }
1006
1007 if was_updated && let Some(op) = self.try_as_operand(pointer, location) {
1008 field_ops[FieldIdx::ZERO] = op;
1009 }
1010
1011 return Some(self.insert(ty, Value::RawPtr { pointer, metadata }));
1012 }
1013 };
1014
1015 if ty.is_array()
1016 && fields.len() > 4
1017 && let Ok(&first) = fields.iter().all_equal_value()
1018 {
1019 let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
1020 if let Some(op) = self.try_as_operand(first, location) {
1021 *rvalue = Rvalue::Repeat(op, len);
1022 }
1023 return Some(self.insert(ty, Value::Repeat(first, len)));
1024 }
1025
1026 if let Some(value) = self.simplify_aggregate_to_copy(ty, variant_index, &fields) {
1027 let allow_complex_projection =
1031 lhs.projection[..].iter().all(PlaceElem::is_stable_offset);
1032 if let Some(place) = self.try_as_place(value, location, allow_complex_projection) {
1033 self.reused_locals.insert(place.local);
1034 *rvalue = Rvalue::Use(Operand::Copy(place));
1035 }
1036 return Some(value);
1037 }
1038
1039 Some(self.insert(ty, Value::Aggregate(variant_index, fields)))
1040 }
1041
1042 #[instrument(level = "trace", skip(self), ret)]
1043 fn simplify_unary(
1044 &mut self,
1045 op: UnOp,
1046 arg_op: &mut Operand<'tcx>,
1047 location: Location,
1048 ) -> Option<VnIndex> {
1049 let mut arg_index = self.simplify_operand(arg_op, location)?;
1050 let arg_ty = self.ty(arg_index);
1051 let ret_ty = op.ty(self.tcx, arg_ty);
1052
1053 if op == UnOp::PtrMetadata {
1056 let mut was_updated = false;
1057 loop {
1058 match self.get(arg_index) {
1059 Value::Cast { kind: CastKind::PtrToPtr, value: inner }
1068 if self.pointers_have_same_metadata(self.ty(*inner), arg_ty) =>
1069 {
1070 arg_index = *inner;
1071 was_updated = true;
1072 continue;
1073 }
1074
1075 Value::Address { place, kind: _, provenance: _ }
1077 if let PlaceRef { local, projection: [PlaceElem::Deref] } =
1078 place.as_ref()
1079 && let Some(local_index) = self.locals[local] =>
1080 {
1081 arg_index = local_index;
1082 was_updated = true;
1083 continue;
1084 }
1085
1086 _ => {
1087 if was_updated && let Some(op) = self.try_as_operand(arg_index, location) {
1088 *arg_op = op;
1089 }
1090 break;
1091 }
1092 }
1093 }
1094 }
1095
1096 let value = match (op, self.get(arg_index)) {
1097 (UnOp::Not, Value::UnaryOp(UnOp::Not, inner)) => return Some(*inner),
1098 (UnOp::Neg, Value::UnaryOp(UnOp::Neg, inner)) => return Some(*inner),
1099 (UnOp::Not, Value::BinaryOp(BinOp::Eq, lhs, rhs)) => {
1100 Value::BinaryOp(BinOp::Ne, *lhs, *rhs)
1101 }
1102 (UnOp::Not, Value::BinaryOp(BinOp::Ne, lhs, rhs)) => {
1103 Value::BinaryOp(BinOp::Eq, *lhs, *rhs)
1104 }
1105 (UnOp::PtrMetadata, Value::RawPtr { metadata, .. }) => return Some(*metadata),
1106 (
1108 UnOp::PtrMetadata,
1109 Value::Cast {
1110 kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1111 value: inner,
1112 },
1113 ) if let ty::Slice(..) = arg_ty.builtin_deref(true).unwrap().kind()
1114 && let ty::Array(_, len) = self.ty(*inner).builtin_deref(true).unwrap().kind() =>
1115 {
1116 return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1117 }
1118 _ => Value::UnaryOp(op, arg_index),
1119 };
1120 Some(self.insert(ret_ty, value))
1121 }
1122
1123 #[instrument(level = "trace", skip(self), ret)]
1124 fn simplify_binary(
1125 &mut self,
1126 op: BinOp,
1127 lhs_operand: &mut Operand<'tcx>,
1128 rhs_operand: &mut Operand<'tcx>,
1129 location: Location,
1130 ) -> Option<VnIndex> {
1131 let lhs = self.simplify_operand(lhs_operand, location);
1132 let rhs = self.simplify_operand(rhs_operand, location);
1133
1134 let mut lhs = lhs?;
1137 let mut rhs = rhs?;
1138
1139 let lhs_ty = self.ty(lhs);
1140
1141 if let BinOp::Eq | BinOp::Ne | BinOp::Lt | BinOp::Le | BinOp::Gt | BinOp::Ge = op
1144 && lhs_ty.is_any_ptr()
1145 && let Value::Cast { kind: CastKind::PtrToPtr, value: lhs_value } = self.get(lhs)
1146 && let Value::Cast { kind: CastKind::PtrToPtr, value: rhs_value } = self.get(rhs)
1147 && let lhs_from = self.ty(*lhs_value)
1148 && lhs_from == self.ty(*rhs_value)
1149 && self.pointers_have_same_metadata(lhs_from, lhs_ty)
1150 {
1151 lhs = *lhs_value;
1152 rhs = *rhs_value;
1153 if let Some(lhs_op) = self.try_as_operand(lhs, location)
1154 && let Some(rhs_op) = self.try_as_operand(rhs, location)
1155 {
1156 *lhs_operand = lhs_op;
1157 *rhs_operand = rhs_op;
1158 }
1159 }
1160
1161 if let Some(value) = self.simplify_binary_inner(op, lhs_ty, lhs, rhs) {
1162 return Some(value);
1163 }
1164 let ty = op.ty(self.tcx, lhs_ty, self.ty(rhs));
1165 let value = Value::BinaryOp(op, lhs, rhs);
1166 Some(self.insert(ty, value))
1167 }
1168
1169 fn simplify_binary_inner(
1170 &mut self,
1171 op: BinOp,
1172 lhs_ty: Ty<'tcx>,
1173 lhs: VnIndex,
1174 rhs: VnIndex,
1175 ) -> Option<VnIndex> {
1176 let reasonable_ty =
1178 lhs_ty.is_integral() || lhs_ty.is_bool() || lhs_ty.is_char() || lhs_ty.is_any_ptr();
1179 if !reasonable_ty {
1180 return None;
1181 }
1182
1183 let layout = self.ecx.layout_of(lhs_ty).ok()?;
1184
1185 let as_bits = |value: VnIndex| {
1186 let constant = self.evaluated[value].as_ref()?;
1187 if layout.backend_repr.is_scalar() {
1188 let scalar = self.ecx.read_scalar(constant).discard_err()?;
1189 scalar.to_bits(constant.layout.size).discard_err()
1190 } else {
1191 None
1193 }
1194 };
1195
1196 use Either::{Left, Right};
1198 let a = as_bits(lhs).map_or(Right(lhs), Left);
1199 let b = as_bits(rhs).map_or(Right(rhs), Left);
1200
1201 let result = match (op, a, b) {
1202 (
1204 BinOp::Add
1205 | BinOp::AddWithOverflow
1206 | BinOp::AddUnchecked
1207 | BinOp::BitOr
1208 | BinOp::BitXor,
1209 Left(0),
1210 Right(p),
1211 )
1212 | (
1213 BinOp::Add
1214 | BinOp::AddWithOverflow
1215 | BinOp::AddUnchecked
1216 | BinOp::BitOr
1217 | BinOp::BitXor
1218 | BinOp::Sub
1219 | BinOp::SubWithOverflow
1220 | BinOp::SubUnchecked
1221 | BinOp::Offset
1222 | BinOp::Shl
1223 | BinOp::Shr,
1224 Right(p),
1225 Left(0),
1226 )
1227 | (BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked, Left(1), Right(p))
1228 | (
1229 BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::Div,
1230 Right(p),
1231 Left(1),
1232 ) => p,
1233 (BinOp::BitAnd, Right(p), Left(ones)) | (BinOp::BitAnd, Left(ones), Right(p))
1235 if ones == layout.size.truncate(u128::MAX)
1236 || (layout.ty.is_bool() && ones == 1) =>
1237 {
1238 p
1239 }
1240 (
1242 BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::BitAnd,
1243 _,
1244 Left(0),
1245 )
1246 | (BinOp::Rem, _, Left(1))
1247 | (
1248 BinOp::Mul
1249 | BinOp::MulWithOverflow
1250 | BinOp::MulUnchecked
1251 | BinOp::Div
1252 | BinOp::Rem
1253 | BinOp::BitAnd
1254 | BinOp::Shl
1255 | BinOp::Shr,
1256 Left(0),
1257 _,
1258 ) => self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size)),
1259 (BinOp::BitOr, _, Left(ones)) | (BinOp::BitOr, Left(ones), _)
1261 if ones == layout.size.truncate(u128::MAX)
1262 || (layout.ty.is_bool() && ones == 1) =>
1263 {
1264 self.insert_scalar(lhs_ty, Scalar::from_uint(ones, layout.size))
1265 }
1266 (BinOp::Sub | BinOp::SubWithOverflow | BinOp::SubUnchecked | BinOp::BitXor, a, b)
1268 if a == b =>
1269 {
1270 self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size))
1271 }
1272 (BinOp::Eq, Left(a), Left(b)) => self.insert_bool(a == b),
1277 (BinOp::Eq, a, b) if a == b => self.insert_bool(true),
1278 (BinOp::Ne, Left(a), Left(b)) => self.insert_bool(a != b),
1279 (BinOp::Ne, a, b) if a == b => self.insert_bool(false),
1280 _ => return None,
1281 };
1282
1283 if op.is_overflowing() {
1284 let ty = Ty::new_tup(self.tcx, &[self.ty(result), self.tcx.types.bool]);
1285 let false_val = self.insert_bool(false);
1286 Some(self.insert_tuple(ty, vec![result, false_val]))
1287 } else {
1288 Some(result)
1289 }
1290 }
1291
1292 fn simplify_cast(
1293 &mut self,
1294 initial_kind: &mut CastKind,
1295 initial_operand: &mut Operand<'tcx>,
1296 to: Ty<'tcx>,
1297 location: Location,
1298 ) -> Option<VnIndex> {
1299 use CastKind::*;
1300 use rustc_middle::ty::adjustment::PointerCoercion::*;
1301
1302 let mut kind = *initial_kind;
1303 let mut value = self.simplify_operand(initial_operand, location)?;
1304 let mut from = self.ty(value);
1305 if from == to {
1306 return Some(value);
1307 }
1308
1309 if let CastKind::PointerCoercion(ReifyFnPointer | ClosureFnPointer(_), _) = kind {
1310 return Some(self.new_opaque(to));
1313 }
1314
1315 let mut was_ever_updated = false;
1316 loop {
1317 let mut was_updated_this_iteration = false;
1318
1319 if let Transmute = kind
1324 && from.is_raw_ptr()
1325 && to.is_raw_ptr()
1326 && self.pointers_have_same_metadata(from, to)
1327 {
1328 kind = PtrToPtr;
1329 was_updated_this_iteration = true;
1330 }
1331
1332 if let PtrToPtr = kind
1335 && let Value::RawPtr { pointer, .. } = self.get(value)
1336 && let ty::RawPtr(to_pointee, _) = to.kind()
1337 && to_pointee.is_sized(self.tcx, self.typing_env())
1338 {
1339 from = self.ty(*pointer);
1340 value = *pointer;
1341 was_updated_this_iteration = true;
1342 if from == to {
1343 return Some(*pointer);
1344 }
1345 }
1346
1347 if let Transmute = kind
1350 && let Value::Aggregate(variant_idx, field_values) = self.get(value)
1351 && let Some((field_idx, field_ty)) =
1352 self.value_is_all_in_one_field(from, *variant_idx)
1353 {
1354 from = field_ty;
1355 value = field_values[field_idx.as_usize()];
1356 was_updated_this_iteration = true;
1357 if field_ty == to {
1358 return Some(value);
1359 }
1360 }
1361
1362 if let Value::Cast { kind: inner_kind, value: inner_value } = *self.get(value) {
1364 let inner_from = self.ty(inner_value);
1365 let new_kind = match (inner_kind, kind) {
1366 (PtrToPtr, PtrToPtr) => Some(PtrToPtr),
1370 (PtrToPtr, Transmute) if self.pointers_have_same_metadata(inner_from, from) => {
1374 Some(Transmute)
1375 }
1376 (Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => {
1379 Some(Transmute)
1380 }
1381 (Transmute, Transmute)
1384 if !self.type_may_have_niche_of_interest_to_backend(from) =>
1385 {
1386 Some(Transmute)
1387 }
1388 _ => None,
1389 };
1390 if let Some(new_kind) = new_kind {
1391 kind = new_kind;
1392 from = inner_from;
1393 value = inner_value;
1394 was_updated_this_iteration = true;
1395 if inner_from == to {
1396 return Some(inner_value);
1397 }
1398 }
1399 }
1400
1401 if was_updated_this_iteration {
1402 was_ever_updated = true;
1403 } else {
1404 break;
1405 }
1406 }
1407
1408 if was_ever_updated && let Some(op) = self.try_as_operand(value, location) {
1409 *initial_operand = op;
1410 *initial_kind = kind;
1411 }
1412
1413 Some(self.insert(to, Value::Cast { kind, value }))
1414 }
1415
1416 fn simplify_len(&mut self, place: &mut Place<'tcx>, location: Location) -> Option<VnIndex> {
1417 let place_ty = place.ty(self.local_decls, self.tcx).ty;
1419 if let ty::Array(_, len) = place_ty.kind() {
1420 return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1421 }
1422
1423 let mut inner = self.simplify_place_value(place, location)?;
1424
1425 while let Value::Address { place: borrowed, .. } = self.get(inner)
1428 && let [PlaceElem::Deref] = borrowed.projection[..]
1429 && let Some(borrowed) = self.locals[borrowed.local]
1430 {
1431 inner = borrowed;
1432 }
1433
1434 if let Value::Cast { kind, value: from } = self.get(inner)
1436 && let CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) = kind
1437 && let Some(from) = self.ty(*from).builtin_deref(true)
1438 && let ty::Array(_, len) = from.kind()
1439 && let Some(to) = self.ty(inner).builtin_deref(true)
1440 && let ty::Slice(..) = to.kind()
1441 {
1442 return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1443 }
1444
1445 Some(self.insert(self.tcx.types.usize, Value::Len(inner)))
1447 }
1448
1449 fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool {
1450 let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1451 let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1452 if left_meta_ty == right_meta_ty {
1453 true
1454 } else if let Ok(left) =
1455 self.tcx.try_normalize_erasing_regions(self.typing_env(), left_meta_ty)
1456 && let Ok(right) =
1457 self.tcx.try_normalize_erasing_regions(self.typing_env(), right_meta_ty)
1458 {
1459 left == right
1460 } else {
1461 false
1462 }
1463 }
1464
1465 fn type_may_have_niche_of_interest_to_backend(&self, ty: Ty<'tcx>) -> bool {
1472 let Ok(layout) = self.ecx.layout_of(ty) else {
1473 return true;
1475 };
1476
1477 if layout.uninhabited {
1478 return true;
1479 }
1480
1481 match layout.backend_repr {
1482 BackendRepr::Scalar(a) => !a.is_always_valid(&self.ecx),
1483 BackendRepr::ScalarPair(a, b) => {
1484 !a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx)
1485 }
1486 BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => false,
1487 }
1488 }
1489
1490 fn value_is_all_in_one_field(
1491 &self,
1492 ty: Ty<'tcx>,
1493 variant: VariantIdx,
1494 ) -> Option<(FieldIdx, Ty<'tcx>)> {
1495 if let Ok(layout) = self.ecx.layout_of(ty)
1496 && let abi::Variants::Single { index } = layout.variants
1497 && index == variant
1498 && let Some((field_idx, field_layout)) = layout.non_1zst_field(&self.ecx)
1499 && layout.size == field_layout.size
1500 {
1501 Some((field_idx, field_layout.ty))
1505 } else if let ty::Adt(adt, args) = ty.kind()
1506 && adt.is_struct()
1507 && adt.repr().transparent()
1508 && let [single_field] = adt.non_enum_variant().fields.raw.as_slice()
1509 {
1510 Some((FieldIdx::ZERO, single_field.ty(self.tcx, args)))
1511 } else {
1512 None
1513 }
1514 }
1515}
1516
1517fn op_to_prop_const<'tcx>(
1518 ecx: &mut InterpCx<'tcx, DummyMachine>,
1519 op: &OpTy<'tcx>,
1520) -> Option<ConstValue> {
1521 if op.layout.is_unsized() {
1523 return None;
1524 }
1525
1526 if op.layout.is_zst() {
1528 return Some(ConstValue::ZeroSized);
1529 }
1530
1531 if !matches!(op.layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
1534 return None;
1535 }
1536
1537 if let BackendRepr::Scalar(abi::Scalar::Initialized { .. }) = op.layout.backend_repr
1539 && let Some(scalar) = ecx.read_scalar(op).discard_err()
1540 {
1541 if !scalar.try_to_scalar_int().is_ok() {
1542 return None;
1546 }
1547 return Some(ConstValue::Scalar(scalar));
1548 }
1549
1550 if let Either::Left(mplace) = op.as_mplace_or_imm() {
1553 let (size, _align) = ecx.size_and_align_of_val(&mplace).discard_err()??;
1554
1555 let alloc_ref = ecx.get_ptr_alloc(mplace.ptr(), size).discard_err()??;
1559 if alloc_ref.has_provenance() {
1560 return None;
1561 }
1562
1563 let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
1564 let (prov, offset) = pointer.prov_and_relative_offset();
1565 let alloc_id = prov.alloc_id();
1566 intern_const_alloc_for_constprop(ecx, alloc_id).discard_err()?;
1567
1568 if let GlobalAlloc::Memory(alloc) = ecx.tcx.global_alloc(alloc_id)
1572 && alloc.inner().align >= op.layout.align.abi
1575 {
1576 return Some(ConstValue::Indirect { alloc_id, offset });
1577 }
1578 }
1579
1580 let alloc_id =
1582 ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest)).discard_err()?;
1583 let value = ConstValue::Indirect { alloc_id, offset: Size::ZERO };
1584
1585 if ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().provenance().ptrs().is_empty() {
1589 return Some(value);
1590 }
1591
1592 None
1593}
1594
1595impl<'tcx> VnState<'_, 'tcx> {
1596 fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option<Operand<'tcx>> {
1599 if let Some(const_) = self.try_as_constant(index) {
1600 Some(Operand::Constant(Box::new(const_)))
1601 } else if let Some(place) = self.try_as_place(index, location, false) {
1602 self.reused_locals.insert(place.local);
1603 Some(Operand::Copy(place))
1604 } else {
1605 None
1606 }
1607 }
1608
1609 fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
1611 if let Value::Constant { value, disambiguator: 0 } = *self.get(index) {
1615 debug_assert!(value.is_deterministic());
1616 return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1617 }
1618
1619 let op = self.evaluated[index].as_ref()?;
1620 if op.layout.is_unsized() {
1621 return None;
1623 }
1624
1625 let value = op_to_prop_const(&mut self.ecx, op)?;
1626
1627 assert!(!value.may_have_provenance(self.tcx, op.layout.size));
1631
1632 let const_ = Const::Val(value, op.layout.ty);
1633 Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_ })
1634 }
1635
1636 #[instrument(level = "trace", skip(self), ret)]
1640 fn try_as_place(
1641 &mut self,
1642 mut index: VnIndex,
1643 loc: Location,
1644 allow_complex_projection: bool,
1645 ) -> Option<Place<'tcx>> {
1646 let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new();
1647 loop {
1648 if let Some(local) = self.try_as_local(index, loc) {
1649 projection.reverse();
1650 let place =
1651 Place { local, projection: self.tcx.mk_place_elems(projection.as_slice()) };
1652 return Some(place);
1653 } else if projection.last() == Some(&PlaceElem::Deref) {
1654 return None;
1658 } else if let Value::Projection(pointer, proj) = *self.get(index)
1659 && (allow_complex_projection || proj.is_stable_offset())
1660 && let Some(proj) = self.try_as_place_elem(self.ty(index), proj, loc)
1661 {
1662 projection.push(proj);
1663 index = pointer;
1664 } else {
1665 return None;
1666 }
1667 }
1668 }
1669
1670 fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
1673 let other = self.rev_locals.get(index)?;
1674 other
1675 .iter()
1676 .find(|&&other| self.ssa.assignment_dominates(&self.dominators, other, loc))
1677 .copied()
1678 }
1679}
1680
1681impl<'tcx> MutVisitor<'tcx> for VnState<'_, 'tcx> {
1682 fn tcx(&self) -> TyCtxt<'tcx> {
1683 self.tcx
1684 }
1685
1686 fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
1687 self.simplify_place_projection(place, location);
1688 if context.is_mutating_use() && place.is_indirect() {
1689 self.invalidate_derefs();
1691 }
1692 self.super_place(place, context, location);
1693 }
1694
1695 fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1696 self.simplify_operand(operand, location);
1697 self.super_operand(operand, location);
1698 }
1699
1700 fn visit_assign(
1701 &mut self,
1702 lhs: &mut Place<'tcx>,
1703 rvalue: &mut Rvalue<'tcx>,
1704 location: Location,
1705 ) {
1706 self.simplify_place_projection(lhs, location);
1707
1708 let value = self.simplify_rvalue(lhs, rvalue, location);
1709 if let Some(value) = value {
1710 if let Some(const_) = self.try_as_constant(value) {
1711 *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
1712 } else if let Some(place) = self.try_as_place(value, location, false)
1713 && *rvalue != Rvalue::Use(Operand::Move(place))
1714 && *rvalue != Rvalue::Use(Operand::Copy(place))
1715 {
1716 *rvalue = Rvalue::Use(Operand::Copy(place));
1717 self.reused_locals.insert(place.local);
1718 }
1719 }
1720
1721 if lhs.is_indirect() {
1722 self.invalidate_derefs();
1724 }
1725
1726 if let Some(local) = lhs.as_local()
1727 && self.ssa.is_ssa(local)
1728 && let rvalue_ty = rvalue.ty(self.local_decls, self.tcx)
1729 && self.local_decls[local].ty == rvalue_ty
1732 {
1733 let value = value.unwrap_or_else(|| self.new_opaque(rvalue_ty));
1734 self.assign(local, value);
1735 }
1736 }
1737
1738 fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
1739 if let Terminator { kind: TerminatorKind::Call { destination, .. }, .. } = terminator {
1740 if let Some(local) = destination.as_local()
1741 && self.ssa.is_ssa(local)
1742 {
1743 let ty = self.local_decls[local].ty;
1744 let opaque = self.new_opaque(ty);
1745 self.assign(local, opaque);
1746 }
1747 }
1748 let safe_to_preserve_derefs = matches!(
1751 terminator.kind,
1752 TerminatorKind::SwitchInt { .. } | TerminatorKind::Goto { .. }
1753 );
1754 if !safe_to_preserve_derefs {
1755 self.invalidate_derefs();
1756 }
1757 self.super_terminator(terminator, location);
1758 }
1759}
1760
1761struct StorageRemover<'tcx> {
1762 tcx: TyCtxt<'tcx>,
1763 reused_locals: DenseBitSet<Local>,
1764}
1765
1766impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
1767 fn tcx(&self) -> TyCtxt<'tcx> {
1768 self.tcx
1769 }
1770
1771 fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
1772 if let Operand::Move(place) = *operand
1773 && !place.is_indirect_first_projection()
1774 && self.reused_locals.contains(place.local)
1775 {
1776 *operand = Operand::Copy(place);
1777 }
1778 }
1779
1780 fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
1781 match stmt.kind {
1782 StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
1784 if self.reused_locals.contains(l) =>
1785 {
1786 stmt.make_nop()
1787 }
1788 _ => self.super_statement(stmt, loc),
1789 }
1790 }
1791}