1use std::ops;
4
5use tracing::{debug, instrument};
6
7use super::interpret::GlobalAlloc;
8use super::*;
9use crate::ty::CoroutineArgsExt;
10
11#[derive(Clone, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]
16#[non_exhaustive]
17pub struct Statement<'tcx> {
18 pub source_info: SourceInfo,
19 pub kind: StatementKind<'tcx>,
20 pub debuginfos: StmtDebugInfos<'tcx>,
22}
23
24impl<'tcx> Statement<'tcx> {
25 pub fn make_nop(&mut self, drop_debuginfo: bool) {
28 if matches!(self.kind, StatementKind::Nop) {
29 return;
30 }
31 let replaced_stmt = std::mem::replace(&mut self.kind, StatementKind::Nop);
32 if !drop_debuginfo {
33 let Some(debuginfo) = replaced_stmt.as_debuginfo() else {
34 bug!("debuginfo is not yet supported.")
35 };
36 self.debuginfos.push(debuginfo);
37 }
38 }
39
40 pub fn new(source_info: SourceInfo, kind: StatementKind<'tcx>) -> Self {
41 Statement { source_info, kind, debuginfos: StmtDebugInfos::default() }
42 }
43}
44
45impl<'tcx> StatementKind<'tcx> {
46 pub const fn name(&self) -> &'static str {
49 match self {
50 StatementKind::Assign(..) => "Assign",
51 StatementKind::FakeRead(..) => "FakeRead",
52 StatementKind::SetDiscriminant { .. } => "SetDiscriminant",
53 StatementKind::StorageLive(..) => "StorageLive",
54 StatementKind::StorageDead(..) => "StorageDead",
55 StatementKind::Retag(..) => "Retag",
56 StatementKind::PlaceMention(..) => "PlaceMention",
57 StatementKind::AscribeUserType(..) => "AscribeUserType",
58 StatementKind::Coverage(..) => "Coverage",
59 StatementKind::Intrinsic(..) => "Intrinsic",
60 StatementKind::ConstEvalCounter => "ConstEvalCounter",
61 StatementKind::Nop => "Nop",
62 StatementKind::BackwardIncompatibleDropHint { .. } => "BackwardIncompatibleDropHint",
63 }
64 }
65 pub fn as_assign_mut(&mut self) -> Option<&mut (Place<'tcx>, Rvalue<'tcx>)> {
66 match self {
67 StatementKind::Assign(x) => Some(x),
68 _ => None,
69 }
70 }
71
72 pub fn as_assign(&self) -> Option<&(Place<'tcx>, Rvalue<'tcx>)> {
73 match self {
74 StatementKind::Assign(x) => Some(x),
75 _ => None,
76 }
77 }
78
79 pub fn as_debuginfo(&self) -> Option<StmtDebugInfo<'tcx>> {
80 match self {
81 StatementKind::Assign(box (place, Rvalue::Ref(_, _, ref_place)))
82 if let Some(local) = place.as_local() =>
83 {
84 Some(StmtDebugInfo::AssignRef(local, *ref_place))
85 }
86 _ => None,
87 }
88 }
89}
90
91#[derive(Copy, Clone, Debug, TypeFoldable, TypeVisitable)]
95pub struct PlaceTy<'tcx> {
96 pub ty: Ty<'tcx>,
97 pub variant_index: Option<VariantIdx>,
99}
100
101#[cfg(target_pointer_width = "64")]
103rustc_data_structures::static_assert_size!(PlaceTy<'_>, 16);
104
105impl<'tcx> PlaceTy<'tcx> {
106 #[inline]
107 pub fn from_ty(ty: Ty<'tcx>) -> PlaceTy<'tcx> {
108 PlaceTy { ty, variant_index: None }
109 }
110
111 #[instrument(level = "debug", skip(tcx), ret)]
119 pub fn field_ty(
120 tcx: TyCtxt<'tcx>,
121 self_ty: Ty<'tcx>,
122 variant_idx: Option<VariantIdx>,
123 f: FieldIdx,
124 ) -> Ty<'tcx> {
125 if let Some(variant_index) = variant_idx {
126 match *self_ty.kind() {
127 ty::Adt(adt_def, args) if adt_def.is_enum() => {
128 adt_def.variant(variant_index).fields[f].ty(tcx, args)
129 }
130 ty::Coroutine(def_id, args) => {
131 let mut variants = args.as_coroutine().state_tys(def_id, tcx);
132 let Some(mut variant) = variants.nth(variant_index.into()) else {
133 bug!("variant {variant_index:?} of coroutine out of range: {self_ty:?}");
134 };
135
136 variant.nth(f.index()).unwrap_or_else(|| {
137 bug!("field {f:?} out of range of variant: {self_ty:?} {variant_idx:?}")
138 })
139 }
140 _ => bug!("can't downcast non-adt non-coroutine type: {self_ty:?}"),
141 }
142 } else {
143 match self_ty.kind() {
144 ty::Adt(adt_def, args) if !adt_def.is_enum() => {
145 adt_def.non_enum_variant().fields[f].ty(tcx, args)
146 }
147 ty::Closure(_, args) => args
148 .as_closure()
149 .upvar_tys()
150 .get(f.index())
151 .copied()
152 .unwrap_or_else(|| bug!("field {f:?} out of range: {self_ty:?}")),
153 ty::CoroutineClosure(_, args) => args
154 .as_coroutine_closure()
155 .upvar_tys()
156 .get(f.index())
157 .copied()
158 .unwrap_or_else(|| bug!("field {f:?} out of range: {self_ty:?}")),
159 ty::Coroutine(_, args) => {
162 args.as_coroutine().prefix_tys().get(f.index()).copied().unwrap_or_else(|| {
163 bug!("field {f:?} out of range of prefixes for {self_ty}")
164 })
165 }
166 ty::Tuple(tys) => tys
167 .get(f.index())
168 .copied()
169 .unwrap_or_else(|| bug!("field {f:?} out of range: {self_ty:?}")),
170 _ => bug!("can't project out of {self_ty:?}"),
171 }
172 }
173 }
174
175 pub fn multi_projection_ty(
176 self,
177 tcx: TyCtxt<'tcx>,
178 elems: &[PlaceElem<'tcx>],
179 ) -> PlaceTy<'tcx> {
180 elems.iter().fold(self, |place_ty, &elem| place_ty.projection_ty(tcx, elem))
181 }
182
183 pub fn projection_ty<V: ::std::fmt::Debug>(
187 self,
188 tcx: TyCtxt<'tcx>,
189 elem: ProjectionElem<V, Ty<'tcx>>,
190 ) -> PlaceTy<'tcx> {
191 self.projection_ty_core(tcx, &elem, |ty| ty, |_, _, _, ty| ty, |ty| ty)
192 }
193
194 pub fn projection_ty_core<V, T>(
200 self,
201 tcx: TyCtxt<'tcx>,
202 elem: &ProjectionElem<V, T>,
203 mut structurally_normalize: impl FnMut(Ty<'tcx>) -> Ty<'tcx>,
204 mut handle_field: impl FnMut(Ty<'tcx>, Option<VariantIdx>, FieldIdx, T) -> Ty<'tcx>,
205 mut handle_opaque_cast_and_subtype: impl FnMut(T) -> Ty<'tcx>,
206 ) -> PlaceTy<'tcx>
207 where
208 V: ::std::fmt::Debug,
209 T: ::std::fmt::Debug + Copy,
210 {
211 if self.variant_index.is_some() && !matches!(elem, ProjectionElem::Field(..)) {
212 bug!("cannot use non field projection on downcasted place")
213 }
214 let answer = match *elem {
215 ProjectionElem::Deref => {
216 let ty = structurally_normalize(self.ty).builtin_deref(true).unwrap_or_else(|| {
217 bug!("deref projection of non-dereferenceable ty {:?}", self)
218 });
219 PlaceTy::from_ty(ty)
220 }
221 ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } => {
222 PlaceTy::from_ty(structurally_normalize(self.ty).builtin_index().unwrap())
223 }
224 ProjectionElem::Subslice { from, to, from_end } => {
225 PlaceTy::from_ty(match structurally_normalize(self.ty).kind() {
226 ty::Slice(..) => self.ty,
227 ty::Array(inner, _) if !from_end => Ty::new_array(tcx, *inner, to - from),
228 ty::Array(inner, size) if from_end => {
229 let size = size
230 .try_to_target_usize(tcx)
231 .expect("expected subslice projection on fixed-size array");
232 let len = size - from - to;
233 Ty::new_array(tcx, *inner, len)
234 }
235 _ => bug!("cannot subslice non-array type: `{:?}`", self),
236 })
237 }
238 ProjectionElem::Downcast(_name, index) => {
239 PlaceTy { ty: self.ty, variant_index: Some(index) }
240 }
241 ProjectionElem::Field(f, fty) => PlaceTy::from_ty(handle_field(
242 structurally_normalize(self.ty),
243 self.variant_index,
244 f,
245 fty,
246 )),
247 ProjectionElem::OpaqueCast(ty) => PlaceTy::from_ty(handle_opaque_cast_and_subtype(ty)),
248
249 ProjectionElem::UnwrapUnsafeBinder(ty) => {
251 PlaceTy::from_ty(handle_opaque_cast_and_subtype(ty))
252 }
253 };
254 debug!("projection_ty self: {:?} elem: {:?} yields: {:?}", self, elem, answer);
255 answer
256 }
257}
258
259impl<V, T> ProjectionElem<V, T> {
260 pub fn is_indirect(&self) -> bool {
263 match self {
264 Self::Deref => true,
265
266 Self::Field(_, _)
267 | Self::Index(_)
268 | Self::OpaqueCast(_)
269 | Self::ConstantIndex { .. }
270 | Self::Subslice { .. }
271 | Self::Downcast(_, _)
272 | Self::UnwrapUnsafeBinder(..) => false,
273 }
274 }
275
276 pub fn is_stable_offset(&self) -> bool {
279 match self {
280 Self::Deref | Self::Index(_) => false,
281 Self::Field(_, _)
282 | Self::OpaqueCast(_)
283 | Self::ConstantIndex { .. }
284 | Self::Subslice { .. }
285 | Self::Downcast(_, _)
286 | Self::UnwrapUnsafeBinder(..) => true,
287 }
288 }
289
290 pub fn is_downcast_to(&self, v: VariantIdx) -> bool {
292 matches!(*self, Self::Downcast(_, x) if x == v)
293 }
294
295 pub fn is_field_to(&self, f: FieldIdx) -> bool {
297 matches!(*self, Self::Field(x, _) if x == f)
298 }
299
300 pub fn can_use_in_debuginfo(&self) -> bool {
302 match self {
303 Self::ConstantIndex { from_end: false, .. }
304 | Self::Deref
305 | Self::Downcast(_, _)
306 | Self::Field(_, _) => true,
307 Self::ConstantIndex { from_end: true, .. }
308 | Self::Index(_)
309 | Self::OpaqueCast(_)
310 | Self::Subslice { .. } => false,
311
312 Self::UnwrapUnsafeBinder(..) => false,
314 }
315 }
316
317 pub fn kind(self) -> ProjectionKind {
319 self.try_map(|_| Some(()), |_| ()).unwrap()
320 }
321
322 pub fn try_map<V2, T2>(
324 self,
325 v: impl FnOnce(V) -> Option<V2>,
326 t: impl FnOnce(T) -> T2,
327 ) -> Option<ProjectionElem<V2, T2>> {
328 Some(match self {
329 ProjectionElem::Deref => ProjectionElem::Deref,
330 ProjectionElem::Downcast(name, read_variant) => {
331 ProjectionElem::Downcast(name, read_variant)
332 }
333 ProjectionElem::Field(f, ty) => ProjectionElem::Field(f, t(ty)),
334 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
335 ProjectionElem::ConstantIndex { offset, min_length, from_end }
336 }
337 ProjectionElem::Subslice { from, to, from_end } => {
338 ProjectionElem::Subslice { from, to, from_end }
339 }
340 ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(t(ty)),
341 ProjectionElem::UnwrapUnsafeBinder(ty) => ProjectionElem::UnwrapUnsafeBinder(t(ty)),
342 ProjectionElem::Index(val) => ProjectionElem::Index(v(val)?),
343 })
344 }
345}
346
347pub type ProjectionKind = ProjectionElem<(), ()>;
350
351#[derive(Clone, Copy, PartialEq, Eq, Hash)]
352pub struct PlaceRef<'tcx> {
353 pub local: Local,
354 pub projection: &'tcx [PlaceElem<'tcx>],
355}
356
357impl<'tcx> !PartialOrd for PlaceRef<'tcx> {}
362
363impl<'tcx> Place<'tcx> {
364 pub fn return_place() -> Place<'tcx> {
366 Place { local: RETURN_PLACE, projection: List::empty() }
367 }
368
369 pub fn is_indirect(&self) -> bool {
374 self.projection.iter().any(|elem| elem.is_indirect())
375 }
376
377 pub fn is_indirect_first_projection(&self) -> bool {
383 self.as_ref().is_indirect_first_projection()
384 }
385
386 #[inline(always)]
389 pub fn local_or_deref_local(&self) -> Option<Local> {
390 self.as_ref().local_or_deref_local()
391 }
392
393 #[inline(always)]
396 pub fn as_local(&self) -> Option<Local> {
397 self.as_ref().as_local()
398 }
399
400 #[inline]
401 pub fn as_ref(&self) -> PlaceRef<'tcx> {
402 PlaceRef { local: self.local, projection: self.projection }
403 }
404
405 #[inline]
413 pub fn iter_projections(
414 self,
415 ) -> impl Iterator<Item = (PlaceRef<'tcx>, PlaceElem<'tcx>)> + DoubleEndedIterator {
416 self.as_ref().iter_projections()
417 }
418
419 pub fn project_deeper(self, more_projections: &[PlaceElem<'tcx>], tcx: TyCtxt<'tcx>) -> Self {
422 if more_projections.is_empty() {
423 return self;
424 }
425
426 self.as_ref().project_deeper(more_projections, tcx)
427 }
428
429 pub fn ty_from<D>(
430 local: Local,
431 projection: &[PlaceElem<'tcx>],
432 local_decls: &D,
433 tcx: TyCtxt<'tcx>,
434 ) -> PlaceTy<'tcx>
435 where
436 D: ?Sized + HasLocalDecls<'tcx>,
437 {
438 PlaceTy::from_ty(local_decls.local_decls()[local].ty).multi_projection_ty(tcx, projection)
439 }
440
441 pub fn ty<D: ?Sized>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> PlaceTy<'tcx>
442 where
443 D: HasLocalDecls<'tcx>,
444 {
445 Place::ty_from(self.local, self.projection, local_decls, tcx)
446 }
447}
448
449impl From<Local> for Place<'_> {
450 #[inline]
451 fn from(local: Local) -> Self {
452 Place { local, projection: List::empty() }
453 }
454}
455
456impl<'tcx> PlaceRef<'tcx> {
457 pub fn local_or_deref_local(&self) -> Option<Local> {
460 match *self {
461 PlaceRef { local, projection: [] }
462 | PlaceRef { local, projection: [ProjectionElem::Deref] } => Some(local),
463 _ => None,
464 }
465 }
466
467 pub fn is_indirect(&self) -> bool {
472 self.projection.iter().any(|elem| elem.is_indirect())
473 }
474
475 pub fn is_indirect_first_projection(&self) -> bool {
481 debug_assert!(
483 self.projection.is_empty() || !self.projection[1..].contains(&PlaceElem::Deref)
484 );
485 self.projection.first() == Some(&PlaceElem::Deref)
486 }
487
488 #[inline]
491 pub fn as_local(&self) -> Option<Local> {
492 match *self {
493 PlaceRef { local, projection: [] } => Some(local),
494 _ => None,
495 }
496 }
497
498 #[inline]
499 pub fn to_place(&self, tcx: TyCtxt<'tcx>) -> Place<'tcx> {
500 Place { local: self.local, projection: tcx.mk_place_elems(self.projection) }
501 }
502
503 #[inline]
504 pub fn last_projection(&self) -> Option<(PlaceRef<'tcx>, PlaceElem<'tcx>)> {
505 if let &[ref proj_base @ .., elem] = self.projection {
506 Some((PlaceRef { local: self.local, projection: proj_base }, elem))
507 } else {
508 None
509 }
510 }
511
512 #[inline]
520 pub fn iter_projections(
521 self,
522 ) -> impl Iterator<Item = (PlaceRef<'tcx>, PlaceElem<'tcx>)> + DoubleEndedIterator {
523 self.projection.iter().enumerate().map(move |(i, proj)| {
524 let base = PlaceRef { local: self.local, projection: &self.projection[..i] };
525 (base, *proj)
526 })
527 }
528
529 pub fn accessed_locals(self) -> impl Iterator<Item = Local> {
531 std::iter::once(self.local).chain(self.projection.iter().filter_map(|proj| match proj {
532 ProjectionElem::Index(local) => Some(*local),
533 ProjectionElem::Deref
534 | ProjectionElem::Field(_, _)
535 | ProjectionElem::ConstantIndex { .. }
536 | ProjectionElem::Subslice { .. }
537 | ProjectionElem::Downcast(_, _)
538 | ProjectionElem::OpaqueCast(_)
539 | ProjectionElem::UnwrapUnsafeBinder(_) => None,
540 }))
541 }
542
543 pub fn project_deeper(
546 self,
547 more_projections: &[PlaceElem<'tcx>],
548 tcx: TyCtxt<'tcx>,
549 ) -> Place<'tcx> {
550 let mut v: Vec<PlaceElem<'tcx>>;
551
552 let new_projections = if self.projection.is_empty() {
553 more_projections
554 } else {
555 v = Vec::with_capacity(self.projection.len() + more_projections.len());
556 v.extend(self.projection);
557 v.extend(more_projections);
558 &v
559 };
560
561 Place { local: self.local, projection: tcx.mk_place_elems(new_projections) }
562 }
563
564 pub fn ty<D>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> PlaceTy<'tcx>
565 where
566 D: ?Sized + HasLocalDecls<'tcx>,
567 {
568 Place::ty_from(self.local, self.projection, local_decls, tcx)
569 }
570}
571
572impl From<Local> for PlaceRef<'_> {
573 #[inline]
574 fn from(local: Local) -> Self {
575 PlaceRef { local, projection: &[] }
576 }
577}
578
579impl<'tcx> Operand<'tcx> {
583 pub fn function_handle(
587 tcx: TyCtxt<'tcx>,
588 def_id: DefId,
589 args: impl IntoIterator<Item = GenericArg<'tcx>>,
590 span: Span,
591 ) -> Self {
592 let ty = Ty::new_fn_def(tcx, def_id, args);
593 Operand::Constant(Box::new(ConstOperand {
594 span,
595 user_ty: None,
596 const_: Const::Val(ConstValue::ZeroSized, ty),
597 }))
598 }
599
600 pub fn unevaluated_constant(
603 tcx: TyCtxt<'tcx>,
604 def_id: DefId,
605 args: &[GenericArg<'tcx>],
606 span: Span,
607 ) -> Self {
608 let const_ = Const::from_unevaluated(tcx, def_id).instantiate(tcx, args);
609 Operand::Constant(Box::new(ConstOperand { span, user_ty: None, const_ }))
610 }
611
612 pub fn is_move(&self) -> bool {
613 matches!(self, Operand::Move(..))
614 }
615
616 pub fn const_from_scalar(
619 tcx: TyCtxt<'tcx>,
620 ty: Ty<'tcx>,
621 val: Scalar,
622 span: Span,
623 ) -> Operand<'tcx> {
624 debug_assert!({
625 let typing_env = ty::TypingEnv::fully_monomorphized();
626 let type_size = tcx
627 .layout_of(typing_env.as_query_input(ty))
628 .unwrap_or_else(|e| panic!("could not compute layout for {ty:?}: {e:?}"))
629 .size;
630 let scalar_size = match val {
631 Scalar::Int(int) => int.size(),
632 _ => panic!("Invalid scalar type {val:?}"),
633 };
634 scalar_size == type_size
635 });
636 Operand::Constant(Box::new(ConstOperand {
637 span,
638 user_ty: None,
639 const_: Const::Val(ConstValue::Scalar(val), ty),
640 }))
641 }
642
643 pub fn to_copy(&self) -> Self {
644 match *self {
645 Operand::Copy(_) | Operand::Constant(_) => self.clone(),
646 Operand::Move(place) => Operand::Copy(place),
647 }
648 }
649
650 pub fn place(&self) -> Option<Place<'tcx>> {
653 match self {
654 Operand::Copy(place) | Operand::Move(place) => Some(*place),
655 Operand::Constant(_) => None,
656 }
657 }
658
659 pub fn constant(&self) -> Option<&ConstOperand<'tcx>> {
662 match self {
663 Operand::Constant(x) => Some(&**x),
664 Operand::Copy(_) | Operand::Move(_) => None,
665 }
666 }
667
668 pub fn const_fn_def(&self) -> Option<(DefId, GenericArgsRef<'tcx>)> {
673 let const_ty = self.constant()?.const_.ty();
674 if let ty::FnDef(def_id, args) = *const_ty.kind() { Some((def_id, args)) } else { None }
675 }
676
677 pub fn ty<D>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> Ty<'tcx>
678 where
679 D: ?Sized + HasLocalDecls<'tcx>,
680 {
681 match self {
682 &Operand::Copy(ref l) | &Operand::Move(ref l) => l.ty(local_decls, tcx).ty,
683 Operand::Constant(c) => c.const_.ty(),
684 }
685 }
686
687 pub fn span<D>(&self, local_decls: &D) -> Span
688 where
689 D: ?Sized + HasLocalDecls<'tcx>,
690 {
691 match self {
692 &Operand::Copy(ref l) | &Operand::Move(ref l) => {
693 local_decls.local_decls()[l.local].source_info.span
694 }
695 Operand::Constant(c) => c.span,
696 }
697 }
698}
699
700impl<'tcx> ConstOperand<'tcx> {
701 pub fn check_static_ptr(&self, tcx: TyCtxt<'_>) -> Option<DefId> {
702 match self.const_.try_to_scalar() {
703 Some(Scalar::Ptr(ptr, _size)) => match tcx.global_alloc(ptr.provenance.alloc_id()) {
704 GlobalAlloc::Static(def_id) => {
705 assert!(!tcx.is_thread_local_static(def_id));
706 Some(def_id)
707 }
708 _ => None,
709 },
710 _ => None,
711 }
712 }
713
714 #[inline]
715 pub fn ty(&self) -> Ty<'tcx> {
716 self.const_.ty()
717 }
718}
719
720pub enum RvalueInitializationState {
724 Shallow,
725 Deep,
726}
727
728impl<'tcx> Rvalue<'tcx> {
729 #[inline]
731 pub fn is_safe_to_remove(&self) -> bool {
732 match self {
733 Rvalue::Cast(CastKind::PointerExposeProvenance, _, _) => false,
737
738 Rvalue::Use(_)
739 | Rvalue::CopyForDeref(_)
740 | Rvalue::Repeat(_, _)
741 | Rvalue::Ref(_, _, _)
742 | Rvalue::ThreadLocalRef(_)
743 | Rvalue::RawPtr(_, _)
744 | Rvalue::Cast(
745 CastKind::IntToInt
746 | CastKind::FloatToInt
747 | CastKind::FloatToFloat
748 | CastKind::IntToFloat
749 | CastKind::FnPtrToPtr
750 | CastKind::PtrToPtr
751 | CastKind::PointerCoercion(_, _)
752 | CastKind::PointerWithExposedProvenance
753 | CastKind::Transmute
754 | CastKind::Subtype,
755 _,
756 _,
757 )
758 | Rvalue::BinaryOp(_, _)
759 | Rvalue::NullaryOp(_, _)
760 | Rvalue::UnaryOp(_, _)
761 | Rvalue::Discriminant(_)
762 | Rvalue::Aggregate(_, _)
763 | Rvalue::ShallowInitBox(_, _)
764 | Rvalue::WrapUnsafeBinder(_, _) => true,
765 }
766 }
767
768 pub fn ty<D>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> Ty<'tcx>
769 where
770 D: ?Sized + HasLocalDecls<'tcx>,
771 {
772 match *self {
773 Rvalue::Use(ref operand) => operand.ty(local_decls, tcx),
774 Rvalue::Repeat(ref operand, count) => {
775 Ty::new_array_with_const_len(tcx, operand.ty(local_decls, tcx), count)
776 }
777 Rvalue::ThreadLocalRef(did) => tcx.thread_local_ptr_ty(did),
778 Rvalue::Ref(reg, bk, ref place) => {
779 let place_ty = place.ty(local_decls, tcx).ty;
780 Ty::new_ref(tcx, reg, place_ty, bk.to_mutbl_lossy())
781 }
782 Rvalue::RawPtr(kind, ref place) => {
783 let place_ty = place.ty(local_decls, tcx).ty;
784 Ty::new_ptr(tcx, place_ty, kind.to_mutbl_lossy())
785 }
786 Rvalue::Cast(.., ty) => ty,
787 Rvalue::BinaryOp(op, box (ref lhs, ref rhs)) => {
788 let lhs_ty = lhs.ty(local_decls, tcx);
789 let rhs_ty = rhs.ty(local_decls, tcx);
790 op.ty(tcx, lhs_ty, rhs_ty)
791 }
792 Rvalue::UnaryOp(op, ref operand) => {
793 let arg_ty = operand.ty(local_decls, tcx);
794 op.ty(tcx, arg_ty)
795 }
796 Rvalue::Discriminant(ref place) => place.ty(local_decls, tcx).ty.discriminant_ty(tcx),
797 Rvalue::NullaryOp(NullOp::OffsetOf(..), _) => tcx.types.usize,
798 Rvalue::NullaryOp(NullOp::ContractChecks, _)
799 | Rvalue::NullaryOp(NullOp::UbChecks, _) => tcx.types.bool,
800 Rvalue::Aggregate(ref ak, ref ops) => match **ak {
801 AggregateKind::Array(ty) => Ty::new_array(tcx, ty, ops.len() as u64),
802 AggregateKind::Tuple => {
803 Ty::new_tup_from_iter(tcx, ops.iter().map(|op| op.ty(local_decls, tcx)))
804 }
805 AggregateKind::Adt(did, _, args, _, _) => tcx.type_of(did).instantiate(tcx, args),
806 AggregateKind::Closure(did, args) => Ty::new_closure(tcx, did, args),
807 AggregateKind::Coroutine(did, args) => Ty::new_coroutine(tcx, did, args),
808 AggregateKind::CoroutineClosure(did, args) => {
809 Ty::new_coroutine_closure(tcx, did, args)
810 }
811 AggregateKind::RawPtr(ty, mutability) => Ty::new_ptr(tcx, ty, mutability),
812 },
813 Rvalue::ShallowInitBox(_, ty) => Ty::new_box(tcx, ty),
814 Rvalue::CopyForDeref(ref place) => place.ty(local_decls, tcx).ty,
815 Rvalue::WrapUnsafeBinder(_, ty) => ty,
816 }
817 }
818
819 #[inline]
820 pub fn initialization_state(&self) -> RvalueInitializationState {
823 match *self {
824 Rvalue::ShallowInitBox(_, _) => RvalueInitializationState::Shallow,
825 _ => RvalueInitializationState::Deep,
826 }
827 }
828}
829
830impl BorrowKind {
831 pub fn mutability(&self) -> Mutability {
832 match *self {
833 BorrowKind::Shared | BorrowKind::Fake(_) => Mutability::Not,
834 BorrowKind::Mut { .. } => Mutability::Mut,
835 }
836 }
837
838 pub fn allows_two_phase_borrow(&self) -> bool {
841 match *self {
842 BorrowKind::Shared
843 | BorrowKind::Fake(_)
844 | BorrowKind::Mut { kind: MutBorrowKind::Default | MutBorrowKind::ClosureCapture } => {
845 false
846 }
847 BorrowKind::Mut { kind: MutBorrowKind::TwoPhaseBorrow } => true,
848 }
849 }
850
851 pub fn to_mutbl_lossy(self) -> hir::Mutability {
852 match self {
853 BorrowKind::Mut { .. } => hir::Mutability::Mut,
854 BorrowKind::Shared => hir::Mutability::Not,
855
856 BorrowKind::Fake(_) => hir::Mutability::Not,
859 }
860 }
861}
862
863impl<'tcx> NullOp<'tcx> {
864 pub fn ty(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
865 match self {
866 NullOp::OffsetOf(_) => tcx.types.usize,
867 NullOp::UbChecks | NullOp::ContractChecks => tcx.types.bool,
868 }
869 }
870}
871
872impl<'tcx> UnOp {
873 pub fn ty(&self, tcx: TyCtxt<'tcx>, arg_ty: Ty<'tcx>) -> Ty<'tcx> {
874 match self {
875 UnOp::Not | UnOp::Neg => arg_ty,
876 UnOp::PtrMetadata => arg_ty.pointee_metadata_ty_or_projection(tcx),
877 }
878 }
879}
880
881impl<'tcx> BinOp {
882 pub fn ty(&self, tcx: TyCtxt<'tcx>, lhs_ty: Ty<'tcx>, rhs_ty: Ty<'tcx>) -> Ty<'tcx> {
883 match self {
885 &BinOp::Add
886 | &BinOp::AddUnchecked
887 | &BinOp::Sub
888 | &BinOp::SubUnchecked
889 | &BinOp::Mul
890 | &BinOp::MulUnchecked
891 | &BinOp::Div
892 | &BinOp::Rem
893 | &BinOp::BitXor
894 | &BinOp::BitAnd
895 | &BinOp::BitOr => {
896 assert_eq!(lhs_ty, rhs_ty);
898 lhs_ty
899 }
900 &BinOp::AddWithOverflow | &BinOp::SubWithOverflow | &BinOp::MulWithOverflow => {
901 assert_eq!(lhs_ty, rhs_ty);
903 Ty::new_tup(tcx, &[lhs_ty, tcx.types.bool])
904 }
905 &BinOp::Shl
906 | &BinOp::ShlUnchecked
907 | &BinOp::Shr
908 | &BinOp::ShrUnchecked
909 | &BinOp::Offset => {
910 lhs_ty }
912 &BinOp::Eq | &BinOp::Lt | &BinOp::Le | &BinOp::Ne | &BinOp::Ge | &BinOp::Gt => {
913 tcx.types.bool
914 }
915 &BinOp::Cmp => {
916 assert_eq!(lhs_ty, rhs_ty);
918 tcx.ty_ordering_enum(DUMMY_SP)
919 }
920 }
921 }
922 pub(crate) fn to_hir_binop(self) -> hir::BinOpKind {
923 match self {
924 BinOp::Add | BinOp::AddWithOverflow => hir::BinOpKind::Add,
927 BinOp::Sub | BinOp::SubWithOverflow => hir::BinOpKind::Sub,
928 BinOp::Mul | BinOp::MulWithOverflow => hir::BinOpKind::Mul,
929 BinOp::Div => hir::BinOpKind::Div,
930 BinOp::Rem => hir::BinOpKind::Rem,
931 BinOp::BitXor => hir::BinOpKind::BitXor,
932 BinOp::BitAnd => hir::BinOpKind::BitAnd,
933 BinOp::BitOr => hir::BinOpKind::BitOr,
934 BinOp::Shl => hir::BinOpKind::Shl,
935 BinOp::Shr => hir::BinOpKind::Shr,
936 BinOp::Eq => hir::BinOpKind::Eq,
937 BinOp::Ne => hir::BinOpKind::Ne,
938 BinOp::Lt => hir::BinOpKind::Lt,
939 BinOp::Gt => hir::BinOpKind::Gt,
940 BinOp::Le => hir::BinOpKind::Le,
941 BinOp::Ge => hir::BinOpKind::Ge,
942 BinOp::Cmp
944 | BinOp::AddUnchecked
945 | BinOp::SubUnchecked
946 | BinOp::MulUnchecked
947 | BinOp::ShlUnchecked
948 | BinOp::ShrUnchecked
949 | BinOp::Offset => {
950 unreachable!()
951 }
952 }
953 }
954
955 pub fn overflowing_to_wrapping(self) -> Option<BinOp> {
957 Some(match self {
958 BinOp::AddWithOverflow => BinOp::Add,
959 BinOp::SubWithOverflow => BinOp::Sub,
960 BinOp::MulWithOverflow => BinOp::Mul,
961 _ => return None,
962 })
963 }
964
965 pub fn is_overflowing(self) -> bool {
967 self.overflowing_to_wrapping().is_some()
968 }
969
970 pub fn wrapping_to_overflowing(self) -> Option<BinOp> {
972 Some(match self {
973 BinOp::Add => BinOp::AddWithOverflow,
974 BinOp::Sub => BinOp::SubWithOverflow,
975 BinOp::Mul => BinOp::MulWithOverflow,
976 _ => return None,
977 })
978 }
979}
980
981impl From<Mutability> for RawPtrKind {
982 fn from(other: Mutability) -> Self {
983 match other {
984 Mutability::Mut => RawPtrKind::Mut,
985 Mutability::Not => RawPtrKind::Const,
986 }
987 }
988}
989
990impl RawPtrKind {
991 pub fn is_fake(self) -> bool {
992 match self {
993 RawPtrKind::Mut | RawPtrKind::Const => false,
994 RawPtrKind::FakeForPtrMetadata => true,
995 }
996 }
997
998 pub fn to_mutbl_lossy(self) -> Mutability {
999 match self {
1000 RawPtrKind::Mut => Mutability::Mut,
1001 RawPtrKind::Const => Mutability::Not,
1002
1003 RawPtrKind::FakeForPtrMetadata => Mutability::Not,
1006 }
1007 }
1008
1009 pub fn ptr_str(self) -> &'static str {
1010 match self {
1011 RawPtrKind::Mut => "mut",
1012 RawPtrKind::Const => "const",
1013 RawPtrKind::FakeForPtrMetadata => "const (fake)",
1014 }
1015 }
1016}
1017
1018#[derive(Default, Debug, Clone, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]
1019pub struct StmtDebugInfos<'tcx>(Vec<StmtDebugInfo<'tcx>>);
1020
1021impl<'tcx> StmtDebugInfos<'tcx> {
1022 pub fn push(&mut self, debuginfo: StmtDebugInfo<'tcx>) {
1023 self.0.push(debuginfo);
1024 }
1025
1026 pub fn drop_debuginfo(&mut self) {
1027 self.0.clear();
1028 }
1029
1030 pub fn is_empty(&self) -> bool {
1031 self.0.is_empty()
1032 }
1033
1034 pub fn prepend(&mut self, debuginfos: &mut Self) {
1035 if debuginfos.is_empty() {
1036 return;
1037 };
1038 debuginfos.0.append(self);
1039 std::mem::swap(debuginfos, self);
1040 }
1041
1042 pub fn append(&mut self, debuginfos: &mut Self) {
1043 if debuginfos.is_empty() {
1044 return;
1045 };
1046 self.0.append(debuginfos);
1047 }
1048
1049 pub fn extend(&mut self, debuginfos: &Self) {
1050 if debuginfos.is_empty() {
1051 return;
1052 };
1053 self.0.extend_from_slice(debuginfos);
1054 }
1055
1056 pub fn retain<F>(&mut self, f: F)
1057 where
1058 F: FnMut(&StmtDebugInfo<'tcx>) -> bool,
1059 {
1060 self.0.retain(f);
1061 }
1062}
1063
1064impl<'tcx> ops::Deref for StmtDebugInfos<'tcx> {
1065 type Target = Vec<StmtDebugInfo<'tcx>>;
1066
1067 #[inline]
1068 fn deref(&self) -> &Vec<StmtDebugInfo<'tcx>> {
1069 &self.0
1070 }
1071}
1072
1073impl<'tcx> ops::DerefMut for StmtDebugInfos<'tcx> {
1074 #[inline]
1075 fn deref_mut(&mut self) -> &mut Vec<StmtDebugInfo<'tcx>> {
1076 &mut self.0
1077 }
1078}
1079
1080#[derive(Clone, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]
1081pub enum StmtDebugInfo<'tcx> {
1082 AssignRef(Local, Place<'tcx>),
1083 InvalidAssign(Local),
1084}