1use std::ops;
4
5use tracing::{debug, instrument};
6
7use super::interpret::GlobalAlloc;
8use super::*;
9use crate::ty::CoroutineArgsExt;
10
11#[derive(Clone, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]
16#[non_exhaustive]
17pub struct Statement<'tcx> {
18 pub source_info: SourceInfo,
19 pub kind: StatementKind<'tcx>,
20 pub debuginfos: StmtDebugInfos<'tcx>,
22}
23
24impl<'tcx> Statement<'tcx> {
25 pub fn make_nop(&mut self, drop_debuginfo: bool) {
28 if matches!(self.kind, StatementKind::Nop) {
29 return;
30 }
31 let replaced_stmt = std::mem::replace(&mut self.kind, StatementKind::Nop);
32 if !drop_debuginfo {
33 let Some(debuginfo) = replaced_stmt.as_debuginfo() else {
34 bug!("debuginfo is not yet supported.")
35 };
36 self.debuginfos.push(debuginfo);
37 }
38 }
39
40 pub fn new(source_info: SourceInfo, kind: StatementKind<'tcx>) -> Self {
41 Statement { source_info, kind, debuginfos: StmtDebugInfos::default() }
42 }
43}
44
45impl<'tcx> StatementKind<'tcx> {
46 pub const fn name(&self) -> &'static str {
49 match self {
50 StatementKind::Assign(..) => "Assign",
51 StatementKind::FakeRead(..) => "FakeRead",
52 StatementKind::SetDiscriminant { .. } => "SetDiscriminant",
53 StatementKind::Deinit(..) => "Deinit",
54 StatementKind::StorageLive(..) => "StorageLive",
55 StatementKind::StorageDead(..) => "StorageDead",
56 StatementKind::Retag(..) => "Retag",
57 StatementKind::PlaceMention(..) => "PlaceMention",
58 StatementKind::AscribeUserType(..) => "AscribeUserType",
59 StatementKind::Coverage(..) => "Coverage",
60 StatementKind::Intrinsic(..) => "Intrinsic",
61 StatementKind::ConstEvalCounter => "ConstEvalCounter",
62 StatementKind::Nop => "Nop",
63 StatementKind::BackwardIncompatibleDropHint { .. } => "BackwardIncompatibleDropHint",
64 }
65 }
66 pub fn as_assign_mut(&mut self) -> Option<&mut (Place<'tcx>, Rvalue<'tcx>)> {
67 match self {
68 StatementKind::Assign(x) => Some(x),
69 _ => None,
70 }
71 }
72
73 pub fn as_assign(&self) -> Option<&(Place<'tcx>, Rvalue<'tcx>)> {
74 match self {
75 StatementKind::Assign(x) => Some(x),
76 _ => None,
77 }
78 }
79
80 pub fn as_debuginfo(&self) -> Option<StmtDebugInfo<'tcx>> {
81 match self {
82 StatementKind::Assign(box (place, Rvalue::Ref(_, _, ref_place)))
83 if let Some(local) = place.as_local() =>
84 {
85 Some(StmtDebugInfo::AssignRef(local, *ref_place))
86 }
87 _ => None,
88 }
89 }
90}
91
92#[derive(Copy, Clone, Debug, TypeFoldable, TypeVisitable)]
96pub struct PlaceTy<'tcx> {
97 pub ty: Ty<'tcx>,
98 pub variant_index: Option<VariantIdx>,
100}
101
102#[cfg(target_pointer_width = "64")]
104rustc_data_structures::static_assert_size!(PlaceTy<'_>, 16);
105
106impl<'tcx> PlaceTy<'tcx> {
107 #[inline]
108 pub fn from_ty(ty: Ty<'tcx>) -> PlaceTy<'tcx> {
109 PlaceTy { ty, variant_index: None }
110 }
111
112 #[instrument(level = "debug", skip(tcx), ret)]
120 pub fn field_ty(
121 tcx: TyCtxt<'tcx>,
122 self_ty: Ty<'tcx>,
123 variant_idx: Option<VariantIdx>,
124 f: FieldIdx,
125 ) -> Ty<'tcx> {
126 if let Some(variant_index) = variant_idx {
127 match *self_ty.kind() {
128 ty::Adt(adt_def, args) if adt_def.is_enum() => {
129 adt_def.variant(variant_index).fields[f].ty(tcx, args)
130 }
131 ty::Coroutine(def_id, args) => {
132 let mut variants = args.as_coroutine().state_tys(def_id, tcx);
133 let Some(mut variant) = variants.nth(variant_index.into()) else {
134 bug!("variant {variant_index:?} of coroutine out of range: {self_ty:?}");
135 };
136
137 variant.nth(f.index()).unwrap_or_else(|| {
138 bug!("field {f:?} out of range of variant: {self_ty:?} {variant_idx:?}")
139 })
140 }
141 _ => bug!("can't downcast non-adt non-coroutine type: {self_ty:?}"),
142 }
143 } else {
144 match self_ty.kind() {
145 ty::Adt(adt_def, args) if !adt_def.is_enum() => {
146 adt_def.non_enum_variant().fields[f].ty(tcx, args)
147 }
148 ty::Closure(_, args) => args
149 .as_closure()
150 .upvar_tys()
151 .get(f.index())
152 .copied()
153 .unwrap_or_else(|| bug!("field {f:?} out of range: {self_ty:?}")),
154 ty::CoroutineClosure(_, args) => args
155 .as_coroutine_closure()
156 .upvar_tys()
157 .get(f.index())
158 .copied()
159 .unwrap_or_else(|| bug!("field {f:?} out of range: {self_ty:?}")),
160 ty::Coroutine(_, args) => {
163 args.as_coroutine().prefix_tys().get(f.index()).copied().unwrap_or_else(|| {
164 bug!("field {f:?} out of range of prefixes for {self_ty}")
165 })
166 }
167 ty::Tuple(tys) => tys
168 .get(f.index())
169 .copied()
170 .unwrap_or_else(|| bug!("field {f:?} out of range: {self_ty:?}")),
171 _ => bug!("can't project out of {self_ty:?}"),
172 }
173 }
174 }
175
176 pub fn multi_projection_ty(
177 self,
178 tcx: TyCtxt<'tcx>,
179 elems: &[PlaceElem<'tcx>],
180 ) -> PlaceTy<'tcx> {
181 elems.iter().fold(self, |place_ty, &elem| place_ty.projection_ty(tcx, elem))
182 }
183
184 pub fn projection_ty<V: ::std::fmt::Debug>(
188 self,
189 tcx: TyCtxt<'tcx>,
190 elem: ProjectionElem<V, Ty<'tcx>>,
191 ) -> PlaceTy<'tcx> {
192 self.projection_ty_core(tcx, &elem, |ty| ty, |_, _, _, ty| ty, |ty| ty)
193 }
194
195 pub fn projection_ty_core<V, T>(
201 self,
202 tcx: TyCtxt<'tcx>,
203 elem: &ProjectionElem<V, T>,
204 mut structurally_normalize: impl FnMut(Ty<'tcx>) -> Ty<'tcx>,
205 mut handle_field: impl FnMut(Ty<'tcx>, Option<VariantIdx>, FieldIdx, T) -> Ty<'tcx>,
206 mut handle_opaque_cast_and_subtype: impl FnMut(T) -> Ty<'tcx>,
207 ) -> PlaceTy<'tcx>
208 where
209 V: ::std::fmt::Debug,
210 T: ::std::fmt::Debug + Copy,
211 {
212 if self.variant_index.is_some() && !matches!(elem, ProjectionElem::Field(..)) {
213 bug!("cannot use non field projection on downcasted place")
214 }
215 let answer = match *elem {
216 ProjectionElem::Deref => {
217 let ty = structurally_normalize(self.ty).builtin_deref(true).unwrap_or_else(|| {
218 bug!("deref projection of non-dereferenceable ty {:?}", self)
219 });
220 PlaceTy::from_ty(ty)
221 }
222 ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } => {
223 PlaceTy::from_ty(structurally_normalize(self.ty).builtin_index().unwrap())
224 }
225 ProjectionElem::Subslice { from, to, from_end } => {
226 PlaceTy::from_ty(match structurally_normalize(self.ty).kind() {
227 ty::Slice(..) => self.ty,
228 ty::Array(inner, _) if !from_end => Ty::new_array(tcx, *inner, to - from),
229 ty::Array(inner, size) if from_end => {
230 let size = size
231 .try_to_target_usize(tcx)
232 .expect("expected subslice projection on fixed-size array");
233 let len = size - from - to;
234 Ty::new_array(tcx, *inner, len)
235 }
236 _ => bug!("cannot subslice non-array type: `{:?}`", self),
237 })
238 }
239 ProjectionElem::Downcast(_name, index) => {
240 PlaceTy { ty: self.ty, variant_index: Some(index) }
241 }
242 ProjectionElem::Field(f, fty) => PlaceTy::from_ty(handle_field(
243 structurally_normalize(self.ty),
244 self.variant_index,
245 f,
246 fty,
247 )),
248 ProjectionElem::OpaqueCast(ty) => PlaceTy::from_ty(handle_opaque_cast_and_subtype(ty)),
249
250 ProjectionElem::UnwrapUnsafeBinder(ty) => {
252 PlaceTy::from_ty(handle_opaque_cast_and_subtype(ty))
253 }
254 };
255 debug!("projection_ty self: {:?} elem: {:?} yields: {:?}", self, elem, answer);
256 answer
257 }
258}
259
260impl<V, T> ProjectionElem<V, T> {
261 fn is_indirect(&self) -> bool {
264 match self {
265 Self::Deref => true,
266
267 Self::Field(_, _)
268 | Self::Index(_)
269 | Self::OpaqueCast(_)
270 | Self::ConstantIndex { .. }
271 | Self::Subslice { .. }
272 | Self::Downcast(_, _)
273 | Self::UnwrapUnsafeBinder(..) => false,
274 }
275 }
276
277 pub fn is_stable_offset(&self) -> bool {
280 match self {
281 Self::Deref | Self::Index(_) => false,
282 Self::Field(_, _)
283 | Self::OpaqueCast(_)
284 | Self::ConstantIndex { .. }
285 | Self::Subslice { .. }
286 | Self::Downcast(_, _)
287 | Self::UnwrapUnsafeBinder(..) => true,
288 }
289 }
290
291 pub fn is_downcast_to(&self, v: VariantIdx) -> bool {
293 matches!(*self, Self::Downcast(_, x) if x == v)
294 }
295
296 pub fn is_field_to(&self, f: FieldIdx) -> bool {
298 matches!(*self, Self::Field(x, _) if x == f)
299 }
300
301 pub fn can_use_in_debuginfo(&self) -> bool {
303 match self {
304 Self::ConstantIndex { from_end: false, .. }
305 | Self::Deref
306 | Self::Downcast(_, _)
307 | Self::Field(_, _) => true,
308 Self::ConstantIndex { from_end: true, .. }
309 | Self::Index(_)
310 | Self::OpaqueCast(_)
311 | Self::Subslice { .. } => false,
312
313 Self::UnwrapUnsafeBinder(..) => false,
315 }
316 }
317
318 pub fn kind(self) -> ProjectionKind {
320 self.try_map(|_| Some(()), |_| ()).unwrap()
321 }
322
323 pub fn try_map<V2, T2>(
325 self,
326 v: impl FnOnce(V) -> Option<V2>,
327 t: impl FnOnce(T) -> T2,
328 ) -> Option<ProjectionElem<V2, T2>> {
329 Some(match self {
330 ProjectionElem::Deref => ProjectionElem::Deref,
331 ProjectionElem::Downcast(name, read_variant) => {
332 ProjectionElem::Downcast(name, read_variant)
333 }
334 ProjectionElem::Field(f, ty) => ProjectionElem::Field(f, t(ty)),
335 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
336 ProjectionElem::ConstantIndex { offset, min_length, from_end }
337 }
338 ProjectionElem::Subslice { from, to, from_end } => {
339 ProjectionElem::Subslice { from, to, from_end }
340 }
341 ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(t(ty)),
342 ProjectionElem::UnwrapUnsafeBinder(ty) => ProjectionElem::UnwrapUnsafeBinder(t(ty)),
343 ProjectionElem::Index(val) => ProjectionElem::Index(v(val)?),
344 })
345 }
346}
347
348pub type ProjectionKind = ProjectionElem<(), ()>;
351
352#[derive(Clone, Copy, PartialEq, Eq, Hash)]
353pub struct PlaceRef<'tcx> {
354 pub local: Local,
355 pub projection: &'tcx [PlaceElem<'tcx>],
356}
357
358impl<'tcx> !PartialOrd for PlaceRef<'tcx> {}
363
364impl<'tcx> Place<'tcx> {
365 pub fn return_place() -> Place<'tcx> {
367 Place { local: RETURN_PLACE, projection: List::empty() }
368 }
369
370 pub fn is_indirect(&self) -> bool {
375 self.projection.iter().any(|elem| elem.is_indirect())
376 }
377
378 pub fn is_indirect_first_projection(&self) -> bool {
384 self.as_ref().is_indirect_first_projection()
385 }
386
387 #[inline(always)]
390 pub fn local_or_deref_local(&self) -> Option<Local> {
391 self.as_ref().local_or_deref_local()
392 }
393
394 #[inline(always)]
397 pub fn as_local(&self) -> Option<Local> {
398 self.as_ref().as_local()
399 }
400
401 #[inline]
402 pub fn as_ref(&self) -> PlaceRef<'tcx> {
403 PlaceRef { local: self.local, projection: self.projection }
404 }
405
406 #[inline]
414 pub fn iter_projections(
415 self,
416 ) -> impl Iterator<Item = (PlaceRef<'tcx>, PlaceElem<'tcx>)> + DoubleEndedIterator {
417 self.as_ref().iter_projections()
418 }
419
420 pub fn project_deeper(self, more_projections: &[PlaceElem<'tcx>], tcx: TyCtxt<'tcx>) -> Self {
423 if more_projections.is_empty() {
424 return self;
425 }
426
427 self.as_ref().project_deeper(more_projections, tcx)
428 }
429
430 pub fn ty_from<D>(
431 local: Local,
432 projection: &[PlaceElem<'tcx>],
433 local_decls: &D,
434 tcx: TyCtxt<'tcx>,
435 ) -> PlaceTy<'tcx>
436 where
437 D: ?Sized + HasLocalDecls<'tcx>,
438 {
439 PlaceTy::from_ty(local_decls.local_decls()[local].ty).multi_projection_ty(tcx, projection)
440 }
441
442 pub fn ty<D: ?Sized>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> PlaceTy<'tcx>
443 where
444 D: HasLocalDecls<'tcx>,
445 {
446 Place::ty_from(self.local, self.projection, local_decls, tcx)
447 }
448}
449
450impl From<Local> for Place<'_> {
451 #[inline]
452 fn from(local: Local) -> Self {
453 Place { local, projection: List::empty() }
454 }
455}
456
457impl<'tcx> PlaceRef<'tcx> {
458 pub fn local_or_deref_local(&self) -> Option<Local> {
461 match *self {
462 PlaceRef { local, projection: [] }
463 | PlaceRef { local, projection: [ProjectionElem::Deref] } => Some(local),
464 _ => None,
465 }
466 }
467
468 pub fn is_indirect(&self) -> bool {
473 self.projection.iter().any(|elem| elem.is_indirect())
474 }
475
476 pub fn is_indirect_first_projection(&self) -> bool {
482 debug_assert!(
484 self.projection.is_empty() || !self.projection[1..].contains(&PlaceElem::Deref)
485 );
486 self.projection.first() == Some(&PlaceElem::Deref)
487 }
488
489 #[inline]
492 pub fn as_local(&self) -> Option<Local> {
493 match *self {
494 PlaceRef { local, projection: [] } => Some(local),
495 _ => None,
496 }
497 }
498
499 #[inline]
500 pub fn to_place(&self, tcx: TyCtxt<'tcx>) -> Place<'tcx> {
501 Place { local: self.local, projection: tcx.mk_place_elems(self.projection) }
502 }
503
504 #[inline]
505 pub fn last_projection(&self) -> Option<(PlaceRef<'tcx>, PlaceElem<'tcx>)> {
506 if let &[ref proj_base @ .., elem] = self.projection {
507 Some((PlaceRef { local: self.local, projection: proj_base }, elem))
508 } else {
509 None
510 }
511 }
512
513 #[inline]
521 pub fn iter_projections(
522 self,
523 ) -> impl Iterator<Item = (PlaceRef<'tcx>, PlaceElem<'tcx>)> + DoubleEndedIterator {
524 self.projection.iter().enumerate().map(move |(i, proj)| {
525 let base = PlaceRef { local: self.local, projection: &self.projection[..i] };
526 (base, *proj)
527 })
528 }
529
530 pub fn accessed_locals(self) -> impl Iterator<Item = Local> {
532 std::iter::once(self.local).chain(self.projection.iter().filter_map(|proj| match proj {
533 ProjectionElem::Index(local) => Some(*local),
534 ProjectionElem::Deref
535 | ProjectionElem::Field(_, _)
536 | ProjectionElem::ConstantIndex { .. }
537 | ProjectionElem::Subslice { .. }
538 | ProjectionElem::Downcast(_, _)
539 | ProjectionElem::OpaqueCast(_)
540 | ProjectionElem::UnwrapUnsafeBinder(_) => None,
541 }))
542 }
543
544 pub fn project_deeper(
547 self,
548 more_projections: &[PlaceElem<'tcx>],
549 tcx: TyCtxt<'tcx>,
550 ) -> Place<'tcx> {
551 let mut v: Vec<PlaceElem<'tcx>>;
552
553 let new_projections = if self.projection.is_empty() {
554 more_projections
555 } else {
556 v = Vec::with_capacity(self.projection.len() + more_projections.len());
557 v.extend(self.projection);
558 v.extend(more_projections);
559 &v
560 };
561
562 Place { local: self.local, projection: tcx.mk_place_elems(new_projections) }
563 }
564
565 pub fn ty<D>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> PlaceTy<'tcx>
566 where
567 D: ?Sized + HasLocalDecls<'tcx>,
568 {
569 Place::ty_from(self.local, self.projection, local_decls, tcx)
570 }
571}
572
573impl From<Local> for PlaceRef<'_> {
574 #[inline]
575 fn from(local: Local) -> Self {
576 PlaceRef { local, projection: &[] }
577 }
578}
579
580impl<'tcx> Operand<'tcx> {
584 pub fn function_handle(
588 tcx: TyCtxt<'tcx>,
589 def_id: DefId,
590 args: impl IntoIterator<Item = GenericArg<'tcx>>,
591 span: Span,
592 ) -> Self {
593 let ty = Ty::new_fn_def(tcx, def_id, args);
594 Operand::Constant(Box::new(ConstOperand {
595 span,
596 user_ty: None,
597 const_: Const::Val(ConstValue::ZeroSized, ty),
598 }))
599 }
600
601 pub fn is_move(&self) -> bool {
602 matches!(self, Operand::Move(..))
603 }
604
605 pub fn const_from_scalar(
608 tcx: TyCtxt<'tcx>,
609 ty: Ty<'tcx>,
610 val: Scalar,
611 span: Span,
612 ) -> Operand<'tcx> {
613 debug_assert!({
614 let typing_env = ty::TypingEnv::fully_monomorphized();
615 let type_size = tcx
616 .layout_of(typing_env.as_query_input(ty))
617 .unwrap_or_else(|e| panic!("could not compute layout for {ty:?}: {e:?}"))
618 .size;
619 let scalar_size = match val {
620 Scalar::Int(int) => int.size(),
621 _ => panic!("Invalid scalar type {val:?}"),
622 };
623 scalar_size == type_size
624 });
625 Operand::Constant(Box::new(ConstOperand {
626 span,
627 user_ty: None,
628 const_: Const::Val(ConstValue::Scalar(val), ty),
629 }))
630 }
631
632 pub fn to_copy(&self) -> Self {
633 match *self {
634 Operand::Copy(_) | Operand::Constant(_) => self.clone(),
635 Operand::Move(place) => Operand::Copy(place),
636 }
637 }
638
639 pub fn place(&self) -> Option<Place<'tcx>> {
642 match self {
643 Operand::Copy(place) | Operand::Move(place) => Some(*place),
644 Operand::Constant(_) => None,
645 }
646 }
647
648 pub fn constant(&self) -> Option<&ConstOperand<'tcx>> {
651 match self {
652 Operand::Constant(x) => Some(&**x),
653 Operand::Copy(_) | Operand::Move(_) => None,
654 }
655 }
656
657 pub fn const_fn_def(&self) -> Option<(DefId, GenericArgsRef<'tcx>)> {
662 let const_ty = self.constant()?.const_.ty();
663 if let ty::FnDef(def_id, args) = *const_ty.kind() { Some((def_id, args)) } else { None }
664 }
665
666 pub fn ty<D>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> Ty<'tcx>
667 where
668 D: ?Sized + HasLocalDecls<'tcx>,
669 {
670 match self {
671 &Operand::Copy(ref l) | &Operand::Move(ref l) => l.ty(local_decls, tcx).ty,
672 Operand::Constant(c) => c.const_.ty(),
673 }
674 }
675
676 pub fn span<D>(&self, local_decls: &D) -> Span
677 where
678 D: ?Sized + HasLocalDecls<'tcx>,
679 {
680 match self {
681 &Operand::Copy(ref l) | &Operand::Move(ref l) => {
682 local_decls.local_decls()[l.local].source_info.span
683 }
684 Operand::Constant(c) => c.span,
685 }
686 }
687}
688
689impl<'tcx> ConstOperand<'tcx> {
690 pub fn check_static_ptr(&self, tcx: TyCtxt<'_>) -> Option<DefId> {
691 match self.const_.try_to_scalar() {
692 Some(Scalar::Ptr(ptr, _size)) => match tcx.global_alloc(ptr.provenance.alloc_id()) {
693 GlobalAlloc::Static(def_id) => {
694 assert!(!tcx.is_thread_local_static(def_id));
695 Some(def_id)
696 }
697 _ => None,
698 },
699 _ => None,
700 }
701 }
702
703 #[inline]
704 pub fn ty(&self) -> Ty<'tcx> {
705 self.const_.ty()
706 }
707}
708
709pub enum RvalueInitializationState {
713 Shallow,
714 Deep,
715}
716
717impl<'tcx> Rvalue<'tcx> {
718 #[inline]
720 pub fn is_safe_to_remove(&self) -> bool {
721 match self {
722 Rvalue::Cast(CastKind::PointerExposeProvenance, _, _) => false,
726
727 Rvalue::Use(_)
728 | Rvalue::CopyForDeref(_)
729 | Rvalue::Repeat(_, _)
730 | Rvalue::Ref(_, _, _)
731 | Rvalue::ThreadLocalRef(_)
732 | Rvalue::RawPtr(_, _)
733 | Rvalue::Cast(
734 CastKind::IntToInt
735 | CastKind::FloatToInt
736 | CastKind::FloatToFloat
737 | CastKind::IntToFloat
738 | CastKind::FnPtrToPtr
739 | CastKind::PtrToPtr
740 | CastKind::PointerCoercion(_, _)
741 | CastKind::PointerWithExposedProvenance
742 | CastKind::Transmute
743 | CastKind::Subtype,
744 _,
745 _,
746 )
747 | Rvalue::BinaryOp(_, _)
748 | Rvalue::NullaryOp(_, _)
749 | Rvalue::UnaryOp(_, _)
750 | Rvalue::Discriminant(_)
751 | Rvalue::Aggregate(_, _)
752 | Rvalue::ShallowInitBox(_, _)
753 | Rvalue::WrapUnsafeBinder(_, _) => true,
754 }
755 }
756
757 pub fn ty<D>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> Ty<'tcx>
758 where
759 D: ?Sized + HasLocalDecls<'tcx>,
760 {
761 match *self {
762 Rvalue::Use(ref operand) => operand.ty(local_decls, tcx),
763 Rvalue::Repeat(ref operand, count) => {
764 Ty::new_array_with_const_len(tcx, operand.ty(local_decls, tcx), count)
765 }
766 Rvalue::ThreadLocalRef(did) => tcx.thread_local_ptr_ty(did),
767 Rvalue::Ref(reg, bk, ref place) => {
768 let place_ty = place.ty(local_decls, tcx).ty;
769 Ty::new_ref(tcx, reg, place_ty, bk.to_mutbl_lossy())
770 }
771 Rvalue::RawPtr(kind, ref place) => {
772 let place_ty = place.ty(local_decls, tcx).ty;
773 Ty::new_ptr(tcx, place_ty, kind.to_mutbl_lossy())
774 }
775 Rvalue::Cast(.., ty) => ty,
776 Rvalue::BinaryOp(op, box (ref lhs, ref rhs)) => {
777 let lhs_ty = lhs.ty(local_decls, tcx);
778 let rhs_ty = rhs.ty(local_decls, tcx);
779 op.ty(tcx, lhs_ty, rhs_ty)
780 }
781 Rvalue::UnaryOp(op, ref operand) => {
782 let arg_ty = operand.ty(local_decls, tcx);
783 op.ty(tcx, arg_ty)
784 }
785 Rvalue::Discriminant(ref place) => place.ty(local_decls, tcx).ty.discriminant_ty(tcx),
786 Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf | NullOp::OffsetOf(..), _) => {
787 tcx.types.usize
788 }
789 Rvalue::NullaryOp(NullOp::ContractChecks, _)
790 | Rvalue::NullaryOp(NullOp::UbChecks, _) => tcx.types.bool,
791 Rvalue::Aggregate(ref ak, ref ops) => match **ak {
792 AggregateKind::Array(ty) => Ty::new_array(tcx, ty, ops.len() as u64),
793 AggregateKind::Tuple => {
794 Ty::new_tup_from_iter(tcx, ops.iter().map(|op| op.ty(local_decls, tcx)))
795 }
796 AggregateKind::Adt(did, _, args, _, _) => tcx.type_of(did).instantiate(tcx, args),
797 AggregateKind::Closure(did, args) => Ty::new_closure(tcx, did, args),
798 AggregateKind::Coroutine(did, args) => Ty::new_coroutine(tcx, did, args),
799 AggregateKind::CoroutineClosure(did, args) => {
800 Ty::new_coroutine_closure(tcx, did, args)
801 }
802 AggregateKind::RawPtr(ty, mutability) => Ty::new_ptr(tcx, ty, mutability),
803 },
804 Rvalue::ShallowInitBox(_, ty) => Ty::new_box(tcx, ty),
805 Rvalue::CopyForDeref(ref place) => place.ty(local_decls, tcx).ty,
806 Rvalue::WrapUnsafeBinder(_, ty) => ty,
807 }
808 }
809
810 #[inline]
811 pub fn initialization_state(&self) -> RvalueInitializationState {
814 match *self {
815 Rvalue::ShallowInitBox(_, _) => RvalueInitializationState::Shallow,
816 _ => RvalueInitializationState::Deep,
817 }
818 }
819}
820
821impl BorrowKind {
822 pub fn mutability(&self) -> Mutability {
823 match *self {
824 BorrowKind::Shared | BorrowKind::Fake(_) => Mutability::Not,
825 BorrowKind::Mut { .. } => Mutability::Mut,
826 }
827 }
828
829 pub fn allows_two_phase_borrow(&self) -> bool {
832 match *self {
833 BorrowKind::Shared
834 | BorrowKind::Fake(_)
835 | BorrowKind::Mut { kind: MutBorrowKind::Default | MutBorrowKind::ClosureCapture } => {
836 false
837 }
838 BorrowKind::Mut { kind: MutBorrowKind::TwoPhaseBorrow } => true,
839 }
840 }
841
842 pub fn to_mutbl_lossy(self) -> hir::Mutability {
843 match self {
844 BorrowKind::Mut { .. } => hir::Mutability::Mut,
845 BorrowKind::Shared => hir::Mutability::Not,
846
847 BorrowKind::Fake(_) => hir::Mutability::Not,
850 }
851 }
852}
853
854impl<'tcx> NullOp<'tcx> {
855 pub fn ty(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
856 match self {
857 NullOp::SizeOf | NullOp::AlignOf | NullOp::OffsetOf(_) => tcx.types.usize,
858 NullOp::UbChecks | NullOp::ContractChecks => tcx.types.bool,
859 }
860 }
861}
862
863impl<'tcx> UnOp {
864 pub fn ty(&self, tcx: TyCtxt<'tcx>, arg_ty: Ty<'tcx>) -> Ty<'tcx> {
865 match self {
866 UnOp::Not | UnOp::Neg => arg_ty,
867 UnOp::PtrMetadata => arg_ty.pointee_metadata_ty_or_projection(tcx),
868 }
869 }
870}
871
872impl<'tcx> BinOp {
873 pub fn ty(&self, tcx: TyCtxt<'tcx>, lhs_ty: Ty<'tcx>, rhs_ty: Ty<'tcx>) -> Ty<'tcx> {
874 match self {
876 &BinOp::Add
877 | &BinOp::AddUnchecked
878 | &BinOp::Sub
879 | &BinOp::SubUnchecked
880 | &BinOp::Mul
881 | &BinOp::MulUnchecked
882 | &BinOp::Div
883 | &BinOp::Rem
884 | &BinOp::BitXor
885 | &BinOp::BitAnd
886 | &BinOp::BitOr => {
887 assert_eq!(lhs_ty, rhs_ty);
889 lhs_ty
890 }
891 &BinOp::AddWithOverflow | &BinOp::SubWithOverflow | &BinOp::MulWithOverflow => {
892 assert_eq!(lhs_ty, rhs_ty);
894 Ty::new_tup(tcx, &[lhs_ty, tcx.types.bool])
895 }
896 &BinOp::Shl
897 | &BinOp::ShlUnchecked
898 | &BinOp::Shr
899 | &BinOp::ShrUnchecked
900 | &BinOp::Offset => {
901 lhs_ty }
903 &BinOp::Eq | &BinOp::Lt | &BinOp::Le | &BinOp::Ne | &BinOp::Ge | &BinOp::Gt => {
904 tcx.types.bool
905 }
906 &BinOp::Cmp => {
907 assert_eq!(lhs_ty, rhs_ty);
909 tcx.ty_ordering_enum(DUMMY_SP)
910 }
911 }
912 }
913 pub(crate) fn to_hir_binop(self) -> hir::BinOpKind {
914 match self {
915 BinOp::Add | BinOp::AddWithOverflow => hir::BinOpKind::Add,
918 BinOp::Sub | BinOp::SubWithOverflow => hir::BinOpKind::Sub,
919 BinOp::Mul | BinOp::MulWithOverflow => hir::BinOpKind::Mul,
920 BinOp::Div => hir::BinOpKind::Div,
921 BinOp::Rem => hir::BinOpKind::Rem,
922 BinOp::BitXor => hir::BinOpKind::BitXor,
923 BinOp::BitAnd => hir::BinOpKind::BitAnd,
924 BinOp::BitOr => hir::BinOpKind::BitOr,
925 BinOp::Shl => hir::BinOpKind::Shl,
926 BinOp::Shr => hir::BinOpKind::Shr,
927 BinOp::Eq => hir::BinOpKind::Eq,
928 BinOp::Ne => hir::BinOpKind::Ne,
929 BinOp::Lt => hir::BinOpKind::Lt,
930 BinOp::Gt => hir::BinOpKind::Gt,
931 BinOp::Le => hir::BinOpKind::Le,
932 BinOp::Ge => hir::BinOpKind::Ge,
933 BinOp::Cmp
935 | BinOp::AddUnchecked
936 | BinOp::SubUnchecked
937 | BinOp::MulUnchecked
938 | BinOp::ShlUnchecked
939 | BinOp::ShrUnchecked
940 | BinOp::Offset => {
941 unreachable!()
942 }
943 }
944 }
945
946 pub fn overflowing_to_wrapping(self) -> Option<BinOp> {
948 Some(match self {
949 BinOp::AddWithOverflow => BinOp::Add,
950 BinOp::SubWithOverflow => BinOp::Sub,
951 BinOp::MulWithOverflow => BinOp::Mul,
952 _ => return None,
953 })
954 }
955
956 pub fn is_overflowing(self) -> bool {
958 self.overflowing_to_wrapping().is_some()
959 }
960
961 pub fn wrapping_to_overflowing(self) -> Option<BinOp> {
963 Some(match self {
964 BinOp::Add => BinOp::AddWithOverflow,
965 BinOp::Sub => BinOp::SubWithOverflow,
966 BinOp::Mul => BinOp::MulWithOverflow,
967 _ => return None,
968 })
969 }
970}
971
972impl From<Mutability> for RawPtrKind {
973 fn from(other: Mutability) -> Self {
974 match other {
975 Mutability::Mut => RawPtrKind::Mut,
976 Mutability::Not => RawPtrKind::Const,
977 }
978 }
979}
980
981impl RawPtrKind {
982 pub fn is_fake(self) -> bool {
983 match self {
984 RawPtrKind::Mut | RawPtrKind::Const => false,
985 RawPtrKind::FakeForPtrMetadata => true,
986 }
987 }
988
989 pub fn to_mutbl_lossy(self) -> Mutability {
990 match self {
991 RawPtrKind::Mut => Mutability::Mut,
992 RawPtrKind::Const => Mutability::Not,
993
994 RawPtrKind::FakeForPtrMetadata => Mutability::Not,
997 }
998 }
999
1000 pub fn ptr_str(self) -> &'static str {
1001 match self {
1002 RawPtrKind::Mut => "mut",
1003 RawPtrKind::Const => "const",
1004 RawPtrKind::FakeForPtrMetadata => "const (fake)",
1005 }
1006 }
1007}
1008
1009#[derive(Default, Debug, Clone, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]
1010pub struct StmtDebugInfos<'tcx>(Vec<StmtDebugInfo<'tcx>>);
1011
1012impl<'tcx> StmtDebugInfos<'tcx> {
1013 pub fn push(&mut self, debuginfo: StmtDebugInfo<'tcx>) {
1014 self.0.push(debuginfo);
1015 }
1016
1017 pub fn drop_debuginfo(&mut self) {
1018 self.0.clear();
1019 }
1020
1021 pub fn is_empty(&self) -> bool {
1022 self.0.is_empty()
1023 }
1024
1025 pub fn prepend(&mut self, debuginfos: &mut Self) {
1026 if debuginfos.is_empty() {
1027 return;
1028 };
1029 debuginfos.0.append(self);
1030 std::mem::swap(debuginfos, self);
1031 }
1032
1033 pub fn append(&mut self, debuginfos: &mut Self) {
1034 if debuginfos.is_empty() {
1035 return;
1036 };
1037 self.0.append(debuginfos);
1038 }
1039
1040 pub fn extend(&mut self, debuginfos: &Self) {
1041 if debuginfos.is_empty() {
1042 return;
1043 };
1044 self.0.extend_from_slice(debuginfos);
1045 }
1046
1047 pub fn retain<F>(&mut self, f: F)
1048 where
1049 F: FnMut(&StmtDebugInfo<'tcx>) -> bool,
1050 {
1051 self.0.retain(f);
1052 }
1053}
1054
1055impl<'tcx> ops::Deref for StmtDebugInfos<'tcx> {
1056 type Target = Vec<StmtDebugInfo<'tcx>>;
1057
1058 #[inline]
1059 fn deref(&self) -> &Vec<StmtDebugInfo<'tcx>> {
1060 &self.0
1061 }
1062}
1063
1064impl<'tcx> ops::DerefMut for StmtDebugInfos<'tcx> {
1065 #[inline]
1066 fn deref_mut(&mut self) -> &mut Vec<StmtDebugInfo<'tcx>> {
1067 &mut self.0
1068 }
1069}
1070
1071#[derive(Clone, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]
1072pub enum StmtDebugInfo<'tcx> {
1073 AssignRef(Local, Place<'tcx>),
1074 InvalidAssign(Local),
1075}