1use rustc_abi::{ExternAbi, FIRST_VARIANT, Size};
4use rustc_attr_parsing::InlineAttr;
5use rustc_data_structures::fx::{FxHashMap, FxHashSet};
6use rustc_hir::LangItem;
7use rustc_index::IndexVec;
8use rustc_index::bit_set::DenseBitSet;
9use rustc_infer::infer::TyCtxtInferExt;
10use rustc_infer::traits::{Obligation, ObligationCause};
11use rustc_middle::mir::coverage::CoverageKind;
12use rustc_middle::mir::visit::{NonUseContext, PlaceContext, Visitor};
13use rustc_middle::mir::*;
14use rustc_middle::ty::adjustment::PointerCoercion;
15use rustc_middle::ty::{
16 self, CoroutineArgsExt, InstanceKind, ScalarInt, Ty, TyCtxt, TypeVisitableExt, Variance,
17};
18use rustc_middle::{bug, span_bug};
19use rustc_trait_selection::traits::ObligationCtxt;
20use rustc_type_ir::Upcast;
21
22use crate::util::{self, is_within_packed};
23
24#[derive(Copy, Clone, Debug, PartialEq, Eq)]
25enum EdgeKind {
26 Unwind,
27 Normal,
28}
29
30pub(super) struct Validator {
31 pub when: String,
33}
34
35impl<'tcx> crate::MirPass<'tcx> for Validator {
36 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
37 if matches!(body.source.instance, InstanceKind::Intrinsic(..) | InstanceKind::Virtual(..)) {
42 return;
43 }
44 let def_id = body.source.def_id();
45 let typing_env = body.typing_env(tcx);
46 let can_unwind = if body.phase <= MirPhase::Runtime(RuntimePhase::Initial) {
47 true
49 } else if !tcx.def_kind(def_id).is_fn_like() {
50 true
51 } else {
52 let body_ty = tcx.type_of(def_id).skip_binder();
53 let body_abi = match body_ty.kind() {
54 ty::FnDef(..) => body_ty.fn_sig(tcx).abi(),
55 ty::Closure(..) => ExternAbi::RustCall,
56 ty::CoroutineClosure(..) => ExternAbi::RustCall,
57 ty::Coroutine(..) => ExternAbi::Rust,
58 ty::Error(_) => return,
60 _ => span_bug!(body.span, "unexpected body ty: {body_ty:?}"),
61 };
62
63 ty::layout::fn_can_unwind(tcx, Some(def_id), body_abi)
64 };
65
66 let mut cfg_checker = CfgChecker {
67 when: &self.when,
68 body,
69 tcx,
70 unwind_edge_count: 0,
71 reachable_blocks: traversal::reachable_as_bitset(body),
72 value_cache: FxHashSet::default(),
73 can_unwind,
74 };
75 cfg_checker.visit_body(body);
76 cfg_checker.check_cleanup_control_flow();
77
78 for (location, msg) in validate_types(tcx, typing_env, body, body) {
80 cfg_checker.fail(location, msg);
81 }
82
83 if let MirPhase::Runtime(_) = body.phase {
84 if let ty::InstanceKind::Item(_) = body.source.instance {
85 if body.has_free_regions() {
86 cfg_checker.fail(
87 Location::START,
88 format!("Free regions in optimized {} MIR", body.phase.name()),
89 );
90 }
91 }
92 }
93 }
94
95 fn is_required(&self) -> bool {
96 true
97 }
98}
99
100struct CfgChecker<'a, 'tcx> {
107 when: &'a str,
108 body: &'a Body<'tcx>,
109 tcx: TyCtxt<'tcx>,
110 unwind_edge_count: usize,
111 reachable_blocks: DenseBitSet<BasicBlock>,
112 value_cache: FxHashSet<u128>,
113 can_unwind: bool,
116}
117
118impl<'a, 'tcx> CfgChecker<'a, 'tcx> {
119 #[track_caller]
120 fn fail(&self, location: Location, msg: impl AsRef<str>) {
121 assert!(
123 self.tcx.dcx().has_errors().is_some(),
124 "broken MIR in {:?} ({}) at {:?}:\n{}",
125 self.body.source.instance,
126 self.when,
127 location,
128 msg.as_ref(),
129 );
130 }
131
132 fn check_edge(&mut self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
133 if bb == START_BLOCK {
134 self.fail(location, "start block must not have predecessors")
135 }
136 if let Some(bb) = self.body.basic_blocks.get(bb) {
137 let src = self.body.basic_blocks.get(location.block).unwrap();
138 match (src.is_cleanup, bb.is_cleanup, edge_kind) {
139 (false, false, EdgeKind::Normal)
141 | (true, true, EdgeKind::Normal) => {}
143 (false, true, EdgeKind::Unwind) => {
145 self.unwind_edge_count += 1;
146 }
147 _ => {
149 self.fail(
150 location,
151 format!(
152 "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
153 edge_kind,
154 bb,
155 src.is_cleanup,
156 bb.is_cleanup,
157 )
158 )
159 }
160 }
161 } else {
162 self.fail(location, format!("encountered jump to invalid basic block {bb:?}"))
163 }
164 }
165
166 fn check_cleanup_control_flow(&self) {
167 if self.unwind_edge_count <= 1 {
168 return;
169 }
170 let doms = self.body.basic_blocks.dominators();
171 let mut post_contract_node = FxHashMap::default();
172 let mut dom_path = vec![];
174 let mut get_post_contract_node = |mut bb| {
175 let root = loop {
176 if let Some(root) = post_contract_node.get(&bb) {
177 break *root;
178 }
179 let parent = doms.immediate_dominator(bb).unwrap();
180 dom_path.push(bb);
181 if !self.body.basic_blocks[parent].is_cleanup {
182 break bb;
183 }
184 bb = parent;
185 };
186 for bb in dom_path.drain(..) {
187 post_contract_node.insert(bb, root);
188 }
189 root
190 };
191
192 let mut parent = IndexVec::from_elem(None, &self.body.basic_blocks);
193 for (bb, bb_data) in self.body.basic_blocks.iter_enumerated() {
194 if !bb_data.is_cleanup || !self.reachable_blocks.contains(bb) {
195 continue;
196 }
197 let bb = get_post_contract_node(bb);
198 for s in bb_data.terminator().successors() {
199 let s = get_post_contract_node(s);
200 if s == bb {
201 continue;
202 }
203 let parent = &mut parent[bb];
204 match parent {
205 None => {
206 *parent = Some(s);
207 }
208 Some(e) if *e == s => (),
209 Some(e) => self.fail(
210 Location { block: bb, statement_index: 0 },
211 format!(
212 "Cleanup control flow violation: The blocks dominated by {:?} have edges to both {:?} and {:?}",
213 bb,
214 s,
215 *e
216 )
217 ),
218 }
219 }
220 }
221
222 let mut stack = FxHashSet::default();
224 for i in 0..parent.len() {
225 let mut bb = BasicBlock::from_usize(i);
226 stack.clear();
227 stack.insert(bb);
228 loop {
229 let Some(parent) = parent[bb].take() else { break };
230 let no_cycle = stack.insert(parent);
231 if !no_cycle {
232 self.fail(
233 Location { block: bb, statement_index: 0 },
234 format!(
235 "Cleanup control flow violation: Cycle involving edge {bb:?} -> {parent:?}",
236 ),
237 );
238 break;
239 }
240 bb = parent;
241 }
242 }
243 }
244
245 fn check_unwind_edge(&mut self, location: Location, unwind: UnwindAction) {
246 let is_cleanup = self.body.basic_blocks[location.block].is_cleanup;
247 match unwind {
248 UnwindAction::Cleanup(unwind) => {
249 if is_cleanup {
250 self.fail(location, "`UnwindAction::Cleanup` in cleanup block");
251 }
252 self.check_edge(location, unwind, EdgeKind::Unwind);
253 }
254 UnwindAction::Continue => {
255 if is_cleanup {
256 self.fail(location, "`UnwindAction::Continue` in cleanup block");
257 }
258
259 if !self.can_unwind {
260 self.fail(location, "`UnwindAction::Continue` in no-unwind function");
261 }
262 }
263 UnwindAction::Terminate(UnwindTerminateReason::InCleanup) => {
264 if !is_cleanup {
265 self.fail(
266 location,
267 "`UnwindAction::Terminate(InCleanup)` in a non-cleanup block",
268 );
269 }
270 }
271 UnwindAction::Unreachable | UnwindAction::Terminate(UnwindTerminateReason::Abi) => (),
273 }
274 }
275
276 fn is_critical_call_edge(&self, target: Option<BasicBlock>, unwind: UnwindAction) -> bool {
277 let Some(target) = target else { return false };
278 matches!(unwind, UnwindAction::Cleanup(_) | UnwindAction::Terminate(_))
279 && self.body.basic_blocks.predecessors()[target].len() > 1
280 }
281}
282
283impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> {
284 fn visit_local(&mut self, local: Local, _context: PlaceContext, location: Location) {
285 if self.body.local_decls.get(local).is_none() {
286 self.fail(
287 location,
288 format!("local {local:?} has no corresponding declaration in `body.local_decls`"),
289 );
290 }
291 }
292
293 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
294 match &statement.kind {
295 StatementKind::AscribeUserType(..) => {
296 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
297 self.fail(
298 location,
299 "`AscribeUserType` should have been removed after drop lowering phase",
300 );
301 }
302 }
303 StatementKind::FakeRead(..) => {
304 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
305 self.fail(
306 location,
307 "`FakeRead` should have been removed after drop lowering phase",
308 );
309 }
310 }
311 StatementKind::SetDiscriminant { .. } => {
312 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
313 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
314 }
315 }
316 StatementKind::Deinit(..) => {
317 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
318 self.fail(location, "`Deinit`is not allowed until deaggregation");
319 }
320 }
321 StatementKind::Retag(kind, _) => {
322 if matches!(kind, RetagKind::TwoPhase) {
326 self.fail(location, format!("explicit `{kind:?}` is forbidden"));
327 }
328 }
329 StatementKind::Coverage(kind) => {
330 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup)
331 && let CoverageKind::BlockMarker { .. } | CoverageKind::SpanMarker { .. } = kind
332 {
333 self.fail(
334 location,
335 format!("{kind:?} should have been removed after analysis"),
336 );
337 }
338 }
339 StatementKind::Assign(..)
340 | StatementKind::StorageLive(_)
341 | StatementKind::StorageDead(_)
342 | StatementKind::Intrinsic(_)
343 | StatementKind::ConstEvalCounter
344 | StatementKind::PlaceMention(..)
345 | StatementKind::BackwardIncompatibleDropHint { .. }
346 | StatementKind::Nop => {}
347 }
348
349 self.super_statement(statement, location);
350 }
351
352 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
353 match &terminator.kind {
354 TerminatorKind::Goto { target } => {
355 self.check_edge(location, *target, EdgeKind::Normal);
356 }
357 TerminatorKind::SwitchInt { targets, discr: _ } => {
358 for (_, target) in targets.iter() {
359 self.check_edge(location, target, EdgeKind::Normal);
360 }
361 self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
362
363 self.value_cache.clear();
364 self.value_cache.extend(targets.iter().map(|(value, _)| value));
365 let has_duplicates = targets.iter().len() != self.value_cache.len();
366 if has_duplicates {
367 self.fail(
368 location,
369 format!(
370 "duplicated values in `SwitchInt` terminator: {:?}",
371 terminator.kind,
372 ),
373 );
374 }
375 }
376 TerminatorKind::Drop { target, unwind, .. } => {
377 self.check_edge(location, *target, EdgeKind::Normal);
378 self.check_unwind_edge(location, *unwind);
379 }
380 TerminatorKind::Call { func, args, .. }
381 | TerminatorKind::TailCall { func, args, .. } => {
382 if let TerminatorKind::Call { target, unwind, destination, .. } = terminator.kind {
384 if let Some(target) = target {
385 self.check_edge(location, target, EdgeKind::Normal);
386 }
387 self.check_unwind_edge(location, unwind);
388
389 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
395 && self.is_critical_call_edge(target, unwind)
396 {
397 self.fail(
398 location,
399 format!(
400 "encountered critical edge in `Call` terminator {:?}",
401 terminator.kind,
402 ),
403 );
404 }
405
406 if is_within_packed(self.tcx, &self.body.local_decls, destination).is_some() {
409 self.fail(
411 location,
412 format!(
413 "encountered packed place in `Call` terminator destination: {:?}",
414 terminator.kind,
415 ),
416 );
417 }
418 }
419
420 for arg in args {
421 if let Operand::Move(place) = &arg.node {
422 if is_within_packed(self.tcx, &self.body.local_decls, *place).is_some() {
423 self.fail(
425 location,
426 format!(
427 "encountered `Move` of a packed place in `Call` terminator: {:?}",
428 terminator.kind,
429 ),
430 );
431 }
432 }
433 }
434
435 if let ty::FnDef(did, ..) = func.ty(&self.body.local_decls, self.tcx).kind()
436 && self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
437 && matches!(self.tcx.codegen_fn_attrs(did).inline, InlineAttr::Force { .. })
438 {
439 self.fail(location, "`#[rustc_force_inline]`-annotated function not inlined");
440 }
441 }
442 TerminatorKind::Assert { target, unwind, .. } => {
443 self.check_edge(location, *target, EdgeKind::Normal);
444 self.check_unwind_edge(location, *unwind);
445 }
446 TerminatorKind::Yield { resume, drop, .. } => {
447 if self.body.coroutine.is_none() {
448 self.fail(location, "`Yield` cannot appear outside coroutine bodies");
449 }
450 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
451 self.fail(location, "`Yield` should have been replaced by coroutine lowering");
452 }
453 self.check_edge(location, *resume, EdgeKind::Normal);
454 if let Some(drop) = drop {
455 self.check_edge(location, *drop, EdgeKind::Normal);
456 }
457 }
458 TerminatorKind::FalseEdge { real_target, imaginary_target } => {
459 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
460 self.fail(
461 location,
462 "`FalseEdge` should have been removed after drop elaboration",
463 );
464 }
465 self.check_edge(location, *real_target, EdgeKind::Normal);
466 self.check_edge(location, *imaginary_target, EdgeKind::Normal);
467 }
468 TerminatorKind::FalseUnwind { real_target, unwind } => {
469 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
470 self.fail(
471 location,
472 "`FalseUnwind` should have been removed after drop elaboration",
473 );
474 }
475 self.check_edge(location, *real_target, EdgeKind::Normal);
476 self.check_unwind_edge(location, *unwind);
477 }
478 TerminatorKind::InlineAsm { targets, unwind, .. } => {
479 for &target in targets {
480 self.check_edge(location, target, EdgeKind::Normal);
481 }
482 self.check_unwind_edge(location, *unwind);
483 }
484 TerminatorKind::CoroutineDrop => {
485 if self.body.coroutine.is_none() {
486 self.fail(location, "`CoroutineDrop` cannot appear outside coroutine bodies");
487 }
488 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
489 self.fail(
490 location,
491 "`CoroutineDrop` should have been replaced by coroutine lowering",
492 );
493 }
494 }
495 TerminatorKind::UnwindResume => {
496 let bb = location.block;
497 if !self.body.basic_blocks[bb].is_cleanup {
498 self.fail(location, "Cannot `UnwindResume` from non-cleanup basic block")
499 }
500 if !self.can_unwind {
501 self.fail(location, "Cannot `UnwindResume` in a function that cannot unwind")
502 }
503 }
504 TerminatorKind::UnwindTerminate(_) => {
505 let bb = location.block;
506 if !self.body.basic_blocks[bb].is_cleanup {
507 self.fail(location, "Cannot `UnwindTerminate` from non-cleanup basic block")
508 }
509 }
510 TerminatorKind::Return => {
511 let bb = location.block;
512 if self.body.basic_blocks[bb].is_cleanup {
513 self.fail(location, "Cannot `Return` from cleanup basic block")
514 }
515 }
516 TerminatorKind::Unreachable => {}
517 }
518
519 self.super_terminator(terminator, location);
520 }
521
522 fn visit_source_scope(&mut self, scope: SourceScope) {
523 if self.body.source_scopes.get(scope).is_none() {
524 self.tcx.dcx().span_bug(
525 self.body.span,
526 format!(
527 "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
528 self.body.source.instance, self.when, scope,
529 ),
530 );
531 }
532 }
533}
534
535pub(super) fn validate_types<'tcx>(
541 tcx: TyCtxt<'tcx>,
542 typing_env: ty::TypingEnv<'tcx>,
543 body: &Body<'tcx>,
544 caller_body: &Body<'tcx>,
545) -> Vec<(Location, String)> {
546 let mut type_checker = TypeChecker { body, caller_body, tcx, typing_env, failures: Vec::new() };
547 type_checker.visit_body(body);
548 type_checker.failures
549}
550
551struct TypeChecker<'a, 'tcx> {
552 body: &'a Body<'tcx>,
553 caller_body: &'a Body<'tcx>,
554 tcx: TyCtxt<'tcx>,
555 typing_env: ty::TypingEnv<'tcx>,
556 failures: Vec<(Location, String)>,
557}
558
559impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
560 fn fail(&mut self, location: Location, msg: impl Into<String>) {
561 self.failures.push((location, msg.into()));
562 }
563
564 fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
567 if src == dest {
569 return true;
571 }
572
573 if (src, dest).has_opaque_types() {
579 return true;
580 }
581
582 let variance = if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
585 Variance::Invariant
586 } else {
587 Variance::Covariant
588 };
589
590 crate::util::relate_types(self.tcx, self.typing_env, variance, src, dest)
591 }
592
593 fn predicate_must_hold_modulo_regions(
595 &self,
596 pred: impl Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>,
597 ) -> bool {
598 let pred: ty::Predicate<'tcx> = pred.upcast(self.tcx);
599
600 if pred.has_opaque_types() {
606 return true;
607 }
608
609 let (infcx, param_env) = self.tcx.infer_ctxt().build_with_typing_env(self.typing_env);
610 let ocx = ObligationCtxt::new(&infcx);
611 ocx.register_obligation(Obligation::new(
612 self.tcx,
613 ObligationCause::dummy(),
614 param_env,
615 pred,
616 ));
617 ocx.select_all_or_error().is_empty()
618 }
619}
620
621impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
622 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
623 if self.tcx.sess.opts.unstable_opts.validate_mir
625 && self.body.phase < MirPhase::Runtime(RuntimePhase::Initial)
626 {
627 if let Operand::Copy(place) = operand {
629 let ty = place.ty(&self.body.local_decls, self.tcx).ty;
630
631 if !self.tcx.type_is_copy_modulo_regions(self.typing_env, ty) {
632 self.fail(location, format!("`Operand::Copy` with non-`Copy` type {ty}"));
633 }
634 }
635 }
636
637 self.super_operand(operand, location);
638 }
639
640 fn visit_projection_elem(
641 &mut self,
642 place_ref: PlaceRef<'tcx>,
643 elem: PlaceElem<'tcx>,
644 context: PlaceContext,
645 location: Location,
646 ) {
647 match elem {
648 ProjectionElem::OpaqueCast(ty)
649 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
650 {
651 self.fail(
652 location,
653 format!("explicit opaque type cast to `{ty}` after `PostAnalysisNormalize`"),
654 )
655 }
656 ProjectionElem::Index(index) => {
657 let index_ty = self.body.local_decls[index].ty;
658 if index_ty != self.tcx.types.usize {
659 self.fail(location, format!("bad index ({index_ty:?} != usize)"))
660 }
661 }
662 ProjectionElem::Deref
663 if self.body.phase >= MirPhase::Runtime(RuntimePhase::PostCleanup) =>
664 {
665 let base_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
666
667 if base_ty.is_box() {
668 self.fail(
669 location,
670 format!("{base_ty:?} dereferenced after ElaborateBoxDerefs"),
671 )
672 }
673 }
674 ProjectionElem::Field(f, ty) => {
675 let parent_ty = place_ref.ty(&self.body.local_decls, self.tcx);
676 let fail_out_of_bounds = |this: &mut Self, location| {
677 this.fail(location, format!("Out of bounds field {f:?} for {parent_ty:?}"));
678 };
679 let check_equal = |this: &mut Self, location, f_ty| {
680 if !this.mir_assign_valid_types(ty, f_ty) {
681 this.fail(
682 location,
683 format!(
684 "Field projection `{place_ref:?}.{f:?}` specified type `{ty:?}`, but actual type is `{f_ty:?}`"
685 )
686 )
687 }
688 };
689
690 let kind = match parent_ty.ty.kind() {
691 &ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => {
692 self.tcx.type_of(def_id).instantiate(self.tcx, args).kind()
693 }
694 kind => kind,
695 };
696
697 match kind {
698 ty::Tuple(fields) => {
699 let Some(f_ty) = fields.get(f.as_usize()) else {
700 fail_out_of_bounds(self, location);
701 return;
702 };
703 check_equal(self, location, *f_ty);
704 }
705 ty::Adt(adt_def, args) => {
706 if self.tcx.is_lang_item(adt_def.did(), LangItem::DynMetadata) {
708 self.fail(
709 location,
710 format!(
711 "You can't project to field {f:?} of `DynMetadata` because \
712 layout is weird and thinks it doesn't have fields."
713 ),
714 );
715 }
716
717 let var = parent_ty.variant_index.unwrap_or(FIRST_VARIANT);
718 let Some(field) = adt_def.variant(var).fields.get(f) else {
719 fail_out_of_bounds(self, location);
720 return;
721 };
722 check_equal(self, location, field.ty(self.tcx, args));
723 }
724 ty::Closure(_, args) => {
725 let args = args.as_closure();
726 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
727 fail_out_of_bounds(self, location);
728 return;
729 };
730 check_equal(self, location, f_ty);
731 }
732 ty::CoroutineClosure(_, args) => {
733 let args = args.as_coroutine_closure();
734 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
735 fail_out_of_bounds(self, location);
736 return;
737 };
738 check_equal(self, location, f_ty);
739 }
740 &ty::Coroutine(def_id, args) => {
741 let f_ty = if let Some(var) = parent_ty.variant_index {
742 let layout = if def_id == self.caller_body.source.def_id() {
748 self.caller_body.coroutine_layout_raw()
749 } else if self.tcx.needs_coroutine_by_move_body_def_id(def_id)
750 && let ty::ClosureKind::FnOnce =
751 args.as_coroutine().kind_ty().to_opt_closure_kind().unwrap()
752 && self.caller_body.source.def_id()
753 == self.tcx.coroutine_by_move_body_def_id(def_id)
754 {
755 self.caller_body.coroutine_layout_raw()
757 } else {
758 self.tcx.coroutine_layout(def_id, args.as_coroutine().kind_ty())
759 };
760
761 let Some(layout) = layout else {
762 self.fail(
763 location,
764 format!("No coroutine layout for {parent_ty:?}"),
765 );
766 return;
767 };
768
769 let Some(&local) = layout.variant_fields[var].get(f) else {
770 fail_out_of_bounds(self, location);
771 return;
772 };
773
774 let Some(f_ty) = layout.field_tys.get(local) else {
775 self.fail(
776 location,
777 format!("Out of bounds local {local:?} for {parent_ty:?}"),
778 );
779 return;
780 };
781
782 ty::EarlyBinder::bind(f_ty.ty).instantiate(self.tcx, args)
783 } else {
784 let Some(&f_ty) = args.as_coroutine().prefix_tys().get(f.index())
785 else {
786 fail_out_of_bounds(self, location);
787 return;
788 };
789
790 f_ty
791 };
792
793 check_equal(self, location, f_ty);
794 }
795 _ => {
796 self.fail(location, format!("{:?} does not have fields", parent_ty.ty));
797 }
798 }
799 }
800 ProjectionElem::Subtype(ty) => {
801 if !util::sub_types(
802 self.tcx,
803 self.typing_env,
804 ty,
805 place_ref.ty(&self.body.local_decls, self.tcx).ty,
806 ) {
807 self.fail(
808 location,
809 format!(
810 "Failed subtyping {ty:#?} and {:#?}",
811 place_ref.ty(&self.body.local_decls, self.tcx).ty
812 ),
813 )
814 }
815 }
816 ProjectionElem::UnwrapUnsafeBinder(unwrapped_ty) => {
817 let binder_ty = place_ref.ty(&self.body.local_decls, self.tcx);
818 let ty::UnsafeBinder(binder_ty) = *binder_ty.ty.kind() else {
819 self.fail(
820 location,
821 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
822 );
823 return;
824 };
825 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
826 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
827 self.fail(
828 location,
829 format!(
830 "Cannot unwrap unsafe binder {binder_ty:?} into type {unwrapped_ty:?}"
831 ),
832 );
833 }
834 }
835 _ => {}
836 }
837 self.super_projection_elem(place_ref, elem, context, location);
838 }
839
840 fn visit_var_debug_info(&mut self, debuginfo: &VarDebugInfo<'tcx>) {
841 if let Some(box VarDebugInfoFragment { ty, ref projection }) = debuginfo.composite {
842 if ty.is_union() || ty.is_enum() {
843 self.fail(
844 START_BLOCK.start_location(),
845 format!("invalid type {ty:?} in debuginfo for {:?}", debuginfo.name),
846 );
847 }
848 if projection.is_empty() {
849 self.fail(
850 START_BLOCK.start_location(),
851 format!("invalid empty projection in debuginfo for {:?}", debuginfo.name),
852 );
853 }
854 if projection.iter().any(|p| !matches!(p, PlaceElem::Field(..))) {
855 self.fail(
856 START_BLOCK.start_location(),
857 format!(
858 "illegal projection {:?} in debuginfo for {:?}",
859 projection, debuginfo.name
860 ),
861 );
862 }
863 }
864 match debuginfo.value {
865 VarDebugInfoContents::Const(_) => {}
866 VarDebugInfoContents::Place(place) => {
867 if place.projection.iter().any(|p| !p.can_use_in_debuginfo()) {
868 self.fail(
869 START_BLOCK.start_location(),
870 format!("illegal place {:?} in debuginfo for {:?}", place, debuginfo.name),
871 );
872 }
873 }
874 }
875 self.super_var_debug_info(debuginfo);
876 }
877
878 fn visit_place(&mut self, place: &Place<'tcx>, cntxt: PlaceContext, location: Location) {
879 let _ = place.ty(&self.body.local_decls, self.tcx);
881
882 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial)
883 && place.projection.len() > 1
884 && cntxt != PlaceContext::NonUse(NonUseContext::VarDebugInfo)
885 && place.projection[1..].contains(&ProjectionElem::Deref)
886 {
887 self.fail(
888 location,
889 format!("place {place:?} has deref as a later projection (it is only permitted as the first projection)"),
890 );
891 }
892
893 let mut projections_iter = place.projection.iter();
895 while let Some(proj) = projections_iter.next() {
896 if matches!(proj, ProjectionElem::Downcast(..)) {
897 if !matches!(projections_iter.next(), Some(ProjectionElem::Field(..))) {
898 self.fail(
899 location,
900 format!(
901 "place {place:?} has `Downcast` projection not followed by `Field`"
902 ),
903 );
904 }
905 }
906 }
907
908 self.super_place(place, cntxt, location);
909 }
910
911 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
912 macro_rules! check_kinds {
913 ($t:expr, $text:literal, $typat:pat) => {
914 if !matches!(($t).kind(), $typat) {
915 self.fail(location, format!($text, $t));
916 }
917 };
918 }
919 match rvalue {
920 Rvalue::Use(_) | Rvalue::CopyForDeref(_) => {}
921 Rvalue::Aggregate(kind, fields) => match **kind {
922 AggregateKind::Tuple => {}
923 AggregateKind::Array(dest) => {
924 for src in fields {
925 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
926 self.fail(location, "array field has the wrong type");
927 }
928 }
929 }
930 AggregateKind::Adt(def_id, idx, args, _, Some(field)) => {
931 let adt_def = self.tcx.adt_def(def_id);
932 assert!(adt_def.is_union());
933 assert_eq!(idx, FIRST_VARIANT);
934 let dest_ty = self.tcx.normalize_erasing_regions(
935 self.typing_env,
936 adt_def.non_enum_variant().fields[field].ty(self.tcx, args),
937 );
938 if let [field] = fields.raw.as_slice() {
939 let src_ty = field.ty(self.body, self.tcx);
940 if !self.mir_assign_valid_types(src_ty, dest_ty) {
941 self.fail(location, "union field has the wrong type");
942 }
943 } else {
944 self.fail(location, "unions should have one initialized field");
945 }
946 }
947 AggregateKind::Adt(def_id, idx, args, _, None) => {
948 let adt_def = self.tcx.adt_def(def_id);
949 assert!(!adt_def.is_union());
950 let variant = &adt_def.variants()[idx];
951 if variant.fields.len() != fields.len() {
952 self.fail(location, "adt has the wrong number of initialized fields");
953 }
954 for (src, dest) in std::iter::zip(fields, &variant.fields) {
955 let dest_ty = self
956 .tcx
957 .normalize_erasing_regions(self.typing_env, dest.ty(self.tcx, args));
958 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest_ty) {
959 self.fail(location, "adt field has the wrong type");
960 }
961 }
962 }
963 AggregateKind::Closure(_, args) => {
964 let upvars = args.as_closure().upvar_tys();
965 if upvars.len() != fields.len() {
966 self.fail(location, "closure has the wrong number of initialized fields");
967 }
968 for (src, dest) in std::iter::zip(fields, upvars) {
969 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
970 self.fail(location, "closure field has the wrong type");
971 }
972 }
973 }
974 AggregateKind::Coroutine(_, args) => {
975 let upvars = args.as_coroutine().upvar_tys();
976 if upvars.len() != fields.len() {
977 self.fail(location, "coroutine has the wrong number of initialized fields");
978 }
979 for (src, dest) in std::iter::zip(fields, upvars) {
980 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
981 self.fail(location, "coroutine field has the wrong type");
982 }
983 }
984 }
985 AggregateKind::CoroutineClosure(_, args) => {
986 let upvars = args.as_coroutine_closure().upvar_tys();
987 if upvars.len() != fields.len() {
988 self.fail(
989 location,
990 "coroutine-closure has the wrong number of initialized fields",
991 );
992 }
993 for (src, dest) in std::iter::zip(fields, upvars) {
994 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
995 self.fail(location, "coroutine-closure field has the wrong type");
996 }
997 }
998 }
999 AggregateKind::RawPtr(pointee_ty, mutability) => {
1000 if !matches!(self.body.phase, MirPhase::Runtime(_)) {
1001 self.fail(location, "RawPtr should be in runtime MIR only");
1005 }
1006
1007 if let [data_ptr, metadata] = fields.raw.as_slice() {
1008 let data_ptr_ty = data_ptr.ty(self.body, self.tcx);
1009 let metadata_ty = metadata.ty(self.body, self.tcx);
1010 if let ty::RawPtr(in_pointee, in_mut) = data_ptr_ty.kind() {
1011 if *in_mut != mutability {
1012 self.fail(location, "input and output mutability must match");
1013 }
1014
1015 if !in_pointee.is_sized(self.tcx, self.typing_env) {
1017 self.fail(location, "input pointer must be thin");
1018 }
1019 } else {
1020 self.fail(
1021 location,
1022 "first operand to raw pointer aggregate must be a raw pointer",
1023 );
1024 }
1025
1026 if pointee_ty.is_slice() {
1028 if !self.mir_assign_valid_types(metadata_ty, self.tcx.types.usize) {
1029 self.fail(location, "slice metadata must be usize");
1030 }
1031 } else if pointee_ty.is_sized(self.tcx, self.typing_env) {
1032 if metadata_ty != self.tcx.types.unit {
1033 self.fail(location, "metadata for pointer-to-thin must be unit");
1034 }
1035 }
1036 } else {
1037 self.fail(location, "raw pointer aggregate must have 2 fields");
1038 }
1039 }
1040 },
1041 Rvalue::Ref(_, BorrowKind::Fake(_), _) => {
1042 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1043 self.fail(
1044 location,
1045 "`Assign` statement with a `Fake` borrow should have been removed in runtime MIR",
1046 );
1047 }
1048 }
1049 Rvalue::Ref(..) => {}
1050 Rvalue::Len(p) => {
1051 let pty = p.ty(&self.body.local_decls, self.tcx).ty;
1052 check_kinds!(
1053 pty,
1054 "Cannot compute length of non-array type {:?}",
1055 ty::Array(..) | ty::Slice(..)
1056 );
1057 }
1058 Rvalue::BinaryOp(op, vals) => {
1059 use BinOp::*;
1060 let a = vals.0.ty(&self.body.local_decls, self.tcx);
1061 let b = vals.1.ty(&self.body.local_decls, self.tcx);
1062 if crate::util::binop_right_homogeneous(*op) {
1063 if let Eq | Lt | Le | Ne | Ge | Gt = op {
1064 if !self.mir_assign_valid_types(a, b) {
1066 self.fail(
1067 location,
1068 format!("Cannot {op:?} compare incompatible types {a:?} and {b:?}"),
1069 );
1070 }
1071 } else if a != b {
1072 self.fail(
1073 location,
1074 format!(
1075 "Cannot perform binary op {op:?} on unequal types {a:?} and {b:?}"
1076 ),
1077 );
1078 }
1079 }
1080
1081 match op {
1082 Offset => {
1083 check_kinds!(a, "Cannot offset non-pointer type {:?}", ty::RawPtr(..));
1084 if b != self.tcx.types.isize && b != self.tcx.types.usize {
1085 self.fail(location, format!("Cannot offset by non-isize type {b:?}"));
1086 }
1087 }
1088 Eq | Lt | Le | Ne | Ge | Gt => {
1089 for x in [a, b] {
1090 check_kinds!(
1091 x,
1092 "Cannot {op:?} compare type {:?}",
1093 ty::Bool
1094 | ty::Char
1095 | ty::Int(..)
1096 | ty::Uint(..)
1097 | ty::Float(..)
1098 | ty::RawPtr(..)
1099 | ty::FnPtr(..)
1100 )
1101 }
1102 }
1103 Cmp => {
1104 for x in [a, b] {
1105 check_kinds!(
1106 x,
1107 "Cannot three-way compare non-integer type {:?}",
1108 ty::Char | ty::Uint(..) | ty::Int(..)
1109 )
1110 }
1111 }
1112 AddUnchecked | AddWithOverflow | SubUnchecked | SubWithOverflow
1113 | MulUnchecked | MulWithOverflow | Shl | ShlUnchecked | Shr | ShrUnchecked => {
1114 for x in [a, b] {
1115 check_kinds!(
1116 x,
1117 "Cannot {op:?} non-integer type {:?}",
1118 ty::Uint(..) | ty::Int(..)
1119 )
1120 }
1121 }
1122 BitAnd | BitOr | BitXor => {
1123 for x in [a, b] {
1124 check_kinds!(
1125 x,
1126 "Cannot perform bitwise op {op:?} on type {:?}",
1127 ty::Uint(..) | ty::Int(..) | ty::Bool
1128 )
1129 }
1130 }
1131 Add | Sub | Mul | Div | Rem => {
1132 for x in [a, b] {
1133 check_kinds!(
1134 x,
1135 "Cannot perform arithmetic {op:?} on type {:?}",
1136 ty::Uint(..) | ty::Int(..) | ty::Float(..)
1137 )
1138 }
1139 }
1140 }
1141 }
1142 Rvalue::UnaryOp(op, operand) => {
1143 let a = operand.ty(&self.body.local_decls, self.tcx);
1144 match op {
1145 UnOp::Neg => {
1146 check_kinds!(a, "Cannot negate type {:?}", ty::Int(..) | ty::Float(..))
1147 }
1148 UnOp::Not => {
1149 check_kinds!(
1150 a,
1151 "Cannot binary not type {:?}",
1152 ty::Int(..) | ty::Uint(..) | ty::Bool
1153 );
1154 }
1155 UnOp::PtrMetadata => {
1156 check_kinds!(
1157 a,
1158 "Cannot PtrMetadata non-pointer non-reference type {:?}",
1159 ty::RawPtr(..) | ty::Ref(..)
1160 );
1161 }
1162 }
1163 }
1164 Rvalue::ShallowInitBox(operand, _) => {
1165 let a = operand.ty(&self.body.local_decls, self.tcx);
1166 check_kinds!(a, "Cannot shallow init type {:?}", ty::RawPtr(..));
1167 }
1168 Rvalue::Cast(kind, operand, target_type) => {
1169 let op_ty = operand.ty(self.body, self.tcx);
1170 match kind {
1171 CastKind::PointerWithExposedProvenance | CastKind::PointerExposeProvenance => {}
1173 CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer, _) => {
1174 check_kinds!(
1176 op_ty,
1177 "CastKind::{kind:?} input must be a fn item, not {:?}",
1178 ty::FnDef(..)
1179 );
1180 check_kinds!(
1181 target_type,
1182 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1183 ty::FnPtr(..)
1184 );
1185 }
1186 CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
1187 check_kinds!(
1189 op_ty,
1190 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1191 ty::FnPtr(..)
1192 );
1193 check_kinds!(
1194 target_type,
1195 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1196 ty::FnPtr(..)
1197 );
1198 }
1199 CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(..), _) => {
1200 check_kinds!(
1202 op_ty,
1203 "CastKind::{kind:?} input must be a closure, not {:?}",
1204 ty::Closure(..)
1205 );
1206 check_kinds!(
1207 target_type,
1208 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1209 ty::FnPtr(..)
1210 );
1211 }
1212 CastKind::PointerCoercion(PointerCoercion::MutToConstPointer, _) => {
1213 check_kinds!(
1215 op_ty,
1216 "CastKind::{kind:?} input must be a raw mut pointer, not {:?}",
1217 ty::RawPtr(_, Mutability::Mut)
1218 );
1219 check_kinds!(
1220 target_type,
1221 "CastKind::{kind:?} output must be a raw const pointer, not {:?}",
1222 ty::RawPtr(_, Mutability::Not)
1223 );
1224 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1225 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1226 }
1227 }
1228 CastKind::PointerCoercion(PointerCoercion::ArrayToPointer, _) => {
1229 check_kinds!(
1231 op_ty,
1232 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1233 ty::RawPtr(..)
1234 );
1235 check_kinds!(
1236 target_type,
1237 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1238 ty::RawPtr(..)
1239 );
1240 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1241 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1242 }
1243 }
1244 CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
1245 if !self.predicate_must_hold_modulo_regions(ty::TraitRef::new(
1248 self.tcx,
1249 self.tcx.require_lang_item(
1250 LangItem::CoerceUnsized,
1251 Some(self.body.source_info(location).span),
1252 ),
1253 [op_ty, *target_type],
1254 )) {
1255 self.fail(location, format!("Unsize coercion, but `{op_ty}` isn't coercible to `{target_type}`"));
1256 }
1257 }
1258 CastKind::PointerCoercion(PointerCoercion::DynStar, _) => {
1259 }
1261 CastKind::IntToInt | CastKind::IntToFloat => {
1262 let input_valid = op_ty.is_integral() || op_ty.is_char() || op_ty.is_bool();
1263 let target_valid = target_type.is_numeric() || target_type.is_char();
1264 if !input_valid || !target_valid {
1265 self.fail(
1266 location,
1267 format!("Wrong cast kind {kind:?} for the type {op_ty}"),
1268 );
1269 }
1270 }
1271 CastKind::FnPtrToPtr => {
1272 check_kinds!(
1273 op_ty,
1274 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1275 ty::FnPtr(..)
1276 );
1277 check_kinds!(
1278 target_type,
1279 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1280 ty::RawPtr(..)
1281 );
1282 }
1283 CastKind::PtrToPtr => {
1284 check_kinds!(
1285 op_ty,
1286 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1287 ty::RawPtr(..)
1288 );
1289 check_kinds!(
1290 target_type,
1291 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1292 ty::RawPtr(..)
1293 );
1294 }
1295 CastKind::FloatToFloat | CastKind::FloatToInt => {
1296 if !op_ty.is_floating_point() || !target_type.is_numeric() {
1297 self.fail(
1298 location,
1299 format!(
1300 "Trying to cast non 'Float' as {kind:?} into {target_type:?}"
1301 ),
1302 );
1303 }
1304 }
1305 CastKind::Transmute => {
1306 if let MirPhase::Runtime(..) = self.body.phase {
1307 if !self
1311 .tcx
1312 .normalize_erasing_regions(self.typing_env, op_ty)
1313 .is_sized(self.tcx, self.typing_env)
1314 {
1315 self.fail(
1316 location,
1317 format!("Cannot transmute from non-`Sized` type {op_ty:?}"),
1318 );
1319 }
1320 if !self
1321 .tcx
1322 .normalize_erasing_regions(self.typing_env, *target_type)
1323 .is_sized(self.tcx, self.typing_env)
1324 {
1325 self.fail(
1326 location,
1327 format!("Cannot transmute to non-`Sized` type {target_type:?}"),
1328 );
1329 }
1330 } else {
1331 self.fail(
1332 location,
1333 format!(
1334 "Transmute is not supported in non-runtime phase {:?}.",
1335 self.body.phase
1336 ),
1337 );
1338 }
1339 }
1340 }
1341 }
1342 Rvalue::NullaryOp(NullOp::OffsetOf(indices), container) => {
1343 let fail_out_of_bounds = |this: &mut Self, location, field, ty| {
1344 this.fail(location, format!("Out of bounds field {field:?} for {ty:?}"));
1345 };
1346
1347 let mut current_ty = *container;
1348
1349 for (variant, field) in indices.iter() {
1350 match current_ty.kind() {
1351 ty::Tuple(fields) => {
1352 if variant != FIRST_VARIANT {
1353 self.fail(
1354 location,
1355 format!("tried to get variant {variant:?} of tuple"),
1356 );
1357 return;
1358 }
1359 let Some(&f_ty) = fields.get(field.as_usize()) else {
1360 fail_out_of_bounds(self, location, field, current_ty);
1361 return;
1362 };
1363
1364 current_ty = self.tcx.normalize_erasing_regions(self.typing_env, f_ty);
1365 }
1366 ty::Adt(adt_def, args) => {
1367 let Some(field) = adt_def.variant(variant).fields.get(field) else {
1368 fail_out_of_bounds(self, location, field, current_ty);
1369 return;
1370 };
1371
1372 let f_ty = field.ty(self.tcx, args);
1373 current_ty = self.tcx.normalize_erasing_regions(self.typing_env, f_ty);
1374 }
1375 _ => {
1376 self.fail(
1377 location,
1378 format!("Cannot get offset ({variant:?}, {field:?}) from type {current_ty:?}"),
1379 );
1380 return;
1381 }
1382 }
1383 }
1384 }
1385 Rvalue::Repeat(_, _)
1386 | Rvalue::ThreadLocalRef(_)
1387 | Rvalue::RawPtr(_, _)
1388 | Rvalue::NullaryOp(
1389 NullOp::SizeOf | NullOp::AlignOf | NullOp::UbChecks | NullOp::ContractChecks,
1390 _,
1391 )
1392 | Rvalue::Discriminant(_) => {}
1393
1394 Rvalue::WrapUnsafeBinder(op, ty) => {
1395 let unwrapped_ty = op.ty(self.body, self.tcx);
1396 let ty::UnsafeBinder(binder_ty) = *ty.kind() else {
1397 self.fail(
1398 location,
1399 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
1400 );
1401 return;
1402 };
1403 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
1404 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
1405 self.fail(
1406 location,
1407 format!("Cannot wrap {unwrapped_ty:?} into unsafe binder {binder_ty:?}"),
1408 );
1409 }
1410 }
1411 }
1412 self.super_rvalue(rvalue, location);
1413 }
1414
1415 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
1416 match &statement.kind {
1417 StatementKind::Assign(box (dest, rvalue)) => {
1418 let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
1420 let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
1421
1422 if !self.mir_assign_valid_types(right_ty, left_ty) {
1423 self.fail(
1424 location,
1425 format!(
1426 "encountered `{:?}` with incompatible types:\n\
1427 left-hand side has type: {}\n\
1428 right-hand side has type: {}",
1429 statement.kind, left_ty, right_ty,
1430 ),
1431 );
1432 }
1433 if let Rvalue::CopyForDeref(place) = rvalue {
1434 if place.ty(&self.body.local_decls, self.tcx).ty.builtin_deref(true).is_none() {
1435 self.fail(
1436 location,
1437 "`CopyForDeref` should only be used for dereferenceable types",
1438 )
1439 }
1440 }
1441 }
1442 StatementKind::AscribeUserType(..) => {
1443 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1444 self.fail(
1445 location,
1446 "`AscribeUserType` should have been removed after drop lowering phase",
1447 );
1448 }
1449 }
1450 StatementKind::FakeRead(..) => {
1451 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1452 self.fail(
1453 location,
1454 "`FakeRead` should have been removed after drop lowering phase",
1455 );
1456 }
1457 }
1458 StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(op)) => {
1459 let ty = op.ty(&self.body.local_decls, self.tcx);
1460 if !ty.is_bool() {
1461 self.fail(
1462 location,
1463 format!("`assume` argument must be `bool`, but got: `{ty}`"),
1464 );
1465 }
1466 }
1467 StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(
1468 CopyNonOverlapping { src, dst, count },
1469 )) => {
1470 let src_ty = src.ty(&self.body.local_decls, self.tcx);
1471 let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) {
1472 src_deref
1473 } else {
1474 self.fail(
1475 location,
1476 format!("Expected src to be ptr in copy_nonoverlapping, got: {src_ty}"),
1477 );
1478 return;
1479 };
1480 let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
1481 let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) {
1482 dst_deref
1483 } else {
1484 self.fail(
1485 location,
1486 format!("Expected dst to be ptr in copy_nonoverlapping, got: {dst_ty}"),
1487 );
1488 return;
1489 };
1490 if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) {
1493 self.fail(location, format!("bad arg ({op_src_ty:?} != {op_dst_ty:?})"));
1494 }
1495
1496 let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
1497 if op_cnt_ty != self.tcx.types.usize {
1498 self.fail(location, format!("bad arg ({op_cnt_ty:?} != usize)"))
1499 }
1500 }
1501 StatementKind::SetDiscriminant { place, .. } => {
1502 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
1503 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
1504 }
1505 let pty = place.ty(&self.body.local_decls, self.tcx).ty.kind();
1506 if !matches!(pty, ty::Adt(..) | ty::Coroutine(..) | ty::Alias(ty::Opaque, ..)) {
1507 self.fail(
1508 location,
1509 format!(
1510 "`SetDiscriminant` is only allowed on ADTs and coroutines, not {pty:?}"
1511 ),
1512 );
1513 }
1514 }
1515 StatementKind::Deinit(..) => {
1516 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
1517 self.fail(location, "`Deinit`is not allowed until deaggregation");
1518 }
1519 }
1520 StatementKind::Retag(kind, _) => {
1521 if matches!(kind, RetagKind::TwoPhase) {
1525 self.fail(location, format!("explicit `{kind:?}` is forbidden"));
1526 }
1527 }
1528 StatementKind::StorageLive(_)
1529 | StatementKind::StorageDead(_)
1530 | StatementKind::Coverage(_)
1531 | StatementKind::ConstEvalCounter
1532 | StatementKind::PlaceMention(..)
1533 | StatementKind::BackwardIncompatibleDropHint { .. }
1534 | StatementKind::Nop => {}
1535 }
1536
1537 self.super_statement(statement, location);
1538 }
1539
1540 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
1541 match &terminator.kind {
1542 TerminatorKind::SwitchInt { targets, discr } => {
1543 let switch_ty = discr.ty(&self.body.local_decls, self.tcx);
1544
1545 let target_width = self.tcx.sess.target.pointer_width;
1546
1547 let size = Size::from_bits(match switch_ty.kind() {
1548 ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
1549 ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
1550 ty::Char => 32,
1551 ty::Bool => 1,
1552 other => bug!("unhandled type: {:?}", other),
1553 });
1554
1555 for (value, _) in targets.iter() {
1556 if ScalarInt::try_from_uint(value, size).is_none() {
1557 self.fail(
1558 location,
1559 format!("the value {value:#x} is not a proper {switch_ty:?}"),
1560 )
1561 }
1562 }
1563 }
1564 TerminatorKind::Call { func, .. } | TerminatorKind::TailCall { func, .. } => {
1565 let func_ty = func.ty(&self.body.local_decls, self.tcx);
1566 match func_ty.kind() {
1567 ty::FnPtr(..) | ty::FnDef(..) => {}
1568 _ => self.fail(
1569 location,
1570 format!(
1571 "encountered non-callable type {func_ty} in `{}` terminator",
1572 terminator.kind.name()
1573 ),
1574 ),
1575 }
1576
1577 if let TerminatorKind::TailCall { .. } = terminator.kind {
1578 }
1581 }
1582 TerminatorKind::Assert { cond, .. } => {
1583 let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
1584 if cond_ty != self.tcx.types.bool {
1585 self.fail(
1586 location,
1587 format!(
1588 "encountered non-boolean condition of type {cond_ty} in `Assert` terminator"
1589 ),
1590 );
1591 }
1592 }
1593 TerminatorKind::Goto { .. }
1594 | TerminatorKind::Drop { .. }
1595 | TerminatorKind::Yield { .. }
1596 | TerminatorKind::FalseEdge { .. }
1597 | TerminatorKind::FalseUnwind { .. }
1598 | TerminatorKind::InlineAsm { .. }
1599 | TerminatorKind::CoroutineDrop
1600 | TerminatorKind::UnwindResume
1601 | TerminatorKind::UnwindTerminate(_)
1602 | TerminatorKind::Return
1603 | TerminatorKind::Unreachable => {}
1604 }
1605
1606 self.super_terminator(terminator, location);
1607 }
1608}