1use rustc_abi::{ExternAbi, FIRST_VARIANT, Size};
4use rustc_attr_parsing::InlineAttr;
5use rustc_data_structures::fx::{FxHashMap, FxHashSet};
6use rustc_hir::LangItem;
7use rustc_index::IndexVec;
8use rustc_index::bit_set::DenseBitSet;
9use rustc_infer::infer::TyCtxtInferExt;
10use rustc_infer::traits::{Obligation, ObligationCause};
11use rustc_middle::mir::coverage::CoverageKind;
12use rustc_middle::mir::visit::{NonUseContext, PlaceContext, Visitor};
13use rustc_middle::mir::*;
14use rustc_middle::ty::adjustment::PointerCoercion;
15use rustc_middle::ty::print::with_no_trimmed_paths;
16use rustc_middle::ty::{
17 self, CoroutineArgsExt, InstanceKind, ScalarInt, Ty, TyCtxt, TypeVisitableExt, Upcast, Variance,
18};
19use rustc_middle::{bug, span_bug};
20use rustc_trait_selection::traits::ObligationCtxt;
21
22use crate::util::{self, is_within_packed};
23
24#[derive(Copy, Clone, Debug, PartialEq, Eq)]
25enum EdgeKind {
26 Unwind,
27 Normal,
28}
29
30pub(super) struct Validator {
31 pub when: String,
33}
34
35impl<'tcx> crate::MirPass<'tcx> for Validator {
36 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
37 if matches!(body.source.instance, InstanceKind::Intrinsic(..) | InstanceKind::Virtual(..)) {
42 return;
43 }
44 let def_id = body.source.def_id();
45 let typing_env = body.typing_env(tcx);
46 let can_unwind = if body.phase <= MirPhase::Runtime(RuntimePhase::Initial) {
47 true
49 } else if !tcx.def_kind(def_id).is_fn_like() {
50 true
51 } else {
52 let body_ty = tcx.type_of(def_id).skip_binder();
53 let body_abi = match body_ty.kind() {
54 ty::FnDef(..) => body_ty.fn_sig(tcx).abi(),
55 ty::Closure(..) => ExternAbi::RustCall,
56 ty::CoroutineClosure(..) => ExternAbi::RustCall,
57 ty::Coroutine(..) => ExternAbi::Rust,
58 ty::Error(_) => return,
60 _ => span_bug!(body.span, "unexpected body ty: {body_ty}"),
61 };
62
63 ty::layout::fn_can_unwind(tcx, Some(def_id), body_abi)
64 };
65
66 let mut cfg_checker = CfgChecker {
67 when: &self.when,
68 body,
69 tcx,
70 unwind_edge_count: 0,
71 reachable_blocks: traversal::reachable_as_bitset(body),
72 value_cache: FxHashSet::default(),
73 can_unwind,
74 };
75 cfg_checker.visit_body(body);
76 cfg_checker.check_cleanup_control_flow();
77
78 for (location, msg) in validate_types(tcx, typing_env, body, body) {
80 cfg_checker.fail(location, msg);
81 }
82
83 if let MirPhase::Runtime(_) = body.phase {
84 if let ty::InstanceKind::Item(_) = body.source.instance {
85 if body.has_free_regions() {
86 cfg_checker.fail(
87 Location::START,
88 format!("Free regions in optimized {} MIR", body.phase.name()),
89 );
90 }
91 }
92 }
93 }
94
95 fn is_required(&self) -> bool {
96 true
97 }
98}
99
100struct CfgChecker<'a, 'tcx> {
107 when: &'a str,
108 body: &'a Body<'tcx>,
109 tcx: TyCtxt<'tcx>,
110 unwind_edge_count: usize,
111 reachable_blocks: DenseBitSet<BasicBlock>,
112 value_cache: FxHashSet<u128>,
113 can_unwind: bool,
116}
117
118impl<'a, 'tcx> CfgChecker<'a, 'tcx> {
119 #[track_caller]
120 fn fail(&self, location: Location, msg: impl AsRef<str>) {
121 assert!(
123 self.tcx.dcx().has_errors().is_some(),
124 "broken MIR in {:?} ({}) at {:?}:\n{}",
125 self.body.source.instance,
126 self.when,
127 location,
128 msg.as_ref(),
129 );
130 }
131
132 fn check_edge(&mut self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
133 if bb == START_BLOCK {
134 self.fail(location, "start block must not have predecessors")
135 }
136 if let Some(bb) = self.body.basic_blocks.get(bb) {
137 let src = self.body.basic_blocks.get(location.block).unwrap();
138 match (src.is_cleanup, bb.is_cleanup, edge_kind) {
139 (false, false, EdgeKind::Normal)
141 | (true, true, EdgeKind::Normal) => {}
143 (false, true, EdgeKind::Unwind) => {
145 self.unwind_edge_count += 1;
146 }
147 _ => {
149 self.fail(
150 location,
151 format!(
152 "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
153 edge_kind,
154 bb,
155 src.is_cleanup,
156 bb.is_cleanup,
157 )
158 )
159 }
160 }
161 } else {
162 self.fail(location, format!("encountered jump to invalid basic block {bb:?}"))
163 }
164 }
165
166 fn check_cleanup_control_flow(&self) {
167 if self.unwind_edge_count <= 1 {
168 return;
169 }
170 let doms = self.body.basic_blocks.dominators();
171 let mut post_contract_node = FxHashMap::default();
172 let mut dom_path = vec![];
174 let mut get_post_contract_node = |mut bb| {
175 let root = loop {
176 if let Some(root) = post_contract_node.get(&bb) {
177 break *root;
178 }
179 let parent = doms.immediate_dominator(bb).unwrap();
180 dom_path.push(bb);
181 if !self.body.basic_blocks[parent].is_cleanup {
182 break bb;
183 }
184 bb = parent;
185 };
186 for bb in dom_path.drain(..) {
187 post_contract_node.insert(bb, root);
188 }
189 root
190 };
191
192 let mut parent = IndexVec::from_elem(None, &self.body.basic_blocks);
193 for (bb, bb_data) in self.body.basic_blocks.iter_enumerated() {
194 if !bb_data.is_cleanup || !self.reachable_blocks.contains(bb) {
195 continue;
196 }
197 let bb = get_post_contract_node(bb);
198 for s in bb_data.terminator().successors() {
199 let s = get_post_contract_node(s);
200 if s == bb {
201 continue;
202 }
203 let parent = &mut parent[bb];
204 match parent {
205 None => {
206 *parent = Some(s);
207 }
208 Some(e) if *e == s => (),
209 Some(e) => self.fail(
210 Location { block: bb, statement_index: 0 },
211 format!(
212 "Cleanup control flow violation: The blocks dominated by {:?} have edges to both {:?} and {:?}",
213 bb,
214 s,
215 *e
216 )
217 ),
218 }
219 }
220 }
221
222 let mut stack = FxHashSet::default();
224 for (mut bb, parent) in parent.iter_enumerated_mut() {
225 stack.clear();
226 stack.insert(bb);
227 loop {
228 let Some(parent) = parent.take() else { break };
229 let no_cycle = stack.insert(parent);
230 if !no_cycle {
231 self.fail(
232 Location { block: bb, statement_index: 0 },
233 format!(
234 "Cleanup control flow violation: Cycle involving edge {bb:?} -> {parent:?}",
235 ),
236 );
237 break;
238 }
239 bb = parent;
240 }
241 }
242 }
243
244 fn check_unwind_edge(&mut self, location: Location, unwind: UnwindAction) {
245 let is_cleanup = self.body.basic_blocks[location.block].is_cleanup;
246 match unwind {
247 UnwindAction::Cleanup(unwind) => {
248 if is_cleanup {
249 self.fail(location, "`UnwindAction::Cleanup` in cleanup block");
250 }
251 self.check_edge(location, unwind, EdgeKind::Unwind);
252 }
253 UnwindAction::Continue => {
254 if is_cleanup {
255 self.fail(location, "`UnwindAction::Continue` in cleanup block");
256 }
257
258 if !self.can_unwind {
259 self.fail(location, "`UnwindAction::Continue` in no-unwind function");
260 }
261 }
262 UnwindAction::Terminate(UnwindTerminateReason::InCleanup) => {
263 if !is_cleanup {
264 self.fail(
265 location,
266 "`UnwindAction::Terminate(InCleanup)` in a non-cleanup block",
267 );
268 }
269 }
270 UnwindAction::Unreachable | UnwindAction::Terminate(UnwindTerminateReason::Abi) => (),
272 }
273 }
274
275 fn is_critical_call_edge(&self, target: Option<BasicBlock>, unwind: UnwindAction) -> bool {
276 let Some(target) = target else { return false };
277 matches!(unwind, UnwindAction::Cleanup(_) | UnwindAction::Terminate(_))
278 && self.body.basic_blocks.predecessors()[target].len() > 1
279 }
280}
281
282impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> {
283 fn visit_local(&mut self, local: Local, _context: PlaceContext, location: Location) {
284 if self.body.local_decls.get(local).is_none() {
285 self.fail(
286 location,
287 format!("local {local:?} has no corresponding declaration in `body.local_decls`"),
288 );
289 }
290 }
291
292 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
293 match &statement.kind {
294 StatementKind::AscribeUserType(..) => {
295 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
296 self.fail(
297 location,
298 "`AscribeUserType` should have been removed after drop lowering phase",
299 );
300 }
301 }
302 StatementKind::FakeRead(..) => {
303 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
304 self.fail(
305 location,
306 "`FakeRead` should have been removed after drop lowering phase",
307 );
308 }
309 }
310 StatementKind::SetDiscriminant { .. } => {
311 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
312 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
313 }
314 }
315 StatementKind::Deinit(..) => {
316 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
317 self.fail(location, "`Deinit`is not allowed until deaggregation");
318 }
319 }
320 StatementKind::Retag(kind, _) => {
321 if matches!(kind, RetagKind::TwoPhase) {
325 self.fail(location, format!("explicit `{kind:?}` is forbidden"));
326 }
327 }
328 StatementKind::Coverage(kind) => {
329 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup)
330 && let CoverageKind::BlockMarker { .. } | CoverageKind::SpanMarker { .. } = kind
331 {
332 self.fail(
333 location,
334 format!("{kind:?} should have been removed after analysis"),
335 );
336 }
337 }
338 StatementKind::Assign(..)
339 | StatementKind::StorageLive(_)
340 | StatementKind::StorageDead(_)
341 | StatementKind::Intrinsic(_)
342 | StatementKind::ConstEvalCounter
343 | StatementKind::PlaceMention(..)
344 | StatementKind::BackwardIncompatibleDropHint { .. }
345 | StatementKind::Nop => {}
346 }
347
348 self.super_statement(statement, location);
349 }
350
351 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
352 match &terminator.kind {
353 TerminatorKind::Goto { target } => {
354 self.check_edge(location, *target, EdgeKind::Normal);
355 }
356 TerminatorKind::SwitchInt { targets, discr: _ } => {
357 for (_, target) in targets.iter() {
358 self.check_edge(location, target, EdgeKind::Normal);
359 }
360 self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
361
362 self.value_cache.clear();
363 self.value_cache.extend(targets.iter().map(|(value, _)| value));
364 let has_duplicates = targets.iter().len() != self.value_cache.len();
365 if has_duplicates {
366 self.fail(
367 location,
368 format!(
369 "duplicated values in `SwitchInt` terminator: {:?}",
370 terminator.kind,
371 ),
372 );
373 }
374 }
375 TerminatorKind::Drop { target, unwind, .. } => {
376 self.check_edge(location, *target, EdgeKind::Normal);
377 self.check_unwind_edge(location, *unwind);
378 }
379 TerminatorKind::Call { func, args, .. }
380 | TerminatorKind::TailCall { func, args, .. } => {
381 if let TerminatorKind::Call { target, unwind, destination, .. } = terminator.kind {
383 if let Some(target) = target {
384 self.check_edge(location, target, EdgeKind::Normal);
385 }
386 self.check_unwind_edge(location, unwind);
387
388 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
394 && self.is_critical_call_edge(target, unwind)
395 {
396 self.fail(
397 location,
398 format!(
399 "encountered critical edge in `Call` terminator {:?}",
400 terminator.kind,
401 ),
402 );
403 }
404
405 if is_within_packed(self.tcx, &self.body.local_decls, destination).is_some() {
408 self.fail(
410 location,
411 format!(
412 "encountered packed place in `Call` terminator destination: {:?}",
413 terminator.kind,
414 ),
415 );
416 }
417 }
418
419 for arg in args {
420 if let Operand::Move(place) = &arg.node {
421 if is_within_packed(self.tcx, &self.body.local_decls, *place).is_some() {
422 self.fail(
424 location,
425 format!(
426 "encountered `Move` of a packed place in `Call` terminator: {:?}",
427 terminator.kind,
428 ),
429 );
430 }
431 }
432 }
433
434 if let ty::FnDef(did, ..) = func.ty(&self.body.local_decls, self.tcx).kind()
435 && self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
436 && matches!(self.tcx.codegen_fn_attrs(did).inline, InlineAttr::Force { .. })
437 {
438 self.fail(location, "`#[rustc_force_inline]`-annotated function not inlined");
439 }
440 }
441 TerminatorKind::Assert { target, unwind, .. } => {
442 self.check_edge(location, *target, EdgeKind::Normal);
443 self.check_unwind_edge(location, *unwind);
444 }
445 TerminatorKind::Yield { resume, drop, .. } => {
446 if self.body.coroutine.is_none() {
447 self.fail(location, "`Yield` cannot appear outside coroutine bodies");
448 }
449 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
450 self.fail(location, "`Yield` should have been replaced by coroutine lowering");
451 }
452 self.check_edge(location, *resume, EdgeKind::Normal);
453 if let Some(drop) = drop {
454 self.check_edge(location, *drop, EdgeKind::Normal);
455 }
456 }
457 TerminatorKind::FalseEdge { real_target, imaginary_target } => {
458 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
459 self.fail(
460 location,
461 "`FalseEdge` should have been removed after drop elaboration",
462 );
463 }
464 self.check_edge(location, *real_target, EdgeKind::Normal);
465 self.check_edge(location, *imaginary_target, EdgeKind::Normal);
466 }
467 TerminatorKind::FalseUnwind { real_target, unwind } => {
468 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
469 self.fail(
470 location,
471 "`FalseUnwind` should have been removed after drop elaboration",
472 );
473 }
474 self.check_edge(location, *real_target, EdgeKind::Normal);
475 self.check_unwind_edge(location, *unwind);
476 }
477 TerminatorKind::InlineAsm { targets, unwind, .. } => {
478 for &target in targets {
479 self.check_edge(location, target, EdgeKind::Normal);
480 }
481 self.check_unwind_edge(location, *unwind);
482 }
483 TerminatorKind::CoroutineDrop => {
484 if self.body.coroutine.is_none() {
485 self.fail(location, "`CoroutineDrop` cannot appear outside coroutine bodies");
486 }
487 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
488 self.fail(
489 location,
490 "`CoroutineDrop` should have been replaced by coroutine lowering",
491 );
492 }
493 }
494 TerminatorKind::UnwindResume => {
495 let bb = location.block;
496 if !self.body.basic_blocks[bb].is_cleanup {
497 self.fail(location, "Cannot `UnwindResume` from non-cleanup basic block")
498 }
499 if !self.can_unwind {
500 self.fail(location, "Cannot `UnwindResume` in a function that cannot unwind")
501 }
502 }
503 TerminatorKind::UnwindTerminate(_) => {
504 let bb = location.block;
505 if !self.body.basic_blocks[bb].is_cleanup {
506 self.fail(location, "Cannot `UnwindTerminate` from non-cleanup basic block")
507 }
508 }
509 TerminatorKind::Return => {
510 let bb = location.block;
511 if self.body.basic_blocks[bb].is_cleanup {
512 self.fail(location, "Cannot `Return` from cleanup basic block")
513 }
514 }
515 TerminatorKind::Unreachable => {}
516 }
517
518 self.super_terminator(terminator, location);
519 }
520
521 fn visit_source_scope(&mut self, scope: SourceScope) {
522 if self.body.source_scopes.get(scope).is_none() {
523 self.tcx.dcx().span_bug(
524 self.body.span,
525 format!(
526 "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
527 self.body.source.instance, self.when, scope,
528 ),
529 );
530 }
531 }
532}
533
534pub(super) fn validate_types<'tcx>(
540 tcx: TyCtxt<'tcx>,
541 typing_env: ty::TypingEnv<'tcx>,
542 body: &Body<'tcx>,
543 caller_body: &Body<'tcx>,
544) -> Vec<(Location, String)> {
545 let mut type_checker = TypeChecker { body, caller_body, tcx, typing_env, failures: Vec::new() };
546 with_no_trimmed_paths!({
551 type_checker.visit_body(body);
552 });
553 type_checker.failures
554}
555
556struct TypeChecker<'a, 'tcx> {
557 body: &'a Body<'tcx>,
558 caller_body: &'a Body<'tcx>,
559 tcx: TyCtxt<'tcx>,
560 typing_env: ty::TypingEnv<'tcx>,
561 failures: Vec<(Location, String)>,
562}
563
564impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
565 fn fail(&mut self, location: Location, msg: impl Into<String>) {
566 self.failures.push((location, msg.into()));
567 }
568
569 fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
572 if src == dest {
574 return true;
576 }
577
578 if (src, dest).has_opaque_types() {
584 return true;
585 }
586
587 let variance = if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
590 Variance::Invariant
591 } else {
592 Variance::Covariant
593 };
594
595 crate::util::relate_types(self.tcx, self.typing_env, variance, src, dest)
596 }
597
598 fn predicate_must_hold_modulo_regions(
600 &self,
601 pred: impl Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>,
602 ) -> bool {
603 let pred: ty::Predicate<'tcx> = pred.upcast(self.tcx);
604
605 if pred.has_opaque_types() {
611 return true;
612 }
613
614 let (infcx, param_env) = self.tcx.infer_ctxt().build_with_typing_env(self.typing_env);
615 let ocx = ObligationCtxt::new(&infcx);
616 ocx.register_obligation(Obligation::new(
617 self.tcx,
618 ObligationCause::dummy(),
619 param_env,
620 pred,
621 ));
622 ocx.select_all_or_error().is_empty()
623 }
624}
625
626impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
627 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
628 if self.tcx.sess.opts.unstable_opts.validate_mir
630 && self.body.phase < MirPhase::Runtime(RuntimePhase::Initial)
631 {
632 if let Operand::Copy(place) = operand {
634 let ty = place.ty(&self.body.local_decls, self.tcx).ty;
635
636 if !self.tcx.type_is_copy_modulo_regions(self.typing_env, ty) {
637 self.fail(location, format!("`Operand::Copy` with non-`Copy` type {ty}"));
638 }
639 }
640 }
641
642 self.super_operand(operand, location);
643 }
644
645 fn visit_projection_elem(
646 &mut self,
647 place_ref: PlaceRef<'tcx>,
648 elem: PlaceElem<'tcx>,
649 context: PlaceContext,
650 location: Location,
651 ) {
652 match elem {
653 ProjectionElem::OpaqueCast(ty)
654 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
655 {
656 self.fail(
657 location,
658 format!("explicit opaque type cast to `{ty}` after `PostAnalysisNormalize`"),
659 )
660 }
661 ProjectionElem::Index(index) => {
662 let index_ty = self.body.local_decls[index].ty;
663 if index_ty != self.tcx.types.usize {
664 self.fail(location, format!("bad index ({index_ty} != usize)"))
665 }
666 }
667 ProjectionElem::Deref
668 if self.body.phase >= MirPhase::Runtime(RuntimePhase::PostCleanup) =>
669 {
670 let base_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
671
672 if base_ty.is_box() {
673 self.fail(location, format!("{base_ty} dereferenced after ElaborateBoxDerefs"))
674 }
675 }
676 ProjectionElem::Field(f, ty) => {
677 let parent_ty = place_ref.ty(&self.body.local_decls, self.tcx);
678 let fail_out_of_bounds = |this: &mut Self, location| {
679 this.fail(location, format!("Out of bounds field {f:?} for {parent_ty:?}"));
680 };
681 let check_equal = |this: &mut Self, location, f_ty| {
682 if !this.mir_assign_valid_types(ty, f_ty) {
683 this.fail(
684 location,
685 format!(
686 "Field projection `{place_ref:?}.{f:?}` specified type `{ty}`, but actual type is `{f_ty}`"
687 )
688 )
689 }
690 };
691
692 let kind = match parent_ty.ty.kind() {
693 &ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => {
694 self.tcx.type_of(def_id).instantiate(self.tcx, args).kind()
695 }
696 kind => kind,
697 };
698
699 match kind {
700 ty::Tuple(fields) => {
701 let Some(f_ty) = fields.get(f.as_usize()) else {
702 fail_out_of_bounds(self, location);
703 return;
704 };
705 check_equal(self, location, *f_ty);
706 }
707 ty::Adt(adt_def, args) => {
708 if self.tcx.is_lang_item(adt_def.did(), LangItem::DynMetadata) {
710 self.fail(
711 location,
712 format!(
713 "You can't project to field {f:?} of `DynMetadata` because \
714 layout is weird and thinks it doesn't have fields."
715 ),
716 );
717 }
718
719 let var = parent_ty.variant_index.unwrap_or(FIRST_VARIANT);
720 let Some(field) = adt_def.variant(var).fields.get(f) else {
721 fail_out_of_bounds(self, location);
722 return;
723 };
724 check_equal(self, location, field.ty(self.tcx, args));
725 }
726 ty::Closure(_, args) => {
727 let args = args.as_closure();
728 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
729 fail_out_of_bounds(self, location);
730 return;
731 };
732 check_equal(self, location, f_ty);
733 }
734 ty::CoroutineClosure(_, args) => {
735 let args = args.as_coroutine_closure();
736 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
737 fail_out_of_bounds(self, location);
738 return;
739 };
740 check_equal(self, location, f_ty);
741 }
742 &ty::Coroutine(def_id, args) => {
743 let f_ty = if let Some(var) = parent_ty.variant_index {
744 let layout = if def_id == self.caller_body.source.def_id() {
750 self.caller_body.coroutine_layout_raw()
751 } else if self.tcx.needs_coroutine_by_move_body_def_id(def_id)
752 && let ty::ClosureKind::FnOnce =
753 args.as_coroutine().kind_ty().to_opt_closure_kind().unwrap()
754 && self.caller_body.source.def_id()
755 == self.tcx.coroutine_by_move_body_def_id(def_id)
756 {
757 self.caller_body.coroutine_layout_raw()
759 } else {
760 self.tcx.coroutine_layout(def_id, args.as_coroutine().kind_ty())
761 };
762
763 let Some(layout) = layout else {
764 self.fail(
765 location,
766 format!("No coroutine layout for {parent_ty:?}"),
767 );
768 return;
769 };
770
771 let Some(&local) = layout.variant_fields[var].get(f) else {
772 fail_out_of_bounds(self, location);
773 return;
774 };
775
776 let Some(f_ty) = layout.field_tys.get(local) else {
777 self.fail(
778 location,
779 format!("Out of bounds local {local:?} for {parent_ty:?}"),
780 );
781 return;
782 };
783
784 ty::EarlyBinder::bind(f_ty.ty).instantiate(self.tcx, args)
785 } else {
786 let Some(&f_ty) = args.as_coroutine().prefix_tys().get(f.index())
787 else {
788 fail_out_of_bounds(self, location);
789 return;
790 };
791
792 f_ty
793 };
794
795 check_equal(self, location, f_ty);
796 }
797 _ => {
798 self.fail(location, format!("{:?} does not have fields", parent_ty.ty));
799 }
800 }
801 }
802 ProjectionElem::Subtype(ty) => {
803 if !util::sub_types(
804 self.tcx,
805 self.typing_env,
806 ty,
807 place_ref.ty(&self.body.local_decls, self.tcx).ty,
808 ) {
809 self.fail(
810 location,
811 format!(
812 "Failed subtyping {ty} and {}",
813 place_ref.ty(&self.body.local_decls, self.tcx).ty
814 ),
815 )
816 }
817 }
818 ProjectionElem::UnwrapUnsafeBinder(unwrapped_ty) => {
819 let binder_ty = place_ref.ty(&self.body.local_decls, self.tcx);
820 let ty::UnsafeBinder(binder_ty) = *binder_ty.ty.kind() else {
821 self.fail(
822 location,
823 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
824 );
825 return;
826 };
827 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
828 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
829 self.fail(
830 location,
831 format!(
832 "Cannot unwrap unsafe binder {binder_ty:?} into type {unwrapped_ty}"
833 ),
834 );
835 }
836 }
837 _ => {}
838 }
839 self.super_projection_elem(place_ref, elem, context, location);
840 }
841
842 fn visit_var_debug_info(&mut self, debuginfo: &VarDebugInfo<'tcx>) {
843 if let Some(box VarDebugInfoFragment { ty, ref projection }) = debuginfo.composite {
844 if ty.is_union() || ty.is_enum() {
845 self.fail(
846 START_BLOCK.start_location(),
847 format!("invalid type {ty} in debuginfo for {:?}", debuginfo.name),
848 );
849 }
850 if projection.is_empty() {
851 self.fail(
852 START_BLOCK.start_location(),
853 format!("invalid empty projection in debuginfo for {:?}", debuginfo.name),
854 );
855 }
856 if projection.iter().any(|p| !matches!(p, PlaceElem::Field(..))) {
857 self.fail(
858 START_BLOCK.start_location(),
859 format!(
860 "illegal projection {:?} in debuginfo for {:?}",
861 projection, debuginfo.name
862 ),
863 );
864 }
865 }
866 match debuginfo.value {
867 VarDebugInfoContents::Const(_) => {}
868 VarDebugInfoContents::Place(place) => {
869 if place.projection.iter().any(|p| !p.can_use_in_debuginfo()) {
870 self.fail(
871 START_BLOCK.start_location(),
872 format!("illegal place {:?} in debuginfo for {:?}", place, debuginfo.name),
873 );
874 }
875 }
876 }
877 self.super_var_debug_info(debuginfo);
878 }
879
880 fn visit_place(&mut self, place: &Place<'tcx>, cntxt: PlaceContext, location: Location) {
881 let _ = place.ty(&self.body.local_decls, self.tcx);
883
884 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial)
885 && place.projection.len() > 1
886 && cntxt != PlaceContext::NonUse(NonUseContext::VarDebugInfo)
887 && place.projection[1..].contains(&ProjectionElem::Deref)
888 {
889 self.fail(
890 location,
891 format!("place {place:?} has deref as a later projection (it is only permitted as the first projection)"),
892 );
893 }
894
895 let mut projections_iter = place.projection.iter();
897 while let Some(proj) = projections_iter.next() {
898 if matches!(proj, ProjectionElem::Downcast(..)) {
899 if !matches!(projections_iter.next(), Some(ProjectionElem::Field(..))) {
900 self.fail(
901 location,
902 format!(
903 "place {place:?} has `Downcast` projection not followed by `Field`"
904 ),
905 );
906 }
907 }
908 }
909
910 self.super_place(place, cntxt, location);
911 }
912
913 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
914 macro_rules! check_kinds {
915 ($t:expr, $text:literal, $typat:pat) => {
916 if !matches!(($t).kind(), $typat) {
917 self.fail(location, format!($text, $t));
918 }
919 };
920 }
921 match rvalue {
922 Rvalue::Use(_) | Rvalue::CopyForDeref(_) => {}
923 Rvalue::Aggregate(kind, fields) => match **kind {
924 AggregateKind::Tuple => {}
925 AggregateKind::Array(dest) => {
926 for src in fields {
927 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
928 self.fail(location, "array field has the wrong type");
929 }
930 }
931 }
932 AggregateKind::Adt(def_id, idx, args, _, Some(field)) => {
933 let adt_def = self.tcx.adt_def(def_id);
934 assert!(adt_def.is_union());
935 assert_eq!(idx, FIRST_VARIANT);
936 let dest_ty = self.tcx.normalize_erasing_regions(
937 self.typing_env,
938 adt_def.non_enum_variant().fields[field].ty(self.tcx, args),
939 );
940 if let [field] = fields.raw.as_slice() {
941 let src_ty = field.ty(self.body, self.tcx);
942 if !self.mir_assign_valid_types(src_ty, dest_ty) {
943 self.fail(location, "union field has the wrong type");
944 }
945 } else {
946 self.fail(location, "unions should have one initialized field");
947 }
948 }
949 AggregateKind::Adt(def_id, idx, args, _, None) => {
950 let adt_def = self.tcx.adt_def(def_id);
951 assert!(!adt_def.is_union());
952 let variant = &adt_def.variants()[idx];
953 if variant.fields.len() != fields.len() {
954 self.fail(location, "adt has the wrong number of initialized fields");
955 }
956 for (src, dest) in std::iter::zip(fields, &variant.fields) {
957 let dest_ty = self
958 .tcx
959 .normalize_erasing_regions(self.typing_env, dest.ty(self.tcx, args));
960 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest_ty) {
961 self.fail(location, "adt field has the wrong type");
962 }
963 }
964 }
965 AggregateKind::Closure(_, args) => {
966 let upvars = args.as_closure().upvar_tys();
967 if upvars.len() != fields.len() {
968 self.fail(location, "closure has the wrong number of initialized fields");
969 }
970 for (src, dest) in std::iter::zip(fields, upvars) {
971 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
972 self.fail(location, "closure field has the wrong type");
973 }
974 }
975 }
976 AggregateKind::Coroutine(_, args) => {
977 let upvars = args.as_coroutine().upvar_tys();
978 if upvars.len() != fields.len() {
979 self.fail(location, "coroutine has the wrong number of initialized fields");
980 }
981 for (src, dest) in std::iter::zip(fields, upvars) {
982 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
983 self.fail(location, "coroutine field has the wrong type");
984 }
985 }
986 }
987 AggregateKind::CoroutineClosure(_, args) => {
988 let upvars = args.as_coroutine_closure().upvar_tys();
989 if upvars.len() != fields.len() {
990 self.fail(
991 location,
992 "coroutine-closure has the wrong number of initialized fields",
993 );
994 }
995 for (src, dest) in std::iter::zip(fields, upvars) {
996 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
997 self.fail(location, "coroutine-closure field has the wrong type");
998 }
999 }
1000 }
1001 AggregateKind::RawPtr(pointee_ty, mutability) => {
1002 if !matches!(self.body.phase, MirPhase::Runtime(_)) {
1003 self.fail(location, "RawPtr should be in runtime MIR only");
1007 }
1008
1009 if let [data_ptr, metadata] = fields.raw.as_slice() {
1010 let data_ptr_ty = data_ptr.ty(self.body, self.tcx);
1011 let metadata_ty = metadata.ty(self.body, self.tcx);
1012 if let ty::RawPtr(in_pointee, in_mut) = data_ptr_ty.kind() {
1013 if *in_mut != mutability {
1014 self.fail(location, "input and output mutability must match");
1015 }
1016
1017 if !in_pointee.is_sized(self.tcx, self.typing_env) {
1019 self.fail(location, "input pointer must be thin");
1020 }
1021 } else {
1022 self.fail(
1023 location,
1024 "first operand to raw pointer aggregate must be a raw pointer",
1025 );
1026 }
1027
1028 if pointee_ty.is_slice() {
1030 if !self.mir_assign_valid_types(metadata_ty, self.tcx.types.usize) {
1031 self.fail(location, "slice metadata must be usize");
1032 }
1033 } else if pointee_ty.is_sized(self.tcx, self.typing_env) {
1034 if metadata_ty != self.tcx.types.unit {
1035 self.fail(location, "metadata for pointer-to-thin must be unit");
1036 }
1037 }
1038 } else {
1039 self.fail(location, "raw pointer aggregate must have 2 fields");
1040 }
1041 }
1042 },
1043 Rvalue::Ref(_, BorrowKind::Fake(_), _) => {
1044 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1045 self.fail(
1046 location,
1047 "`Assign` statement with a `Fake` borrow should have been removed in runtime MIR",
1048 );
1049 }
1050 }
1051 Rvalue::Ref(..) => {}
1052 Rvalue::Len(p) => {
1053 let pty = p.ty(&self.body.local_decls, self.tcx).ty;
1054 check_kinds!(
1055 pty,
1056 "Cannot compute length of non-array type {:?}",
1057 ty::Array(..) | ty::Slice(..)
1058 );
1059 }
1060 Rvalue::BinaryOp(op, vals) => {
1061 use BinOp::*;
1062 let a = vals.0.ty(&self.body.local_decls, self.tcx);
1063 let b = vals.1.ty(&self.body.local_decls, self.tcx);
1064 if crate::util::binop_right_homogeneous(*op) {
1065 if let Eq | Lt | Le | Ne | Ge | Gt = op {
1066 if !self.mir_assign_valid_types(a, b) {
1068 self.fail(
1069 location,
1070 format!("Cannot {op:?} compare incompatible types {a} and {b}"),
1071 );
1072 }
1073 } else if a != b {
1074 self.fail(
1075 location,
1076 format!("Cannot perform binary op {op:?} on unequal types {a} and {b}"),
1077 );
1078 }
1079 }
1080
1081 match op {
1082 Offset => {
1083 check_kinds!(a, "Cannot offset non-pointer type {:?}", ty::RawPtr(..));
1084 if b != self.tcx.types.isize && b != self.tcx.types.usize {
1085 self.fail(location, format!("Cannot offset by non-isize type {b}"));
1086 }
1087 }
1088 Eq | Lt | Le | Ne | Ge | Gt => {
1089 for x in [a, b] {
1090 check_kinds!(
1091 x,
1092 "Cannot {op:?} compare type {:?}",
1093 ty::Bool
1094 | ty::Char
1095 | ty::Int(..)
1096 | ty::Uint(..)
1097 | ty::Float(..)
1098 | ty::RawPtr(..)
1099 | ty::FnPtr(..)
1100 )
1101 }
1102 }
1103 Cmp => {
1104 for x in [a, b] {
1105 check_kinds!(
1106 x,
1107 "Cannot three-way compare non-integer type {:?}",
1108 ty::Char | ty::Uint(..) | ty::Int(..)
1109 )
1110 }
1111 }
1112 AddUnchecked | AddWithOverflow | SubUnchecked | SubWithOverflow
1113 | MulUnchecked | MulWithOverflow | Shl | ShlUnchecked | Shr | ShrUnchecked => {
1114 for x in [a, b] {
1115 check_kinds!(
1116 x,
1117 "Cannot {op:?} non-integer type {:?}",
1118 ty::Uint(..) | ty::Int(..)
1119 )
1120 }
1121 }
1122 BitAnd | BitOr | BitXor => {
1123 for x in [a, b] {
1124 check_kinds!(
1125 x,
1126 "Cannot perform bitwise op {op:?} on type {:?}",
1127 ty::Uint(..) | ty::Int(..) | ty::Bool
1128 )
1129 }
1130 }
1131 Add | Sub | Mul | Div | Rem => {
1132 for x in [a, b] {
1133 check_kinds!(
1134 x,
1135 "Cannot perform arithmetic {op:?} on type {:?}",
1136 ty::Uint(..) | ty::Int(..) | ty::Float(..)
1137 )
1138 }
1139 }
1140 }
1141 }
1142 Rvalue::UnaryOp(op, operand) => {
1143 let a = operand.ty(&self.body.local_decls, self.tcx);
1144 match op {
1145 UnOp::Neg => {
1146 check_kinds!(a, "Cannot negate type {:?}", ty::Int(..) | ty::Float(..))
1147 }
1148 UnOp::Not => {
1149 check_kinds!(
1150 a,
1151 "Cannot binary not type {:?}",
1152 ty::Int(..) | ty::Uint(..) | ty::Bool
1153 );
1154 }
1155 UnOp::PtrMetadata => {
1156 check_kinds!(
1157 a,
1158 "Cannot PtrMetadata non-pointer non-reference type {:?}",
1159 ty::RawPtr(..) | ty::Ref(..)
1160 );
1161 }
1162 }
1163 }
1164 Rvalue::ShallowInitBox(operand, _) => {
1165 let a = operand.ty(&self.body.local_decls, self.tcx);
1166 check_kinds!(a, "Cannot shallow init type {:?}", ty::RawPtr(..));
1167 }
1168 Rvalue::Cast(kind, operand, target_type) => {
1169 let op_ty = operand.ty(self.body, self.tcx);
1170 match kind {
1171 CastKind::PointerWithExposedProvenance | CastKind::PointerExposeProvenance => {}
1173 CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer, _) => {
1174 check_kinds!(
1176 op_ty,
1177 "CastKind::{kind:?} input must be a fn item, not {:?}",
1178 ty::FnDef(..)
1179 );
1180 check_kinds!(
1181 target_type,
1182 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1183 ty::FnPtr(..)
1184 );
1185 }
1186 CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
1187 check_kinds!(
1189 op_ty,
1190 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1191 ty::FnPtr(..)
1192 );
1193 check_kinds!(
1194 target_type,
1195 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1196 ty::FnPtr(..)
1197 );
1198 }
1199 CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(..), _) => {
1200 check_kinds!(
1202 op_ty,
1203 "CastKind::{kind:?} input must be a closure, not {:?}",
1204 ty::Closure(..)
1205 );
1206 check_kinds!(
1207 target_type,
1208 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1209 ty::FnPtr(..)
1210 );
1211 }
1212 CastKind::PointerCoercion(PointerCoercion::MutToConstPointer, _) => {
1213 check_kinds!(
1215 op_ty,
1216 "CastKind::{kind:?} input must be a raw mut pointer, not {:?}",
1217 ty::RawPtr(_, Mutability::Mut)
1218 );
1219 check_kinds!(
1220 target_type,
1221 "CastKind::{kind:?} output must be a raw const pointer, not {:?}",
1222 ty::RawPtr(_, Mutability::Not)
1223 );
1224 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1225 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1226 }
1227 }
1228 CastKind::PointerCoercion(PointerCoercion::ArrayToPointer, _) => {
1229 check_kinds!(
1231 op_ty,
1232 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1233 ty::RawPtr(..)
1234 );
1235 check_kinds!(
1236 target_type,
1237 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1238 ty::RawPtr(..)
1239 );
1240 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1241 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1242 }
1243 }
1244 CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
1245 if !self.predicate_must_hold_modulo_regions(ty::TraitRef::new(
1248 self.tcx,
1249 self.tcx.require_lang_item(
1250 LangItem::CoerceUnsized,
1251 Some(self.body.source_info(location).span),
1252 ),
1253 [op_ty, *target_type],
1254 )) {
1255 self.fail(location, format!("Unsize coercion, but `{op_ty}` isn't coercible to `{target_type}`"));
1256 }
1257 }
1258 CastKind::PointerCoercion(PointerCoercion::DynStar, _) => {
1259 }
1261 CastKind::IntToInt | CastKind::IntToFloat => {
1262 let input_valid = op_ty.is_integral() || op_ty.is_char() || op_ty.is_bool();
1263 let target_valid = target_type.is_numeric() || target_type.is_char();
1264 if !input_valid || !target_valid {
1265 self.fail(
1266 location,
1267 format!("Wrong cast kind {kind:?} for the type {op_ty}"),
1268 );
1269 }
1270 }
1271 CastKind::FnPtrToPtr => {
1272 check_kinds!(
1273 op_ty,
1274 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1275 ty::FnPtr(..)
1276 );
1277 check_kinds!(
1278 target_type,
1279 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1280 ty::RawPtr(..)
1281 );
1282 }
1283 CastKind::PtrToPtr => {
1284 check_kinds!(
1285 op_ty,
1286 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1287 ty::RawPtr(..)
1288 );
1289 check_kinds!(
1290 target_type,
1291 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1292 ty::RawPtr(..)
1293 );
1294 }
1295 CastKind::FloatToFloat | CastKind::FloatToInt => {
1296 if !op_ty.is_floating_point() || !target_type.is_numeric() {
1297 self.fail(
1298 location,
1299 format!(
1300 "Trying to cast non 'Float' as {kind:?} into {target_type:?}"
1301 ),
1302 );
1303 }
1304 }
1305 CastKind::Transmute => {
1306 if let MirPhase::Runtime(..) = self.body.phase {
1307 if !self
1311 .tcx
1312 .normalize_erasing_regions(self.typing_env, op_ty)
1313 .is_sized(self.tcx, self.typing_env)
1314 {
1315 self.fail(
1316 location,
1317 format!("Cannot transmute from non-`Sized` type {op_ty}"),
1318 );
1319 }
1320 if !self
1321 .tcx
1322 .normalize_erasing_regions(self.typing_env, *target_type)
1323 .is_sized(self.tcx, self.typing_env)
1324 {
1325 self.fail(
1326 location,
1327 format!("Cannot transmute to non-`Sized` type {target_type:?}"),
1328 );
1329 }
1330 } else {
1331 self.fail(
1332 location,
1333 format!(
1334 "Transmute is not supported in non-runtime phase {:?}.",
1335 self.body.phase
1336 ),
1337 );
1338 }
1339 }
1340 }
1341 }
1342 Rvalue::NullaryOp(NullOp::OffsetOf(indices), container) => {
1343 let fail_out_of_bounds = |this: &mut Self, location, field, ty| {
1344 this.fail(location, format!("Out of bounds field {field:?} for {ty}"));
1345 };
1346
1347 let mut current_ty = *container;
1348
1349 for (variant, field) in indices.iter() {
1350 match current_ty.kind() {
1351 ty::Tuple(fields) => {
1352 if variant != FIRST_VARIANT {
1353 self.fail(
1354 location,
1355 format!("tried to get variant {variant:?} of tuple"),
1356 );
1357 return;
1358 }
1359 let Some(&f_ty) = fields.get(field.as_usize()) else {
1360 fail_out_of_bounds(self, location, field, current_ty);
1361 return;
1362 };
1363
1364 current_ty = self.tcx.normalize_erasing_regions(self.typing_env, f_ty);
1365 }
1366 ty::Adt(adt_def, args) => {
1367 let Some(field) = adt_def.variant(variant).fields.get(field) else {
1368 fail_out_of_bounds(self, location, field, current_ty);
1369 return;
1370 };
1371
1372 let f_ty = field.ty(self.tcx, args);
1373 current_ty = self.tcx.normalize_erasing_regions(self.typing_env, f_ty);
1374 }
1375 _ => {
1376 self.fail(
1377 location,
1378 format!("Cannot get offset ({variant:?}, {field:?}) from type {current_ty}"),
1379 );
1380 return;
1381 }
1382 }
1383 }
1384 }
1385 Rvalue::Repeat(_, _)
1386 | Rvalue::ThreadLocalRef(_)
1387 | Rvalue::RawPtr(_, _)
1388 | Rvalue::NullaryOp(
1389 NullOp::SizeOf | NullOp::AlignOf | NullOp::UbChecks | NullOp::ContractChecks,
1390 _,
1391 )
1392 | Rvalue::Discriminant(_) => {}
1393
1394 Rvalue::WrapUnsafeBinder(op, ty) => {
1395 let unwrapped_ty = op.ty(self.body, self.tcx);
1396 let ty::UnsafeBinder(binder_ty) = *ty.kind() else {
1397 self.fail(
1398 location,
1399 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
1400 );
1401 return;
1402 };
1403 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
1404 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
1405 self.fail(
1406 location,
1407 format!("Cannot wrap {unwrapped_ty} into unsafe binder {binder_ty:?}"),
1408 );
1409 }
1410 }
1411 }
1412 self.super_rvalue(rvalue, location);
1413 }
1414
1415 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
1416 match &statement.kind {
1417 StatementKind::Assign(box (dest, rvalue)) => {
1418 let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
1420 let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
1421
1422 if !self.mir_assign_valid_types(right_ty, left_ty) {
1423 self.fail(
1424 location,
1425 format!(
1426 "encountered `{:?}` with incompatible types:\n\
1427 left-hand side has type: {}\n\
1428 right-hand side has type: {}",
1429 statement.kind, left_ty, right_ty,
1430 ),
1431 );
1432 }
1433 if let Rvalue::CopyForDeref(place) = rvalue {
1434 if place.ty(&self.body.local_decls, self.tcx).ty.builtin_deref(true).is_none() {
1435 self.fail(
1436 location,
1437 "`CopyForDeref` should only be used for dereferenceable types",
1438 )
1439 }
1440 }
1441 }
1442 StatementKind::AscribeUserType(..) => {
1443 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1444 self.fail(
1445 location,
1446 "`AscribeUserType` should have been removed after drop lowering phase",
1447 );
1448 }
1449 }
1450 StatementKind::FakeRead(..) => {
1451 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1452 self.fail(
1453 location,
1454 "`FakeRead` should have been removed after drop lowering phase",
1455 );
1456 }
1457 }
1458 StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(op)) => {
1459 let ty = op.ty(&self.body.local_decls, self.tcx);
1460 if !ty.is_bool() {
1461 self.fail(
1462 location,
1463 format!("`assume` argument must be `bool`, but got: `{ty}`"),
1464 );
1465 }
1466 }
1467 StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(
1468 CopyNonOverlapping { src, dst, count },
1469 )) => {
1470 let src_ty = src.ty(&self.body.local_decls, self.tcx);
1471 let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) {
1472 src_deref
1473 } else {
1474 self.fail(
1475 location,
1476 format!("Expected src to be ptr in copy_nonoverlapping, got: {src_ty}"),
1477 );
1478 return;
1479 };
1480 let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
1481 let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) {
1482 dst_deref
1483 } else {
1484 self.fail(
1485 location,
1486 format!("Expected dst to be ptr in copy_nonoverlapping, got: {dst_ty}"),
1487 );
1488 return;
1489 };
1490 if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) {
1493 self.fail(location, format!("bad arg ({op_src_ty} != {op_dst_ty})"));
1494 }
1495
1496 let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
1497 if op_cnt_ty != self.tcx.types.usize {
1498 self.fail(location, format!("bad arg ({op_cnt_ty} != usize)"))
1499 }
1500 }
1501 StatementKind::SetDiscriminant { place, .. } => {
1502 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
1503 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
1504 }
1505 let pty = place.ty(&self.body.local_decls, self.tcx).ty;
1506 if !matches!(
1507 pty.kind(),
1508 ty::Adt(..) | ty::Coroutine(..) | ty::Alias(ty::Opaque, ..)
1509 ) {
1510 self.fail(
1511 location,
1512 format!(
1513 "`SetDiscriminant` is only allowed on ADTs and coroutines, not {pty}"
1514 ),
1515 );
1516 }
1517 }
1518 StatementKind::Deinit(..) => {
1519 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
1520 self.fail(location, "`Deinit`is not allowed until deaggregation");
1521 }
1522 }
1523 StatementKind::Retag(kind, _) => {
1524 if matches!(kind, RetagKind::TwoPhase) {
1528 self.fail(location, format!("explicit `{kind:?}` is forbidden"));
1529 }
1530 }
1531 StatementKind::StorageLive(_)
1532 | StatementKind::StorageDead(_)
1533 | StatementKind::Coverage(_)
1534 | StatementKind::ConstEvalCounter
1535 | StatementKind::PlaceMention(..)
1536 | StatementKind::BackwardIncompatibleDropHint { .. }
1537 | StatementKind::Nop => {}
1538 }
1539
1540 self.super_statement(statement, location);
1541 }
1542
1543 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
1544 match &terminator.kind {
1545 TerminatorKind::SwitchInt { targets, discr } => {
1546 let switch_ty = discr.ty(&self.body.local_decls, self.tcx);
1547
1548 let target_width = self.tcx.sess.target.pointer_width;
1549
1550 let size = Size::from_bits(match switch_ty.kind() {
1551 ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
1552 ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
1553 ty::Char => 32,
1554 ty::Bool => 1,
1555 other => bug!("unhandled type: {:?}", other),
1556 });
1557
1558 for (value, _) in targets.iter() {
1559 if ScalarInt::try_from_uint(value, size).is_none() {
1560 self.fail(
1561 location,
1562 format!("the value {value:#x} is not a proper {switch_ty}"),
1563 )
1564 }
1565 }
1566 }
1567 TerminatorKind::Call { func, .. } | TerminatorKind::TailCall { func, .. } => {
1568 let func_ty = func.ty(&self.body.local_decls, self.tcx);
1569 match func_ty.kind() {
1570 ty::FnPtr(..) | ty::FnDef(..) => {}
1571 _ => self.fail(
1572 location,
1573 format!(
1574 "encountered non-callable type {func_ty} in `{}` terminator",
1575 terminator.kind.name()
1576 ),
1577 ),
1578 }
1579
1580 if let TerminatorKind::TailCall { .. } = terminator.kind {
1581 }
1584 }
1585 TerminatorKind::Assert { cond, .. } => {
1586 let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
1587 if cond_ty != self.tcx.types.bool {
1588 self.fail(
1589 location,
1590 format!(
1591 "encountered non-boolean condition of type {cond_ty} in `Assert` terminator"
1592 ),
1593 );
1594 }
1595 }
1596 TerminatorKind::Goto { .. }
1597 | TerminatorKind::Drop { .. }
1598 | TerminatorKind::Yield { .. }
1599 | TerminatorKind::FalseEdge { .. }
1600 | TerminatorKind::FalseUnwind { .. }
1601 | TerminatorKind::InlineAsm { .. }
1602 | TerminatorKind::CoroutineDrop
1603 | TerminatorKind::UnwindResume
1604 | TerminatorKind::UnwindTerminate(_)
1605 | TerminatorKind::Return
1606 | TerminatorKind::Unreachable => {}
1607 }
1608
1609 self.super_terminator(terminator, location);
1610 }
1611}