1use rustc_abi::{ExternAbi, FIRST_VARIANT, Size};
4use rustc_data_structures::fx::{FxHashMap, FxHashSet};
5use rustc_hir::LangItem;
6use rustc_hir::attrs::InlineAttr;
7use rustc_index::IndexVec;
8use rustc_index::bit_set::DenseBitSet;
9use rustc_infer::infer::TyCtxtInferExt;
10use rustc_infer::traits::{Obligation, ObligationCause};
11use rustc_middle::mir::coverage::CoverageKind;
12use rustc_middle::mir::visit::{MutatingUseContext, NonUseContext, PlaceContext, Visitor};
13use rustc_middle::mir::*;
14use rustc_middle::ty::adjustment::PointerCoercion;
15use rustc_middle::ty::print::with_no_trimmed_paths;
16use rustc_middle::ty::{
17 self, CoroutineArgsExt, InstanceKind, ScalarInt, Ty, TyCtxt, TypeVisitableExt, Unnormalized,
18 Upcast, Variance,
19};
20use rustc_middle::{bug, span_bug};
21use rustc_mir_dataflow::debuginfo::debuginfo_locals;
22use rustc_trait_selection::traits::ObligationCtxt;
23
24use crate::util::{self, most_packed_projection};
25
26#[derive(Copy, Clone, Debug, PartialEq, Eq)]
27enum EdgeKind {
28 Unwind,
29 Normal,
30}
31
32pub(super) struct Validator {
33 pub when: String,
35}
36
37impl<'tcx> crate::MirPass<'tcx> for Validator {
38 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
39 if matches!(body.source.instance, InstanceKind::Intrinsic(..) | InstanceKind::Virtual(..)) {
44 return;
45 }
46 let def_id = body.source.def_id();
47 let typing_env = body.typing_env(tcx);
48 let can_unwind = if body.phase <= MirPhase::Runtime(RuntimePhase::Initial) {
49 true
51 } else if !tcx.def_kind(def_id).is_fn_like() {
52 true
53 } else {
54 let body_ty = tcx.type_of(def_id).skip_binder();
55 let body_abi = match body_ty.kind() {
56 ty::FnDef(..) => body_ty.fn_sig(tcx).abi(),
57 ty::Closure(..) => ExternAbi::RustCall,
58 ty::CoroutineClosure(..) => ExternAbi::RustCall,
59 ty::Coroutine(..) => ExternAbi::Rust,
60 ty::Error(_) => return,
62 _ => span_bug!(body.span, "unexpected body ty: {body_ty}"),
63 };
64
65 ty::layout::fn_can_unwind(tcx, Some(def_id), body_abi)
66 };
67
68 let mut cfg_checker = CfgChecker {
69 when: &self.when,
70 body,
71 tcx,
72 unwind_edge_count: 0,
73 reachable_blocks: traversal::reachable_as_bitset(body),
74 value_cache: FxHashSet::default(),
75 can_unwind,
76 };
77 cfg_checker.visit_body(body);
78 cfg_checker.check_cleanup_control_flow();
79
80 for (location, msg) in validate_types(tcx, typing_env, body, body) {
82 cfg_checker.fail(location, msg);
83 }
84
85 for (location, msg) in validate_debuginfos(body) {
87 cfg_checker.fail(location, msg);
88 }
89
90 if let MirPhase::Runtime(_) = body.phase
91 && let ty::InstanceKind::Item(_) = body.source.instance
92 && body.has_free_regions()
93 {
94 cfg_checker.fail(
95 Location::START,
96 format!("Free regions in optimized {} MIR", body.phase.name()),
97 );
98 }
99 }
100
101 fn is_required(&self) -> bool {
102 true
103 }
104}
105
106struct CfgChecker<'a, 'tcx> {
113 when: &'a str,
114 body: &'a Body<'tcx>,
115 tcx: TyCtxt<'tcx>,
116 unwind_edge_count: usize,
117 reachable_blocks: DenseBitSet<BasicBlock>,
118 value_cache: FxHashSet<u128>,
119 can_unwind: bool,
122}
123
124impl<'a, 'tcx> CfgChecker<'a, 'tcx> {
125 #[track_caller]
126 fn fail(&self, location: Location, msg: impl AsRef<str>) {
127 if self.tcx.dcx().has_errors().is_none() {
129 span_bug!(
130 self.body.source_info(location).span,
131 "broken MIR in {:?} ({}) at {:?}:\n{}",
132 self.body.source.instance,
133 self.when,
134 location,
135 msg.as_ref(),
136 );
137 }
138 }
139
140 fn check_edge(&mut self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
141 if bb == START_BLOCK {
142 self.fail(location, "start block must not have predecessors")
143 }
144 if let Some(bb) = self.body.basic_blocks.get(bb) {
145 let src = self.body.basic_blocks.get(location.block).unwrap();
146 match (src.is_cleanup, bb.is_cleanup, edge_kind) {
147 (false, false, EdgeKind::Normal)
149 | (true, true, EdgeKind::Normal) => {}
151 (false, true, EdgeKind::Unwind) => {
153 self.unwind_edge_count += 1;
154 }
155 _ => {
157 self.fail(
158 location,
159 format!(
160 "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
161 edge_kind,
162 bb,
163 src.is_cleanup,
164 bb.is_cleanup,
165 )
166 )
167 }
168 }
169 } else {
170 self.fail(location, format!("encountered jump to invalid basic block {bb:?}"))
171 }
172 }
173
174 fn check_cleanup_control_flow(&self) {
175 if self.unwind_edge_count <= 1 {
176 return;
177 }
178 let doms = self.body.basic_blocks.dominators();
179 let mut post_contract_node = FxHashMap::default();
180 let mut dom_path = vec![];
182 let mut get_post_contract_node = |mut bb| {
183 let root = loop {
184 if let Some(root) = post_contract_node.get(&bb) {
185 break *root;
186 }
187 let parent = doms.immediate_dominator(bb).unwrap();
188 dom_path.push(bb);
189 if !self.body.basic_blocks[parent].is_cleanup {
190 break bb;
191 }
192 bb = parent;
193 };
194 for bb in dom_path.drain(..) {
195 post_contract_node.insert(bb, root);
196 }
197 root
198 };
199
200 let mut parent = IndexVec::from_elem(None, &self.body.basic_blocks);
201 for (bb, bb_data) in self.body.basic_blocks.iter_enumerated() {
202 if !bb_data.is_cleanup || !self.reachable_blocks.contains(bb) {
203 continue;
204 }
205 let bb = get_post_contract_node(bb);
206 for s in bb_data.terminator().successors() {
207 let s = get_post_contract_node(s);
208 if s == bb {
209 continue;
210 }
211 let parent = &mut parent[bb];
212 match parent {
213 None => {
214 *parent = Some(s);
215 }
216 Some(e) if *e == s => (),
217 Some(e) => self.fail(
218 Location { block: bb, statement_index: 0 },
219 format!(
220 "Cleanup control flow violation: The blocks dominated by {:?} have edges to both {:?} and {:?}",
221 bb,
222 s,
223 *e
224 )
225 ),
226 }
227 }
228 }
229
230 let mut stack = FxHashSet::default();
232 for (mut bb, parent) in parent.iter_enumerated_mut() {
233 stack.clear();
234 stack.insert(bb);
235 loop {
236 let Some(parent) = parent.take() else { break };
237 let no_cycle = stack.insert(parent);
238 if !no_cycle {
239 self.fail(
240 Location { block: bb, statement_index: 0 },
241 format!(
242 "Cleanup control flow violation: Cycle involving edge {bb:?} -> {parent:?}",
243 ),
244 );
245 break;
246 }
247 bb = parent;
248 }
249 }
250 }
251
252 fn check_unwind_edge(&mut self, location: Location, unwind: UnwindAction) {
253 let is_cleanup = self.body.basic_blocks[location.block].is_cleanup;
254 match unwind {
255 UnwindAction::Cleanup(unwind) => {
256 if is_cleanup {
257 self.fail(location, "`UnwindAction::Cleanup` in cleanup block");
258 }
259 self.check_edge(location, unwind, EdgeKind::Unwind);
260 }
261 UnwindAction::Continue => {
262 if is_cleanup {
263 self.fail(location, "`UnwindAction::Continue` in cleanup block");
264 }
265
266 if !self.can_unwind {
267 self.fail(location, "`UnwindAction::Continue` in no-unwind function");
268 }
269 }
270 UnwindAction::Terminate(UnwindTerminateReason::InCleanup) => {
271 if !is_cleanup {
272 self.fail(
273 location,
274 "`UnwindAction::Terminate(InCleanup)` in a non-cleanup block",
275 );
276 }
277 }
278 UnwindAction::Unreachable | UnwindAction::Terminate(UnwindTerminateReason::Abi) => (),
280 }
281 }
282
283 fn is_critical_call_edge(&self, target: Option<BasicBlock>, unwind: UnwindAction) -> bool {
284 let Some(target) = target else { return false };
285 matches!(unwind, UnwindAction::Cleanup(_) | UnwindAction::Terminate(_))
286 && self.body.basic_blocks.predecessors()[target].len() > 1
287 }
288}
289
290impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> {
291 fn visit_local(&mut self, local: Local, _context: PlaceContext, location: Location) {
292 if self.body.local_decls.get(local).is_none() {
293 self.fail(
294 location,
295 format!("local {local:?} has no corresponding declaration in `body.local_decls`"),
296 );
297 }
298 }
299
300 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
301 match &statement.kind {
302 StatementKind::AscribeUserType(..) => {
303 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
304 self.fail(
305 location,
306 "`AscribeUserType` should have been removed after drop lowering phase",
307 );
308 }
309 }
310 StatementKind::FakeRead(..) => {
311 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
312 self.fail(
313 location,
314 "`FakeRead` should have been removed after drop lowering phase",
315 );
316 }
317 }
318 StatementKind::SetDiscriminant { .. } => {
319 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
320 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
321 }
322 }
323 StatementKind::Coverage(kind) => {
324 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup)
325 && let CoverageKind::BlockMarker { .. } | CoverageKind::SpanMarker { .. } = kind
326 {
327 self.fail(
328 location,
329 format!("{kind:?} should have been removed after analysis"),
330 );
331 }
332 }
333 StatementKind::Assign(..)
334 | StatementKind::StorageLive(_)
335 | StatementKind::StorageDead(_)
336 | StatementKind::Intrinsic(_)
337 | StatementKind::ConstEvalCounter
338 | StatementKind::PlaceMention(..)
339 | StatementKind::BackwardIncompatibleDropHint { .. }
340 | StatementKind::Nop => {}
341 }
342
343 self.super_statement(statement, location);
344 }
345
346 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
347 match &terminator.kind {
348 TerminatorKind::Goto { target } => {
349 self.check_edge(location, *target, EdgeKind::Normal);
350 }
351 TerminatorKind::SwitchInt { targets, discr: _ } => {
352 for (_, target) in targets.iter() {
353 self.check_edge(location, target, EdgeKind::Normal);
354 }
355 self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
356
357 self.value_cache.clear();
358 self.value_cache.extend(targets.iter().map(|(value, _)| value));
359 let has_duplicates = targets.iter().len() != self.value_cache.len();
360 if has_duplicates {
361 self.fail(
362 location,
363 format!(
364 "duplicated values in `SwitchInt` terminator: {:?}",
365 terminator.kind,
366 ),
367 );
368 }
369 }
370 TerminatorKind::Drop { target, unwind, drop, .. } => {
371 self.check_edge(location, *target, EdgeKind::Normal);
372 self.check_unwind_edge(location, *unwind);
373 if let Some(drop) = drop {
374 self.check_edge(location, *drop, EdgeKind::Normal);
375 }
376 }
377 TerminatorKind::Call { func, args, .. }
378 | TerminatorKind::TailCall { func, args, .. } => {
379 if let TerminatorKind::Call { target, unwind, destination, .. } = terminator.kind {
381 if let Some(target) = target {
382 self.check_edge(location, target, EdgeKind::Normal);
383 }
384 self.check_unwind_edge(location, unwind);
385
386 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
392 && self.is_critical_call_edge(target, unwind)
393 {
394 self.fail(
395 location,
396 format!(
397 "encountered critical edge in `Call` terminator {:?}",
398 terminator.kind,
399 ),
400 );
401 }
402
403 if most_packed_projection(self.tcx, &self.body.local_decls, destination)
406 .is_some()
407 {
408 self.fail(
410 location,
411 format!(
412 "encountered packed place in `Call` terminator destination: {:?}",
413 terminator.kind,
414 ),
415 );
416 }
417 }
418
419 for arg in args {
420 if let Operand::Move(place) = &arg.node {
421 if most_packed_projection(self.tcx, &self.body.local_decls, *place)
422 .is_some()
423 {
424 self.fail(
426 location,
427 format!(
428 "encountered `Move` of a packed place in `Call` terminator: {:?}",
429 terminator.kind,
430 ),
431 );
432 }
433 }
434 }
435
436 if let ty::FnDef(did, ..) = *func.ty(&self.body.local_decls, self.tcx).kind()
437 && self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
438 && matches!(self.tcx.codegen_fn_attrs(did).inline, InlineAttr::Force { .. })
439 {
440 self.fail(location, "`#[rustc_force_inline]`-annotated function not inlined");
441 }
442 }
443 TerminatorKind::Assert { target, unwind, .. } => {
444 self.check_edge(location, *target, EdgeKind::Normal);
445 self.check_unwind_edge(location, *unwind);
446 }
447 TerminatorKind::Yield { resume, drop, .. } => {
448 if self.body.coroutine.is_none() {
449 self.fail(location, "`Yield` cannot appear outside coroutine bodies");
450 }
451 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
452 self.fail(location, "`Yield` should have been replaced by coroutine lowering");
453 }
454 self.check_edge(location, *resume, EdgeKind::Normal);
455 if let Some(drop) = drop {
456 self.check_edge(location, *drop, EdgeKind::Normal);
457 }
458 }
459 TerminatorKind::FalseEdge { real_target, imaginary_target } => {
460 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
461 self.fail(
462 location,
463 "`FalseEdge` should have been removed after drop elaboration",
464 );
465 }
466 self.check_edge(location, *real_target, EdgeKind::Normal);
467 self.check_edge(location, *imaginary_target, EdgeKind::Normal);
468 }
469 TerminatorKind::FalseUnwind { real_target, unwind } => {
470 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
471 self.fail(
472 location,
473 "`FalseUnwind` should have been removed after drop elaboration",
474 );
475 }
476 self.check_edge(location, *real_target, EdgeKind::Normal);
477 self.check_unwind_edge(location, *unwind);
478 }
479 TerminatorKind::InlineAsm { targets, unwind, .. } => {
480 for &target in targets {
481 self.check_edge(location, target, EdgeKind::Normal);
482 }
483 self.check_unwind_edge(location, *unwind);
484 }
485 TerminatorKind::CoroutineDrop => {
486 if self.body.coroutine.is_none() {
487 self.fail(location, "`CoroutineDrop` cannot appear outside coroutine bodies");
488 }
489 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
490 self.fail(
491 location,
492 "`CoroutineDrop` should have been replaced by coroutine lowering",
493 );
494 }
495 }
496 TerminatorKind::UnwindResume => {
497 let bb = location.block;
498 if !self.body.basic_blocks[bb].is_cleanup {
499 self.fail(location, "Cannot `UnwindResume` from non-cleanup basic block")
500 }
501 if !self.can_unwind {
502 self.fail(location, "Cannot `UnwindResume` in a function that cannot unwind")
503 }
504 }
505 TerminatorKind::UnwindTerminate(_) => {
506 let bb = location.block;
507 if !self.body.basic_blocks[bb].is_cleanup {
508 self.fail(location, "Cannot `UnwindTerminate` from non-cleanup basic block")
509 }
510 }
511 TerminatorKind::Return => {
512 let bb = location.block;
513 if self.body.basic_blocks[bb].is_cleanup {
514 self.fail(location, "Cannot `Return` from cleanup basic block")
515 }
516 }
517 TerminatorKind::Unreachable => {}
518 }
519
520 self.super_terminator(terminator, location);
521 }
522
523 fn visit_source_scope(&mut self, scope: SourceScope) {
524 if self.body.source_scopes.get(scope).is_none() {
525 self.tcx.dcx().span_bug(
526 self.body.span,
527 format!(
528 "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
529 self.body.source.instance, self.when, scope,
530 ),
531 );
532 }
533 }
534}
535
536pub(super) fn validate_types<'tcx>(
542 tcx: TyCtxt<'tcx>,
543 typing_env: ty::TypingEnv<'tcx>,
544 body: &Body<'tcx>,
545 caller_body: &Body<'tcx>,
546) -> Vec<(Location, String)> {
547 let mut type_checker = TypeChecker { body, caller_body, tcx, typing_env, failures: Vec::new() };
548 with_no_trimmed_paths!({
553 type_checker.visit_body(body);
554 });
555 type_checker.failures
556}
557
558struct TypeChecker<'a, 'tcx> {
559 body: &'a Body<'tcx>,
560 caller_body: &'a Body<'tcx>,
561 tcx: TyCtxt<'tcx>,
562 typing_env: ty::TypingEnv<'tcx>,
563 failures: Vec<(Location, String)>,
564}
565
566impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
567 fn fail(&mut self, location: Location, msg: impl Into<String>) {
568 self.failures.push((location, msg.into()));
569 }
570
571 fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
574 if src == dest {
576 return true;
578 }
579
580 if (src, dest).has_opaque_types() {
586 return true;
587 }
588
589 let variance = if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
592 Variance::Invariant
593 } else {
594 Variance::Covariant
595 };
596
597 crate::util::relate_types(self.tcx, self.typing_env, variance, src, dest)
598 }
599
600 fn predicate_must_hold_modulo_regions(
602 &self,
603 pred: impl Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>,
604 ) -> bool {
605 let pred: ty::Predicate<'tcx> = pred.upcast(self.tcx);
606
607 if pred.has_opaque_types() {
613 return true;
614 }
615
616 let (infcx, param_env) = self.tcx.infer_ctxt().build_with_typing_env(self.typing_env);
617 let ocx = ObligationCtxt::new(&infcx);
618 ocx.register_obligation(Obligation::new(
619 self.tcx,
620 ObligationCause::dummy(),
621 param_env,
622 pred,
623 ));
624 ocx.evaluate_obligations_error_on_ambiguity().is_empty()
625 }
626}
627
628impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
629 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
630 if self.tcx.sess.opts.unstable_opts.validate_mir
632 && self.body.phase < MirPhase::Runtime(RuntimePhase::Initial)
633 {
634 if let Operand::Copy(place) = operand {
636 let ty = place.ty(&self.body.local_decls, self.tcx).ty;
637
638 if !self.tcx.type_is_copy_modulo_regions(self.typing_env, ty) {
639 self.fail(location, format!("`Operand::Copy` with non-`Copy` type {ty}"));
640 }
641 }
642 }
643
644 self.super_operand(operand, location);
645 }
646
647 fn visit_projection_elem(
648 &mut self,
649 place_ref: PlaceRef<'tcx>,
650 elem: PlaceElem<'tcx>,
651 context: PlaceContext,
652 location: Location,
653 ) {
654 match elem {
655 ProjectionElem::Deref
656 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
657 {
658 let base_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
659
660 if base_ty.is_box() {
661 self.fail(location, format!("{base_ty} dereferenced after ElaborateBoxDerefs"))
662 }
663 }
664 ProjectionElem::Field(f, ty) => {
665 let parent_ty = place_ref.ty(&self.body.local_decls, self.tcx);
666 let fail_out_of_bounds = |this: &mut Self, location| {
667 this.fail(location, format!("Out of bounds field {f:?} for {parent_ty:?}"));
668 };
669 let check_equal = |this: &mut Self, location, f_ty| {
670 if !this.mir_assign_valid_types(ty, f_ty) {
671 this.fail(
672 location,
673 format!(
674 "Field projection `{place_ref:?}.{f:?}` specified type `{ty}`, but actual type is `{f_ty}`"
675 )
676 )
677 }
678 };
679
680 let kind = match parent_ty.ty.kind() {
681 &ty::Alias(ty::AliasTy { kind: ty::Opaque { def_id }, args, .. }) => {
682 self.tcx.type_of(def_id).instantiate(self.tcx, args).skip_norm_wip().kind()
683 }
684 kind => kind,
685 };
686
687 match kind {
688 ty::Tuple(fields) => {
689 let Some(f_ty) = fields.get(f.as_usize()) else {
690 fail_out_of_bounds(self, location);
691 return;
692 };
693 check_equal(self, location, *f_ty);
694 }
695 ty::Pat(base, _) => check_equal(self, location, *base),
697 ty::Adt(adt_def, args) => {
698 if self.tcx.is_lang_item(adt_def.did(), LangItem::DynMetadata) {
700 self.fail(
701 location,
702 format!(
703 "You can't project to field {f:?} of `DynMetadata` because \
704 layout is weird and thinks it doesn't have fields."
705 ),
706 );
707 }
708
709 if adt_def.repr().simd() {
710 self.fail(
711 location,
712 format!(
713 "Projecting into SIMD type {adt_def:?} is banned by MCP#838"
714 ),
715 );
716 }
717
718 let var = parent_ty.variant_index.unwrap_or(FIRST_VARIANT);
719 let Some(field) = adt_def.variant(var).fields.get(f) else {
720 fail_out_of_bounds(self, location);
721 return;
722 };
723 check_equal(self, location, field.ty(self.tcx, args).skip_norm_wip());
724 }
725 ty::Closure(_, args) => {
726 let args = args.as_closure();
727 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
728 fail_out_of_bounds(self, location);
729 return;
730 };
731 check_equal(self, location, f_ty);
732 }
733 ty::CoroutineClosure(_, args) => {
734 let args = args.as_coroutine_closure();
735 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
736 fail_out_of_bounds(self, location);
737 return;
738 };
739 check_equal(self, location, f_ty);
740 }
741 &ty::Coroutine(def_id, args) => {
742 let f_ty = if let Some(var) = parent_ty.variant_index {
743 let layout = if def_id == self.caller_body.source.def_id() {
749 self.caller_body
750 .coroutine_layout_raw()
751 .or_else(|| self.tcx.coroutine_layout(def_id, args).ok())
752 } else if self.tcx.needs_coroutine_by_move_body_def_id(def_id)
753 && let ty::ClosureKind::FnOnce =
754 args.as_coroutine().kind_ty().to_opt_closure_kind().unwrap()
755 && self.caller_body.source.def_id()
756 == self.tcx.coroutine_by_move_body_def_id(def_id)
757 {
758 self.caller_body.coroutine_layout_raw()
760 } else {
761 self.tcx.coroutine_layout(def_id, args).ok()
762 };
763
764 let Some(layout) = layout else {
765 self.fail(
766 location,
767 format!("No coroutine layout for {parent_ty:?}"),
768 );
769 return;
770 };
771
772 let Some(&local) = layout.variant_fields[var].get(f) else {
773 fail_out_of_bounds(self, location);
774 return;
775 };
776
777 let Some(f_ty) = layout.field_tys.get(local) else {
778 self.fail(
779 location,
780 format!("Out of bounds local {local:?} for {parent_ty:?}"),
781 );
782 return;
783 };
784
785 ty::EarlyBinder::bind(f_ty.ty)
786 .instantiate(self.tcx, args)
787 .skip_norm_wip()
788 } else {
789 let Some(&f_ty) = args.as_coroutine().prefix_tys().get(f.index())
790 else {
791 fail_out_of_bounds(self, location);
792 return;
793 };
794
795 f_ty
796 };
797
798 check_equal(self, location, f_ty);
799 }
800 _ => {
801 self.fail(location, format!("{:?} does not have fields", parent_ty.ty));
802 }
803 }
804 }
805 ProjectionElem::Index(index) => {
806 let indexed_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
807 match indexed_ty.kind() {
808 ty::Array(_, _) | ty::Slice(_) => {}
809 _ => self.fail(location, format!("{indexed_ty:?} cannot be indexed")),
810 }
811
812 let index_ty = self.body.local_decls[index].ty;
813 if index_ty != self.tcx.types.usize {
814 self.fail(location, format!("bad index ({index_ty} != usize)"))
815 }
816 }
817 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
818 let indexed_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
819 match indexed_ty.kind() {
820 ty::Array(_, _) => {
821 if from_end {
822 self.fail(location, "arrays should not be indexed from end");
823 }
824 }
825 ty::Slice(_) => {}
826 _ => self.fail(location, format!("{indexed_ty:?} cannot be indexed")),
827 }
828
829 if from_end {
830 if offset > min_length {
831 self.fail(
832 location,
833 format!(
834 "constant index with offset -{offset} out of bounds of min length {min_length}"
835 ),
836 );
837 }
838 } else {
839 if offset >= min_length {
840 self.fail(
841 location,
842 format!(
843 "constant index with offset {offset} out of bounds of min length {min_length}"
844 ),
845 );
846 }
847 }
848 }
849 ProjectionElem::Subslice { from, to, from_end } => {
850 let indexed_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
851 match indexed_ty.kind() {
852 ty::Array(_, _) => {
853 if from_end {
854 self.fail(location, "arrays should not be subsliced from end");
855 }
856 }
857 ty::Slice(_) => {
858 if !from_end {
859 self.fail(location, "slices should be subsliced from end");
860 }
861 }
862 _ => self.fail(location, format!("{indexed_ty:?} cannot be indexed")),
863 }
864
865 if !from_end && from > to {
866 self.fail(location, "backwards subslice {from}..{to}");
867 }
868 }
869 ProjectionElem::OpaqueCast(ty)
870 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
871 {
872 self.fail(
873 location,
874 format!("explicit opaque type cast to `{ty}` after `PostAnalysisNormalize`"),
875 )
876 }
877 ProjectionElem::UnwrapUnsafeBinder(unwrapped_ty) => {
878 let binder_ty = place_ref.ty(&self.body.local_decls, self.tcx);
879 let ty::UnsafeBinder(binder_ty) = *binder_ty.ty.kind() else {
880 self.fail(
881 location,
882 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
883 );
884 return;
885 };
886 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
887 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
888 self.fail(
889 location,
890 format!(
891 "Cannot unwrap unsafe binder {binder_ty:?} into type {unwrapped_ty}"
892 ),
893 );
894 }
895 }
896 _ => {}
897 }
898 self.super_projection_elem(place_ref, elem, context, location);
899 }
900
901 fn visit_var_debug_info(&mut self, debuginfo: &VarDebugInfo<'tcx>) {
902 if let Some(VarDebugInfoFragment { ty, ref projection }) = debuginfo.composite {
903 if ty.is_union() || ty.is_enum() {
904 self.fail(
905 START_BLOCK.start_location(),
906 format!("invalid type {ty} in debuginfo for {:?}", debuginfo.name),
907 );
908 }
909 if projection.is_empty() {
910 self.fail(
911 START_BLOCK.start_location(),
912 format!("invalid empty projection in debuginfo for {:?}", debuginfo.name),
913 );
914 }
915 if projection.iter().any(|p| !matches!(p, PlaceElem::Field(..))) {
916 self.fail(
917 START_BLOCK.start_location(),
918 format!(
919 "illegal projection {:?} in debuginfo for {:?}",
920 projection, debuginfo.name
921 ),
922 );
923 }
924 }
925 match debuginfo.value {
926 VarDebugInfoContents::Const(_) => {}
927 VarDebugInfoContents::Place(place) => {
928 if place.projection.iter().any(|p| !p.can_use_in_debuginfo()) {
929 self.fail(
930 START_BLOCK.start_location(),
931 format!("illegal place {:?} in debuginfo for {:?}", place, debuginfo.name),
932 );
933 }
934 }
935 }
936 self.super_var_debug_info(debuginfo);
937 }
938
939 fn visit_place(&mut self, place: &Place<'tcx>, cntxt: PlaceContext, location: Location) {
940 let _ = place.ty(&self.body.local_decls, self.tcx);
942
943 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial)
944 && place.projection.len() > 1
945 && cntxt != PlaceContext::NonUse(NonUseContext::VarDebugInfo)
946 && place.projection[1..].contains(&ProjectionElem::Deref)
947 {
948 self.fail(
949 location,
950 format!("place {place:?} has deref as a later projection (it is only permitted as the first projection)"),
951 );
952 }
953
954 let mut projections_iter = place.projection.iter();
956 while let Some(proj) = projections_iter.next() {
957 if matches!(proj, ProjectionElem::Downcast(..)) {
958 if !matches!(projections_iter.next(), Some(ProjectionElem::Field(..))) {
959 self.fail(
960 location,
961 format!(
962 "place {place:?} has `Downcast` projection not followed by `Field`"
963 ),
964 );
965 }
966 }
967 }
968
969 if let ClearCrossCrate::Set(LocalInfo::DerefTemp) =
970 self.body.local_decls[place.local].local_info
971 && !place.is_indirect_first_projection()
972 {
973 if cntxt != PlaceContext::MutatingUse(MutatingUseContext::Store)
974 || place.as_local().is_none()
975 {
976 self.fail(
977 location,
978 format!("`DerefTemp` locals must only be dereferenced or directly assigned to"),
979 );
980 }
981 }
982
983 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial)
984 && let Some(i) = place
985 .projection
986 .iter()
987 .position(|elem| matches!(elem, ProjectionElem::Subslice { .. }))
988 && let Some(tail) = place.projection.get(i + 1..)
989 && tail.iter().any(|elem| {
990 matches!(
991 elem,
992 ProjectionElem::ConstantIndex { .. } | ProjectionElem::Subslice { .. }
993 )
994 })
995 {
996 self.fail(
997 location,
998 format!("place {place:?} has `ConstantIndex` or `Subslice` after `Subslice`"),
999 );
1000 }
1001
1002 self.super_place(place, cntxt, location);
1003 }
1004
1005 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
1006 macro_rules! check_kinds {
1007 ($t:expr, $text:literal, $typat:pat) => {
1008 if !matches!(($t).kind(), $typat) {
1009 self.fail(location, format!($text, $t));
1010 }
1011 };
1012 }
1013 match rvalue {
1014 Rvalue::Use(_, _) => {}
1015 Rvalue::CopyForDeref(_) => {
1016 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1017 self.fail(location, "`CopyForDeref` should have been removed in runtime MIR");
1018 }
1019 }
1020 Rvalue::Aggregate(kind, fields) => match **kind {
1021 AggregateKind::Tuple => {}
1022 AggregateKind::Array(dest) => {
1023 for src in fields {
1024 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1025 self.fail(location, "array field has the wrong type");
1026 }
1027 }
1028 }
1029 AggregateKind::Adt(def_id, idx, args, _, Some(field)) => {
1030 let adt_def = self.tcx.adt_def(def_id);
1031 assert!(adt_def.is_union());
1032 assert_eq!(idx, FIRST_VARIANT);
1033 let dest_ty = self.tcx.normalize_erasing_regions(
1034 self.typing_env,
1035 adt_def.non_enum_variant().fields[field].ty(self.tcx, args),
1036 );
1037 if let [field] = fields.raw.as_slice() {
1038 let src_ty = field.ty(self.body, self.tcx);
1039 if !self.mir_assign_valid_types(src_ty, dest_ty) {
1040 self.fail(location, "union field has the wrong type");
1041 }
1042 } else {
1043 self.fail(location, "unions should have one initialized field");
1044 }
1045 }
1046 AggregateKind::Adt(def_id, idx, args, _, None) => {
1047 let adt_def = self.tcx.adt_def(def_id);
1048 assert!(!adt_def.is_union());
1049 let variant = &adt_def.variants()[idx];
1050 if variant.fields.len() != fields.len() {
1051 self.fail(location, format!(
1052 "adt {def_id:?} has the wrong number of initialized fields, expected {}, found {}",
1053 fields.len(),
1054 variant.fields.len(),
1055 ));
1056 }
1057 for (src, dest) in std::iter::zip(fields, &variant.fields) {
1058 let dest_ty = self
1059 .tcx
1060 .normalize_erasing_regions(self.typing_env, dest.ty(self.tcx, args));
1061 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest_ty) {
1062 self.fail(location, "adt field has the wrong type");
1063 }
1064 }
1065 }
1066 AggregateKind::Closure(_, args) => {
1067 let upvars = args.as_closure().upvar_tys();
1068 if upvars.len() != fields.len() {
1069 self.fail(location, "closure has the wrong number of initialized fields");
1070 }
1071 for (src, dest) in std::iter::zip(fields, upvars) {
1072 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1073 self.fail(location, "closure field has the wrong type");
1074 }
1075 }
1076 }
1077 AggregateKind::Coroutine(_, args) => {
1078 let upvars = args.as_coroutine().upvar_tys();
1079 if upvars.len() != fields.len() {
1080 self.fail(location, "coroutine has the wrong number of initialized fields");
1081 }
1082 for (src, dest) in std::iter::zip(fields, upvars) {
1083 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1084 self.fail(location, "coroutine field has the wrong type");
1085 }
1086 }
1087 }
1088 AggregateKind::CoroutineClosure(_, args) => {
1089 let upvars = args.as_coroutine_closure().upvar_tys();
1090 if upvars.len() != fields.len() {
1091 self.fail(
1092 location,
1093 "coroutine-closure has the wrong number of initialized fields",
1094 );
1095 }
1096 for (src, dest) in std::iter::zip(fields, upvars) {
1097 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1098 self.fail(location, "coroutine-closure field has the wrong type");
1099 }
1100 }
1101 }
1102 AggregateKind::RawPtr(pointee_ty, mutability) => {
1103 if !matches!(self.body.phase, MirPhase::Runtime(_)) {
1104 self.fail(location, "RawPtr should be in runtime MIR only");
1108 }
1109
1110 if let [data_ptr, metadata] = fields.raw.as_slice() {
1111 let data_ptr_ty = data_ptr.ty(self.body, self.tcx);
1112 let metadata_ty = metadata.ty(self.body, self.tcx);
1113 if let ty::RawPtr(in_pointee, in_mut) = data_ptr_ty.kind() {
1114 if *in_mut != mutability {
1115 self.fail(location, "input and output mutability must match");
1116 }
1117
1118 if !in_pointee.is_sized(self.tcx, self.typing_env) {
1120 self.fail(location, "input pointer must be thin");
1121 }
1122 } else {
1123 self.fail(
1124 location,
1125 "first operand to raw pointer aggregate must be a raw pointer",
1126 );
1127 }
1128
1129 if pointee_ty.is_slice() {
1131 if !self.mir_assign_valid_types(metadata_ty, self.tcx.types.usize) {
1132 self.fail(location, "slice metadata must be usize");
1133 }
1134 } else if pointee_ty.is_sized(self.tcx, self.typing_env) {
1135 if metadata_ty != self.tcx.types.unit {
1136 self.fail(location, "metadata for pointer-to-thin must be unit");
1137 }
1138 }
1139 } else {
1140 self.fail(location, "raw pointer aggregate must have 2 fields");
1141 }
1142 }
1143 },
1144 Rvalue::Ref(_, BorrowKind::Fake(_), _) => {
1145 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1146 self.fail(
1147 location,
1148 "`Assign` statement with a `Fake` borrow should have been removed in runtime MIR",
1149 );
1150 }
1151 }
1152 Rvalue::Ref(..) | Rvalue::Reborrow(..) => {}
1153 Rvalue::BinaryOp(op, vals) => {
1154 use BinOp::*;
1155 let a = vals.0.ty(&self.body.local_decls, self.tcx);
1156 let b = vals.1.ty(&self.body.local_decls, self.tcx);
1157 if crate::util::binop_right_homogeneous(*op) {
1158 if let Eq | Lt | Le | Ne | Ge | Gt = op {
1159 if !self.mir_assign_valid_types(a, b) {
1161 self.fail(
1162 location,
1163 format!("Cannot {op:?} compare incompatible types {a} and {b}"),
1164 );
1165 }
1166 } else if a != b {
1167 self.fail(
1168 location,
1169 format!("Cannot perform binary op {op:?} on unequal types {a} and {b}"),
1170 );
1171 }
1172 }
1173
1174 match op {
1175 Offset => {
1176 check_kinds!(a, "Cannot offset non-pointer type {:?}", ty::RawPtr(..));
1177 if b != self.tcx.types.isize && b != self.tcx.types.usize {
1178 self.fail(location, format!("Cannot offset by non-isize type {b}"));
1179 }
1180 }
1181 Eq | Lt | Le | Ne | Ge | Gt => {
1182 for x in [a, b] {
1183 check_kinds!(
1184 x,
1185 "Cannot {op:?} compare type {:?}",
1186 ty::Bool
1187 | ty::Char
1188 | ty::Int(..)
1189 | ty::Uint(..)
1190 | ty::Float(..)
1191 | ty::RawPtr(..)
1192 | ty::FnPtr(..)
1193 )
1194 }
1195 }
1196 Cmp => {
1197 for x in [a, b] {
1198 check_kinds!(
1199 x,
1200 "Cannot three-way compare non-integer type {:?}",
1201 ty::Char | ty::Uint(..) | ty::Int(..)
1202 )
1203 }
1204 }
1205 AddUnchecked | AddWithOverflow | SubUnchecked | SubWithOverflow
1206 | MulUnchecked | MulWithOverflow | Shl | ShlUnchecked | Shr | ShrUnchecked => {
1207 for x in [a, b] {
1208 check_kinds!(
1209 x,
1210 "Cannot {op:?} non-integer type {:?}",
1211 ty::Uint(..) | ty::Int(..)
1212 )
1213 }
1214 }
1215 BitAnd | BitOr | BitXor => {
1216 for x in [a, b] {
1217 check_kinds!(
1218 x,
1219 "Cannot perform bitwise op {op:?} on type {:?}",
1220 ty::Uint(..) | ty::Int(..) | ty::Bool
1221 )
1222 }
1223 }
1224 Add | Sub | Mul | Div | Rem => {
1225 for x in [a, b] {
1226 check_kinds!(
1227 x,
1228 "Cannot perform arithmetic {op:?} on type {:?}",
1229 ty::Uint(..) | ty::Int(..) | ty::Float(..)
1230 )
1231 }
1232 }
1233 }
1234 }
1235 Rvalue::UnaryOp(op, operand) => {
1236 let a = operand.ty(&self.body.local_decls, self.tcx);
1237 match op {
1238 UnOp::Neg => {
1239 check_kinds!(a, "Cannot negate type {:?}", ty::Int(..) | ty::Float(..))
1240 }
1241 UnOp::Not => {
1242 check_kinds!(
1243 a,
1244 "Cannot binary not type {:?}",
1245 ty::Int(..) | ty::Uint(..) | ty::Bool
1246 );
1247 }
1248 UnOp::PtrMetadata => {
1249 check_kinds!(
1250 a,
1251 "Cannot PtrMetadata non-pointer non-reference type {:?}",
1252 ty::RawPtr(..) | ty::Ref(..)
1253 );
1254 }
1255 }
1256 }
1257 Rvalue::Cast(kind, operand, target_type) => {
1258 let op_ty = operand.ty(self.body, self.tcx);
1259 match kind {
1260 CastKind::PointerWithExposedProvenance | CastKind::PointerExposeProvenance => {}
1262 CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer(_), _) => {
1263 check_kinds!(
1265 op_ty,
1266 "CastKind::{kind:?} input must be a fn item, not {:?}",
1267 ty::FnDef(..)
1268 );
1269 check_kinds!(
1270 target_type,
1271 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1272 ty::FnPtr(..)
1273 );
1274 }
1275 CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
1276 check_kinds!(
1278 op_ty,
1279 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1280 ty::FnPtr(..)
1281 );
1282 check_kinds!(
1283 target_type,
1284 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1285 ty::FnPtr(..)
1286 );
1287 }
1288 CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(..), _) => {
1289 check_kinds!(
1291 op_ty,
1292 "CastKind::{kind:?} input must be a closure, not {:?}",
1293 ty::Closure(..)
1294 );
1295 check_kinds!(
1296 target_type,
1297 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1298 ty::FnPtr(..)
1299 );
1300 }
1301 CastKind::PointerCoercion(PointerCoercion::MutToConstPointer, _) => {
1302 check_kinds!(
1304 op_ty,
1305 "CastKind::{kind:?} input must be a raw mut pointer, not {:?}",
1306 ty::RawPtr(_, Mutability::Mut)
1307 );
1308 check_kinds!(
1309 target_type,
1310 "CastKind::{kind:?} output must be a raw const pointer, not {:?}",
1311 ty::RawPtr(_, Mutability::Not)
1312 );
1313 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1314 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1315 }
1316 }
1317 CastKind::PointerCoercion(PointerCoercion::ArrayToPointer, _) => {
1318 check_kinds!(
1320 op_ty,
1321 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1322 ty::RawPtr(..)
1323 );
1324 check_kinds!(
1325 target_type,
1326 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1327 ty::RawPtr(..)
1328 );
1329 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1330 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1331 }
1332 }
1333 CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
1334 if !self.predicate_must_hold_modulo_regions(ty::TraitRef::new(
1337 self.tcx,
1338 self.tcx.require_lang_item(
1339 LangItem::CoerceUnsized,
1340 self.body.source_info(location).span,
1341 ),
1342 [op_ty, *target_type],
1343 )) {
1344 self.fail(location, format!("Unsize coercion, but `{op_ty}` isn't coercible to `{target_type}`"));
1345 }
1346 }
1347 CastKind::IntToInt | CastKind::IntToFloat => {
1348 let input_valid = op_ty.is_integral() || op_ty.is_char() || op_ty.is_bool();
1349 let target_valid = target_type.is_numeric() || target_type.is_char();
1350 if !input_valid || !target_valid {
1351 self.fail(
1352 location,
1353 format!("Wrong cast kind {kind:?} for the type {op_ty}"),
1354 );
1355 }
1356 }
1357 CastKind::FnPtrToPtr => {
1358 check_kinds!(
1359 op_ty,
1360 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1361 ty::FnPtr(..)
1362 );
1363 check_kinds!(
1364 target_type,
1365 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1366 ty::RawPtr(..)
1367 );
1368 }
1369 CastKind::PtrToPtr => {
1370 check_kinds!(
1371 op_ty,
1372 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1373 ty::RawPtr(..)
1374 );
1375 check_kinds!(
1376 target_type,
1377 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1378 ty::RawPtr(..)
1379 );
1380 }
1381 CastKind::FloatToFloat | CastKind::FloatToInt => {
1382 if !op_ty.is_floating_point() || !target_type.is_numeric() {
1383 self.fail(
1384 location,
1385 format!(
1386 "Trying to cast non 'Float' as {kind:?} into {target_type:?}"
1387 ),
1388 );
1389 }
1390 }
1391 CastKind::Transmute => {
1392 if !self
1396 .tcx
1397 .normalize_erasing_regions(
1398 self.typing_env,
1399 Unnormalized::new_wip(op_ty),
1400 )
1401 .is_sized(self.tcx, self.typing_env)
1402 {
1403 self.fail(
1404 location,
1405 format!("Cannot transmute from non-`Sized` type {op_ty}"),
1406 );
1407 }
1408 if !self
1409 .tcx
1410 .normalize_erasing_regions(
1411 self.typing_env,
1412 Unnormalized::new_wip(*target_type),
1413 )
1414 .is_sized(self.tcx, self.typing_env)
1415 {
1416 self.fail(
1417 location,
1418 format!("Cannot transmute to non-`Sized` type {target_type:?}"),
1419 );
1420 }
1421 }
1422 CastKind::Subtype => {
1423 if !util::sub_types(self.tcx, self.typing_env, op_ty, *target_type) {
1424 self.fail(
1425 location,
1426 format!("Failed subtyping {op_ty} and {target_type}"),
1427 )
1428 }
1429 }
1430 }
1431 }
1432 Rvalue::Repeat(_, _)
1433 | Rvalue::ThreadLocalRef(_)
1434 | Rvalue::RawPtr(_, _)
1435 | Rvalue::Discriminant(_) => {}
1436
1437 Rvalue::WrapUnsafeBinder(op, ty) => {
1438 let unwrapped_ty = op.ty(self.body, self.tcx);
1439 let ty::UnsafeBinder(binder_ty) = *ty.kind() else {
1440 self.fail(
1441 location,
1442 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
1443 );
1444 return;
1445 };
1446 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
1447 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
1448 self.fail(
1449 location,
1450 format!("Cannot wrap {unwrapped_ty} into unsafe binder {binder_ty:?}"),
1451 );
1452 }
1453 }
1454 }
1455 self.super_rvalue(rvalue, location);
1456 }
1457
1458 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
1459 match &statement.kind {
1460 StatementKind::Assign((dest, rvalue)) => {
1461 let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
1463 let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
1464
1465 if !self.mir_assign_valid_types(right_ty, left_ty) {
1466 self.fail(
1467 location,
1468 format!(
1469 "encountered `{:?}` with incompatible types:\n\
1470 left-hand side has type: {}\n\
1471 right-hand side has type: {}",
1472 statement.kind, left_ty, right_ty,
1473 ),
1474 );
1475 }
1476
1477 if let Some(local) = dest.as_local()
1478 && let ClearCrossCrate::Set(LocalInfo::DerefTemp) =
1479 self.body.local_decls[local].local_info
1480 && !matches!(rvalue, Rvalue::CopyForDeref(_))
1481 {
1482 self.fail(location, "assignment to a `DerefTemp` must use `CopyForDeref`")
1483 }
1484 }
1485 StatementKind::AscribeUserType(..) => {
1486 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1487 self.fail(
1488 location,
1489 "`AscribeUserType` should have been removed after drop lowering phase",
1490 );
1491 }
1492 }
1493 StatementKind::FakeRead(..) => {
1494 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1495 self.fail(
1496 location,
1497 "`FakeRead` should have been removed after drop lowering phase",
1498 );
1499 }
1500 }
1501 StatementKind::Intrinsic(NonDivergingIntrinsic::Assume(op)) => {
1502 let ty = op.ty(&self.body.local_decls, self.tcx);
1503 if !ty.is_bool() {
1504 self.fail(
1505 location,
1506 format!("`assume` argument must be `bool`, but got: `{ty}`"),
1507 );
1508 }
1509 }
1510 StatementKind::Intrinsic(NonDivergingIntrinsic::CopyNonOverlapping(
1511 CopyNonOverlapping { src, dst, count },
1512 )) => {
1513 let src_ty = src.ty(&self.body.local_decls, self.tcx);
1514 let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) {
1515 src_deref
1516 } else {
1517 self.fail(
1518 location,
1519 format!("Expected src to be ptr in copy_nonoverlapping, got: {src_ty}"),
1520 );
1521 return;
1522 };
1523 let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
1524 let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) {
1525 dst_deref
1526 } else {
1527 self.fail(
1528 location,
1529 format!("Expected dst to be ptr in copy_nonoverlapping, got: {dst_ty}"),
1530 );
1531 return;
1532 };
1533 if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) {
1536 self.fail(location, format!("bad arg ({op_src_ty} != {op_dst_ty})"));
1537 }
1538
1539 let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
1540 if op_cnt_ty != self.tcx.types.usize {
1541 self.fail(location, format!("bad arg ({op_cnt_ty} != usize)"))
1542 }
1543 }
1544 StatementKind::SetDiscriminant { place, .. } => {
1545 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
1546 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
1547 }
1548 let pty = place.ty(&self.body.local_decls, self.tcx).ty;
1549 if !matches!(
1550 pty.kind(),
1551 ty::Adt(..)
1552 | ty::Coroutine(..)
1553 | ty::Alias(ty::AliasTy { kind: ty::Opaque { .. }, .. })
1554 ) {
1555 self.fail(
1556 location,
1557 format!(
1558 "`SetDiscriminant` is only allowed on ADTs and coroutines, not {pty}"
1559 ),
1560 );
1561 }
1562 }
1563 StatementKind::StorageLive(_)
1564 | StatementKind::StorageDead(_)
1565 | StatementKind::Coverage(_)
1566 | StatementKind::ConstEvalCounter
1567 | StatementKind::PlaceMention(..)
1568 | StatementKind::BackwardIncompatibleDropHint { .. }
1569 | StatementKind::Nop => {}
1570 }
1571
1572 self.super_statement(statement, location);
1573 }
1574
1575 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
1576 match &terminator.kind {
1577 TerminatorKind::SwitchInt { targets, discr } => {
1578 let switch_ty = discr.ty(&self.body.local_decls, self.tcx);
1579
1580 let target_width = self.tcx.sess.target.pointer_width;
1581
1582 let size = Size::from_bits(match switch_ty.kind() {
1583 ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
1584 ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
1585 ty::Char => 32,
1586 ty::Bool => 1,
1587 other => bug!("unhandled type: {:?}", other),
1588 });
1589
1590 for (value, _) in targets.iter() {
1591 if ScalarInt::try_from_uint(value, size).is_none() {
1592 self.fail(
1593 location,
1594 format!("the value {value:#x} is not a proper {switch_ty}"),
1595 )
1596 }
1597 }
1598 }
1599 TerminatorKind::Call { func, .. } | TerminatorKind::TailCall { func, .. } => {
1600 let func_ty = func.ty(&self.body.local_decls, self.tcx);
1601 match func_ty.kind() {
1602 ty::FnPtr(..) | ty::FnDef(..) => {}
1603 _ => self.fail(
1604 location,
1605 format!(
1606 "encountered non-callable type {func_ty} in `{}` terminator",
1607 terminator.kind.name()
1608 ),
1609 ),
1610 }
1611
1612 if let TerminatorKind::TailCall { .. } = terminator.kind {
1613 }
1616 }
1617 TerminatorKind::Assert { cond, .. } => {
1618 let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
1619 if cond_ty != self.tcx.types.bool {
1620 self.fail(
1621 location,
1622 format!(
1623 "encountered non-boolean condition of type {cond_ty} in `Assert` terminator"
1624 ),
1625 );
1626 }
1627 }
1628 TerminatorKind::Goto { .. }
1629 | TerminatorKind::Drop { .. }
1630 | TerminatorKind::Yield { .. }
1631 | TerminatorKind::FalseEdge { .. }
1632 | TerminatorKind::FalseUnwind { .. }
1633 | TerminatorKind::InlineAsm { .. }
1634 | TerminatorKind::CoroutineDrop
1635 | TerminatorKind::UnwindResume
1636 | TerminatorKind::UnwindTerminate(_)
1637 | TerminatorKind::Return
1638 | TerminatorKind::Unreachable => {}
1639 }
1640
1641 self.super_terminator(terminator, location);
1642 }
1643
1644 fn visit_local_decl(&mut self, local: Local, local_decl: &LocalDecl<'tcx>) {
1645 if let ClearCrossCrate::Set(LocalInfo::DerefTemp) = local_decl.local_info {
1646 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1647 self.fail(
1648 START_BLOCK.start_location(),
1649 "`DerefTemp` should have been removed in runtime MIR",
1650 );
1651 } else if local_decl.ty.builtin_deref(true).is_none() {
1652 self.fail(
1653 START_BLOCK.start_location(),
1654 "`DerefTemp` should only be used for dereferenceable types",
1655 )
1656 }
1657 }
1658
1659 self.super_local_decl(local, local_decl);
1660 }
1661}
1662
1663pub(super) fn validate_debuginfos<'tcx>(body: &Body<'tcx>) -> Vec<(Location, String)> {
1664 let mut debuginfo_checker =
1665 DebuginfoChecker { debuginfo_locals: debuginfo_locals(body), failures: Vec::new() };
1666 debuginfo_checker.visit_body(body);
1667 debuginfo_checker.failures
1668}
1669
1670struct DebuginfoChecker {
1671 debuginfo_locals: DenseBitSet<Local>,
1672 failures: Vec<(Location, String)>,
1673}
1674
1675impl<'tcx> Visitor<'tcx> for DebuginfoChecker {
1676 fn visit_statement_debuginfo(
1677 &mut self,
1678 stmt_debuginfo: &StmtDebugInfo<'tcx>,
1679 location: Location,
1680 ) {
1681 let local = match stmt_debuginfo {
1682 StmtDebugInfo::AssignRef(local, _) | StmtDebugInfo::InvalidAssign(local) => *local,
1683 };
1684 if !self.debuginfo_locals.contains(local) {
1685 self.failures.push((location, format!("{local:?} is not in debuginfo")));
1686 }
1687 }
1688}