1use rustc_abi::{ExternAbi, FIRST_VARIANT, Size};
4use rustc_data_structures::fx::{FxHashMap, FxHashSet};
5use rustc_hir::LangItem;
6use rustc_hir::attrs::InlineAttr;
7use rustc_index::IndexVec;
8use rustc_index::bit_set::DenseBitSet;
9use rustc_infer::infer::TyCtxtInferExt;
10use rustc_infer::traits::{Obligation, ObligationCause};
11use rustc_middle::mir::coverage::CoverageKind;
12use rustc_middle::mir::visit::{MutatingUseContext, NonUseContext, PlaceContext, Visitor};
13use rustc_middle::mir::*;
14use rustc_middle::ty::adjustment::PointerCoercion;
15use rustc_middle::ty::print::with_no_trimmed_paths;
16use rustc_middle::ty::{
17 self, CoroutineArgsExt, InstanceKind, ScalarInt, Ty, TyCtxt, TypeVisitableExt, Unnormalized,
18 Upcast, Variance,
19};
20use rustc_middle::{bug, span_bug};
21use rustc_mir_dataflow::debuginfo::debuginfo_locals;
22use rustc_trait_selection::traits::ObligationCtxt;
23
24use crate::util::{self, most_packed_projection};
25
26#[derive(Copy, Clone, Debug, PartialEq, Eq)]
27enum EdgeKind {
28 Unwind,
29 Normal,
30}
31
32pub(super) struct Validator {
33 pub when: String,
35}
36
37impl<'tcx> crate::MirPass<'tcx> for Validator {
38 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
39 if matches!(body.source.instance, InstanceKind::Intrinsic(..) | InstanceKind::Virtual(..)) {
44 return;
45 }
46 let def_id = body.source.def_id();
47 let typing_env = body.typing_env(tcx);
48 let can_unwind = if body.phase <= MirPhase::Runtime(RuntimePhase::Initial) {
49 true
51 } else if !tcx.def_kind(def_id).is_fn_like() {
52 true
53 } else {
54 let body_ty = tcx.type_of(def_id).skip_binder();
55 let body_abi = match body_ty.kind() {
56 ty::FnDef(..) => body_ty.fn_sig(tcx).abi(),
57 ty::Closure(..) => ExternAbi::RustCall,
58 ty::CoroutineClosure(..) => ExternAbi::RustCall,
59 ty::Coroutine(..) => ExternAbi::Rust,
60 ty::Error(_) => return,
62 _ => span_bug!(body.span, "unexpected body ty: {body_ty}"),
63 };
64
65 ty::layout::fn_can_unwind(tcx, Some(def_id), body_abi)
66 };
67
68 let mut cfg_checker = CfgChecker {
69 when: &self.when,
70 body,
71 tcx,
72 unwind_edge_count: 0,
73 reachable_blocks: traversal::reachable_as_bitset(body),
74 value_cache: FxHashSet::default(),
75 can_unwind,
76 };
77 cfg_checker.visit_body(body);
78 cfg_checker.check_cleanup_control_flow();
79
80 for (location, msg) in validate_types(tcx, typing_env, body, body) {
82 cfg_checker.fail(location, msg);
83 }
84
85 for (location, msg) in validate_debuginfos(body) {
87 cfg_checker.fail(location, msg);
88 }
89
90 if let MirPhase::Runtime(_) = body.phase
91 && let ty::InstanceKind::Item(_) = body.source.instance
92 && body.has_free_regions()
93 {
94 cfg_checker.fail(
95 Location::START,
96 format!("Free regions in optimized {} MIR", body.phase.name()),
97 );
98 }
99 }
100
101 fn is_required(&self) -> bool {
102 true
103 }
104}
105
106struct CfgChecker<'a, 'tcx> {
113 when: &'a str,
114 body: &'a Body<'tcx>,
115 tcx: TyCtxt<'tcx>,
116 unwind_edge_count: usize,
117 reachable_blocks: DenseBitSet<BasicBlock>,
118 value_cache: FxHashSet<u128>,
119 can_unwind: bool,
122}
123
124impl<'a, 'tcx> CfgChecker<'a, 'tcx> {
125 #[track_caller]
126 fn fail(&self, location: Location, msg: impl AsRef<str>) {
127 if self.tcx.dcx().has_errors().is_none() {
129 span_bug!(
130 self.body.source_info(location).span,
131 "broken MIR in {:?} ({}) at {:?}:\n{}",
132 self.body.source.instance,
133 self.when,
134 location,
135 msg.as_ref(),
136 );
137 }
138 }
139
140 fn check_edge(&mut self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
141 if bb == START_BLOCK {
142 self.fail(location, "start block must not have predecessors")
143 }
144 if let Some(bb) = self.body.basic_blocks.get(bb) {
145 let src = self.body.basic_blocks.get(location.block).unwrap();
146 match (src.is_cleanup, bb.is_cleanup, edge_kind) {
147 (false, false, EdgeKind::Normal)
149 | (true, true, EdgeKind::Normal) => {}
151 (false, true, EdgeKind::Unwind) => {
153 self.unwind_edge_count += 1;
154 }
155 _ => {
157 self.fail(
158 location,
159 format!(
160 "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
161 edge_kind,
162 bb,
163 src.is_cleanup,
164 bb.is_cleanup,
165 )
166 )
167 }
168 }
169 } else {
170 self.fail(location, format!("encountered jump to invalid basic block {bb:?}"))
171 }
172 }
173
174 fn check_cleanup_control_flow(&self) {
175 if self.unwind_edge_count <= 1 {
176 return;
177 }
178 let doms = self.body.basic_blocks.dominators();
179 let mut post_contract_node = FxHashMap::default();
180 let mut dom_path = vec![];
182 let mut get_post_contract_node = |mut bb| {
183 let root = loop {
184 if let Some(root) = post_contract_node.get(&bb) {
185 break *root;
186 }
187 let parent = doms.immediate_dominator(bb).unwrap();
188 dom_path.push(bb);
189 if !self.body.basic_blocks[parent].is_cleanup {
190 break bb;
191 }
192 bb = parent;
193 };
194 for bb in dom_path.drain(..) {
195 post_contract_node.insert(bb, root);
196 }
197 root
198 };
199
200 let mut parent = IndexVec::from_elem(None, &self.body.basic_blocks);
201 for (bb, bb_data) in self.body.basic_blocks.iter_enumerated() {
202 if !bb_data.is_cleanup || !self.reachable_blocks.contains(bb) {
203 continue;
204 }
205 let bb = get_post_contract_node(bb);
206 for s in bb_data.terminator().successors() {
207 let s = get_post_contract_node(s);
208 if s == bb {
209 continue;
210 }
211 let parent = &mut parent[bb];
212 match parent {
213 None => {
214 *parent = Some(s);
215 }
216 Some(e) if *e == s => (),
217 Some(e) => self.fail(
218 Location { block: bb, statement_index: 0 },
219 format!(
220 "Cleanup control flow violation: The blocks dominated by {:?} have edges to both {:?} and {:?}",
221 bb,
222 s,
223 *e
224 )
225 ),
226 }
227 }
228 }
229
230 let mut stack = FxHashSet::default();
232 for (mut bb, parent) in parent.iter_enumerated_mut() {
233 stack.clear();
234 stack.insert(bb);
235 loop {
236 let Some(parent) = parent.take() else { break };
237 let no_cycle = stack.insert(parent);
238 if !no_cycle {
239 self.fail(
240 Location { block: bb, statement_index: 0 },
241 format!(
242 "Cleanup control flow violation: Cycle involving edge {bb:?} -> {parent:?}",
243 ),
244 );
245 break;
246 }
247 bb = parent;
248 }
249 }
250 }
251
252 fn check_unwind_edge(&mut self, location: Location, unwind: UnwindAction) {
253 let is_cleanup = self.body.basic_blocks[location.block].is_cleanup;
254 match unwind {
255 UnwindAction::Cleanup(unwind) => {
256 if is_cleanup {
257 self.fail(location, "`UnwindAction::Cleanup` in cleanup block");
258 }
259 self.check_edge(location, unwind, EdgeKind::Unwind);
260 }
261 UnwindAction::Continue => {
262 if is_cleanup {
263 self.fail(location, "`UnwindAction::Continue` in cleanup block");
264 }
265
266 if !self.can_unwind {
267 self.fail(location, "`UnwindAction::Continue` in no-unwind function");
268 }
269 }
270 UnwindAction::Terminate(UnwindTerminateReason::InCleanup) => {
271 if !is_cleanup {
272 self.fail(
273 location,
274 "`UnwindAction::Terminate(InCleanup)` in a non-cleanup block",
275 );
276 }
277 }
278 UnwindAction::Unreachable | UnwindAction::Terminate(UnwindTerminateReason::Abi) => (),
280 }
281 }
282
283 fn is_critical_call_edge(&self, target: Option<BasicBlock>, unwind: UnwindAction) -> bool {
284 let Some(target) = target else { return false };
285 matches!(unwind, UnwindAction::Cleanup(_) | UnwindAction::Terminate(_))
286 && self.body.basic_blocks.predecessors()[target].len() > 1
287 }
288}
289
290impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> {
291 fn visit_local(&mut self, local: Local, _context: PlaceContext, location: Location) {
292 if self.body.local_decls.get(local).is_none() {
293 self.fail(
294 location,
295 format!("local {local:?} has no corresponding declaration in `body.local_decls`"),
296 );
297 }
298 }
299
300 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
301 match &statement.kind {
302 StatementKind::AscribeUserType(..) => {
303 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
304 self.fail(
305 location,
306 "`AscribeUserType` should have been removed after drop lowering phase",
307 );
308 }
309 }
310 StatementKind::FakeRead(..) => {
311 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
312 self.fail(
313 location,
314 "`FakeRead` should have been removed after drop lowering phase",
315 );
316 }
317 }
318 StatementKind::SetDiscriminant { .. } => {
319 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
320 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
321 }
322 }
323 StatementKind::Retag(kind, _) => {
324 if matches!(kind, RetagKind::TwoPhase) {
328 self.fail(location, format!("explicit `{kind:?}` is forbidden"));
329 }
330 }
331 StatementKind::Coverage(kind) => {
332 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup)
333 && let CoverageKind::BlockMarker { .. } | CoverageKind::SpanMarker { .. } = kind
334 {
335 self.fail(
336 location,
337 format!("{kind:?} should have been removed after analysis"),
338 );
339 }
340 }
341 StatementKind::Assign(..)
342 | StatementKind::StorageLive(_)
343 | StatementKind::StorageDead(_)
344 | StatementKind::Intrinsic(_)
345 | StatementKind::ConstEvalCounter
346 | StatementKind::PlaceMention(..)
347 | StatementKind::BackwardIncompatibleDropHint { .. }
348 | StatementKind::Nop => {}
349 }
350
351 self.super_statement(statement, location);
352 }
353
354 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
355 match &terminator.kind {
356 TerminatorKind::Goto { target } => {
357 self.check_edge(location, *target, EdgeKind::Normal);
358 }
359 TerminatorKind::SwitchInt { targets, discr: _ } => {
360 for (_, target) in targets.iter() {
361 self.check_edge(location, target, EdgeKind::Normal);
362 }
363 self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
364
365 self.value_cache.clear();
366 self.value_cache.extend(targets.iter().map(|(value, _)| value));
367 let has_duplicates = targets.iter().len() != self.value_cache.len();
368 if has_duplicates {
369 self.fail(
370 location,
371 format!(
372 "duplicated values in `SwitchInt` terminator: {:?}",
373 terminator.kind,
374 ),
375 );
376 }
377 }
378 TerminatorKind::Drop { target, unwind, drop, .. } => {
379 self.check_edge(location, *target, EdgeKind::Normal);
380 self.check_unwind_edge(location, *unwind);
381 if let Some(drop) = drop {
382 self.check_edge(location, *drop, EdgeKind::Normal);
383 }
384 }
385 TerminatorKind::Call { func, args, .. }
386 | TerminatorKind::TailCall { func, args, .. } => {
387 if let TerminatorKind::Call { target, unwind, destination, .. } = terminator.kind {
389 if let Some(target) = target {
390 self.check_edge(location, target, EdgeKind::Normal);
391 }
392 self.check_unwind_edge(location, unwind);
393
394 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
400 && self.is_critical_call_edge(target, unwind)
401 {
402 self.fail(
403 location,
404 format!(
405 "encountered critical edge in `Call` terminator {:?}",
406 terminator.kind,
407 ),
408 );
409 }
410
411 if most_packed_projection(self.tcx, &self.body.local_decls, destination)
414 .is_some()
415 {
416 self.fail(
418 location,
419 format!(
420 "encountered packed place in `Call` terminator destination: {:?}",
421 terminator.kind,
422 ),
423 );
424 }
425 }
426
427 for arg in args {
428 if let Operand::Move(place) = &arg.node {
429 if most_packed_projection(self.tcx, &self.body.local_decls, *place)
430 .is_some()
431 {
432 self.fail(
434 location,
435 format!(
436 "encountered `Move` of a packed place in `Call` terminator: {:?}",
437 terminator.kind,
438 ),
439 );
440 }
441 }
442 }
443
444 if let ty::FnDef(did, ..) = *func.ty(&self.body.local_decls, self.tcx).kind()
445 && self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
446 && matches!(self.tcx.codegen_fn_attrs(did).inline, InlineAttr::Force { .. })
447 {
448 self.fail(location, "`#[rustc_force_inline]`-annotated function not inlined");
449 }
450 }
451 TerminatorKind::Assert { target, unwind, .. } => {
452 self.check_edge(location, *target, EdgeKind::Normal);
453 self.check_unwind_edge(location, *unwind);
454 }
455 TerminatorKind::Yield { resume, drop, .. } => {
456 if self.body.coroutine.is_none() {
457 self.fail(location, "`Yield` cannot appear outside coroutine bodies");
458 }
459 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
460 self.fail(location, "`Yield` should have been replaced by coroutine lowering");
461 }
462 self.check_edge(location, *resume, EdgeKind::Normal);
463 if let Some(drop) = drop {
464 self.check_edge(location, *drop, EdgeKind::Normal);
465 }
466 }
467 TerminatorKind::FalseEdge { real_target, imaginary_target } => {
468 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
469 self.fail(
470 location,
471 "`FalseEdge` should have been removed after drop elaboration",
472 );
473 }
474 self.check_edge(location, *real_target, EdgeKind::Normal);
475 self.check_edge(location, *imaginary_target, EdgeKind::Normal);
476 }
477 TerminatorKind::FalseUnwind { real_target, unwind } => {
478 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
479 self.fail(
480 location,
481 "`FalseUnwind` should have been removed after drop elaboration",
482 );
483 }
484 self.check_edge(location, *real_target, EdgeKind::Normal);
485 self.check_unwind_edge(location, *unwind);
486 }
487 TerminatorKind::InlineAsm { targets, unwind, .. } => {
488 for &target in targets {
489 self.check_edge(location, target, EdgeKind::Normal);
490 }
491 self.check_unwind_edge(location, *unwind);
492 }
493 TerminatorKind::CoroutineDrop => {
494 if self.body.coroutine.is_none() {
495 self.fail(location, "`CoroutineDrop` cannot appear outside coroutine bodies");
496 }
497 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
498 self.fail(
499 location,
500 "`CoroutineDrop` should have been replaced by coroutine lowering",
501 );
502 }
503 }
504 TerminatorKind::UnwindResume => {
505 let bb = location.block;
506 if !self.body.basic_blocks[bb].is_cleanup {
507 self.fail(location, "Cannot `UnwindResume` from non-cleanup basic block")
508 }
509 if !self.can_unwind {
510 self.fail(location, "Cannot `UnwindResume` in a function that cannot unwind")
511 }
512 }
513 TerminatorKind::UnwindTerminate(_) => {
514 let bb = location.block;
515 if !self.body.basic_blocks[bb].is_cleanup {
516 self.fail(location, "Cannot `UnwindTerminate` from non-cleanup basic block")
517 }
518 }
519 TerminatorKind::Return => {
520 let bb = location.block;
521 if self.body.basic_blocks[bb].is_cleanup {
522 self.fail(location, "Cannot `Return` from cleanup basic block")
523 }
524 }
525 TerminatorKind::Unreachable => {}
526 }
527
528 self.super_terminator(terminator, location);
529 }
530
531 fn visit_source_scope(&mut self, scope: SourceScope) {
532 if self.body.source_scopes.get(scope).is_none() {
533 self.tcx.dcx().span_bug(
534 self.body.span,
535 format!(
536 "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
537 self.body.source.instance, self.when, scope,
538 ),
539 );
540 }
541 }
542}
543
544pub(super) fn validate_types<'tcx>(
550 tcx: TyCtxt<'tcx>,
551 typing_env: ty::TypingEnv<'tcx>,
552 body: &Body<'tcx>,
553 caller_body: &Body<'tcx>,
554) -> Vec<(Location, String)> {
555 let mut type_checker = TypeChecker { body, caller_body, tcx, typing_env, failures: Vec::new() };
556 with_no_trimmed_paths!({
561 type_checker.visit_body(body);
562 });
563 type_checker.failures
564}
565
566struct TypeChecker<'a, 'tcx> {
567 body: &'a Body<'tcx>,
568 caller_body: &'a Body<'tcx>,
569 tcx: TyCtxt<'tcx>,
570 typing_env: ty::TypingEnv<'tcx>,
571 failures: Vec<(Location, String)>,
572}
573
574impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
575 fn fail(&mut self, location: Location, msg: impl Into<String>) {
576 self.failures.push((location, msg.into()));
577 }
578
579 fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
582 if src == dest {
584 return true;
586 }
587
588 if (src, dest).has_opaque_types() {
594 return true;
595 }
596
597 let variance = if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
600 Variance::Invariant
601 } else {
602 Variance::Covariant
603 };
604
605 crate::util::relate_types(self.tcx, self.typing_env, variance, src, dest)
606 }
607
608 fn predicate_must_hold_modulo_regions(
610 &self,
611 pred: impl Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>,
612 ) -> bool {
613 let pred: ty::Predicate<'tcx> = pred.upcast(self.tcx);
614
615 if pred.has_opaque_types() {
621 return true;
622 }
623
624 let (infcx, param_env) = self.tcx.infer_ctxt().build_with_typing_env(self.typing_env);
625 let ocx = ObligationCtxt::new(&infcx);
626 ocx.register_obligation(Obligation::new(
627 self.tcx,
628 ObligationCause::dummy(),
629 param_env,
630 pred,
631 ));
632 ocx.evaluate_obligations_error_on_ambiguity().is_empty()
633 }
634}
635
636impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
637 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
638 if self.tcx.sess.opts.unstable_opts.validate_mir
640 && self.body.phase < MirPhase::Runtime(RuntimePhase::Initial)
641 {
642 if let Operand::Copy(place) = operand {
644 let ty = place.ty(&self.body.local_decls, self.tcx).ty;
645
646 if !self.tcx.type_is_copy_modulo_regions(self.typing_env, ty) {
647 self.fail(location, format!("`Operand::Copy` with non-`Copy` type {ty}"));
648 }
649 }
650 }
651
652 self.super_operand(operand, location);
653 }
654
655 fn visit_projection_elem(
656 &mut self,
657 place_ref: PlaceRef<'tcx>,
658 elem: PlaceElem<'tcx>,
659 context: PlaceContext,
660 location: Location,
661 ) {
662 match elem {
663 ProjectionElem::Deref
664 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
665 {
666 let base_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
667
668 if base_ty.is_box() {
669 self.fail(location, format!("{base_ty} dereferenced after ElaborateBoxDerefs"))
670 }
671 }
672 ProjectionElem::Field(f, ty) => {
673 let parent_ty = place_ref.ty(&self.body.local_decls, self.tcx);
674 let fail_out_of_bounds = |this: &mut Self, location| {
675 this.fail(location, format!("Out of bounds field {f:?} for {parent_ty:?}"));
676 };
677 let check_equal = |this: &mut Self, location, f_ty| {
678 if !this.mir_assign_valid_types(ty, f_ty) {
679 this.fail(
680 location,
681 format!(
682 "Field projection `{place_ref:?}.{f:?}` specified type `{ty}`, but actual type is `{f_ty}`"
683 )
684 )
685 }
686 };
687
688 let kind = match parent_ty.ty.kind() {
689 &ty::Alias(ty::AliasTy { kind: ty::Opaque { def_id }, args, .. }) => {
690 self.tcx.type_of(def_id).instantiate(self.tcx, args).skip_norm_wip().kind()
691 }
692 kind => kind,
693 };
694
695 match kind {
696 ty::Tuple(fields) => {
697 let Some(f_ty) = fields.get(f.as_usize()) else {
698 fail_out_of_bounds(self, location);
699 return;
700 };
701 check_equal(self, location, *f_ty);
702 }
703 ty::Pat(base, _) => check_equal(self, location, *base),
705 ty::Adt(adt_def, args) => {
706 if self.tcx.is_lang_item(adt_def.did(), LangItem::DynMetadata) {
708 self.fail(
709 location,
710 format!(
711 "You can't project to field {f:?} of `DynMetadata` because \
712 layout is weird and thinks it doesn't have fields."
713 ),
714 );
715 }
716
717 if adt_def.repr().simd() {
718 self.fail(
719 location,
720 format!(
721 "Projecting into SIMD type {adt_def:?} is banned by MCP#838"
722 ),
723 );
724 }
725
726 let var = parent_ty.variant_index.unwrap_or(FIRST_VARIANT);
727 let Some(field) = adt_def.variant(var).fields.get(f) else {
728 fail_out_of_bounds(self, location);
729 return;
730 };
731 check_equal(self, location, field.ty(self.tcx, args));
732 }
733 ty::Closure(_, args) => {
734 let args = args.as_closure();
735 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
736 fail_out_of_bounds(self, location);
737 return;
738 };
739 check_equal(self, location, f_ty);
740 }
741 ty::CoroutineClosure(_, args) => {
742 let args = args.as_coroutine_closure();
743 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
744 fail_out_of_bounds(self, location);
745 return;
746 };
747 check_equal(self, location, f_ty);
748 }
749 &ty::Coroutine(def_id, args) => {
750 let f_ty = if let Some(var) = parent_ty.variant_index {
751 let layout = if def_id == self.caller_body.source.def_id() {
757 self.caller_body
758 .coroutine_layout_raw()
759 .or_else(|| self.tcx.coroutine_layout(def_id, args).ok())
760 } else if self.tcx.needs_coroutine_by_move_body_def_id(def_id)
761 && let ty::ClosureKind::FnOnce =
762 args.as_coroutine().kind_ty().to_opt_closure_kind().unwrap()
763 && self.caller_body.source.def_id()
764 == self.tcx.coroutine_by_move_body_def_id(def_id)
765 {
766 self.caller_body.coroutine_layout_raw()
768 } else {
769 self.tcx.coroutine_layout(def_id, args).ok()
770 };
771
772 let Some(layout) = layout else {
773 self.fail(
774 location,
775 format!("No coroutine layout for {parent_ty:?}"),
776 );
777 return;
778 };
779
780 let Some(&local) = layout.variant_fields[var].get(f) else {
781 fail_out_of_bounds(self, location);
782 return;
783 };
784
785 let Some(f_ty) = layout.field_tys.get(local) else {
786 self.fail(
787 location,
788 format!("Out of bounds local {local:?} for {parent_ty:?}"),
789 );
790 return;
791 };
792
793 ty::EarlyBinder::bind(f_ty.ty)
794 .instantiate(self.tcx, args)
795 .skip_norm_wip()
796 } else {
797 let Some(&f_ty) = args.as_coroutine().prefix_tys().get(f.index())
798 else {
799 fail_out_of_bounds(self, location);
800 return;
801 };
802
803 f_ty
804 };
805
806 check_equal(self, location, f_ty);
807 }
808 _ => {
809 self.fail(location, format!("{:?} does not have fields", parent_ty.ty));
810 }
811 }
812 }
813 ProjectionElem::Index(index) => {
814 let indexed_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
815 match indexed_ty.kind() {
816 ty::Array(_, _) | ty::Slice(_) => {}
817 _ => self.fail(location, format!("{indexed_ty:?} cannot be indexed")),
818 }
819
820 let index_ty = self.body.local_decls[index].ty;
821 if index_ty != self.tcx.types.usize {
822 self.fail(location, format!("bad index ({index_ty} != usize)"))
823 }
824 }
825 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
826 let indexed_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
827 match indexed_ty.kind() {
828 ty::Array(_, _) => {
829 if from_end {
830 self.fail(location, "arrays should not be indexed from end");
831 }
832 }
833 ty::Slice(_) => {}
834 _ => self.fail(location, format!("{indexed_ty:?} cannot be indexed")),
835 }
836
837 if from_end {
838 if offset > min_length {
839 self.fail(
840 location,
841 format!(
842 "constant index with offset -{offset} out of bounds of min length {min_length}"
843 ),
844 );
845 }
846 } else {
847 if offset >= min_length {
848 self.fail(
849 location,
850 format!(
851 "constant index with offset {offset} out of bounds of min length {min_length}"
852 ),
853 );
854 }
855 }
856 }
857 ProjectionElem::Subslice { from, to, from_end } => {
858 let indexed_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
859 match indexed_ty.kind() {
860 ty::Array(_, _) => {
861 if from_end {
862 self.fail(location, "arrays should not be subsliced from end");
863 }
864 }
865 ty::Slice(_) => {
866 if !from_end {
867 self.fail(location, "slices should be subsliced from end");
868 }
869 }
870 _ => self.fail(location, format!("{indexed_ty:?} cannot be indexed")),
871 }
872
873 if !from_end && from > to {
874 self.fail(location, "backwards subslice {from}..{to}");
875 }
876 }
877 ProjectionElem::OpaqueCast(ty)
878 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
879 {
880 self.fail(
881 location,
882 format!("explicit opaque type cast to `{ty}` after `PostAnalysisNormalize`"),
883 )
884 }
885 ProjectionElem::UnwrapUnsafeBinder(unwrapped_ty) => {
886 let binder_ty = place_ref.ty(&self.body.local_decls, self.tcx);
887 let ty::UnsafeBinder(binder_ty) = *binder_ty.ty.kind() else {
888 self.fail(
889 location,
890 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
891 );
892 return;
893 };
894 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
895 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
896 self.fail(
897 location,
898 format!(
899 "Cannot unwrap unsafe binder {binder_ty:?} into type {unwrapped_ty}"
900 ),
901 );
902 }
903 }
904 _ => {}
905 }
906 self.super_projection_elem(place_ref, elem, context, location);
907 }
908
909 fn visit_var_debug_info(&mut self, debuginfo: &VarDebugInfo<'tcx>) {
910 if let Some(box VarDebugInfoFragment { ty, ref projection }) = debuginfo.composite {
911 if ty.is_union() || ty.is_enum() {
912 self.fail(
913 START_BLOCK.start_location(),
914 format!("invalid type {ty} in debuginfo for {:?}", debuginfo.name),
915 );
916 }
917 if projection.is_empty() {
918 self.fail(
919 START_BLOCK.start_location(),
920 format!("invalid empty projection in debuginfo for {:?}", debuginfo.name),
921 );
922 }
923 if projection.iter().any(|p| !matches!(p, PlaceElem::Field(..))) {
924 self.fail(
925 START_BLOCK.start_location(),
926 format!(
927 "illegal projection {:?} in debuginfo for {:?}",
928 projection, debuginfo.name
929 ),
930 );
931 }
932 }
933 match debuginfo.value {
934 VarDebugInfoContents::Const(_) => {}
935 VarDebugInfoContents::Place(place) => {
936 if place.projection.iter().any(|p| !p.can_use_in_debuginfo()) {
937 self.fail(
938 START_BLOCK.start_location(),
939 format!("illegal place {:?} in debuginfo for {:?}", place, debuginfo.name),
940 );
941 }
942 }
943 }
944 self.super_var_debug_info(debuginfo);
945 }
946
947 fn visit_place(&mut self, place: &Place<'tcx>, cntxt: PlaceContext, location: Location) {
948 let _ = place.ty(&self.body.local_decls, self.tcx);
950
951 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial)
952 && place.projection.len() > 1
953 && cntxt != PlaceContext::NonUse(NonUseContext::VarDebugInfo)
954 && place.projection[1..].contains(&ProjectionElem::Deref)
955 {
956 self.fail(
957 location,
958 format!("place {place:?} has deref as a later projection (it is only permitted as the first projection)"),
959 );
960 }
961
962 let mut projections_iter = place.projection.iter();
964 while let Some(proj) = projections_iter.next() {
965 if matches!(proj, ProjectionElem::Downcast(..)) {
966 if !matches!(projections_iter.next(), Some(ProjectionElem::Field(..))) {
967 self.fail(
968 location,
969 format!(
970 "place {place:?} has `Downcast` projection not followed by `Field`"
971 ),
972 );
973 }
974 }
975 }
976
977 if let ClearCrossCrate::Set(box LocalInfo::DerefTemp) =
978 self.body.local_decls[place.local].local_info
979 && !place.is_indirect_first_projection()
980 {
981 if cntxt != PlaceContext::MutatingUse(MutatingUseContext::Store)
982 || place.as_local().is_none()
983 {
984 self.fail(
985 location,
986 format!("`DerefTemp` locals must only be dereferenced or directly assigned to"),
987 );
988 }
989 }
990
991 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial)
992 && let Some(i) = place
993 .projection
994 .iter()
995 .position(|elem| matches!(elem, ProjectionElem::Subslice { .. }))
996 && let Some(tail) = place.projection.get(i + 1..)
997 && tail.iter().any(|elem| {
998 matches!(
999 elem,
1000 ProjectionElem::ConstantIndex { .. } | ProjectionElem::Subslice { .. }
1001 )
1002 })
1003 {
1004 self.fail(
1005 location,
1006 format!("place {place:?} has `ConstantIndex` or `Subslice` after `Subslice`"),
1007 );
1008 }
1009
1010 self.super_place(place, cntxt, location);
1011 }
1012
1013 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
1014 macro_rules! check_kinds {
1015 ($t:expr, $text:literal, $typat:pat) => {
1016 if !matches!(($t).kind(), $typat) {
1017 self.fail(location, format!($text, $t));
1018 }
1019 };
1020 }
1021 match rvalue {
1022 Rvalue::Use(_) => {}
1023 Rvalue::CopyForDeref(_) => {
1024 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1025 self.fail(location, "`CopyForDeref` should have been removed in runtime MIR");
1026 }
1027 }
1028 Rvalue::Aggregate(kind, fields) => match **kind {
1029 AggregateKind::Tuple => {}
1030 AggregateKind::Array(dest) => {
1031 for src in fields {
1032 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1033 self.fail(location, "array field has the wrong type");
1034 }
1035 }
1036 }
1037 AggregateKind::Adt(def_id, idx, args, _, Some(field)) => {
1038 let adt_def = self.tcx.adt_def(def_id);
1039 assert!(adt_def.is_union());
1040 assert_eq!(idx, FIRST_VARIANT);
1041 let dest_ty = self.tcx.normalize_erasing_regions(
1042 self.typing_env,
1043 Unnormalized::new_wip(
1044 adt_def.non_enum_variant().fields[field].ty(self.tcx, args),
1045 ),
1046 );
1047 if let [field] = fields.raw.as_slice() {
1048 let src_ty = field.ty(self.body, self.tcx);
1049 if !self.mir_assign_valid_types(src_ty, dest_ty) {
1050 self.fail(location, "union field has the wrong type");
1051 }
1052 } else {
1053 self.fail(location, "unions should have one initialized field");
1054 }
1055 }
1056 AggregateKind::Adt(def_id, idx, args, _, None) => {
1057 let adt_def = self.tcx.adt_def(def_id);
1058 assert!(!adt_def.is_union());
1059 let variant = &adt_def.variants()[idx];
1060 if variant.fields.len() != fields.len() {
1061 self.fail(location, format!(
1062 "adt {def_id:?} has the wrong number of initialized fields, expected {}, found {}",
1063 fields.len(),
1064 variant.fields.len(),
1065 ));
1066 }
1067 for (src, dest) in std::iter::zip(fields, &variant.fields) {
1068 let dest_ty = self.tcx.normalize_erasing_regions(
1069 self.typing_env,
1070 Unnormalized::new_wip(dest.ty(self.tcx, args)),
1071 );
1072 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest_ty) {
1073 self.fail(location, "adt field has the wrong type");
1074 }
1075 }
1076 }
1077 AggregateKind::Closure(_, args) => {
1078 let upvars = args.as_closure().upvar_tys();
1079 if upvars.len() != fields.len() {
1080 self.fail(location, "closure has the wrong number of initialized fields");
1081 }
1082 for (src, dest) in std::iter::zip(fields, upvars) {
1083 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1084 self.fail(location, "closure field has the wrong type");
1085 }
1086 }
1087 }
1088 AggregateKind::Coroutine(_, args) => {
1089 let upvars = args.as_coroutine().upvar_tys();
1090 if upvars.len() != fields.len() {
1091 self.fail(location, "coroutine has the wrong number of initialized fields");
1092 }
1093 for (src, dest) in std::iter::zip(fields, upvars) {
1094 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1095 self.fail(location, "coroutine field has the wrong type");
1096 }
1097 }
1098 }
1099 AggregateKind::CoroutineClosure(_, args) => {
1100 let upvars = args.as_coroutine_closure().upvar_tys();
1101 if upvars.len() != fields.len() {
1102 self.fail(
1103 location,
1104 "coroutine-closure has the wrong number of initialized fields",
1105 );
1106 }
1107 for (src, dest) in std::iter::zip(fields, upvars) {
1108 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1109 self.fail(location, "coroutine-closure field has the wrong type");
1110 }
1111 }
1112 }
1113 AggregateKind::RawPtr(pointee_ty, mutability) => {
1114 if !matches!(self.body.phase, MirPhase::Runtime(_)) {
1115 self.fail(location, "RawPtr should be in runtime MIR only");
1119 }
1120
1121 if let [data_ptr, metadata] = fields.raw.as_slice() {
1122 let data_ptr_ty = data_ptr.ty(self.body, self.tcx);
1123 let metadata_ty = metadata.ty(self.body, self.tcx);
1124 if let ty::RawPtr(in_pointee, in_mut) = data_ptr_ty.kind() {
1125 if *in_mut != mutability {
1126 self.fail(location, "input and output mutability must match");
1127 }
1128
1129 if !in_pointee.is_sized(self.tcx, self.typing_env) {
1131 self.fail(location, "input pointer must be thin");
1132 }
1133 } else {
1134 self.fail(
1135 location,
1136 "first operand to raw pointer aggregate must be a raw pointer",
1137 );
1138 }
1139
1140 if pointee_ty.is_slice() {
1142 if !self.mir_assign_valid_types(metadata_ty, self.tcx.types.usize) {
1143 self.fail(location, "slice metadata must be usize");
1144 }
1145 } else if pointee_ty.is_sized(self.tcx, self.typing_env) {
1146 if metadata_ty != self.tcx.types.unit {
1147 self.fail(location, "metadata for pointer-to-thin must be unit");
1148 }
1149 }
1150 } else {
1151 self.fail(location, "raw pointer aggregate must have 2 fields");
1152 }
1153 }
1154 },
1155 Rvalue::Ref(_, BorrowKind::Fake(_), _) => {
1156 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1157 self.fail(
1158 location,
1159 "`Assign` statement with a `Fake` borrow should have been removed in runtime MIR",
1160 );
1161 }
1162 }
1163 Rvalue::Ref(..) => {}
1164 Rvalue::BinaryOp(op, vals) => {
1165 use BinOp::*;
1166 let a = vals.0.ty(&self.body.local_decls, self.tcx);
1167 let b = vals.1.ty(&self.body.local_decls, self.tcx);
1168 if crate::util::binop_right_homogeneous(*op) {
1169 if let Eq | Lt | Le | Ne | Ge | Gt = op {
1170 if !self.mir_assign_valid_types(a, b) {
1172 self.fail(
1173 location,
1174 format!("Cannot {op:?} compare incompatible types {a} and {b}"),
1175 );
1176 }
1177 } else if a != b {
1178 self.fail(
1179 location,
1180 format!("Cannot perform binary op {op:?} on unequal types {a} and {b}"),
1181 );
1182 }
1183 }
1184
1185 match op {
1186 Offset => {
1187 check_kinds!(a, "Cannot offset non-pointer type {:?}", ty::RawPtr(..));
1188 if b != self.tcx.types.isize && b != self.tcx.types.usize {
1189 self.fail(location, format!("Cannot offset by non-isize type {b}"));
1190 }
1191 }
1192 Eq | Lt | Le | Ne | Ge | Gt => {
1193 for x in [a, b] {
1194 check_kinds!(
1195 x,
1196 "Cannot {op:?} compare type {:?}",
1197 ty::Bool
1198 | ty::Char
1199 | ty::Int(..)
1200 | ty::Uint(..)
1201 | ty::Float(..)
1202 | ty::RawPtr(..)
1203 | ty::FnPtr(..)
1204 )
1205 }
1206 }
1207 Cmp => {
1208 for x in [a, b] {
1209 check_kinds!(
1210 x,
1211 "Cannot three-way compare non-integer type {:?}",
1212 ty::Char | ty::Uint(..) | ty::Int(..)
1213 )
1214 }
1215 }
1216 AddUnchecked | AddWithOverflow | SubUnchecked | SubWithOverflow
1217 | MulUnchecked | MulWithOverflow | Shl | ShlUnchecked | Shr | ShrUnchecked => {
1218 for x in [a, b] {
1219 check_kinds!(
1220 x,
1221 "Cannot {op:?} non-integer type {:?}",
1222 ty::Uint(..) | ty::Int(..)
1223 )
1224 }
1225 }
1226 BitAnd | BitOr | BitXor => {
1227 for x in [a, b] {
1228 check_kinds!(
1229 x,
1230 "Cannot perform bitwise op {op:?} on type {:?}",
1231 ty::Uint(..) | ty::Int(..) | ty::Bool
1232 )
1233 }
1234 }
1235 Add | Sub | Mul | Div | Rem => {
1236 for x in [a, b] {
1237 check_kinds!(
1238 x,
1239 "Cannot perform arithmetic {op:?} on type {:?}",
1240 ty::Uint(..) | ty::Int(..) | ty::Float(..)
1241 )
1242 }
1243 }
1244 }
1245 }
1246 Rvalue::UnaryOp(op, operand) => {
1247 let a = operand.ty(&self.body.local_decls, self.tcx);
1248 match op {
1249 UnOp::Neg => {
1250 check_kinds!(a, "Cannot negate type {:?}", ty::Int(..) | ty::Float(..))
1251 }
1252 UnOp::Not => {
1253 check_kinds!(
1254 a,
1255 "Cannot binary not type {:?}",
1256 ty::Int(..) | ty::Uint(..) | ty::Bool
1257 );
1258 }
1259 UnOp::PtrMetadata => {
1260 check_kinds!(
1261 a,
1262 "Cannot PtrMetadata non-pointer non-reference type {:?}",
1263 ty::RawPtr(..) | ty::Ref(..)
1264 );
1265 }
1266 }
1267 }
1268 Rvalue::Cast(kind, operand, target_type) => {
1269 let op_ty = operand.ty(self.body, self.tcx);
1270 match kind {
1271 CastKind::PointerWithExposedProvenance | CastKind::PointerExposeProvenance => {}
1273 CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer(_), _) => {
1274 check_kinds!(
1276 op_ty,
1277 "CastKind::{kind:?} input must be a fn item, not {:?}",
1278 ty::FnDef(..)
1279 );
1280 check_kinds!(
1281 target_type,
1282 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1283 ty::FnPtr(..)
1284 );
1285 }
1286 CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
1287 check_kinds!(
1289 op_ty,
1290 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1291 ty::FnPtr(..)
1292 );
1293 check_kinds!(
1294 target_type,
1295 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1296 ty::FnPtr(..)
1297 );
1298 }
1299 CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(..), _) => {
1300 check_kinds!(
1302 op_ty,
1303 "CastKind::{kind:?} input must be a closure, not {:?}",
1304 ty::Closure(..)
1305 );
1306 check_kinds!(
1307 target_type,
1308 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1309 ty::FnPtr(..)
1310 );
1311 }
1312 CastKind::PointerCoercion(PointerCoercion::MutToConstPointer, _) => {
1313 check_kinds!(
1315 op_ty,
1316 "CastKind::{kind:?} input must be a raw mut pointer, not {:?}",
1317 ty::RawPtr(_, Mutability::Mut)
1318 );
1319 check_kinds!(
1320 target_type,
1321 "CastKind::{kind:?} output must be a raw const pointer, not {:?}",
1322 ty::RawPtr(_, Mutability::Not)
1323 );
1324 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1325 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1326 }
1327 }
1328 CastKind::PointerCoercion(PointerCoercion::ArrayToPointer, _) => {
1329 check_kinds!(
1331 op_ty,
1332 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1333 ty::RawPtr(..)
1334 );
1335 check_kinds!(
1336 target_type,
1337 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1338 ty::RawPtr(..)
1339 );
1340 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1341 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1342 }
1343 }
1344 CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
1345 if !self.predicate_must_hold_modulo_regions(ty::TraitRef::new(
1348 self.tcx,
1349 self.tcx.require_lang_item(
1350 LangItem::CoerceUnsized,
1351 self.body.source_info(location).span,
1352 ),
1353 [op_ty, *target_type],
1354 )) {
1355 self.fail(location, format!("Unsize coercion, but `{op_ty}` isn't coercible to `{target_type}`"));
1356 }
1357 }
1358 CastKind::IntToInt | CastKind::IntToFloat => {
1359 let input_valid = op_ty.is_integral() || op_ty.is_char() || op_ty.is_bool();
1360 let target_valid = target_type.is_numeric() || target_type.is_char();
1361 if !input_valid || !target_valid {
1362 self.fail(
1363 location,
1364 format!("Wrong cast kind {kind:?} for the type {op_ty}"),
1365 );
1366 }
1367 }
1368 CastKind::FnPtrToPtr => {
1369 check_kinds!(
1370 op_ty,
1371 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1372 ty::FnPtr(..)
1373 );
1374 check_kinds!(
1375 target_type,
1376 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1377 ty::RawPtr(..)
1378 );
1379 }
1380 CastKind::PtrToPtr => {
1381 check_kinds!(
1382 op_ty,
1383 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1384 ty::RawPtr(..)
1385 );
1386 check_kinds!(
1387 target_type,
1388 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1389 ty::RawPtr(..)
1390 );
1391 }
1392 CastKind::FloatToFloat | CastKind::FloatToInt => {
1393 if !op_ty.is_floating_point() || !target_type.is_numeric() {
1394 self.fail(
1395 location,
1396 format!(
1397 "Trying to cast non 'Float' as {kind:?} into {target_type:?}"
1398 ),
1399 );
1400 }
1401 }
1402 CastKind::Transmute => {
1403 if !self
1407 .tcx
1408 .normalize_erasing_regions(
1409 self.typing_env,
1410 Unnormalized::new_wip(op_ty),
1411 )
1412 .is_sized(self.tcx, self.typing_env)
1413 {
1414 self.fail(
1415 location,
1416 format!("Cannot transmute from non-`Sized` type {op_ty}"),
1417 );
1418 }
1419 if !self
1420 .tcx
1421 .normalize_erasing_regions(
1422 self.typing_env,
1423 Unnormalized::new_wip(*target_type),
1424 )
1425 .is_sized(self.tcx, self.typing_env)
1426 {
1427 self.fail(
1428 location,
1429 format!("Cannot transmute to non-`Sized` type {target_type:?}"),
1430 );
1431 }
1432 }
1433 CastKind::Subtype => {
1434 if !util::sub_types(self.tcx, self.typing_env, op_ty, *target_type) {
1435 self.fail(
1436 location,
1437 format!("Failed subtyping {op_ty} and {target_type}"),
1438 )
1439 }
1440 }
1441 }
1442 }
1443 Rvalue::Repeat(_, _)
1444 | Rvalue::ThreadLocalRef(_)
1445 | Rvalue::RawPtr(_, _)
1446 | Rvalue::Discriminant(_) => {}
1447
1448 Rvalue::WrapUnsafeBinder(op, ty) => {
1449 let unwrapped_ty = op.ty(self.body, self.tcx);
1450 let ty::UnsafeBinder(binder_ty) = *ty.kind() else {
1451 self.fail(
1452 location,
1453 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
1454 );
1455 return;
1456 };
1457 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
1458 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
1459 self.fail(
1460 location,
1461 format!("Cannot wrap {unwrapped_ty} into unsafe binder {binder_ty:?}"),
1462 );
1463 }
1464 }
1465 }
1466 self.super_rvalue(rvalue, location);
1467 }
1468
1469 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
1470 match &statement.kind {
1471 StatementKind::Assign(box (dest, rvalue)) => {
1472 let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
1474 let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
1475
1476 if !self.mir_assign_valid_types(right_ty, left_ty) {
1477 self.fail(
1478 location,
1479 format!(
1480 "encountered `{:?}` with incompatible types:\n\
1481 left-hand side has type: {}\n\
1482 right-hand side has type: {}",
1483 statement.kind, left_ty, right_ty,
1484 ),
1485 );
1486 }
1487
1488 if let Some(local) = dest.as_local()
1489 && let ClearCrossCrate::Set(box LocalInfo::DerefTemp) =
1490 self.body.local_decls[local].local_info
1491 && !matches!(rvalue, Rvalue::CopyForDeref(_))
1492 {
1493 self.fail(location, "assignment to a `DerefTemp` must use `CopyForDeref`")
1494 }
1495 }
1496 StatementKind::AscribeUserType(..) => {
1497 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1498 self.fail(
1499 location,
1500 "`AscribeUserType` should have been removed after drop lowering phase",
1501 );
1502 }
1503 }
1504 StatementKind::FakeRead(..) => {
1505 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1506 self.fail(
1507 location,
1508 "`FakeRead` should have been removed after drop lowering phase",
1509 );
1510 }
1511 }
1512 StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(op)) => {
1513 let ty = op.ty(&self.body.local_decls, self.tcx);
1514 if !ty.is_bool() {
1515 self.fail(
1516 location,
1517 format!("`assume` argument must be `bool`, but got: `{ty}`"),
1518 );
1519 }
1520 }
1521 StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(
1522 CopyNonOverlapping { src, dst, count },
1523 )) => {
1524 let src_ty = src.ty(&self.body.local_decls, self.tcx);
1525 let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) {
1526 src_deref
1527 } else {
1528 self.fail(
1529 location,
1530 format!("Expected src to be ptr in copy_nonoverlapping, got: {src_ty}"),
1531 );
1532 return;
1533 };
1534 let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
1535 let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) {
1536 dst_deref
1537 } else {
1538 self.fail(
1539 location,
1540 format!("Expected dst to be ptr in copy_nonoverlapping, got: {dst_ty}"),
1541 );
1542 return;
1543 };
1544 if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) {
1547 self.fail(location, format!("bad arg ({op_src_ty} != {op_dst_ty})"));
1548 }
1549
1550 let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
1551 if op_cnt_ty != self.tcx.types.usize {
1552 self.fail(location, format!("bad arg ({op_cnt_ty} != usize)"))
1553 }
1554 }
1555 StatementKind::SetDiscriminant { place, .. } => {
1556 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
1557 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
1558 }
1559 let pty = place.ty(&self.body.local_decls, self.tcx).ty;
1560 if !matches!(
1561 pty.kind(),
1562 ty::Adt(..)
1563 | ty::Coroutine(..)
1564 | ty::Alias(ty::AliasTy { kind: ty::Opaque { .. }, .. })
1565 ) {
1566 self.fail(
1567 location,
1568 format!(
1569 "`SetDiscriminant` is only allowed on ADTs and coroutines, not {pty}"
1570 ),
1571 );
1572 }
1573 }
1574 StatementKind::Retag(kind, _) => {
1575 if matches!(kind, RetagKind::TwoPhase) {
1579 self.fail(location, format!("explicit `{kind:?}` is forbidden"));
1580 }
1581 }
1582 StatementKind::StorageLive(_)
1583 | StatementKind::StorageDead(_)
1584 | StatementKind::Coverage(_)
1585 | StatementKind::ConstEvalCounter
1586 | StatementKind::PlaceMention(..)
1587 | StatementKind::BackwardIncompatibleDropHint { .. }
1588 | StatementKind::Nop => {}
1589 }
1590
1591 self.super_statement(statement, location);
1592 }
1593
1594 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
1595 match &terminator.kind {
1596 TerminatorKind::SwitchInt { targets, discr } => {
1597 let switch_ty = discr.ty(&self.body.local_decls, self.tcx);
1598
1599 let target_width = self.tcx.sess.target.pointer_width;
1600
1601 let size = Size::from_bits(match switch_ty.kind() {
1602 ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
1603 ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
1604 ty::Char => 32,
1605 ty::Bool => 1,
1606 other => bug!("unhandled type: {:?}", other),
1607 });
1608
1609 for (value, _) in targets.iter() {
1610 if ScalarInt::try_from_uint(value, size).is_none() {
1611 self.fail(
1612 location,
1613 format!("the value {value:#x} is not a proper {switch_ty}"),
1614 )
1615 }
1616 }
1617 }
1618 TerminatorKind::Call { func, .. } | TerminatorKind::TailCall { func, .. } => {
1619 let func_ty = func.ty(&self.body.local_decls, self.tcx);
1620 match func_ty.kind() {
1621 ty::FnPtr(..) | ty::FnDef(..) => {}
1622 _ => self.fail(
1623 location,
1624 format!(
1625 "encountered non-callable type {func_ty} in `{}` terminator",
1626 terminator.kind.name()
1627 ),
1628 ),
1629 }
1630
1631 if let TerminatorKind::TailCall { .. } = terminator.kind {
1632 }
1635 }
1636 TerminatorKind::Assert { cond, .. } => {
1637 let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
1638 if cond_ty != self.tcx.types.bool {
1639 self.fail(
1640 location,
1641 format!(
1642 "encountered non-boolean condition of type {cond_ty} in `Assert` terminator"
1643 ),
1644 );
1645 }
1646 }
1647 TerminatorKind::Goto { .. }
1648 | TerminatorKind::Drop { .. }
1649 | TerminatorKind::Yield { .. }
1650 | TerminatorKind::FalseEdge { .. }
1651 | TerminatorKind::FalseUnwind { .. }
1652 | TerminatorKind::InlineAsm { .. }
1653 | TerminatorKind::CoroutineDrop
1654 | TerminatorKind::UnwindResume
1655 | TerminatorKind::UnwindTerminate(_)
1656 | TerminatorKind::Return
1657 | TerminatorKind::Unreachable => {}
1658 }
1659
1660 self.super_terminator(terminator, location);
1661 }
1662
1663 fn visit_local_decl(&mut self, local: Local, local_decl: &LocalDecl<'tcx>) {
1664 if let ClearCrossCrate::Set(box LocalInfo::DerefTemp) = local_decl.local_info {
1665 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1666 self.fail(
1667 START_BLOCK.start_location(),
1668 "`DerefTemp` should have been removed in runtime MIR",
1669 );
1670 } else if local_decl.ty.builtin_deref(true).is_none() {
1671 self.fail(
1672 START_BLOCK.start_location(),
1673 "`DerefTemp` should only be used for dereferenceable types",
1674 )
1675 }
1676 }
1677
1678 self.super_local_decl(local, local_decl);
1679 }
1680}
1681
1682pub(super) fn validate_debuginfos<'tcx>(body: &Body<'tcx>) -> Vec<(Location, String)> {
1683 let mut debuginfo_checker =
1684 DebuginfoChecker { debuginfo_locals: debuginfo_locals(body), failures: Vec::new() };
1685 debuginfo_checker.visit_body(body);
1686 debuginfo_checker.failures
1687}
1688
1689struct DebuginfoChecker {
1690 debuginfo_locals: DenseBitSet<Local>,
1691 failures: Vec<(Location, String)>,
1692}
1693
1694impl<'tcx> Visitor<'tcx> for DebuginfoChecker {
1695 fn visit_statement_debuginfo(
1696 &mut self,
1697 stmt_debuginfo: &StmtDebugInfo<'tcx>,
1698 location: Location,
1699 ) {
1700 let local = match stmt_debuginfo {
1701 StmtDebugInfo::AssignRef(local, _) | StmtDebugInfo::InvalidAssign(local) => *local,
1702 };
1703 if !self.debuginfo_locals.contains(local) {
1704 self.failures.push((location, format!("{local:?} is not in debuginfo")));
1705 }
1706 }
1707}