1use rustc_abi::{ExternAbi, FIRST_VARIANT, Size};
4use rustc_data_structures::fx::{FxHashMap, FxHashSet};
5use rustc_hir::LangItem;
6use rustc_hir::attrs::InlineAttr;
7use rustc_index::IndexVec;
8use rustc_index::bit_set::DenseBitSet;
9use rustc_infer::infer::TyCtxtInferExt;
10use rustc_infer::traits::{Obligation, ObligationCause};
11use rustc_middle::mir::coverage::CoverageKind;
12use rustc_middle::mir::visit::{MutatingUseContext, NonUseContext, PlaceContext, Visitor};
13use rustc_middle::mir::*;
14use rustc_middle::ty::adjustment::PointerCoercion;
15use rustc_middle::ty::print::with_no_trimmed_paths;
16use rustc_middle::ty::{
17 self, CoroutineArgsExt, InstanceKind, ScalarInt, Ty, TyCtxt, TypeVisitableExt, Upcast, Variance,
18};
19use rustc_middle::{bug, span_bug};
20use rustc_mir_dataflow::debuginfo::debuginfo_locals;
21use rustc_trait_selection::traits::ObligationCtxt;
22
23use crate::util::{self, most_packed_projection};
24
25#[derive(Copy, Clone, Debug, PartialEq, Eq)]
26enum EdgeKind {
27 Unwind,
28 Normal,
29}
30
31pub(super) struct Validator {
32 pub when: String,
34}
35
36impl<'tcx> crate::MirPass<'tcx> for Validator {
37 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
38 if matches!(body.source.instance, InstanceKind::Intrinsic(..) | InstanceKind::Virtual(..)) {
43 return;
44 }
45 let def_id = body.source.def_id();
46 let typing_env = body.typing_env(tcx);
47 let can_unwind = if body.phase <= MirPhase::Runtime(RuntimePhase::Initial) {
48 true
50 } else if !tcx.def_kind(def_id).is_fn_like() {
51 true
52 } else {
53 let body_ty = tcx.type_of(def_id).skip_binder();
54 let body_abi = match body_ty.kind() {
55 ty::FnDef(..) => body_ty.fn_sig(tcx).abi(),
56 ty::Closure(..) => ExternAbi::RustCall,
57 ty::CoroutineClosure(..) => ExternAbi::RustCall,
58 ty::Coroutine(..) => ExternAbi::Rust,
59 ty::Error(_) => return,
61 _ => span_bug!(body.span, "unexpected body ty: {body_ty}"),
62 };
63
64 ty::layout::fn_can_unwind(tcx, Some(def_id), body_abi)
65 };
66
67 let mut cfg_checker = CfgChecker {
68 when: &self.when,
69 body,
70 tcx,
71 unwind_edge_count: 0,
72 reachable_blocks: traversal::reachable_as_bitset(body),
73 value_cache: FxHashSet::default(),
74 can_unwind,
75 };
76 cfg_checker.visit_body(body);
77 cfg_checker.check_cleanup_control_flow();
78
79 for (location, msg) in validate_types(tcx, typing_env, body, body) {
81 cfg_checker.fail(location, msg);
82 }
83
84 for (location, msg) in validate_debuginfos(body) {
86 cfg_checker.fail(location, msg);
87 }
88
89 if let MirPhase::Runtime(_) = body.phase
90 && let ty::InstanceKind::Item(_) = body.source.instance
91 && body.has_free_regions()
92 {
93 cfg_checker.fail(
94 Location::START,
95 format!("Free regions in optimized {} MIR", body.phase.name()),
96 );
97 }
98 }
99
100 fn is_required(&self) -> bool {
101 true
102 }
103}
104
105struct CfgChecker<'a, 'tcx> {
112 when: &'a str,
113 body: &'a Body<'tcx>,
114 tcx: TyCtxt<'tcx>,
115 unwind_edge_count: usize,
116 reachable_blocks: DenseBitSet<BasicBlock>,
117 value_cache: FxHashSet<u128>,
118 can_unwind: bool,
121}
122
123impl<'a, 'tcx> CfgChecker<'a, 'tcx> {
124 #[track_caller]
125 fn fail(&self, location: Location, msg: impl AsRef<str>) {
126 if self.tcx.dcx().has_errors().is_none() {
128 span_bug!(
129 self.body.source_info(location).span,
130 "broken MIR in {:?} ({}) at {:?}:\n{}",
131 self.body.source.instance,
132 self.when,
133 location,
134 msg.as_ref(),
135 );
136 }
137 }
138
139 fn check_edge(&mut self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
140 if bb == START_BLOCK {
141 self.fail(location, "start block must not have predecessors")
142 }
143 if let Some(bb) = self.body.basic_blocks.get(bb) {
144 let src = self.body.basic_blocks.get(location.block).unwrap();
145 match (src.is_cleanup, bb.is_cleanup, edge_kind) {
146 (false, false, EdgeKind::Normal)
148 | (true, true, EdgeKind::Normal) => {}
150 (false, true, EdgeKind::Unwind) => {
152 self.unwind_edge_count += 1;
153 }
154 _ => {
156 self.fail(
157 location,
158 format!(
159 "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
160 edge_kind,
161 bb,
162 src.is_cleanup,
163 bb.is_cleanup,
164 )
165 )
166 }
167 }
168 } else {
169 self.fail(location, format!("encountered jump to invalid basic block {bb:?}"))
170 }
171 }
172
173 fn check_cleanup_control_flow(&self) {
174 if self.unwind_edge_count <= 1 {
175 return;
176 }
177 let doms = self.body.basic_blocks.dominators();
178 let mut post_contract_node = FxHashMap::default();
179 let mut dom_path = vec![];
181 let mut get_post_contract_node = |mut bb| {
182 let root = loop {
183 if let Some(root) = post_contract_node.get(&bb) {
184 break *root;
185 }
186 let parent = doms.immediate_dominator(bb).unwrap();
187 dom_path.push(bb);
188 if !self.body.basic_blocks[parent].is_cleanup {
189 break bb;
190 }
191 bb = parent;
192 };
193 for bb in dom_path.drain(..) {
194 post_contract_node.insert(bb, root);
195 }
196 root
197 };
198
199 let mut parent = IndexVec::from_elem(None, &self.body.basic_blocks);
200 for (bb, bb_data) in self.body.basic_blocks.iter_enumerated() {
201 if !bb_data.is_cleanup || !self.reachable_blocks.contains(bb) {
202 continue;
203 }
204 let bb = get_post_contract_node(bb);
205 for s in bb_data.terminator().successors() {
206 let s = get_post_contract_node(s);
207 if s == bb {
208 continue;
209 }
210 let parent = &mut parent[bb];
211 match parent {
212 None => {
213 *parent = Some(s);
214 }
215 Some(e) if *e == s => (),
216 Some(e) => self.fail(
217 Location { block: bb, statement_index: 0 },
218 format!(
219 "Cleanup control flow violation: The blocks dominated by {:?} have edges to both {:?} and {:?}",
220 bb,
221 s,
222 *e
223 )
224 ),
225 }
226 }
227 }
228
229 let mut stack = FxHashSet::default();
231 for (mut bb, parent) in parent.iter_enumerated_mut() {
232 stack.clear();
233 stack.insert(bb);
234 loop {
235 let Some(parent) = parent.take() else { break };
236 let no_cycle = stack.insert(parent);
237 if !no_cycle {
238 self.fail(
239 Location { block: bb, statement_index: 0 },
240 format!(
241 "Cleanup control flow violation: Cycle involving edge {bb:?} -> {parent:?}",
242 ),
243 );
244 break;
245 }
246 bb = parent;
247 }
248 }
249 }
250
251 fn check_unwind_edge(&mut self, location: Location, unwind: UnwindAction) {
252 let is_cleanup = self.body.basic_blocks[location.block].is_cleanup;
253 match unwind {
254 UnwindAction::Cleanup(unwind) => {
255 if is_cleanup {
256 self.fail(location, "`UnwindAction::Cleanup` in cleanup block");
257 }
258 self.check_edge(location, unwind, EdgeKind::Unwind);
259 }
260 UnwindAction::Continue => {
261 if is_cleanup {
262 self.fail(location, "`UnwindAction::Continue` in cleanup block");
263 }
264
265 if !self.can_unwind {
266 self.fail(location, "`UnwindAction::Continue` in no-unwind function");
267 }
268 }
269 UnwindAction::Terminate(UnwindTerminateReason::InCleanup) => {
270 if !is_cleanup {
271 self.fail(
272 location,
273 "`UnwindAction::Terminate(InCleanup)` in a non-cleanup block",
274 );
275 }
276 }
277 UnwindAction::Unreachable | UnwindAction::Terminate(UnwindTerminateReason::Abi) => (),
279 }
280 }
281
282 fn is_critical_call_edge(&self, target: Option<BasicBlock>, unwind: UnwindAction) -> bool {
283 let Some(target) = target else { return false };
284 matches!(unwind, UnwindAction::Cleanup(_) | UnwindAction::Terminate(_))
285 && self.body.basic_blocks.predecessors()[target].len() > 1
286 }
287}
288
289impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> {
290 fn visit_local(&mut self, local: Local, _context: PlaceContext, location: Location) {
291 if self.body.local_decls.get(local).is_none() {
292 self.fail(
293 location,
294 format!("local {local:?} has no corresponding declaration in `body.local_decls`"),
295 );
296 }
297 }
298
299 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
300 match &statement.kind {
301 StatementKind::AscribeUserType(..) => {
302 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
303 self.fail(
304 location,
305 "`AscribeUserType` should have been removed after drop lowering phase",
306 );
307 }
308 }
309 StatementKind::FakeRead(..) => {
310 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
311 self.fail(
312 location,
313 "`FakeRead` should have been removed after drop lowering phase",
314 );
315 }
316 }
317 StatementKind::SetDiscriminant { .. } => {
318 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
319 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
320 }
321 }
322 StatementKind::Retag(kind, _) => {
323 if matches!(kind, RetagKind::TwoPhase) {
327 self.fail(location, format!("explicit `{kind:?}` is forbidden"));
328 }
329 }
330 StatementKind::Coverage(kind) => {
331 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup)
332 && let CoverageKind::BlockMarker { .. } | CoverageKind::SpanMarker { .. } = kind
333 {
334 self.fail(
335 location,
336 format!("{kind:?} should have been removed after analysis"),
337 );
338 }
339 }
340 StatementKind::Assign(..)
341 | StatementKind::StorageLive(_)
342 | StatementKind::StorageDead(_)
343 | StatementKind::Intrinsic(_)
344 | StatementKind::ConstEvalCounter
345 | StatementKind::PlaceMention(..)
346 | StatementKind::BackwardIncompatibleDropHint { .. }
347 | StatementKind::Nop => {}
348 }
349
350 self.super_statement(statement, location);
351 }
352
353 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
354 match &terminator.kind {
355 TerminatorKind::Goto { target } => {
356 self.check_edge(location, *target, EdgeKind::Normal);
357 }
358 TerminatorKind::SwitchInt { targets, discr: _ } => {
359 for (_, target) in targets.iter() {
360 self.check_edge(location, target, EdgeKind::Normal);
361 }
362 self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
363
364 self.value_cache.clear();
365 self.value_cache.extend(targets.iter().map(|(value, _)| value));
366 let has_duplicates = targets.iter().len() != self.value_cache.len();
367 if has_duplicates {
368 self.fail(
369 location,
370 format!(
371 "duplicated values in `SwitchInt` terminator: {:?}",
372 terminator.kind,
373 ),
374 );
375 }
376 }
377 TerminatorKind::Drop { target, unwind, drop, .. } => {
378 self.check_edge(location, *target, EdgeKind::Normal);
379 self.check_unwind_edge(location, *unwind);
380 if let Some(drop) = drop {
381 self.check_edge(location, *drop, EdgeKind::Normal);
382 }
383 }
384 TerminatorKind::Call { func, args, .. }
385 | TerminatorKind::TailCall { func, args, .. } => {
386 if let TerminatorKind::Call { target, unwind, destination, .. } = terminator.kind {
388 if let Some(target) = target {
389 self.check_edge(location, target, EdgeKind::Normal);
390 }
391 self.check_unwind_edge(location, unwind);
392
393 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
399 && self.is_critical_call_edge(target, unwind)
400 {
401 self.fail(
402 location,
403 format!(
404 "encountered critical edge in `Call` terminator {:?}",
405 terminator.kind,
406 ),
407 );
408 }
409
410 if most_packed_projection(self.tcx, &self.body.local_decls, destination)
413 .is_some()
414 {
415 self.fail(
417 location,
418 format!(
419 "encountered packed place in `Call` terminator destination: {:?}",
420 terminator.kind,
421 ),
422 );
423 }
424 }
425
426 for arg in args {
427 if let Operand::Move(place) = &arg.node {
428 if most_packed_projection(self.tcx, &self.body.local_decls, *place)
429 .is_some()
430 {
431 self.fail(
433 location,
434 format!(
435 "encountered `Move` of a packed place in `Call` terminator: {:?}",
436 terminator.kind,
437 ),
438 );
439 }
440 }
441 }
442
443 if let ty::FnDef(did, ..) = func.ty(&self.body.local_decls, self.tcx).kind()
444 && self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
445 && matches!(self.tcx.codegen_fn_attrs(did).inline, InlineAttr::Force { .. })
446 {
447 self.fail(location, "`#[rustc_force_inline]`-annotated function not inlined");
448 }
449 }
450 TerminatorKind::Assert { target, unwind, .. } => {
451 self.check_edge(location, *target, EdgeKind::Normal);
452 self.check_unwind_edge(location, *unwind);
453 }
454 TerminatorKind::Yield { resume, drop, .. } => {
455 if self.body.coroutine.is_none() {
456 self.fail(location, "`Yield` cannot appear outside coroutine bodies");
457 }
458 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
459 self.fail(location, "`Yield` should have been replaced by coroutine lowering");
460 }
461 self.check_edge(location, *resume, EdgeKind::Normal);
462 if let Some(drop) = drop {
463 self.check_edge(location, *drop, EdgeKind::Normal);
464 }
465 }
466 TerminatorKind::FalseEdge { real_target, imaginary_target } => {
467 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
468 self.fail(
469 location,
470 "`FalseEdge` should have been removed after drop elaboration",
471 );
472 }
473 self.check_edge(location, *real_target, EdgeKind::Normal);
474 self.check_edge(location, *imaginary_target, EdgeKind::Normal);
475 }
476 TerminatorKind::FalseUnwind { real_target, unwind } => {
477 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
478 self.fail(
479 location,
480 "`FalseUnwind` should have been removed after drop elaboration",
481 );
482 }
483 self.check_edge(location, *real_target, EdgeKind::Normal);
484 self.check_unwind_edge(location, *unwind);
485 }
486 TerminatorKind::InlineAsm { targets, unwind, .. } => {
487 for &target in targets {
488 self.check_edge(location, target, EdgeKind::Normal);
489 }
490 self.check_unwind_edge(location, *unwind);
491 }
492 TerminatorKind::CoroutineDrop => {
493 if self.body.coroutine.is_none() {
494 self.fail(location, "`CoroutineDrop` cannot appear outside coroutine bodies");
495 }
496 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
497 self.fail(
498 location,
499 "`CoroutineDrop` should have been replaced by coroutine lowering",
500 );
501 }
502 }
503 TerminatorKind::UnwindResume => {
504 let bb = location.block;
505 if !self.body.basic_blocks[bb].is_cleanup {
506 self.fail(location, "Cannot `UnwindResume` from non-cleanup basic block")
507 }
508 if !self.can_unwind {
509 self.fail(location, "Cannot `UnwindResume` in a function that cannot unwind")
510 }
511 }
512 TerminatorKind::UnwindTerminate(_) => {
513 let bb = location.block;
514 if !self.body.basic_blocks[bb].is_cleanup {
515 self.fail(location, "Cannot `UnwindTerminate` from non-cleanup basic block")
516 }
517 }
518 TerminatorKind::Return => {
519 let bb = location.block;
520 if self.body.basic_blocks[bb].is_cleanup {
521 self.fail(location, "Cannot `Return` from cleanup basic block")
522 }
523 }
524 TerminatorKind::Unreachable => {}
525 }
526
527 self.super_terminator(terminator, location);
528 }
529
530 fn visit_source_scope(&mut self, scope: SourceScope) {
531 if self.body.source_scopes.get(scope).is_none() {
532 self.tcx.dcx().span_bug(
533 self.body.span,
534 format!(
535 "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
536 self.body.source.instance, self.when, scope,
537 ),
538 );
539 }
540 }
541}
542
543pub(super) fn validate_types<'tcx>(
549 tcx: TyCtxt<'tcx>,
550 typing_env: ty::TypingEnv<'tcx>,
551 body: &Body<'tcx>,
552 caller_body: &Body<'tcx>,
553) -> Vec<(Location, String)> {
554 let mut type_checker = TypeChecker { body, caller_body, tcx, typing_env, failures: Vec::new() };
555 with_no_trimmed_paths!({
560 type_checker.visit_body(body);
561 });
562 type_checker.failures
563}
564
565struct TypeChecker<'a, 'tcx> {
566 body: &'a Body<'tcx>,
567 caller_body: &'a Body<'tcx>,
568 tcx: TyCtxt<'tcx>,
569 typing_env: ty::TypingEnv<'tcx>,
570 failures: Vec<(Location, String)>,
571}
572
573impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
574 fn fail(&mut self, location: Location, msg: impl Into<String>) {
575 self.failures.push((location, msg.into()));
576 }
577
578 fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
581 if src == dest {
583 return true;
585 }
586
587 if (src, dest).has_opaque_types() {
593 return true;
594 }
595
596 let variance = if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
599 Variance::Invariant
600 } else {
601 Variance::Covariant
602 };
603
604 crate::util::relate_types(self.tcx, self.typing_env, variance, src, dest)
605 }
606
607 fn predicate_must_hold_modulo_regions(
609 &self,
610 pred: impl Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>,
611 ) -> bool {
612 let pred: ty::Predicate<'tcx> = pred.upcast(self.tcx);
613
614 if pred.has_opaque_types() {
620 return true;
621 }
622
623 let (infcx, param_env) = self.tcx.infer_ctxt().build_with_typing_env(self.typing_env);
624 let ocx = ObligationCtxt::new(&infcx);
625 ocx.register_obligation(Obligation::new(
626 self.tcx,
627 ObligationCause::dummy(),
628 param_env,
629 pred,
630 ));
631 ocx.evaluate_obligations_error_on_ambiguity().is_empty()
632 }
633}
634
635impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
636 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
637 if self.tcx.sess.opts.unstable_opts.validate_mir
639 && self.body.phase < MirPhase::Runtime(RuntimePhase::Initial)
640 {
641 if let Operand::Copy(place) = operand {
643 let ty = place.ty(&self.body.local_decls, self.tcx).ty;
644
645 if !self.tcx.type_is_copy_modulo_regions(self.typing_env, ty) {
646 self.fail(location, format!("`Operand::Copy` with non-`Copy` type {ty}"));
647 }
648 }
649 }
650
651 self.super_operand(operand, location);
652 }
653
654 fn visit_projection_elem(
655 &mut self,
656 place_ref: PlaceRef<'tcx>,
657 elem: PlaceElem<'tcx>,
658 context: PlaceContext,
659 location: Location,
660 ) {
661 match elem {
662 ProjectionElem::Deref
663 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
664 {
665 let base_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
666
667 if base_ty.is_box() {
668 self.fail(location, format!("{base_ty} dereferenced after ElaborateBoxDerefs"))
669 }
670 }
671 ProjectionElem::Field(f, ty) => {
672 let parent_ty = place_ref.ty(&self.body.local_decls, self.tcx);
673 let fail_out_of_bounds = |this: &mut Self, location| {
674 this.fail(location, format!("Out of bounds field {f:?} for {parent_ty:?}"));
675 };
676 let check_equal = |this: &mut Self, location, f_ty| {
677 if !this.mir_assign_valid_types(ty, f_ty) {
678 this.fail(
679 location,
680 format!(
681 "Field projection `{place_ref:?}.{f:?}` specified type `{ty}`, but actual type is `{f_ty}`"
682 )
683 )
684 }
685 };
686
687 let kind = match parent_ty.ty.kind() {
688 &ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => {
689 self.tcx.type_of(def_id).instantiate(self.tcx, args).kind()
690 }
691 kind => kind,
692 };
693
694 match kind {
695 ty::Tuple(fields) => {
696 let Some(f_ty) = fields.get(f.as_usize()) else {
697 fail_out_of_bounds(self, location);
698 return;
699 };
700 check_equal(self, location, *f_ty);
701 }
702 ty::Pat(base, _) => check_equal(self, location, *base),
704 ty::Adt(adt_def, args) => {
705 if self.tcx.is_lang_item(adt_def.did(), LangItem::DynMetadata) {
707 self.fail(
708 location,
709 format!(
710 "You can't project to field {f:?} of `DynMetadata` because \
711 layout is weird and thinks it doesn't have fields."
712 ),
713 );
714 }
715
716 if adt_def.repr().simd() {
717 self.fail(
718 location,
719 format!(
720 "Projecting into SIMD type {adt_def:?} is banned by MCP#838"
721 ),
722 );
723 }
724
725 let var = parent_ty.variant_index.unwrap_or(FIRST_VARIANT);
726 let Some(field) = adt_def.variant(var).fields.get(f) else {
727 fail_out_of_bounds(self, location);
728 return;
729 };
730 check_equal(self, location, field.ty(self.tcx, args));
731 }
732 ty::Closure(_, args) => {
733 let args = args.as_closure();
734 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
735 fail_out_of_bounds(self, location);
736 return;
737 };
738 check_equal(self, location, f_ty);
739 }
740 ty::CoroutineClosure(_, args) => {
741 let args = args.as_coroutine_closure();
742 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
743 fail_out_of_bounds(self, location);
744 return;
745 };
746 check_equal(self, location, f_ty);
747 }
748 &ty::Coroutine(def_id, args) => {
749 let f_ty = if let Some(var) = parent_ty.variant_index {
750 let layout = if def_id == self.caller_body.source.def_id() {
756 self.caller_body
757 .coroutine_layout_raw()
758 .or_else(|| self.tcx.coroutine_layout(def_id, args).ok())
759 } else if self.tcx.needs_coroutine_by_move_body_def_id(def_id)
760 && let ty::ClosureKind::FnOnce =
761 args.as_coroutine().kind_ty().to_opt_closure_kind().unwrap()
762 && self.caller_body.source.def_id()
763 == self.tcx.coroutine_by_move_body_def_id(def_id)
764 {
765 self.caller_body.coroutine_layout_raw()
767 } else {
768 self.tcx.coroutine_layout(def_id, args).ok()
769 };
770
771 let Some(layout) = layout else {
772 self.fail(
773 location,
774 format!("No coroutine layout for {parent_ty:?}"),
775 );
776 return;
777 };
778
779 let Some(&local) = layout.variant_fields[var].get(f) else {
780 fail_out_of_bounds(self, location);
781 return;
782 };
783
784 let Some(f_ty) = layout.field_tys.get(local) else {
785 self.fail(
786 location,
787 format!("Out of bounds local {local:?} for {parent_ty:?}"),
788 );
789 return;
790 };
791
792 ty::EarlyBinder::bind(f_ty.ty).instantiate(self.tcx, args)
793 } else {
794 let Some(&f_ty) = args.as_coroutine().prefix_tys().get(f.index())
795 else {
796 fail_out_of_bounds(self, location);
797 return;
798 };
799
800 f_ty
801 };
802
803 check_equal(self, location, f_ty);
804 }
805 _ => {
806 self.fail(location, format!("{:?} does not have fields", parent_ty.ty));
807 }
808 }
809 }
810 ProjectionElem::Index(index) => {
811 let indexed_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
812 match indexed_ty.kind() {
813 ty::Array(_, _) | ty::Slice(_) => {}
814 _ => self.fail(location, format!("{indexed_ty:?} cannot be indexed")),
815 }
816
817 let index_ty = self.body.local_decls[index].ty;
818 if index_ty != self.tcx.types.usize {
819 self.fail(location, format!("bad index ({index_ty} != usize)"))
820 }
821 }
822 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
823 let indexed_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
824 match indexed_ty.kind() {
825 ty::Array(_, _) => {
826 if from_end {
827 self.fail(location, "arrays should not be indexed from end");
828 }
829 }
830 ty::Slice(_) => {}
831 _ => self.fail(location, format!("{indexed_ty:?} cannot be indexed")),
832 }
833
834 if from_end {
835 if offset > min_length {
836 self.fail(
837 location,
838 format!(
839 "constant index with offset -{offset} out of bounds of min length {min_length}"
840 ),
841 );
842 }
843 } else {
844 if offset >= min_length {
845 self.fail(
846 location,
847 format!(
848 "constant index with offset {offset} out of bounds of min length {min_length}"
849 ),
850 );
851 }
852 }
853 }
854 ProjectionElem::Subslice { from, to, from_end } => {
855 let indexed_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
856 match indexed_ty.kind() {
857 ty::Array(_, _) => {
858 if from_end {
859 self.fail(location, "arrays should not be subsliced from end");
860 }
861 }
862 ty::Slice(_) => {
863 if !from_end {
864 self.fail(location, "slices should be subsliced from end");
865 }
866 }
867 _ => self.fail(location, format!("{indexed_ty:?} cannot be indexed")),
868 }
869
870 if !from_end && from > to {
871 self.fail(location, "backwards subslice {from}..{to}");
872 }
873 }
874 ProjectionElem::OpaqueCast(ty)
875 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
876 {
877 self.fail(
878 location,
879 format!("explicit opaque type cast to `{ty}` after `PostAnalysisNormalize`"),
880 )
881 }
882 ProjectionElem::UnwrapUnsafeBinder(unwrapped_ty) => {
883 let binder_ty = place_ref.ty(&self.body.local_decls, self.tcx);
884 let ty::UnsafeBinder(binder_ty) = *binder_ty.ty.kind() else {
885 self.fail(
886 location,
887 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
888 );
889 return;
890 };
891 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
892 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
893 self.fail(
894 location,
895 format!(
896 "Cannot unwrap unsafe binder {binder_ty:?} into type {unwrapped_ty}"
897 ),
898 );
899 }
900 }
901 _ => {}
902 }
903 self.super_projection_elem(place_ref, elem, context, location);
904 }
905
906 fn visit_var_debug_info(&mut self, debuginfo: &VarDebugInfo<'tcx>) {
907 if let Some(box VarDebugInfoFragment { ty, ref projection }) = debuginfo.composite {
908 if ty.is_union() || ty.is_enum() {
909 self.fail(
910 START_BLOCK.start_location(),
911 format!("invalid type {ty} in debuginfo for {:?}", debuginfo.name),
912 );
913 }
914 if projection.is_empty() {
915 self.fail(
916 START_BLOCK.start_location(),
917 format!("invalid empty projection in debuginfo for {:?}", debuginfo.name),
918 );
919 }
920 if projection.iter().any(|p| !matches!(p, PlaceElem::Field(..))) {
921 self.fail(
922 START_BLOCK.start_location(),
923 format!(
924 "illegal projection {:?} in debuginfo for {:?}",
925 projection, debuginfo.name
926 ),
927 );
928 }
929 }
930 match debuginfo.value {
931 VarDebugInfoContents::Const(_) => {}
932 VarDebugInfoContents::Place(place) => {
933 if place.projection.iter().any(|p| !p.can_use_in_debuginfo()) {
934 self.fail(
935 START_BLOCK.start_location(),
936 format!("illegal place {:?} in debuginfo for {:?}", place, debuginfo.name),
937 );
938 }
939 }
940 }
941 self.super_var_debug_info(debuginfo);
942 }
943
944 fn visit_place(&mut self, place: &Place<'tcx>, cntxt: PlaceContext, location: Location) {
945 let _ = place.ty(&self.body.local_decls, self.tcx);
947
948 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial)
949 && place.projection.len() > 1
950 && cntxt != PlaceContext::NonUse(NonUseContext::VarDebugInfo)
951 && place.projection[1..].contains(&ProjectionElem::Deref)
952 {
953 self.fail(
954 location,
955 format!("place {place:?} has deref as a later projection (it is only permitted as the first projection)"),
956 );
957 }
958
959 let mut projections_iter = place.projection.iter();
961 while let Some(proj) = projections_iter.next() {
962 if matches!(proj, ProjectionElem::Downcast(..)) {
963 if !matches!(projections_iter.next(), Some(ProjectionElem::Field(..))) {
964 self.fail(
965 location,
966 format!(
967 "place {place:?} has `Downcast` projection not followed by `Field`"
968 ),
969 );
970 }
971 }
972 }
973
974 if let ClearCrossCrate::Set(box LocalInfo::DerefTemp) =
975 self.body.local_decls[place.local].local_info
976 && !place.is_indirect_first_projection()
977 {
978 if cntxt != PlaceContext::MutatingUse(MutatingUseContext::Store)
979 || place.as_local().is_none()
980 {
981 self.fail(
982 location,
983 format!("`DerefTemp` locals must only be dereferenced or directly assigned to"),
984 );
985 }
986 }
987
988 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial)
989 && let Some(i) = place
990 .projection
991 .iter()
992 .position(|elem| matches!(elem, ProjectionElem::Subslice { .. }))
993 && let Some(tail) = place.projection.get(i + 1..)
994 && tail.iter().any(|elem| {
995 matches!(
996 elem,
997 ProjectionElem::ConstantIndex { .. } | ProjectionElem::Subslice { .. }
998 )
999 })
1000 {
1001 self.fail(
1002 location,
1003 format!("place {place:?} has `ConstantIndex` or `Subslice` after `Subslice`"),
1004 );
1005 }
1006
1007 self.super_place(place, cntxt, location);
1008 }
1009
1010 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
1011 macro_rules! check_kinds {
1012 ($t:expr, $text:literal, $typat:pat) => {
1013 if !matches!(($t).kind(), $typat) {
1014 self.fail(location, format!($text, $t));
1015 }
1016 };
1017 }
1018 match rvalue {
1019 Rvalue::Use(_) => {}
1020 Rvalue::CopyForDeref(_) => {
1021 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1022 self.fail(location, "`CopyForDeref` should have been removed in runtime MIR");
1023 }
1024 }
1025 Rvalue::Aggregate(kind, fields) => match **kind {
1026 AggregateKind::Tuple => {}
1027 AggregateKind::Array(dest) => {
1028 for src in fields {
1029 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1030 self.fail(location, "array field has the wrong type");
1031 }
1032 }
1033 }
1034 AggregateKind::Adt(def_id, idx, args, _, Some(field)) => {
1035 let adt_def = self.tcx.adt_def(def_id);
1036 assert!(adt_def.is_union());
1037 assert_eq!(idx, FIRST_VARIANT);
1038 let dest_ty = self.tcx.normalize_erasing_regions(
1039 self.typing_env,
1040 adt_def.non_enum_variant().fields[field].ty(self.tcx, args),
1041 );
1042 if let [field] = fields.raw.as_slice() {
1043 let src_ty = field.ty(self.body, self.tcx);
1044 if !self.mir_assign_valid_types(src_ty, dest_ty) {
1045 self.fail(location, "union field has the wrong type");
1046 }
1047 } else {
1048 self.fail(location, "unions should have one initialized field");
1049 }
1050 }
1051 AggregateKind::Adt(def_id, idx, args, _, None) => {
1052 let adt_def = self.tcx.adt_def(def_id);
1053 assert!(!adt_def.is_union());
1054 let variant = &adt_def.variants()[idx];
1055 if variant.fields.len() != fields.len() {
1056 self.fail(location, format!(
1057 "adt {def_id:?} has the wrong number of initialized fields, expected {}, found {}",
1058 fields.len(),
1059 variant.fields.len(),
1060 ));
1061 }
1062 for (src, dest) in std::iter::zip(fields, &variant.fields) {
1063 let dest_ty = self
1064 .tcx
1065 .normalize_erasing_regions(self.typing_env, dest.ty(self.tcx, args));
1066 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest_ty) {
1067 self.fail(location, "adt field has the wrong type");
1068 }
1069 }
1070 }
1071 AggregateKind::Closure(_, args) => {
1072 let upvars = args.as_closure().upvar_tys();
1073 if upvars.len() != fields.len() {
1074 self.fail(location, "closure has the wrong number of initialized fields");
1075 }
1076 for (src, dest) in std::iter::zip(fields, upvars) {
1077 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1078 self.fail(location, "closure field has the wrong type");
1079 }
1080 }
1081 }
1082 AggregateKind::Coroutine(_, args) => {
1083 let upvars = args.as_coroutine().upvar_tys();
1084 if upvars.len() != fields.len() {
1085 self.fail(location, "coroutine has the wrong number of initialized fields");
1086 }
1087 for (src, dest) in std::iter::zip(fields, upvars) {
1088 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1089 self.fail(location, "coroutine field has the wrong type");
1090 }
1091 }
1092 }
1093 AggregateKind::CoroutineClosure(_, args) => {
1094 let upvars = args.as_coroutine_closure().upvar_tys();
1095 if upvars.len() != fields.len() {
1096 self.fail(
1097 location,
1098 "coroutine-closure has the wrong number of initialized fields",
1099 );
1100 }
1101 for (src, dest) in std::iter::zip(fields, upvars) {
1102 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1103 self.fail(location, "coroutine-closure field has the wrong type");
1104 }
1105 }
1106 }
1107 AggregateKind::RawPtr(pointee_ty, mutability) => {
1108 if !matches!(self.body.phase, MirPhase::Runtime(_)) {
1109 self.fail(location, "RawPtr should be in runtime MIR only");
1113 }
1114
1115 if let [data_ptr, metadata] = fields.raw.as_slice() {
1116 let data_ptr_ty = data_ptr.ty(self.body, self.tcx);
1117 let metadata_ty = metadata.ty(self.body, self.tcx);
1118 if let ty::RawPtr(in_pointee, in_mut) = data_ptr_ty.kind() {
1119 if *in_mut != mutability {
1120 self.fail(location, "input and output mutability must match");
1121 }
1122
1123 if !in_pointee.is_sized(self.tcx, self.typing_env) {
1125 self.fail(location, "input pointer must be thin");
1126 }
1127 } else {
1128 self.fail(
1129 location,
1130 "first operand to raw pointer aggregate must be a raw pointer",
1131 );
1132 }
1133
1134 if pointee_ty.is_slice() {
1136 if !self.mir_assign_valid_types(metadata_ty, self.tcx.types.usize) {
1137 self.fail(location, "slice metadata must be usize");
1138 }
1139 } else if pointee_ty.is_sized(self.tcx, self.typing_env) {
1140 if metadata_ty != self.tcx.types.unit {
1141 self.fail(location, "metadata for pointer-to-thin must be unit");
1142 }
1143 }
1144 } else {
1145 self.fail(location, "raw pointer aggregate must have 2 fields");
1146 }
1147 }
1148 },
1149 Rvalue::Ref(_, BorrowKind::Fake(_), _) => {
1150 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1151 self.fail(
1152 location,
1153 "`Assign` statement with a `Fake` borrow should have been removed in runtime MIR",
1154 );
1155 }
1156 }
1157 Rvalue::Ref(..) => {}
1158 Rvalue::BinaryOp(op, vals) => {
1159 use BinOp::*;
1160 let a = vals.0.ty(&self.body.local_decls, self.tcx);
1161 let b = vals.1.ty(&self.body.local_decls, self.tcx);
1162 if crate::util::binop_right_homogeneous(*op) {
1163 if let Eq | Lt | Le | Ne | Ge | Gt = op {
1164 if !self.mir_assign_valid_types(a, b) {
1166 self.fail(
1167 location,
1168 format!("Cannot {op:?} compare incompatible types {a} and {b}"),
1169 );
1170 }
1171 } else if a != b {
1172 self.fail(
1173 location,
1174 format!("Cannot perform binary op {op:?} on unequal types {a} and {b}"),
1175 );
1176 }
1177 }
1178
1179 match op {
1180 Offset => {
1181 check_kinds!(a, "Cannot offset non-pointer type {:?}", ty::RawPtr(..));
1182 if b != self.tcx.types.isize && b != self.tcx.types.usize {
1183 self.fail(location, format!("Cannot offset by non-isize type {b}"));
1184 }
1185 }
1186 Eq | Lt | Le | Ne | Ge | Gt => {
1187 for x in [a, b] {
1188 check_kinds!(
1189 x,
1190 "Cannot {op:?} compare type {:?}",
1191 ty::Bool
1192 | ty::Char
1193 | ty::Int(..)
1194 | ty::Uint(..)
1195 | ty::Float(..)
1196 | ty::RawPtr(..)
1197 | ty::FnPtr(..)
1198 )
1199 }
1200 }
1201 Cmp => {
1202 for x in [a, b] {
1203 check_kinds!(
1204 x,
1205 "Cannot three-way compare non-integer type {:?}",
1206 ty::Char | ty::Uint(..) | ty::Int(..)
1207 )
1208 }
1209 }
1210 AddUnchecked | AddWithOverflow | SubUnchecked | SubWithOverflow
1211 | MulUnchecked | MulWithOverflow | Shl | ShlUnchecked | Shr | ShrUnchecked => {
1212 for x in [a, b] {
1213 check_kinds!(
1214 x,
1215 "Cannot {op:?} non-integer type {:?}",
1216 ty::Uint(..) | ty::Int(..)
1217 )
1218 }
1219 }
1220 BitAnd | BitOr | BitXor => {
1221 for x in [a, b] {
1222 check_kinds!(
1223 x,
1224 "Cannot perform bitwise op {op:?} on type {:?}",
1225 ty::Uint(..) | ty::Int(..) | ty::Bool
1226 )
1227 }
1228 }
1229 Add | Sub | Mul | Div | Rem => {
1230 for x in [a, b] {
1231 check_kinds!(
1232 x,
1233 "Cannot perform arithmetic {op:?} on type {:?}",
1234 ty::Uint(..) | ty::Int(..) | ty::Float(..)
1235 )
1236 }
1237 }
1238 }
1239 }
1240 Rvalue::UnaryOp(op, operand) => {
1241 let a = operand.ty(&self.body.local_decls, self.tcx);
1242 match op {
1243 UnOp::Neg => {
1244 check_kinds!(a, "Cannot negate type {:?}", ty::Int(..) | ty::Float(..))
1245 }
1246 UnOp::Not => {
1247 check_kinds!(
1248 a,
1249 "Cannot binary not type {:?}",
1250 ty::Int(..) | ty::Uint(..) | ty::Bool
1251 );
1252 }
1253 UnOp::PtrMetadata => {
1254 check_kinds!(
1255 a,
1256 "Cannot PtrMetadata non-pointer non-reference type {:?}",
1257 ty::RawPtr(..) | ty::Ref(..)
1258 );
1259 }
1260 }
1261 }
1262 Rvalue::ShallowInitBox(operand, _) => {
1263 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1264 self.fail(location, format!("ShallowInitBox after ElaborateBoxDerefs"))
1265 }
1266
1267 let a = operand.ty(&self.body.local_decls, self.tcx);
1268 check_kinds!(a, "Cannot shallow init type {:?}", ty::RawPtr(..));
1269 }
1270 Rvalue::Cast(kind, operand, target_type) => {
1271 let op_ty = operand.ty(self.body, self.tcx);
1272 match kind {
1273 CastKind::PointerWithExposedProvenance | CastKind::PointerExposeProvenance => {}
1275 CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer, _) => {
1276 check_kinds!(
1278 op_ty,
1279 "CastKind::{kind:?} input must be a fn item, not {:?}",
1280 ty::FnDef(..)
1281 );
1282 check_kinds!(
1283 target_type,
1284 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1285 ty::FnPtr(..)
1286 );
1287 }
1288 CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
1289 check_kinds!(
1291 op_ty,
1292 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1293 ty::FnPtr(..)
1294 );
1295 check_kinds!(
1296 target_type,
1297 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1298 ty::FnPtr(..)
1299 );
1300 }
1301 CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(..), _) => {
1302 check_kinds!(
1304 op_ty,
1305 "CastKind::{kind:?} input must be a closure, not {:?}",
1306 ty::Closure(..)
1307 );
1308 check_kinds!(
1309 target_type,
1310 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1311 ty::FnPtr(..)
1312 );
1313 }
1314 CastKind::PointerCoercion(PointerCoercion::MutToConstPointer, _) => {
1315 check_kinds!(
1317 op_ty,
1318 "CastKind::{kind:?} input must be a raw mut pointer, not {:?}",
1319 ty::RawPtr(_, Mutability::Mut)
1320 );
1321 check_kinds!(
1322 target_type,
1323 "CastKind::{kind:?} output must be a raw const pointer, not {:?}",
1324 ty::RawPtr(_, Mutability::Not)
1325 );
1326 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1327 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1328 }
1329 }
1330 CastKind::PointerCoercion(PointerCoercion::ArrayToPointer, _) => {
1331 check_kinds!(
1333 op_ty,
1334 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1335 ty::RawPtr(..)
1336 );
1337 check_kinds!(
1338 target_type,
1339 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1340 ty::RawPtr(..)
1341 );
1342 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1343 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1344 }
1345 }
1346 CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
1347 if !self.predicate_must_hold_modulo_regions(ty::TraitRef::new(
1350 self.tcx,
1351 self.tcx.require_lang_item(
1352 LangItem::CoerceUnsized,
1353 self.body.source_info(location).span,
1354 ),
1355 [op_ty, *target_type],
1356 )) {
1357 self.fail(location, format!("Unsize coercion, but `{op_ty}` isn't coercible to `{target_type}`"));
1358 }
1359 }
1360 CastKind::IntToInt | CastKind::IntToFloat => {
1361 let input_valid = op_ty.is_integral() || op_ty.is_char() || op_ty.is_bool();
1362 let target_valid = target_type.is_numeric() || target_type.is_char();
1363 if !input_valid || !target_valid {
1364 self.fail(
1365 location,
1366 format!("Wrong cast kind {kind:?} for the type {op_ty}"),
1367 );
1368 }
1369 }
1370 CastKind::FnPtrToPtr => {
1371 check_kinds!(
1372 op_ty,
1373 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1374 ty::FnPtr(..)
1375 );
1376 check_kinds!(
1377 target_type,
1378 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1379 ty::RawPtr(..)
1380 );
1381 }
1382 CastKind::PtrToPtr => {
1383 check_kinds!(
1384 op_ty,
1385 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1386 ty::RawPtr(..)
1387 );
1388 check_kinds!(
1389 target_type,
1390 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1391 ty::RawPtr(..)
1392 );
1393 }
1394 CastKind::FloatToFloat | CastKind::FloatToInt => {
1395 if !op_ty.is_floating_point() || !target_type.is_numeric() {
1396 self.fail(
1397 location,
1398 format!(
1399 "Trying to cast non 'Float' as {kind:?} into {target_type:?}"
1400 ),
1401 );
1402 }
1403 }
1404 CastKind::Transmute => {
1405 if !self
1409 .tcx
1410 .normalize_erasing_regions(self.typing_env, op_ty)
1411 .is_sized(self.tcx, self.typing_env)
1412 {
1413 self.fail(
1414 location,
1415 format!("Cannot transmute from non-`Sized` type {op_ty}"),
1416 );
1417 }
1418 if !self
1419 .tcx
1420 .normalize_erasing_regions(self.typing_env, *target_type)
1421 .is_sized(self.tcx, self.typing_env)
1422 {
1423 self.fail(
1424 location,
1425 format!("Cannot transmute to non-`Sized` type {target_type:?}"),
1426 );
1427 }
1428 }
1429 CastKind::Subtype => {
1430 if !util::sub_types(self.tcx, self.typing_env, op_ty, *target_type) {
1431 self.fail(
1432 location,
1433 format!("Failed subtyping {op_ty} and {target_type}"),
1434 )
1435 }
1436 }
1437 }
1438 }
1439 Rvalue::NullaryOp(NullOp::OffsetOf(indices), container) => {
1440 let fail_out_of_bounds = |this: &mut Self, location, field, ty| {
1441 this.fail(location, format!("Out of bounds field {field:?} for {ty}"));
1442 };
1443
1444 let mut current_ty = *container;
1445
1446 for (variant, field) in indices.iter() {
1447 match current_ty.kind() {
1448 ty::Tuple(fields) => {
1449 if variant != FIRST_VARIANT {
1450 self.fail(
1451 location,
1452 format!("tried to get variant {variant:?} of tuple"),
1453 );
1454 return;
1455 }
1456 let Some(&f_ty) = fields.get(field.as_usize()) else {
1457 fail_out_of_bounds(self, location, field, current_ty);
1458 return;
1459 };
1460
1461 current_ty = self.tcx.normalize_erasing_regions(self.typing_env, f_ty);
1462 }
1463 ty::Adt(adt_def, args) => {
1464 let Some(field) = adt_def.variant(variant).fields.get(field) else {
1465 fail_out_of_bounds(self, location, field, current_ty);
1466 return;
1467 };
1468
1469 let f_ty = field.ty(self.tcx, args);
1470 current_ty = self.tcx.normalize_erasing_regions(self.typing_env, f_ty);
1471 }
1472 _ => {
1473 self.fail(
1474 location,
1475 format!("Cannot get offset ({variant:?}, {field:?}) from type {current_ty}"),
1476 );
1477 return;
1478 }
1479 }
1480 }
1481 }
1482 Rvalue::Repeat(_, _)
1483 | Rvalue::ThreadLocalRef(_)
1484 | Rvalue::RawPtr(_, _)
1485 | Rvalue::NullaryOp(NullOp::RuntimeChecks(_), _)
1486 | Rvalue::Discriminant(_) => {}
1487
1488 Rvalue::WrapUnsafeBinder(op, ty) => {
1489 let unwrapped_ty = op.ty(self.body, self.tcx);
1490 let ty::UnsafeBinder(binder_ty) = *ty.kind() else {
1491 self.fail(
1492 location,
1493 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
1494 );
1495 return;
1496 };
1497 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
1498 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
1499 self.fail(
1500 location,
1501 format!("Cannot wrap {unwrapped_ty} into unsafe binder {binder_ty:?}"),
1502 );
1503 }
1504 }
1505 }
1506 self.super_rvalue(rvalue, location);
1507 }
1508
1509 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
1510 match &statement.kind {
1511 StatementKind::Assign(box (dest, rvalue)) => {
1512 let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
1514 let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
1515
1516 if !self.mir_assign_valid_types(right_ty, left_ty) {
1517 self.fail(
1518 location,
1519 format!(
1520 "encountered `{:?}` with incompatible types:\n\
1521 left-hand side has type: {}\n\
1522 right-hand side has type: {}",
1523 statement.kind, left_ty, right_ty,
1524 ),
1525 );
1526 }
1527
1528 if let Some(local) = dest.as_local()
1529 && let ClearCrossCrate::Set(box LocalInfo::DerefTemp) =
1530 self.body.local_decls[local].local_info
1531 && !matches!(rvalue, Rvalue::CopyForDeref(_))
1532 {
1533 self.fail(location, "assignment to a `DerefTemp` must use `CopyForDeref`")
1534 }
1535 }
1536 StatementKind::AscribeUserType(..) => {
1537 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1538 self.fail(
1539 location,
1540 "`AscribeUserType` should have been removed after drop lowering phase",
1541 );
1542 }
1543 }
1544 StatementKind::FakeRead(..) => {
1545 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1546 self.fail(
1547 location,
1548 "`FakeRead` should have been removed after drop lowering phase",
1549 );
1550 }
1551 }
1552 StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(op)) => {
1553 let ty = op.ty(&self.body.local_decls, self.tcx);
1554 if !ty.is_bool() {
1555 self.fail(
1556 location,
1557 format!("`assume` argument must be `bool`, but got: `{ty}`"),
1558 );
1559 }
1560 }
1561 StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(
1562 CopyNonOverlapping { src, dst, count },
1563 )) => {
1564 let src_ty = src.ty(&self.body.local_decls, self.tcx);
1565 let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) {
1566 src_deref
1567 } else {
1568 self.fail(
1569 location,
1570 format!("Expected src to be ptr in copy_nonoverlapping, got: {src_ty}"),
1571 );
1572 return;
1573 };
1574 let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
1575 let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) {
1576 dst_deref
1577 } else {
1578 self.fail(
1579 location,
1580 format!("Expected dst to be ptr in copy_nonoverlapping, got: {dst_ty}"),
1581 );
1582 return;
1583 };
1584 if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) {
1587 self.fail(location, format!("bad arg ({op_src_ty} != {op_dst_ty})"));
1588 }
1589
1590 let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
1591 if op_cnt_ty != self.tcx.types.usize {
1592 self.fail(location, format!("bad arg ({op_cnt_ty} != usize)"))
1593 }
1594 }
1595 StatementKind::SetDiscriminant { place, .. } => {
1596 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
1597 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
1598 }
1599 let pty = place.ty(&self.body.local_decls, self.tcx).ty;
1600 if !matches!(
1601 pty.kind(),
1602 ty::Adt(..) | ty::Coroutine(..) | ty::Alias(ty::Opaque, ..)
1603 ) {
1604 self.fail(
1605 location,
1606 format!(
1607 "`SetDiscriminant` is only allowed on ADTs and coroutines, not {pty}"
1608 ),
1609 );
1610 }
1611 }
1612 StatementKind::Retag(kind, _) => {
1613 if matches!(kind, RetagKind::TwoPhase) {
1617 self.fail(location, format!("explicit `{kind:?}` is forbidden"));
1618 }
1619 }
1620 StatementKind::StorageLive(_)
1621 | StatementKind::StorageDead(_)
1622 | StatementKind::Coverage(_)
1623 | StatementKind::ConstEvalCounter
1624 | StatementKind::PlaceMention(..)
1625 | StatementKind::BackwardIncompatibleDropHint { .. }
1626 | StatementKind::Nop => {}
1627 }
1628
1629 self.super_statement(statement, location);
1630 }
1631
1632 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
1633 match &terminator.kind {
1634 TerminatorKind::SwitchInt { targets, discr } => {
1635 let switch_ty = discr.ty(&self.body.local_decls, self.tcx);
1636
1637 let target_width = self.tcx.sess.target.pointer_width;
1638
1639 let size = Size::from_bits(match switch_ty.kind() {
1640 ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
1641 ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
1642 ty::Char => 32,
1643 ty::Bool => 1,
1644 other => bug!("unhandled type: {:?}", other),
1645 });
1646
1647 for (value, _) in targets.iter() {
1648 if ScalarInt::try_from_uint(value, size).is_none() {
1649 self.fail(
1650 location,
1651 format!("the value {value:#x} is not a proper {switch_ty}"),
1652 )
1653 }
1654 }
1655 }
1656 TerminatorKind::Call { func, .. } | TerminatorKind::TailCall { func, .. } => {
1657 let func_ty = func.ty(&self.body.local_decls, self.tcx);
1658 match func_ty.kind() {
1659 ty::FnPtr(..) | ty::FnDef(..) => {}
1660 _ => self.fail(
1661 location,
1662 format!(
1663 "encountered non-callable type {func_ty} in `{}` terminator",
1664 terminator.kind.name()
1665 ),
1666 ),
1667 }
1668
1669 if let TerminatorKind::TailCall { .. } = terminator.kind {
1670 }
1673 }
1674 TerminatorKind::Assert { cond, .. } => {
1675 let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
1676 if cond_ty != self.tcx.types.bool {
1677 self.fail(
1678 location,
1679 format!(
1680 "encountered non-boolean condition of type {cond_ty} in `Assert` terminator"
1681 ),
1682 );
1683 }
1684 }
1685 TerminatorKind::Goto { .. }
1686 | TerminatorKind::Drop { .. }
1687 | TerminatorKind::Yield { .. }
1688 | TerminatorKind::FalseEdge { .. }
1689 | TerminatorKind::FalseUnwind { .. }
1690 | TerminatorKind::InlineAsm { .. }
1691 | TerminatorKind::CoroutineDrop
1692 | TerminatorKind::UnwindResume
1693 | TerminatorKind::UnwindTerminate(_)
1694 | TerminatorKind::Return
1695 | TerminatorKind::Unreachable => {}
1696 }
1697
1698 self.super_terminator(terminator, location);
1699 }
1700
1701 fn visit_local_decl(&mut self, local: Local, local_decl: &LocalDecl<'tcx>) {
1702 if let ClearCrossCrate::Set(box LocalInfo::DerefTemp) = local_decl.local_info {
1703 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1704 self.fail(
1705 START_BLOCK.start_location(),
1706 "`DerefTemp` should have been removed in runtime MIR",
1707 );
1708 } else if local_decl.ty.builtin_deref(true).is_none() {
1709 self.fail(
1710 START_BLOCK.start_location(),
1711 "`DerefTemp` should only be used for dereferenceable types",
1712 )
1713 }
1714 }
1715
1716 self.super_local_decl(local, local_decl);
1717 }
1718}
1719
1720pub(super) fn validate_debuginfos<'tcx>(body: &Body<'tcx>) -> Vec<(Location, String)> {
1721 let mut debuginfo_checker =
1722 DebuginfoChecker { debuginfo_locals: debuginfo_locals(body), failures: Vec::new() };
1723 debuginfo_checker.visit_body(body);
1724 debuginfo_checker.failures
1725}
1726
1727struct DebuginfoChecker {
1728 debuginfo_locals: DenseBitSet<Local>,
1729 failures: Vec<(Location, String)>,
1730}
1731
1732impl<'tcx> Visitor<'tcx> for DebuginfoChecker {
1733 fn visit_statement_debuginfo(
1734 &mut self,
1735 stmt_debuginfo: &StmtDebugInfo<'tcx>,
1736 location: Location,
1737 ) {
1738 let local = match stmt_debuginfo {
1739 StmtDebugInfo::AssignRef(local, _) | StmtDebugInfo::InvalidAssign(local) => *local,
1740 };
1741 if !self.debuginfo_locals.contains(local) {
1742 self.failures.push((location, format!("{local:?} is not in debuginfo")));
1743 }
1744 }
1745}