1use rustc_abi::{ExternAbi, FIRST_VARIANT, Size};
4use rustc_data_structures::fx::{FxHashMap, FxHashSet};
5use rustc_hir::LangItem;
6use rustc_hir::attrs::InlineAttr;
7use rustc_index::IndexVec;
8use rustc_index::bit_set::DenseBitSet;
9use rustc_infer::infer::TyCtxtInferExt;
10use rustc_infer::traits::{Obligation, ObligationCause};
11use rustc_middle::mir::coverage::CoverageKind;
12use rustc_middle::mir::visit::{MutatingUseContext, NonUseContext, PlaceContext, Visitor};
13use rustc_middle::mir::*;
14use rustc_middle::ty::adjustment::PointerCoercion;
15use rustc_middle::ty::print::with_no_trimmed_paths;
16use rustc_middle::ty::{
17 self, CoroutineArgsExt, InstanceKind, ScalarInt, Ty, TyCtxt, TypeVisitableExt, Upcast, Variance,
18};
19use rustc_middle::{bug, span_bug};
20use rustc_mir_dataflow::debuginfo::debuginfo_locals;
21use rustc_trait_selection::traits::ObligationCtxt;
22
23use crate::util::{self, is_within_packed};
24
25#[derive(Copy, Clone, Debug, PartialEq, Eq)]
26enum EdgeKind {
27 Unwind,
28 Normal,
29}
30
31pub(super) struct Validator {
32 pub when: String,
34}
35
36impl<'tcx> crate::MirPass<'tcx> for Validator {
37 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
38 if matches!(body.source.instance, InstanceKind::Intrinsic(..) | InstanceKind::Virtual(..)) {
43 return;
44 }
45 let def_id = body.source.def_id();
46 let typing_env = body.typing_env(tcx);
47 let can_unwind = if body.phase <= MirPhase::Runtime(RuntimePhase::Initial) {
48 true
50 } else if !tcx.def_kind(def_id).is_fn_like() {
51 true
52 } else {
53 let body_ty = tcx.type_of(def_id).skip_binder();
54 let body_abi = match body_ty.kind() {
55 ty::FnDef(..) => body_ty.fn_sig(tcx).abi(),
56 ty::Closure(..) => ExternAbi::RustCall,
57 ty::CoroutineClosure(..) => ExternAbi::RustCall,
58 ty::Coroutine(..) => ExternAbi::Rust,
59 ty::Error(_) => return,
61 _ => span_bug!(body.span, "unexpected body ty: {body_ty}"),
62 };
63
64 ty::layout::fn_can_unwind(tcx, Some(def_id), body_abi)
65 };
66
67 let mut cfg_checker = CfgChecker {
68 when: &self.when,
69 body,
70 tcx,
71 unwind_edge_count: 0,
72 reachable_blocks: traversal::reachable_as_bitset(body),
73 value_cache: FxHashSet::default(),
74 can_unwind,
75 };
76 cfg_checker.visit_body(body);
77 cfg_checker.check_cleanup_control_flow();
78
79 for (location, msg) in validate_types(tcx, typing_env, body, body) {
81 cfg_checker.fail(location, msg);
82 }
83
84 for (location, msg) in validate_debuginfos(body) {
86 cfg_checker.fail(location, msg);
87 }
88
89 if let MirPhase::Runtime(_) = body.phase
90 && let ty::InstanceKind::Item(_) = body.source.instance
91 && body.has_free_regions()
92 {
93 cfg_checker.fail(
94 Location::START,
95 format!("Free regions in optimized {} MIR", body.phase.name()),
96 );
97 }
98 }
99
100 fn is_required(&self) -> bool {
101 true
102 }
103}
104
105struct CfgChecker<'a, 'tcx> {
112 when: &'a str,
113 body: &'a Body<'tcx>,
114 tcx: TyCtxt<'tcx>,
115 unwind_edge_count: usize,
116 reachable_blocks: DenseBitSet<BasicBlock>,
117 value_cache: FxHashSet<u128>,
118 can_unwind: bool,
121}
122
123impl<'a, 'tcx> CfgChecker<'a, 'tcx> {
124 #[track_caller]
125 fn fail(&self, location: Location, msg: impl AsRef<str>) {
126 if self.tcx.dcx().has_errors().is_none() {
128 span_bug!(
129 self.body.source_info(location).span,
130 "broken MIR in {:?} ({}) at {:?}:\n{}",
131 self.body.source.instance,
132 self.when,
133 location,
134 msg.as_ref(),
135 );
136 }
137 }
138
139 fn check_edge(&mut self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
140 if bb == START_BLOCK {
141 self.fail(location, "start block must not have predecessors")
142 }
143 if let Some(bb) = self.body.basic_blocks.get(bb) {
144 let src = self.body.basic_blocks.get(location.block).unwrap();
145 match (src.is_cleanup, bb.is_cleanup, edge_kind) {
146 (false, false, EdgeKind::Normal)
148 | (true, true, EdgeKind::Normal) => {}
150 (false, true, EdgeKind::Unwind) => {
152 self.unwind_edge_count += 1;
153 }
154 _ => {
156 self.fail(
157 location,
158 format!(
159 "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
160 edge_kind,
161 bb,
162 src.is_cleanup,
163 bb.is_cleanup,
164 )
165 )
166 }
167 }
168 } else {
169 self.fail(location, format!("encountered jump to invalid basic block {bb:?}"))
170 }
171 }
172
173 fn check_cleanup_control_flow(&self) {
174 if self.unwind_edge_count <= 1 {
175 return;
176 }
177 let doms = self.body.basic_blocks.dominators();
178 let mut post_contract_node = FxHashMap::default();
179 let mut dom_path = vec![];
181 let mut get_post_contract_node = |mut bb| {
182 let root = loop {
183 if let Some(root) = post_contract_node.get(&bb) {
184 break *root;
185 }
186 let parent = doms.immediate_dominator(bb).unwrap();
187 dom_path.push(bb);
188 if !self.body.basic_blocks[parent].is_cleanup {
189 break bb;
190 }
191 bb = parent;
192 };
193 for bb in dom_path.drain(..) {
194 post_contract_node.insert(bb, root);
195 }
196 root
197 };
198
199 let mut parent = IndexVec::from_elem(None, &self.body.basic_blocks);
200 for (bb, bb_data) in self.body.basic_blocks.iter_enumerated() {
201 if !bb_data.is_cleanup || !self.reachable_blocks.contains(bb) {
202 continue;
203 }
204 let bb = get_post_contract_node(bb);
205 for s in bb_data.terminator().successors() {
206 let s = get_post_contract_node(s);
207 if s == bb {
208 continue;
209 }
210 let parent = &mut parent[bb];
211 match parent {
212 None => {
213 *parent = Some(s);
214 }
215 Some(e) if *e == s => (),
216 Some(e) => self.fail(
217 Location { block: bb, statement_index: 0 },
218 format!(
219 "Cleanup control flow violation: The blocks dominated by {:?} have edges to both {:?} and {:?}",
220 bb,
221 s,
222 *e
223 )
224 ),
225 }
226 }
227 }
228
229 let mut stack = FxHashSet::default();
231 for (mut bb, parent) in parent.iter_enumerated_mut() {
232 stack.clear();
233 stack.insert(bb);
234 loop {
235 let Some(parent) = parent.take() else { break };
236 let no_cycle = stack.insert(parent);
237 if !no_cycle {
238 self.fail(
239 Location { block: bb, statement_index: 0 },
240 format!(
241 "Cleanup control flow violation: Cycle involving edge {bb:?} -> {parent:?}",
242 ),
243 );
244 break;
245 }
246 bb = parent;
247 }
248 }
249 }
250
251 fn check_unwind_edge(&mut self, location: Location, unwind: UnwindAction) {
252 let is_cleanup = self.body.basic_blocks[location.block].is_cleanup;
253 match unwind {
254 UnwindAction::Cleanup(unwind) => {
255 if is_cleanup {
256 self.fail(location, "`UnwindAction::Cleanup` in cleanup block");
257 }
258 self.check_edge(location, unwind, EdgeKind::Unwind);
259 }
260 UnwindAction::Continue => {
261 if is_cleanup {
262 self.fail(location, "`UnwindAction::Continue` in cleanup block");
263 }
264
265 if !self.can_unwind {
266 self.fail(location, "`UnwindAction::Continue` in no-unwind function");
267 }
268 }
269 UnwindAction::Terminate(UnwindTerminateReason::InCleanup) => {
270 if !is_cleanup {
271 self.fail(
272 location,
273 "`UnwindAction::Terminate(InCleanup)` in a non-cleanup block",
274 );
275 }
276 }
277 UnwindAction::Unreachable | UnwindAction::Terminate(UnwindTerminateReason::Abi) => (),
279 }
280 }
281
282 fn is_critical_call_edge(&self, target: Option<BasicBlock>, unwind: UnwindAction) -> bool {
283 let Some(target) = target else { return false };
284 matches!(unwind, UnwindAction::Cleanup(_) | UnwindAction::Terminate(_))
285 && self.body.basic_blocks.predecessors()[target].len() > 1
286 }
287}
288
289impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> {
290 fn visit_local(&mut self, local: Local, _context: PlaceContext, location: Location) {
291 if self.body.local_decls.get(local).is_none() {
292 self.fail(
293 location,
294 format!("local {local:?} has no corresponding declaration in `body.local_decls`"),
295 );
296 }
297 }
298
299 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
300 match &statement.kind {
301 StatementKind::AscribeUserType(..) => {
302 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
303 self.fail(
304 location,
305 "`AscribeUserType` should have been removed after drop lowering phase",
306 );
307 }
308 }
309 StatementKind::FakeRead(..) => {
310 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
311 self.fail(
312 location,
313 "`FakeRead` should have been removed after drop lowering phase",
314 );
315 }
316 }
317 StatementKind::SetDiscriminant { .. } => {
318 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
319 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
320 }
321 }
322 StatementKind::Retag(kind, _) => {
323 if matches!(kind, RetagKind::TwoPhase) {
327 self.fail(location, format!("explicit `{kind:?}` is forbidden"));
328 }
329 }
330 StatementKind::Coverage(kind) => {
331 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup)
332 && let CoverageKind::BlockMarker { .. } | CoverageKind::SpanMarker { .. } = kind
333 {
334 self.fail(
335 location,
336 format!("{kind:?} should have been removed after analysis"),
337 );
338 }
339 }
340 StatementKind::Assign(..)
341 | StatementKind::StorageLive(_)
342 | StatementKind::StorageDead(_)
343 | StatementKind::Intrinsic(_)
344 | StatementKind::ConstEvalCounter
345 | StatementKind::PlaceMention(..)
346 | StatementKind::BackwardIncompatibleDropHint { .. }
347 | StatementKind::Nop => {}
348 }
349
350 self.super_statement(statement, location);
351 }
352
353 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
354 match &terminator.kind {
355 TerminatorKind::Goto { target } => {
356 self.check_edge(location, *target, EdgeKind::Normal);
357 }
358 TerminatorKind::SwitchInt { targets, discr: _ } => {
359 for (_, target) in targets.iter() {
360 self.check_edge(location, target, EdgeKind::Normal);
361 }
362 self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
363
364 self.value_cache.clear();
365 self.value_cache.extend(targets.iter().map(|(value, _)| value));
366 let has_duplicates = targets.iter().len() != self.value_cache.len();
367 if has_duplicates {
368 self.fail(
369 location,
370 format!(
371 "duplicated values in `SwitchInt` terminator: {:?}",
372 terminator.kind,
373 ),
374 );
375 }
376 }
377 TerminatorKind::Drop { target, unwind, drop, .. } => {
378 self.check_edge(location, *target, EdgeKind::Normal);
379 self.check_unwind_edge(location, *unwind);
380 if let Some(drop) = drop {
381 self.check_edge(location, *drop, EdgeKind::Normal);
382 }
383 }
384 TerminatorKind::Call { func, args, .. }
385 | TerminatorKind::TailCall { func, args, .. } => {
386 if let TerminatorKind::Call { target, unwind, destination, .. } = terminator.kind {
388 if let Some(target) = target {
389 self.check_edge(location, target, EdgeKind::Normal);
390 }
391 self.check_unwind_edge(location, unwind);
392
393 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
399 && self.is_critical_call_edge(target, unwind)
400 {
401 self.fail(
402 location,
403 format!(
404 "encountered critical edge in `Call` terminator {:?}",
405 terminator.kind,
406 ),
407 );
408 }
409
410 if is_within_packed(self.tcx, &self.body.local_decls, destination).is_some() {
413 self.fail(
415 location,
416 format!(
417 "encountered packed place in `Call` terminator destination: {:?}",
418 terminator.kind,
419 ),
420 );
421 }
422 }
423
424 for arg in args {
425 if let Operand::Move(place) = &arg.node {
426 if is_within_packed(self.tcx, &self.body.local_decls, *place).is_some() {
427 self.fail(
429 location,
430 format!(
431 "encountered `Move` of a packed place in `Call` terminator: {:?}",
432 terminator.kind,
433 ),
434 );
435 }
436 }
437 }
438
439 if let ty::FnDef(did, ..) = func.ty(&self.body.local_decls, self.tcx).kind()
440 && self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
441 && matches!(self.tcx.codegen_fn_attrs(did).inline, InlineAttr::Force { .. })
442 {
443 self.fail(location, "`#[rustc_force_inline]`-annotated function not inlined");
444 }
445 }
446 TerminatorKind::Assert { target, unwind, .. } => {
447 self.check_edge(location, *target, EdgeKind::Normal);
448 self.check_unwind_edge(location, *unwind);
449 }
450 TerminatorKind::Yield { resume, drop, .. } => {
451 if self.body.coroutine.is_none() {
452 self.fail(location, "`Yield` cannot appear outside coroutine bodies");
453 }
454 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
455 self.fail(location, "`Yield` should have been replaced by coroutine lowering");
456 }
457 self.check_edge(location, *resume, EdgeKind::Normal);
458 if let Some(drop) = drop {
459 self.check_edge(location, *drop, EdgeKind::Normal);
460 }
461 }
462 TerminatorKind::FalseEdge { real_target, imaginary_target } => {
463 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
464 self.fail(
465 location,
466 "`FalseEdge` should have been removed after drop elaboration",
467 );
468 }
469 self.check_edge(location, *real_target, EdgeKind::Normal);
470 self.check_edge(location, *imaginary_target, EdgeKind::Normal);
471 }
472 TerminatorKind::FalseUnwind { real_target, unwind } => {
473 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
474 self.fail(
475 location,
476 "`FalseUnwind` should have been removed after drop elaboration",
477 );
478 }
479 self.check_edge(location, *real_target, EdgeKind::Normal);
480 self.check_unwind_edge(location, *unwind);
481 }
482 TerminatorKind::InlineAsm { targets, unwind, .. } => {
483 for &target in targets {
484 self.check_edge(location, target, EdgeKind::Normal);
485 }
486 self.check_unwind_edge(location, *unwind);
487 }
488 TerminatorKind::CoroutineDrop => {
489 if self.body.coroutine.is_none() {
490 self.fail(location, "`CoroutineDrop` cannot appear outside coroutine bodies");
491 }
492 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
493 self.fail(
494 location,
495 "`CoroutineDrop` should have been replaced by coroutine lowering",
496 );
497 }
498 }
499 TerminatorKind::UnwindResume => {
500 let bb = location.block;
501 if !self.body.basic_blocks[bb].is_cleanup {
502 self.fail(location, "Cannot `UnwindResume` from non-cleanup basic block")
503 }
504 if !self.can_unwind {
505 self.fail(location, "Cannot `UnwindResume` in a function that cannot unwind")
506 }
507 }
508 TerminatorKind::UnwindTerminate(_) => {
509 let bb = location.block;
510 if !self.body.basic_blocks[bb].is_cleanup {
511 self.fail(location, "Cannot `UnwindTerminate` from non-cleanup basic block")
512 }
513 }
514 TerminatorKind::Return => {
515 let bb = location.block;
516 if self.body.basic_blocks[bb].is_cleanup {
517 self.fail(location, "Cannot `Return` from cleanup basic block")
518 }
519 }
520 TerminatorKind::Unreachable => {}
521 }
522
523 self.super_terminator(terminator, location);
524 }
525
526 fn visit_source_scope(&mut self, scope: SourceScope) {
527 if self.body.source_scopes.get(scope).is_none() {
528 self.tcx.dcx().span_bug(
529 self.body.span,
530 format!(
531 "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
532 self.body.source.instance, self.when, scope,
533 ),
534 );
535 }
536 }
537}
538
539pub(super) fn validate_types<'tcx>(
545 tcx: TyCtxt<'tcx>,
546 typing_env: ty::TypingEnv<'tcx>,
547 body: &Body<'tcx>,
548 caller_body: &Body<'tcx>,
549) -> Vec<(Location, String)> {
550 let mut type_checker = TypeChecker { body, caller_body, tcx, typing_env, failures: Vec::new() };
551 with_no_trimmed_paths!({
556 type_checker.visit_body(body);
557 });
558 type_checker.failures
559}
560
561struct TypeChecker<'a, 'tcx> {
562 body: &'a Body<'tcx>,
563 caller_body: &'a Body<'tcx>,
564 tcx: TyCtxt<'tcx>,
565 typing_env: ty::TypingEnv<'tcx>,
566 failures: Vec<(Location, String)>,
567}
568
569impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
570 fn fail(&mut self, location: Location, msg: impl Into<String>) {
571 self.failures.push((location, msg.into()));
572 }
573
574 fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
577 if src == dest {
579 return true;
581 }
582
583 if (src, dest).has_opaque_types() {
589 return true;
590 }
591
592 let variance = if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
595 Variance::Invariant
596 } else {
597 Variance::Covariant
598 };
599
600 crate::util::relate_types(self.tcx, self.typing_env, variance, src, dest)
601 }
602
603 fn predicate_must_hold_modulo_regions(
605 &self,
606 pred: impl Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>,
607 ) -> bool {
608 let pred: ty::Predicate<'tcx> = pred.upcast(self.tcx);
609
610 if pred.has_opaque_types() {
616 return true;
617 }
618
619 let (infcx, param_env) = self.tcx.infer_ctxt().build_with_typing_env(self.typing_env);
620 let ocx = ObligationCtxt::new(&infcx);
621 ocx.register_obligation(Obligation::new(
622 self.tcx,
623 ObligationCause::dummy(),
624 param_env,
625 pred,
626 ));
627 ocx.evaluate_obligations_error_on_ambiguity().is_empty()
628 }
629}
630
631impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
632 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
633 if self.tcx.sess.opts.unstable_opts.validate_mir
635 && self.body.phase < MirPhase::Runtime(RuntimePhase::Initial)
636 {
637 if let Operand::Copy(place) = operand {
639 let ty = place.ty(&self.body.local_decls, self.tcx).ty;
640
641 if !self.tcx.type_is_copy_modulo_regions(self.typing_env, ty) {
642 self.fail(location, format!("`Operand::Copy` with non-`Copy` type {ty}"));
643 }
644 }
645 }
646
647 self.super_operand(operand, location);
648 }
649
650 fn visit_projection_elem(
651 &mut self,
652 place_ref: PlaceRef<'tcx>,
653 elem: PlaceElem<'tcx>,
654 context: PlaceContext,
655 location: Location,
656 ) {
657 match elem {
658 ProjectionElem::Deref
659 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
660 {
661 let base_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
662
663 if base_ty.is_box() {
664 self.fail(location, format!("{base_ty} dereferenced after ElaborateBoxDerefs"))
665 }
666 }
667 ProjectionElem::Field(f, ty) => {
668 let parent_ty = place_ref.ty(&self.body.local_decls, self.tcx);
669 let fail_out_of_bounds = |this: &mut Self, location| {
670 this.fail(location, format!("Out of bounds field {f:?} for {parent_ty:?}"));
671 };
672 let check_equal = |this: &mut Self, location, f_ty| {
673 if !this.mir_assign_valid_types(ty, f_ty) {
674 this.fail(
675 location,
676 format!(
677 "Field projection `{place_ref:?}.{f:?}` specified type `{ty}`, but actual type is `{f_ty}`"
678 )
679 )
680 }
681 };
682
683 let kind = match parent_ty.ty.kind() {
684 &ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => {
685 self.tcx.type_of(def_id).instantiate(self.tcx, args).kind()
686 }
687 kind => kind,
688 };
689
690 match kind {
691 ty::Tuple(fields) => {
692 let Some(f_ty) = fields.get(f.as_usize()) else {
693 fail_out_of_bounds(self, location);
694 return;
695 };
696 check_equal(self, location, *f_ty);
697 }
698 ty::Pat(base, _) => check_equal(self, location, *base),
700 ty::Adt(adt_def, args) => {
701 if self.tcx.is_lang_item(adt_def.did(), LangItem::DynMetadata) {
703 self.fail(
704 location,
705 format!(
706 "You can't project to field {f:?} of `DynMetadata` because \
707 layout is weird and thinks it doesn't have fields."
708 ),
709 );
710 }
711
712 if adt_def.repr().simd() {
713 self.fail(
714 location,
715 format!(
716 "Projecting into SIMD type {adt_def:?} is banned by MCP#838"
717 ),
718 );
719 }
720
721 let var = parent_ty.variant_index.unwrap_or(FIRST_VARIANT);
722 let Some(field) = adt_def.variant(var).fields.get(f) else {
723 fail_out_of_bounds(self, location);
724 return;
725 };
726 check_equal(self, location, field.ty(self.tcx, args));
727 }
728 ty::Closure(_, args) => {
729 let args = args.as_closure();
730 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
731 fail_out_of_bounds(self, location);
732 return;
733 };
734 check_equal(self, location, f_ty);
735 }
736 ty::CoroutineClosure(_, args) => {
737 let args = args.as_coroutine_closure();
738 let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
739 fail_out_of_bounds(self, location);
740 return;
741 };
742 check_equal(self, location, f_ty);
743 }
744 &ty::Coroutine(def_id, args) => {
745 let f_ty = if let Some(var) = parent_ty.variant_index {
746 let layout = if def_id == self.caller_body.source.def_id() {
752 self.caller_body
753 .coroutine_layout_raw()
754 .or_else(|| self.tcx.coroutine_layout(def_id, args).ok())
755 } else if self.tcx.needs_coroutine_by_move_body_def_id(def_id)
756 && let ty::ClosureKind::FnOnce =
757 args.as_coroutine().kind_ty().to_opt_closure_kind().unwrap()
758 && self.caller_body.source.def_id()
759 == self.tcx.coroutine_by_move_body_def_id(def_id)
760 {
761 self.caller_body.coroutine_layout_raw()
763 } else {
764 self.tcx.coroutine_layout(def_id, args).ok()
765 };
766
767 let Some(layout) = layout else {
768 self.fail(
769 location,
770 format!("No coroutine layout for {parent_ty:?}"),
771 );
772 return;
773 };
774
775 let Some(&local) = layout.variant_fields[var].get(f) else {
776 fail_out_of_bounds(self, location);
777 return;
778 };
779
780 let Some(f_ty) = layout.field_tys.get(local) else {
781 self.fail(
782 location,
783 format!("Out of bounds local {local:?} for {parent_ty:?}"),
784 );
785 return;
786 };
787
788 ty::EarlyBinder::bind(f_ty.ty).instantiate(self.tcx, args)
789 } else {
790 let Some(&f_ty) = args.as_coroutine().prefix_tys().get(f.index())
791 else {
792 fail_out_of_bounds(self, location);
793 return;
794 };
795
796 f_ty
797 };
798
799 check_equal(self, location, f_ty);
800 }
801 _ => {
802 self.fail(location, format!("{:?} does not have fields", parent_ty.ty));
803 }
804 }
805 }
806 ProjectionElem::Index(index) => {
807 let indexed_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
808 match indexed_ty.kind() {
809 ty::Array(_, _) | ty::Slice(_) => {}
810 _ => self.fail(location, format!("{indexed_ty:?} cannot be indexed")),
811 }
812
813 let index_ty = self.body.local_decls[index].ty;
814 if index_ty != self.tcx.types.usize {
815 self.fail(location, format!("bad index ({index_ty} != usize)"))
816 }
817 }
818 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
819 let indexed_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
820 match indexed_ty.kind() {
821 ty::Array(_, _) => {
822 if from_end {
823 self.fail(location, "arrays should not be indexed from end");
824 }
825 }
826 ty::Slice(_) => {}
827 _ => self.fail(location, format!("{indexed_ty:?} cannot be indexed")),
828 }
829
830 if from_end {
831 if offset > min_length {
832 self.fail(
833 location,
834 format!(
835 "constant index with offset -{offset} out of bounds of min length {min_length}"
836 ),
837 );
838 }
839 } else {
840 if offset >= min_length {
841 self.fail(
842 location,
843 format!(
844 "constant index with offset {offset} out of bounds of min length {min_length}"
845 ),
846 );
847 }
848 }
849 }
850 ProjectionElem::Subslice { from, to, from_end } => {
851 let indexed_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
852 match indexed_ty.kind() {
853 ty::Array(_, _) => {
854 if from_end {
855 self.fail(location, "arrays should not be subsliced from end");
856 }
857 }
858 ty::Slice(_) => {
859 if !from_end {
860 self.fail(location, "slices should be subsliced from end");
861 }
862 }
863 _ => self.fail(location, format!("{indexed_ty:?} cannot be indexed")),
864 }
865
866 if !from_end && from > to {
867 self.fail(location, "backwards subslice {from}..{to}");
868 }
869 }
870 ProjectionElem::OpaqueCast(ty)
871 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
872 {
873 self.fail(
874 location,
875 format!("explicit opaque type cast to `{ty}` after `PostAnalysisNormalize`"),
876 )
877 }
878 ProjectionElem::UnwrapUnsafeBinder(unwrapped_ty) => {
879 let binder_ty = place_ref.ty(&self.body.local_decls, self.tcx);
880 let ty::UnsafeBinder(binder_ty) = *binder_ty.ty.kind() else {
881 self.fail(
882 location,
883 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
884 );
885 return;
886 };
887 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
888 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
889 self.fail(
890 location,
891 format!(
892 "Cannot unwrap unsafe binder {binder_ty:?} into type {unwrapped_ty}"
893 ),
894 );
895 }
896 }
897 _ => {}
898 }
899 self.super_projection_elem(place_ref, elem, context, location);
900 }
901
902 fn visit_var_debug_info(&mut self, debuginfo: &VarDebugInfo<'tcx>) {
903 if let Some(box VarDebugInfoFragment { ty, ref projection }) = debuginfo.composite {
904 if ty.is_union() || ty.is_enum() {
905 self.fail(
906 START_BLOCK.start_location(),
907 format!("invalid type {ty} in debuginfo for {:?}", debuginfo.name),
908 );
909 }
910 if projection.is_empty() {
911 self.fail(
912 START_BLOCK.start_location(),
913 format!("invalid empty projection in debuginfo for {:?}", debuginfo.name),
914 );
915 }
916 if projection.iter().any(|p| !matches!(p, PlaceElem::Field(..))) {
917 self.fail(
918 START_BLOCK.start_location(),
919 format!(
920 "illegal projection {:?} in debuginfo for {:?}",
921 projection, debuginfo.name
922 ),
923 );
924 }
925 }
926 match debuginfo.value {
927 VarDebugInfoContents::Const(_) => {}
928 VarDebugInfoContents::Place(place) => {
929 if place.projection.iter().any(|p| !p.can_use_in_debuginfo()) {
930 self.fail(
931 START_BLOCK.start_location(),
932 format!("illegal place {:?} in debuginfo for {:?}", place, debuginfo.name),
933 );
934 }
935 }
936 }
937 self.super_var_debug_info(debuginfo);
938 }
939
940 fn visit_place(&mut self, place: &Place<'tcx>, cntxt: PlaceContext, location: Location) {
941 let _ = place.ty(&self.body.local_decls, self.tcx);
943
944 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial)
945 && place.projection.len() > 1
946 && cntxt != PlaceContext::NonUse(NonUseContext::VarDebugInfo)
947 && place.projection[1..].contains(&ProjectionElem::Deref)
948 {
949 self.fail(
950 location,
951 format!("place {place:?} has deref as a later projection (it is only permitted as the first projection)"),
952 );
953 }
954
955 let mut projections_iter = place.projection.iter();
957 while let Some(proj) = projections_iter.next() {
958 if matches!(proj, ProjectionElem::Downcast(..)) {
959 if !matches!(projections_iter.next(), Some(ProjectionElem::Field(..))) {
960 self.fail(
961 location,
962 format!(
963 "place {place:?} has `Downcast` projection not followed by `Field`"
964 ),
965 );
966 }
967 }
968 }
969
970 if let ClearCrossCrate::Set(box LocalInfo::DerefTemp) =
971 self.body.local_decls[place.local].local_info
972 && !place.is_indirect_first_projection()
973 {
974 if cntxt != PlaceContext::MutatingUse(MutatingUseContext::Store)
975 || place.as_local().is_none()
976 {
977 self.fail(
978 location,
979 format!("`DerefTemp` locals must only be dereferenced or directly assigned to"),
980 );
981 }
982 }
983
984 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial)
985 && let Some(i) = place
986 .projection
987 .iter()
988 .position(|elem| matches!(elem, ProjectionElem::Subslice { .. }))
989 && let Some(tail) = place.projection.get(i + 1..)
990 && tail.iter().any(|elem| {
991 matches!(
992 elem,
993 ProjectionElem::ConstantIndex { .. } | ProjectionElem::Subslice { .. }
994 )
995 })
996 {
997 self.fail(
998 location,
999 format!("place {place:?} has `ConstantIndex` or `Subslice` after `Subslice`"),
1000 );
1001 }
1002
1003 self.super_place(place, cntxt, location);
1004 }
1005
1006 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
1007 macro_rules! check_kinds {
1008 ($t:expr, $text:literal, $typat:pat) => {
1009 if !matches!(($t).kind(), $typat) {
1010 self.fail(location, format!($text, $t));
1011 }
1012 };
1013 }
1014 match rvalue {
1015 Rvalue::Use(_) => {}
1016 Rvalue::CopyForDeref(_) => {
1017 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1018 self.fail(location, "`CopyForDeref` should have been removed in runtime MIR");
1019 }
1020 }
1021 Rvalue::Aggregate(kind, fields) => match **kind {
1022 AggregateKind::Tuple => {}
1023 AggregateKind::Array(dest) => {
1024 for src in fields {
1025 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1026 self.fail(location, "array field has the wrong type");
1027 }
1028 }
1029 }
1030 AggregateKind::Adt(def_id, idx, args, _, Some(field)) => {
1031 let adt_def = self.tcx.adt_def(def_id);
1032 assert!(adt_def.is_union());
1033 assert_eq!(idx, FIRST_VARIANT);
1034 let dest_ty = self.tcx.normalize_erasing_regions(
1035 self.typing_env,
1036 adt_def.non_enum_variant().fields[field].ty(self.tcx, args),
1037 );
1038 if let [field] = fields.raw.as_slice() {
1039 let src_ty = field.ty(self.body, self.tcx);
1040 if !self.mir_assign_valid_types(src_ty, dest_ty) {
1041 self.fail(location, "union field has the wrong type");
1042 }
1043 } else {
1044 self.fail(location, "unions should have one initialized field");
1045 }
1046 }
1047 AggregateKind::Adt(def_id, idx, args, _, None) => {
1048 let adt_def = self.tcx.adt_def(def_id);
1049 assert!(!adt_def.is_union());
1050 let variant = &adt_def.variants()[idx];
1051 if variant.fields.len() != fields.len() {
1052 self.fail(location, format!(
1053 "adt {def_id:?} has the wrong number of initialized fields, expected {}, found {}",
1054 fields.len(),
1055 variant.fields.len(),
1056 ));
1057 }
1058 for (src, dest) in std::iter::zip(fields, &variant.fields) {
1059 let dest_ty = self
1060 .tcx
1061 .normalize_erasing_regions(self.typing_env, dest.ty(self.tcx, args));
1062 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest_ty) {
1063 self.fail(location, "adt field has the wrong type");
1064 }
1065 }
1066 }
1067 AggregateKind::Closure(_, args) => {
1068 let upvars = args.as_closure().upvar_tys();
1069 if upvars.len() != fields.len() {
1070 self.fail(location, "closure has the wrong number of initialized fields");
1071 }
1072 for (src, dest) in std::iter::zip(fields, upvars) {
1073 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1074 self.fail(location, "closure field has the wrong type");
1075 }
1076 }
1077 }
1078 AggregateKind::Coroutine(_, args) => {
1079 let upvars = args.as_coroutine().upvar_tys();
1080 if upvars.len() != fields.len() {
1081 self.fail(location, "coroutine has the wrong number of initialized fields");
1082 }
1083 for (src, dest) in std::iter::zip(fields, upvars) {
1084 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1085 self.fail(location, "coroutine field has the wrong type");
1086 }
1087 }
1088 }
1089 AggregateKind::CoroutineClosure(_, args) => {
1090 let upvars = args.as_coroutine_closure().upvar_tys();
1091 if upvars.len() != fields.len() {
1092 self.fail(
1093 location,
1094 "coroutine-closure has the wrong number of initialized fields",
1095 );
1096 }
1097 for (src, dest) in std::iter::zip(fields, upvars) {
1098 if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1099 self.fail(location, "coroutine-closure field has the wrong type");
1100 }
1101 }
1102 }
1103 AggregateKind::RawPtr(pointee_ty, mutability) => {
1104 if !matches!(self.body.phase, MirPhase::Runtime(_)) {
1105 self.fail(location, "RawPtr should be in runtime MIR only");
1109 }
1110
1111 if let [data_ptr, metadata] = fields.raw.as_slice() {
1112 let data_ptr_ty = data_ptr.ty(self.body, self.tcx);
1113 let metadata_ty = metadata.ty(self.body, self.tcx);
1114 if let ty::RawPtr(in_pointee, in_mut) = data_ptr_ty.kind() {
1115 if *in_mut != mutability {
1116 self.fail(location, "input and output mutability must match");
1117 }
1118
1119 if !in_pointee.is_sized(self.tcx, self.typing_env) {
1121 self.fail(location, "input pointer must be thin");
1122 }
1123 } else {
1124 self.fail(
1125 location,
1126 "first operand to raw pointer aggregate must be a raw pointer",
1127 );
1128 }
1129
1130 if pointee_ty.is_slice() {
1132 if !self.mir_assign_valid_types(metadata_ty, self.tcx.types.usize) {
1133 self.fail(location, "slice metadata must be usize");
1134 }
1135 } else if pointee_ty.is_sized(self.tcx, self.typing_env) {
1136 if metadata_ty != self.tcx.types.unit {
1137 self.fail(location, "metadata for pointer-to-thin must be unit");
1138 }
1139 }
1140 } else {
1141 self.fail(location, "raw pointer aggregate must have 2 fields");
1142 }
1143 }
1144 },
1145 Rvalue::Ref(_, BorrowKind::Fake(_), _) => {
1146 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1147 self.fail(
1148 location,
1149 "`Assign` statement with a `Fake` borrow should have been removed in runtime MIR",
1150 );
1151 }
1152 }
1153 Rvalue::Ref(..) => {}
1154 Rvalue::BinaryOp(op, vals) => {
1155 use BinOp::*;
1156 let a = vals.0.ty(&self.body.local_decls, self.tcx);
1157 let b = vals.1.ty(&self.body.local_decls, self.tcx);
1158 if crate::util::binop_right_homogeneous(*op) {
1159 if let Eq | Lt | Le | Ne | Ge | Gt = op {
1160 if !self.mir_assign_valid_types(a, b) {
1162 self.fail(
1163 location,
1164 format!("Cannot {op:?} compare incompatible types {a} and {b}"),
1165 );
1166 }
1167 } else if a != b {
1168 self.fail(
1169 location,
1170 format!("Cannot perform binary op {op:?} on unequal types {a} and {b}"),
1171 );
1172 }
1173 }
1174
1175 match op {
1176 Offset => {
1177 check_kinds!(a, "Cannot offset non-pointer type {:?}", ty::RawPtr(..));
1178 if b != self.tcx.types.isize && b != self.tcx.types.usize {
1179 self.fail(location, format!("Cannot offset by non-isize type {b}"));
1180 }
1181 }
1182 Eq | Lt | Le | Ne | Ge | Gt => {
1183 for x in [a, b] {
1184 check_kinds!(
1185 x,
1186 "Cannot {op:?} compare type {:?}",
1187 ty::Bool
1188 | ty::Char
1189 | ty::Int(..)
1190 | ty::Uint(..)
1191 | ty::Float(..)
1192 | ty::RawPtr(..)
1193 | ty::FnPtr(..)
1194 )
1195 }
1196 }
1197 Cmp => {
1198 for x in [a, b] {
1199 check_kinds!(
1200 x,
1201 "Cannot three-way compare non-integer type {:?}",
1202 ty::Char | ty::Uint(..) | ty::Int(..)
1203 )
1204 }
1205 }
1206 AddUnchecked | AddWithOverflow | SubUnchecked | SubWithOverflow
1207 | MulUnchecked | MulWithOverflow | Shl | ShlUnchecked | Shr | ShrUnchecked => {
1208 for x in [a, b] {
1209 check_kinds!(
1210 x,
1211 "Cannot {op:?} non-integer type {:?}",
1212 ty::Uint(..) | ty::Int(..)
1213 )
1214 }
1215 }
1216 BitAnd | BitOr | BitXor => {
1217 for x in [a, b] {
1218 check_kinds!(
1219 x,
1220 "Cannot perform bitwise op {op:?} on type {:?}",
1221 ty::Uint(..) | ty::Int(..) | ty::Bool
1222 )
1223 }
1224 }
1225 Add | Sub | Mul | Div | Rem => {
1226 for x in [a, b] {
1227 check_kinds!(
1228 x,
1229 "Cannot perform arithmetic {op:?} on type {:?}",
1230 ty::Uint(..) | ty::Int(..) | ty::Float(..)
1231 )
1232 }
1233 }
1234 }
1235 }
1236 Rvalue::UnaryOp(op, operand) => {
1237 let a = operand.ty(&self.body.local_decls, self.tcx);
1238 match op {
1239 UnOp::Neg => {
1240 check_kinds!(a, "Cannot negate type {:?}", ty::Int(..) | ty::Float(..))
1241 }
1242 UnOp::Not => {
1243 check_kinds!(
1244 a,
1245 "Cannot binary not type {:?}",
1246 ty::Int(..) | ty::Uint(..) | ty::Bool
1247 );
1248 }
1249 UnOp::PtrMetadata => {
1250 check_kinds!(
1251 a,
1252 "Cannot PtrMetadata non-pointer non-reference type {:?}",
1253 ty::RawPtr(..) | ty::Ref(..)
1254 );
1255 }
1256 }
1257 }
1258 Rvalue::ShallowInitBox(operand, _) => {
1259 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1260 self.fail(location, format!("ShallowInitBox after ElaborateBoxDerefs"))
1261 }
1262
1263 let a = operand.ty(&self.body.local_decls, self.tcx);
1264 check_kinds!(a, "Cannot shallow init type {:?}", ty::RawPtr(..));
1265 }
1266 Rvalue::Cast(kind, operand, target_type) => {
1267 let op_ty = operand.ty(self.body, self.tcx);
1268 match kind {
1269 CastKind::PointerWithExposedProvenance | CastKind::PointerExposeProvenance => {}
1271 CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer, _) => {
1272 check_kinds!(
1274 op_ty,
1275 "CastKind::{kind:?} input must be a fn item, not {:?}",
1276 ty::FnDef(..)
1277 );
1278 check_kinds!(
1279 target_type,
1280 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1281 ty::FnPtr(..)
1282 );
1283 }
1284 CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
1285 check_kinds!(
1287 op_ty,
1288 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1289 ty::FnPtr(..)
1290 );
1291 check_kinds!(
1292 target_type,
1293 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1294 ty::FnPtr(..)
1295 );
1296 }
1297 CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(..), _) => {
1298 check_kinds!(
1300 op_ty,
1301 "CastKind::{kind:?} input must be a closure, not {:?}",
1302 ty::Closure(..)
1303 );
1304 check_kinds!(
1305 target_type,
1306 "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1307 ty::FnPtr(..)
1308 );
1309 }
1310 CastKind::PointerCoercion(PointerCoercion::MutToConstPointer, _) => {
1311 check_kinds!(
1313 op_ty,
1314 "CastKind::{kind:?} input must be a raw mut pointer, not {:?}",
1315 ty::RawPtr(_, Mutability::Mut)
1316 );
1317 check_kinds!(
1318 target_type,
1319 "CastKind::{kind:?} output must be a raw const pointer, not {:?}",
1320 ty::RawPtr(_, Mutability::Not)
1321 );
1322 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1323 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1324 }
1325 }
1326 CastKind::PointerCoercion(PointerCoercion::ArrayToPointer, _) => {
1327 check_kinds!(
1329 op_ty,
1330 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1331 ty::RawPtr(..)
1332 );
1333 check_kinds!(
1334 target_type,
1335 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1336 ty::RawPtr(..)
1337 );
1338 if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1339 self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1340 }
1341 }
1342 CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
1343 if !self.predicate_must_hold_modulo_regions(ty::TraitRef::new(
1346 self.tcx,
1347 self.tcx.require_lang_item(
1348 LangItem::CoerceUnsized,
1349 self.body.source_info(location).span,
1350 ),
1351 [op_ty, *target_type],
1352 )) {
1353 self.fail(location, format!("Unsize coercion, but `{op_ty}` isn't coercible to `{target_type}`"));
1354 }
1355 }
1356 CastKind::IntToInt | CastKind::IntToFloat => {
1357 let input_valid = op_ty.is_integral() || op_ty.is_char() || op_ty.is_bool();
1358 let target_valid = target_type.is_numeric() || target_type.is_char();
1359 if !input_valid || !target_valid {
1360 self.fail(
1361 location,
1362 format!("Wrong cast kind {kind:?} for the type {op_ty}"),
1363 );
1364 }
1365 }
1366 CastKind::FnPtrToPtr => {
1367 check_kinds!(
1368 op_ty,
1369 "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1370 ty::FnPtr(..)
1371 );
1372 check_kinds!(
1373 target_type,
1374 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1375 ty::RawPtr(..)
1376 );
1377 }
1378 CastKind::PtrToPtr => {
1379 check_kinds!(
1380 op_ty,
1381 "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1382 ty::RawPtr(..)
1383 );
1384 check_kinds!(
1385 target_type,
1386 "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1387 ty::RawPtr(..)
1388 );
1389 }
1390 CastKind::FloatToFloat | CastKind::FloatToInt => {
1391 if !op_ty.is_floating_point() || !target_type.is_numeric() {
1392 self.fail(
1393 location,
1394 format!(
1395 "Trying to cast non 'Float' as {kind:?} into {target_type:?}"
1396 ),
1397 );
1398 }
1399 }
1400 CastKind::Transmute => {
1401 if !self
1405 .tcx
1406 .normalize_erasing_regions(self.typing_env, op_ty)
1407 .is_sized(self.tcx, self.typing_env)
1408 {
1409 self.fail(
1410 location,
1411 format!("Cannot transmute from non-`Sized` type {op_ty}"),
1412 );
1413 }
1414 if !self
1415 .tcx
1416 .normalize_erasing_regions(self.typing_env, *target_type)
1417 .is_sized(self.tcx, self.typing_env)
1418 {
1419 self.fail(
1420 location,
1421 format!("Cannot transmute to non-`Sized` type {target_type:?}"),
1422 );
1423 }
1424 }
1425 CastKind::Subtype => {
1426 if !util::sub_types(self.tcx, self.typing_env, op_ty, *target_type) {
1427 self.fail(
1428 location,
1429 format!("Failed subtyping {op_ty} and {target_type}"),
1430 )
1431 }
1432 }
1433 }
1434 }
1435 Rvalue::NullaryOp(NullOp::OffsetOf(indices), container) => {
1436 let fail_out_of_bounds = |this: &mut Self, location, field, ty| {
1437 this.fail(location, format!("Out of bounds field {field:?} for {ty}"));
1438 };
1439
1440 let mut current_ty = *container;
1441
1442 for (variant, field) in indices.iter() {
1443 match current_ty.kind() {
1444 ty::Tuple(fields) => {
1445 if variant != FIRST_VARIANT {
1446 self.fail(
1447 location,
1448 format!("tried to get variant {variant:?} of tuple"),
1449 );
1450 return;
1451 }
1452 let Some(&f_ty) = fields.get(field.as_usize()) else {
1453 fail_out_of_bounds(self, location, field, current_ty);
1454 return;
1455 };
1456
1457 current_ty = self.tcx.normalize_erasing_regions(self.typing_env, f_ty);
1458 }
1459 ty::Adt(adt_def, args) => {
1460 let Some(field) = adt_def.variant(variant).fields.get(field) else {
1461 fail_out_of_bounds(self, location, field, current_ty);
1462 return;
1463 };
1464
1465 let f_ty = field.ty(self.tcx, args);
1466 current_ty = self.tcx.normalize_erasing_regions(self.typing_env, f_ty);
1467 }
1468 _ => {
1469 self.fail(
1470 location,
1471 format!("Cannot get offset ({variant:?}, {field:?}) from type {current_ty}"),
1472 );
1473 return;
1474 }
1475 }
1476 }
1477 }
1478 Rvalue::Repeat(_, _)
1479 | Rvalue::ThreadLocalRef(_)
1480 | Rvalue::RawPtr(_, _)
1481 | Rvalue::NullaryOp(
1482 NullOp::SizeOf | NullOp::AlignOf | NullOp::UbChecks | NullOp::ContractChecks,
1483 _,
1484 )
1485 | Rvalue::Discriminant(_) => {}
1486
1487 Rvalue::WrapUnsafeBinder(op, ty) => {
1488 let unwrapped_ty = op.ty(self.body, self.tcx);
1489 let ty::UnsafeBinder(binder_ty) = *ty.kind() else {
1490 self.fail(
1491 location,
1492 format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
1493 );
1494 return;
1495 };
1496 let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
1497 if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
1498 self.fail(
1499 location,
1500 format!("Cannot wrap {unwrapped_ty} into unsafe binder {binder_ty:?}"),
1501 );
1502 }
1503 }
1504 }
1505 self.super_rvalue(rvalue, location);
1506 }
1507
1508 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
1509 match &statement.kind {
1510 StatementKind::Assign(box (dest, rvalue)) => {
1511 let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
1513 let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
1514
1515 if !self.mir_assign_valid_types(right_ty, left_ty) {
1516 self.fail(
1517 location,
1518 format!(
1519 "encountered `{:?}` with incompatible types:\n\
1520 left-hand side has type: {}\n\
1521 right-hand side has type: {}",
1522 statement.kind, left_ty, right_ty,
1523 ),
1524 );
1525 }
1526
1527 if let Some(local) = dest.as_local()
1528 && let ClearCrossCrate::Set(box LocalInfo::DerefTemp) =
1529 self.body.local_decls[local].local_info
1530 && !matches!(rvalue, Rvalue::CopyForDeref(_))
1531 {
1532 self.fail(location, "assignment to a `DerefTemp` must use `CopyForDeref`")
1533 }
1534 }
1535 StatementKind::AscribeUserType(..) => {
1536 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1537 self.fail(
1538 location,
1539 "`AscribeUserType` should have been removed after drop lowering phase",
1540 );
1541 }
1542 }
1543 StatementKind::FakeRead(..) => {
1544 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1545 self.fail(
1546 location,
1547 "`FakeRead` should have been removed after drop lowering phase",
1548 );
1549 }
1550 }
1551 StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(op)) => {
1552 let ty = op.ty(&self.body.local_decls, self.tcx);
1553 if !ty.is_bool() {
1554 self.fail(
1555 location,
1556 format!("`assume` argument must be `bool`, but got: `{ty}`"),
1557 );
1558 }
1559 }
1560 StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(
1561 CopyNonOverlapping { src, dst, count },
1562 )) => {
1563 let src_ty = src.ty(&self.body.local_decls, self.tcx);
1564 let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) {
1565 src_deref
1566 } else {
1567 self.fail(
1568 location,
1569 format!("Expected src to be ptr in copy_nonoverlapping, got: {src_ty}"),
1570 );
1571 return;
1572 };
1573 let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
1574 let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) {
1575 dst_deref
1576 } else {
1577 self.fail(
1578 location,
1579 format!("Expected dst to be ptr in copy_nonoverlapping, got: {dst_ty}"),
1580 );
1581 return;
1582 };
1583 if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) {
1586 self.fail(location, format!("bad arg ({op_src_ty} != {op_dst_ty})"));
1587 }
1588
1589 let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
1590 if op_cnt_ty != self.tcx.types.usize {
1591 self.fail(location, format!("bad arg ({op_cnt_ty} != usize)"))
1592 }
1593 }
1594 StatementKind::SetDiscriminant { place, .. } => {
1595 if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
1596 self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
1597 }
1598 let pty = place.ty(&self.body.local_decls, self.tcx).ty;
1599 if !matches!(
1600 pty.kind(),
1601 ty::Adt(..) | ty::Coroutine(..) | ty::Alias(ty::Opaque, ..)
1602 ) {
1603 self.fail(
1604 location,
1605 format!(
1606 "`SetDiscriminant` is only allowed on ADTs and coroutines, not {pty}"
1607 ),
1608 );
1609 }
1610 }
1611 StatementKind::Retag(kind, _) => {
1612 if matches!(kind, RetagKind::TwoPhase) {
1616 self.fail(location, format!("explicit `{kind:?}` is forbidden"));
1617 }
1618 }
1619 StatementKind::StorageLive(_)
1620 | StatementKind::StorageDead(_)
1621 | StatementKind::Coverage(_)
1622 | StatementKind::ConstEvalCounter
1623 | StatementKind::PlaceMention(..)
1624 | StatementKind::BackwardIncompatibleDropHint { .. }
1625 | StatementKind::Nop => {}
1626 }
1627
1628 self.super_statement(statement, location);
1629 }
1630
1631 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
1632 match &terminator.kind {
1633 TerminatorKind::SwitchInt { targets, discr } => {
1634 let switch_ty = discr.ty(&self.body.local_decls, self.tcx);
1635
1636 let target_width = self.tcx.sess.target.pointer_width;
1637
1638 let size = Size::from_bits(match switch_ty.kind() {
1639 ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
1640 ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
1641 ty::Char => 32,
1642 ty::Bool => 1,
1643 other => bug!("unhandled type: {:?}", other),
1644 });
1645
1646 for (value, _) in targets.iter() {
1647 if ScalarInt::try_from_uint(value, size).is_none() {
1648 self.fail(
1649 location,
1650 format!("the value {value:#x} is not a proper {switch_ty}"),
1651 )
1652 }
1653 }
1654 }
1655 TerminatorKind::Call { func, .. } | TerminatorKind::TailCall { func, .. } => {
1656 let func_ty = func.ty(&self.body.local_decls, self.tcx);
1657 match func_ty.kind() {
1658 ty::FnPtr(..) | ty::FnDef(..) => {}
1659 _ => self.fail(
1660 location,
1661 format!(
1662 "encountered non-callable type {func_ty} in `{}` terminator",
1663 terminator.kind.name()
1664 ),
1665 ),
1666 }
1667
1668 if let TerminatorKind::TailCall { .. } = terminator.kind {
1669 }
1672 }
1673 TerminatorKind::Assert { cond, .. } => {
1674 let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
1675 if cond_ty != self.tcx.types.bool {
1676 self.fail(
1677 location,
1678 format!(
1679 "encountered non-boolean condition of type {cond_ty} in `Assert` terminator"
1680 ),
1681 );
1682 }
1683 }
1684 TerminatorKind::Goto { .. }
1685 | TerminatorKind::Drop { .. }
1686 | TerminatorKind::Yield { .. }
1687 | TerminatorKind::FalseEdge { .. }
1688 | TerminatorKind::FalseUnwind { .. }
1689 | TerminatorKind::InlineAsm { .. }
1690 | TerminatorKind::CoroutineDrop
1691 | TerminatorKind::UnwindResume
1692 | TerminatorKind::UnwindTerminate(_)
1693 | TerminatorKind::Return
1694 | TerminatorKind::Unreachable => {}
1695 }
1696
1697 self.super_terminator(terminator, location);
1698 }
1699
1700 fn visit_local_decl(&mut self, local: Local, local_decl: &LocalDecl<'tcx>) {
1701 if let ClearCrossCrate::Set(box LocalInfo::DerefTemp) = local_decl.local_info {
1702 if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1703 self.fail(
1704 START_BLOCK.start_location(),
1705 "`DerefTemp` should have been removed in runtime MIR",
1706 );
1707 } else if local_decl.ty.builtin_deref(true).is_none() {
1708 self.fail(
1709 START_BLOCK.start_location(),
1710 "`DerefTemp` should only be used for dereferenceable types",
1711 )
1712 }
1713 }
1714
1715 self.super_local_decl(local, local_decl);
1716 }
1717}
1718
1719pub(super) fn validate_debuginfos<'tcx>(body: &Body<'tcx>) -> Vec<(Location, String)> {
1720 let mut debuginfo_checker =
1721 DebuginfoChecker { debuginfo_locals: debuginfo_locals(body), failures: Vec::new() };
1722 debuginfo_checker.visit_body(body);
1723 debuginfo_checker.failures
1724}
1725
1726struct DebuginfoChecker {
1727 debuginfo_locals: DenseBitSet<Local>,
1728 failures: Vec<(Location, String)>,
1729}
1730
1731impl<'tcx> Visitor<'tcx> for DebuginfoChecker {
1732 fn visit_statement_debuginfo(
1733 &mut self,
1734 stmt_debuginfo: &StmtDebugInfo<'tcx>,
1735 location: Location,
1736 ) {
1737 let local = match stmt_debuginfo {
1738 StmtDebugInfo::AssignRef(local, _) | StmtDebugInfo::InvalidAssign(local) => *local,
1739 };
1740 if !self.debuginfo_locals.contains(local) {
1741 self.failures.push((location, format!("{local:?} is not in debuginfo")));
1742 }
1743 }
1744}