1#![cfg_attr(bootstrap, feature(assert_matches))]
3#![cfg_attr(bootstrap, feature(if_let_guard))]
4#![feature(box_patterns)]
5#![feature(const_type_name)]
6#![feature(cow_is_borrowed)]
7#![feature(file_buffered)]
8#![feature(impl_trait_in_assoc_type)]
9#![feature(iterator_try_collect)]
10#![feature(try_blocks)]
11#![feature(yeet_expr)]
12use hir::ConstContext;
15use required_consts::RequiredConstsVisitor;
16use rustc_const_eval::check_consts::{self, ConstCx};
17use rustc_const_eval::util;
18use rustc_data_structures::fx::FxIndexSet;
19use rustc_data_structures::steal::Steal;
20use rustc_hir as hir;
21use rustc_hir::def::{CtorKind, DefKind};
22use rustc_hir::def_id::LocalDefId;
23use rustc_index::IndexVec;
24use rustc_middle::mir::{
25 AnalysisPhase, Body, CallSource, ClearCrossCrate, ConstOperand, ConstQualifs, LocalDecl,
26 MirPhase, Operand, Place, ProjectionElem, Promoted, RuntimePhase, Rvalue, START_BLOCK,
27 SourceInfo, Statement, StatementKind, TerminatorKind,
28};
29use rustc_middle::ty::{self, TyCtxt, TypeVisitableExt};
30use rustc_middle::util::Providers;
31use rustc_middle::{bug, query, span_bug};
32use rustc_span::source_map::Spanned;
33use rustc_span::{DUMMY_SP, sym};
34use tracing::debug;
35
36#[macro_use]
37mod pass_manager;
38
39use std::sync::LazyLock;
40
41use pass_manager::{self as pm, Lint, MirLint, MirPass, WithMinOptLevel};
42
43mod check_pointers;
44mod cost_checker;
45mod cross_crate_inline;
46mod deduce_param_attrs;
47mod elaborate_drop;
48mod errors;
49mod ffi_unwind_calls;
50mod lint;
51mod lint_tail_expr_drop_order;
52mod liveness;
53mod patch;
54mod shim;
55mod ssa;
56mod trivial_const;
57
58macro_rules! declare_passes {
81 (
82 $(
83 $vis:vis mod $mod_name:ident : $($pass_name:ident $( { $($ident:ident),* } )?),+ $(,)?;
84 )*
85 ) => {
86 $(
87 $vis mod $mod_name;
88 $(
89 #[allow(unused_imports)]
91 use $mod_name::$pass_name as _;
92 )+
93 )*
94
95 static PASS_NAMES: LazyLock<FxIndexSet<&str>> = LazyLock::new(|| {
96 let mut set = FxIndexSet::default();
97 set.insert("PreCodegen");
99 $(
100 $(
101 set.extend(pass_names!($mod_name : $pass_name $( { $($ident),* } )? ));
102 )+
103 )*
104 set
105 });
106 };
107}
108
109macro_rules! pass_names {
110 ($mod_name:ident : $pass_group:ident { $($pass_name:ident),* $(,)? }) => {
112 [
113 $(
114 $mod_name::$pass_group::$pass_name.name(),
115 )*
116 ]
117 };
118 ($mod_name:ident : $pass_name:ident) => {
120 [stringify!($pass_name)]
121 };
122}
123
124mod validate {
use rustc_abi::{ExternAbi, FIRST_VARIANT, Size};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir::LangItem;
use rustc_hir::attrs::InlineAttr;
use rustc_index::IndexVec;
use rustc_index::bit_set::DenseBitSet;
use rustc_infer::infer::TyCtxtInferExt;
use rustc_infer::traits::{Obligation, ObligationCause};
use rustc_middle::mir::coverage::CoverageKind;
use rustc_middle::mir::visit::{
MutatingUseContext, NonUseContext, PlaceContext, Visitor,
};
use rustc_middle::mir::*;
use rustc_middle::ty::adjustment::PointerCoercion;
use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_middle::ty::{
self, CoroutineArgsExt, InstanceKind, ScalarInt, Ty, TyCtxt,
TypeVisitableExt, Upcast, Variance,
};
use rustc_middle::{bug, span_bug};
use rustc_mir_dataflow::debuginfo::debuginfo_locals;
use rustc_trait_selection::traits::ObligationCtxt;
use crate::util::{self, most_packed_projection};
enum EdgeKind { Unwind, Normal, }
#[automatically_derived]
impl ::core::marker::Copy for EdgeKind { }
#[automatically_derived]
#[doc(hidden)]
unsafe impl ::core::clone::TrivialClone for EdgeKind { }
#[automatically_derived]
impl ::core::clone::Clone for EdgeKind {
#[inline]
fn clone(&self) -> EdgeKind { *self }
}
#[automatically_derived]
impl ::core::fmt::Debug for EdgeKind {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
EdgeKind::Unwind => "Unwind",
EdgeKind::Normal => "Normal",
})
}
}
#[automatically_derived]
impl ::core::marker::StructuralPartialEq for EdgeKind { }
#[automatically_derived]
impl ::core::cmp::PartialEq for EdgeKind {
#[inline]
fn eq(&self, other: &EdgeKind) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}
#[automatically_derived]
impl ::core::cmp::Eq for EdgeKind {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_fields_are_eq(&self) {}
}
pub(super) struct Validator {
pub when: String,
}
impl<'tcx> crate::MirPass<'tcx> for Validator {
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
if #[allow(non_exhaustive_omitted_patterns)] match body.source.instance
{
InstanceKind::Intrinsic(..) | InstanceKind::Virtual(..) =>
true,
_ => false,
} {
return;
}
let def_id = body.source.def_id();
let typing_env = body.typing_env(tcx);
let can_unwind =
if body.phase <= MirPhase::Runtime(RuntimePhase::Initial) {
true
} else if !tcx.def_kind(def_id).is_fn_like() {
true
} else {
let body_ty = tcx.type_of(def_id).skip_binder();
let body_abi =
match body_ty.kind() {
ty::FnDef(..) => body_ty.fn_sig(tcx).abi(),
ty::Closure(..) => ExternAbi::RustCall,
ty::CoroutineClosure(..) => ExternAbi::RustCall,
ty::Coroutine(..) => ExternAbi::Rust,
ty::Error(_) => return,
_ =>
::rustc_middle::util::bug::span_bug_fmt(body.span,
format_args!("unexpected body ty: {0}", body_ty)),
};
ty::layout::fn_can_unwind(tcx, Some(def_id), body_abi)
};
let mut cfg_checker =
CfgChecker {
when: &self.when,
body,
tcx,
unwind_edge_count: 0,
reachable_blocks: traversal::reachable_as_bitset(body),
value_cache: FxHashSet::default(),
can_unwind,
};
cfg_checker.visit_body(body);
cfg_checker.check_cleanup_control_flow();
for (location, msg) in validate_types(tcx, typing_env, body, body)
{
cfg_checker.fail(location, msg);
}
for (location, msg) in validate_debuginfos(body) {
cfg_checker.fail(location, msg);
}
if let MirPhase::Runtime(_) = body.phase &&
let ty::InstanceKind::Item(_) = body.source.instance &&
body.has_free_regions() {
cfg_checker.fail(Location::START,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Free regions in optimized {0} MIR",
body.phase.name()))
}));
}
}
fn is_required(&self) -> bool { true }
}
struct CfgChecker<'a, 'tcx> {
when: &'a str,
body: &'a Body<'tcx>,
tcx: TyCtxt<'tcx>,
unwind_edge_count: usize,
reachable_blocks: DenseBitSet<BasicBlock>,
value_cache: FxHashSet<u128>,
can_unwind: bool,
}
impl<'a, 'tcx> CfgChecker<'a, 'tcx> {
#[track_caller]
fn fail(&self, location: Location, msg: impl AsRef<str>) {
if self.tcx.dcx().has_errors().is_none() {
::rustc_middle::util::bug::span_bug_fmt(self.body.source_info(location).span,
format_args!("broken MIR in {0:?} ({1}) at {2:?}:\n{3}",
self.body.source.instance, self.when, location,
msg.as_ref()));
}
}
fn check_edge(&mut self, location: Location, bb: BasicBlock,
edge_kind: EdgeKind) {
if bb == START_BLOCK {
self.fail(location, "start block must not have predecessors")
}
if let Some(bb) = self.body.basic_blocks.get(bb) {
let src = self.body.basic_blocks.get(location.block).unwrap();
match (src.is_cleanup, bb.is_cleanup, edge_kind) {
(false, false, EdgeKind::Normal) |
(true, true, EdgeKind::Normal) => {}
(false, true, EdgeKind::Unwind) => {
self.unwind_edge_count += 1;
}
_ => {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0:?} edge to {1:?} violates unwind invariants (cleanup {2:?} -> {3:?})",
edge_kind, bb, src.is_cleanup, bb.is_cleanup))
}))
}
}
} else {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered jump to invalid basic block {0:?}",
bb))
}))
}
}
fn check_cleanup_control_flow(&self) {
if self.unwind_edge_count <= 1 { return; }
let doms = self.body.basic_blocks.dominators();
let mut post_contract_node = FxHashMap::default();
let mut dom_path = ::alloc::vec::Vec::new();
let mut get_post_contract_node =
|mut bb|
{
let root =
loop {
if let Some(root) = post_contract_node.get(&bb) {
break *root;
}
let parent = doms.immediate_dominator(bb).unwrap();
dom_path.push(bb);
if !self.body.basic_blocks[parent].is_cleanup { break bb; }
bb = parent;
};
for bb in dom_path.drain(..) {
post_contract_node.insert(bb, root);
}
root
};
let mut parent =
IndexVec::from_elem(None, &self.body.basic_blocks);
for (bb, bb_data) in self.body.basic_blocks.iter_enumerated() {
if !bb_data.is_cleanup || !self.reachable_blocks.contains(bb)
{
continue;
}
let bb = get_post_contract_node(bb);
for s in bb_data.terminator().successors() {
let s = get_post_contract_node(s);
if s == bb { continue; }
let parent = &mut parent[bb];
match parent {
None => { *parent = Some(s); }
Some(e) if *e == s => (),
Some(e) =>
self.fail(Location { block: bb, statement_index: 0 },
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cleanup control flow violation: The blocks dominated by {0:?} have edges to both {1:?} and {2:?}",
bb, s, *e))
})),
}
}
}
let mut stack = FxHashSet::default();
for (mut bb, parent) in parent.iter_enumerated_mut() {
stack.clear();
stack.insert(bb);
loop {
let Some(parent) = parent.take() else { break };
let no_cycle = stack.insert(parent);
if !no_cycle {
self.fail(Location { block: bb, statement_index: 0 },
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cleanup control flow violation: Cycle involving edge {0:?} -> {1:?}",
bb, parent))
}));
break;
}
bb = parent;
}
}
}
fn check_unwind_edge(&mut self, location: Location,
unwind: UnwindAction) {
let is_cleanup =
self.body.basic_blocks[location.block].is_cleanup;
match unwind {
UnwindAction::Cleanup(unwind) => {
if is_cleanup {
self.fail(location,
"`UnwindAction::Cleanup` in cleanup block");
}
self.check_edge(location, unwind, EdgeKind::Unwind);
}
UnwindAction::Continue => {
if is_cleanup {
self.fail(location,
"`UnwindAction::Continue` in cleanup block");
}
if !self.can_unwind {
self.fail(location,
"`UnwindAction::Continue` in no-unwind function");
}
}
UnwindAction::Terminate(UnwindTerminateReason::InCleanup) => {
if !is_cleanup {
self.fail(location,
"`UnwindAction::Terminate(InCleanup)` in a non-cleanup block");
}
}
UnwindAction::Unreachable |
UnwindAction::Terminate(UnwindTerminateReason::Abi) => (),
}
}
fn is_critical_call_edge(&self, target: Option<BasicBlock>,
unwind: UnwindAction) -> bool {
let Some(target) = target else { return false };
#[allow(non_exhaustive_omitted_patterns)] (match unwind {
UnwindAction::Cleanup(_) | UnwindAction::Terminate(_) =>
true,
_ => false,
}) && self.body.basic_blocks.predecessors()[target].len() > 1
}
}
impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> {
fn visit_local(&mut self, local: Local, _context: PlaceContext,
location: Location) {
if self.body.local_decls.get(local).is_none() {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("local {0:?} has no corresponding declaration in `body.local_decls`",
local))
}));
}
}
fn visit_statement(&mut self, statement: &Statement<'tcx>,
location: Location) {
match &statement.kind {
StatementKind::AscribeUserType(..) => {
if self.body.phase >=
MirPhase::Runtime(RuntimePhase::Initial) {
self.fail(location,
"`AscribeUserType` should have been removed after drop lowering phase");
}
}
StatementKind::FakeRead(..) => {
if self.body.phase >=
MirPhase::Runtime(RuntimePhase::Initial) {
self.fail(location,
"`FakeRead` should have been removed after drop lowering phase");
}
}
StatementKind::SetDiscriminant { .. } => {
if self.body.phase <
MirPhase::Runtime(RuntimePhase::Initial) {
self.fail(location,
"`SetDiscriminant`is not allowed until deaggregation");
}
}
StatementKind::Retag(kind, _) => {
if #[allow(non_exhaustive_omitted_patterns)] match kind {
RetagKind::TwoPhase => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("explicit `{0:?}` is forbidden",
kind))
}));
}
}
StatementKind::Coverage(kind) => {
if self.body.phase >=
MirPhase::Analysis(AnalysisPhase::PostCleanup) &&
let CoverageKind::BlockMarker { .. } |
CoverageKind::SpanMarker { .. } = kind {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0:?} should have been removed after analysis",
kind))
}));
}
}
StatementKind::Assign(..) | StatementKind::StorageLive(_) |
StatementKind::StorageDead(_) | StatementKind::Intrinsic(_)
| StatementKind::ConstEvalCounter |
StatementKind::PlaceMention(..) |
StatementKind::BackwardIncompatibleDropHint { .. } |
StatementKind::Nop => {}
}
self.super_statement(statement, location);
}
fn visit_terminator(&mut self, terminator: &Terminator<'tcx>,
location: Location) {
match &terminator.kind {
TerminatorKind::Goto { target } => {
self.check_edge(location, *target, EdgeKind::Normal);
}
TerminatorKind::SwitchInt { targets, discr: _ } => {
for (_, target) in targets.iter() {
self.check_edge(location, target, EdgeKind::Normal);
}
self.check_edge(location, targets.otherwise(),
EdgeKind::Normal);
self.value_cache.clear();
self.value_cache.extend(targets.iter().map(|(value, _)|
value));
let has_duplicates =
targets.iter().len() != self.value_cache.len();
if has_duplicates {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("duplicated values in `SwitchInt` terminator: {0:?}",
terminator.kind))
}));
}
}
TerminatorKind::Drop { target, unwind, drop, .. } => {
self.check_edge(location, *target, EdgeKind::Normal);
self.check_unwind_edge(location, *unwind);
if let Some(drop) = drop {
self.check_edge(location, *drop, EdgeKind::Normal);
}
}
TerminatorKind::Call { func, args, .. } |
TerminatorKind::TailCall { func, args, .. } => {
if let TerminatorKind::Call { target, unwind, destination,
.. } = terminator.kind {
if let Some(target) = target {
self.check_edge(location, target, EdgeKind::Normal);
}
self.check_unwind_edge(location, unwind);
if self.body.phase >=
MirPhase::Runtime(RuntimePhase::Optimized) &&
self.is_critical_call_edge(target, unwind) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered critical edge in `Call` terminator {0:?}",
terminator.kind))
}));
}
if most_packed_projection(self.tcx, &self.body.local_decls,
destination).is_some() {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered packed place in `Call` terminator destination: {0:?}",
terminator.kind))
}));
}
}
for arg in args {
if let Operand::Move(place) = &arg.node {
if most_packed_projection(self.tcx, &self.body.local_decls,
*place).is_some() {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered `Move` of a packed place in `Call` terminator: {0:?}",
terminator.kind))
}));
}
}
}
if let ty::FnDef(did, ..) =
*func.ty(&self.body.local_decls, self.tcx).kind() &&
self.body.phase >=
MirPhase::Runtime(RuntimePhase::Optimized) &&
#[allow(non_exhaustive_omitted_patterns)] match self.tcx.codegen_fn_attrs(did).inline
{
InlineAttr::Force { .. } => true,
_ => false,
} {
self.fail(location,
"`#[rustc_force_inline]`-annotated function not inlined");
}
}
TerminatorKind::Assert { target, unwind, .. } => {
self.check_edge(location, *target, EdgeKind::Normal);
self.check_unwind_edge(location, *unwind);
}
TerminatorKind::Yield { resume, drop, .. } => {
if self.body.coroutine.is_none() {
self.fail(location,
"`Yield` cannot appear outside coroutine bodies");
}
if self.body.phase >=
MirPhase::Runtime(RuntimePhase::Initial) {
self.fail(location,
"`Yield` should have been replaced by coroutine lowering");
}
self.check_edge(location, *resume, EdgeKind::Normal);
if let Some(drop) = drop {
self.check_edge(location, *drop, EdgeKind::Normal);
}
}
TerminatorKind::FalseEdge { real_target, imaginary_target } =>
{
if self.body.phase >=
MirPhase::Runtime(RuntimePhase::Initial) {
self.fail(location,
"`FalseEdge` should have been removed after drop elaboration");
}
self.check_edge(location, *real_target, EdgeKind::Normal);
self.check_edge(location, *imaginary_target,
EdgeKind::Normal);
}
TerminatorKind::FalseUnwind { real_target, unwind } => {
if self.body.phase >=
MirPhase::Runtime(RuntimePhase::Initial) {
self.fail(location,
"`FalseUnwind` should have been removed after drop elaboration");
}
self.check_edge(location, *real_target, EdgeKind::Normal);
self.check_unwind_edge(location, *unwind);
}
TerminatorKind::InlineAsm { targets, unwind, .. } => {
for &target in targets {
self.check_edge(location, target, EdgeKind::Normal);
}
self.check_unwind_edge(location, *unwind);
}
TerminatorKind::CoroutineDrop => {
if self.body.coroutine.is_none() {
self.fail(location,
"`CoroutineDrop` cannot appear outside coroutine bodies");
}
if self.body.phase >=
MirPhase::Runtime(RuntimePhase::Initial) {
self.fail(location,
"`CoroutineDrop` should have been replaced by coroutine lowering");
}
}
TerminatorKind::UnwindResume => {
let bb = location.block;
if !self.body.basic_blocks[bb].is_cleanup {
self.fail(location,
"Cannot `UnwindResume` from non-cleanup basic block")
}
if !self.can_unwind {
self.fail(location,
"Cannot `UnwindResume` in a function that cannot unwind")
}
}
TerminatorKind::UnwindTerminate(_) => {
let bb = location.block;
if !self.body.basic_blocks[bb].is_cleanup {
self.fail(location,
"Cannot `UnwindTerminate` from non-cleanup basic block")
}
}
TerminatorKind::Return => {
let bb = location.block;
if self.body.basic_blocks[bb].is_cleanup {
self.fail(location,
"Cannot `Return` from cleanup basic block")
}
}
TerminatorKind::Unreachable => {}
}
self.super_terminator(terminator, location);
}
fn visit_source_scope(&mut self, scope: SourceScope) {
if self.body.source_scopes.get(scope).is_none() {
self.tcx.dcx().span_bug(self.body.span,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("broken MIR in {0:?} ({1}):\ninvalid source scope {2:?}",
self.body.source.instance, self.when, scope))
}));
}
}
}
pub(super) fn validate_types<'tcx>(tcx: TyCtxt<'tcx>,
typing_env: ty::TypingEnv<'tcx>, body: &Body<'tcx>,
caller_body: &Body<'tcx>) -> Vec<(Location, String)> {
let mut type_checker =
TypeChecker {
body,
caller_body,
tcx,
typing_env,
failures: Vec::new(),
};
{
let _guard = NoTrimmedGuard::new();
{ type_checker.visit_body(body); }
};
type_checker.failures
}
struct TypeChecker<'a, 'tcx> {
body: &'a Body<'tcx>,
caller_body: &'a Body<'tcx>,
tcx: TyCtxt<'tcx>,
typing_env: ty::TypingEnv<'tcx>,
failures: Vec<(Location, String)>,
}
impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
fn fail(&mut self, location: Location, msg: impl Into<String>) {
self.failures.push((location, msg.into()));
}
fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>)
-> bool {
if src == dest { return true; }
if (src, dest).has_opaque_types() { return true; }
let variance =
if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial)
{
Variance::Invariant
} else { Variance::Covariant };
crate::util::relate_types(self.tcx, self.typing_env, variance,
src, dest)
}
fn predicate_must_hold_modulo_regions(&self,
pred: impl Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>) -> bool {
let pred: ty::Predicate<'tcx> = pred.upcast(self.tcx);
if pred.has_opaque_types() { return true; }
let (infcx, param_env) =
self.tcx.infer_ctxt().build_with_typing_env(self.typing_env);
let ocx = ObligationCtxt::new(&infcx);
ocx.register_obligation(Obligation::new(self.tcx,
ObligationCause::dummy(), param_env, pred));
ocx.evaluate_obligations_error_on_ambiguity().is_empty()
}
}
impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
fn visit_operand(&mut self, operand: &Operand<'tcx>,
location: Location) {
if self.tcx.sess.opts.unstable_opts.validate_mir &&
self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
if let Operand::Copy(place) = operand {
let ty = place.ty(&self.body.local_decls, self.tcx).ty;
if !self.tcx.type_is_copy_modulo_regions(self.typing_env,
ty) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("`Operand::Copy` with non-`Copy` type {0}",
ty))
}));
}
}
}
self.super_operand(operand, location);
}
fn visit_projection_elem(&mut self, place_ref: PlaceRef<'tcx>,
elem: PlaceElem<'tcx>, context: PlaceContext,
location: Location) {
match elem {
ProjectionElem::Deref if
self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial)
=> {
let base_ty =
place_ref.ty(&self.body.local_decls, self.tcx).ty;
if base_ty.is_box() {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0} dereferenced after ElaborateBoxDerefs",
base_ty))
}))
}
}
ProjectionElem::Field(f, ty) => {
let parent_ty =
place_ref.ty(&self.body.local_decls, self.tcx);
let fail_out_of_bounds =
|this: &mut Self, location|
{
this.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Out of bounds field {0:?} for {1:?}",
f, parent_ty))
}));
};
let check_equal =
|this: &mut Self, location, f_ty|
{
if !this.mir_assign_valid_types(ty, f_ty) {
this.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Field projection `{0:?}.{1:?}` specified type `{2}`, but actual type is `{3}`",
place_ref, f, ty, f_ty))
}))
}
};
let kind =
match parent_ty.ty.kind() {
&ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) =>
{
self.tcx.type_of(def_id).instantiate(self.tcx, args).kind()
}
kind => kind,
};
match kind {
ty::Tuple(fields) => {
let Some(f_ty) =
fields.get(f.as_usize()) else {
fail_out_of_bounds(self, location);
return;
};
check_equal(self, location, *f_ty);
}
ty::Pat(base, _) => check_equal(self, location, *base),
ty::Adt(adt_def, args) => {
if self.tcx.is_lang_item(adt_def.did(),
LangItem::DynMetadata) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("You can\'t project to field {0:?} of `DynMetadata` because layout is weird and thinks it doesn\'t have fields.",
f))
}));
}
if adt_def.repr().simd() {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Projecting into SIMD type {0:?} is banned by MCP#838",
adt_def))
}));
}
let var = parent_ty.variant_index.unwrap_or(FIRST_VARIANT);
let Some(field) =
adt_def.variant(var).fields.get(f) else {
fail_out_of_bounds(self, location);
return;
};
check_equal(self, location, field.ty(self.tcx, args));
}
ty::Closure(_, args) => {
let args = args.as_closure();
let Some(&f_ty) =
args.upvar_tys().get(f.as_usize()) else {
fail_out_of_bounds(self, location);
return;
};
check_equal(self, location, f_ty);
}
ty::CoroutineClosure(_, args) => {
let args = args.as_coroutine_closure();
let Some(&f_ty) =
args.upvar_tys().get(f.as_usize()) else {
fail_out_of_bounds(self, location);
return;
};
check_equal(self, location, f_ty);
}
&ty::Coroutine(def_id, args) => {
let f_ty =
if let Some(var) = parent_ty.variant_index {
let layout =
if def_id == self.caller_body.source.def_id() {
self.caller_body.coroutine_layout_raw().or_else(||
self.tcx.coroutine_layout(def_id, args).ok())
} else if self.tcx.needs_coroutine_by_move_body_def_id(def_id)
&&
let ty::ClosureKind::FnOnce =
args.as_coroutine().kind_ty().to_opt_closure_kind().unwrap()
&&
self.caller_body.source.def_id() ==
self.tcx.coroutine_by_move_body_def_id(def_id) {
self.caller_body.coroutine_layout_raw()
} else { self.tcx.coroutine_layout(def_id, args).ok() };
let Some(layout) =
layout else {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("No coroutine layout for {0:?}",
parent_ty))
}));
return;
};
let Some(&local) =
layout.variant_fields[var].get(f) else {
fail_out_of_bounds(self, location);
return;
};
let Some(f_ty) =
layout.field_tys.get(local) else {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Out of bounds local {0:?} for {1:?}",
local, parent_ty))
}));
return;
};
ty::EarlyBinder::bind(f_ty.ty).instantiate(self.tcx, args)
} else {
let Some(&f_ty) =
args.as_coroutine().prefix_tys().get(f.index()) else {
fail_out_of_bounds(self, location);
return;
};
f_ty
};
check_equal(self, location, f_ty);
}
_ => {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0:?} does not have fields",
parent_ty.ty))
}));
}
}
}
ProjectionElem::Index(index) => {
let indexed_ty =
place_ref.ty(&self.body.local_decls, self.tcx).ty;
match indexed_ty.kind() {
ty::Array(_, _) | ty::Slice(_) => {}
_ =>
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0:?} cannot be indexed",
indexed_ty))
})),
}
let index_ty = self.body.local_decls[index].ty;
if index_ty != self.tcx.types.usize {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("bad index ({0} != usize)",
index_ty))
}))
}
}
ProjectionElem::ConstantIndex { offset, min_length, from_end }
=> {
let indexed_ty =
place_ref.ty(&self.body.local_decls, self.tcx).ty;
match indexed_ty.kind() {
ty::Array(_, _) => {
if from_end {
self.fail(location,
"arrays should not be indexed from end");
}
}
ty::Slice(_) => {}
_ =>
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0:?} cannot be indexed",
indexed_ty))
})),
}
if from_end {
if offset > min_length {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("constant index with offset -{0} out of bounds of min length {1}",
offset, min_length))
}));
}
} else {
if offset >= min_length {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("constant index with offset {0} out of bounds of min length {1}",
offset, min_length))
}));
}
}
}
ProjectionElem::Subslice { from, to, from_end } => {
let indexed_ty =
place_ref.ty(&self.body.local_decls, self.tcx).ty;
match indexed_ty.kind() {
ty::Array(_, _) => {
if from_end {
self.fail(location,
"arrays should not be subsliced from end");
}
}
ty::Slice(_) => {
if !from_end {
self.fail(location, "slices should be subsliced from end");
}
}
_ =>
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0:?} cannot be indexed",
indexed_ty))
})),
}
if !from_end && from > to {
self.fail(location, "backwards subslice {from}..{to}");
}
}
ProjectionElem::OpaqueCast(ty) if
self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial)
=> {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("explicit opaque type cast to `{0}` after `PostAnalysisNormalize`",
ty))
}))
}
ProjectionElem::UnwrapUnsafeBinder(unwrapped_ty) => {
let binder_ty =
place_ref.ty(&self.body.local_decls, self.tcx);
let ty::UnsafeBinder(binder_ty) =
*binder_ty.ty.kind() else {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"))
}));
return;
};
let binder_inner_ty =
self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
if !self.mir_assign_valid_types(unwrapped_ty,
binder_inner_ty) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot unwrap unsafe binder {0:?} into type {1}",
binder_ty, unwrapped_ty))
}));
}
}
_ => {}
}
self.super_projection_elem(place_ref, elem, context, location);
}
fn visit_var_debug_info(&mut self, debuginfo: &VarDebugInfo<'tcx>) {
if let Some(box VarDebugInfoFragment { ty, ref projection }) =
debuginfo.composite {
if ty.is_union() || ty.is_enum() {
self.fail(START_BLOCK.start_location(),
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("invalid type {1} in debuginfo for {0:?}",
debuginfo.name, ty))
}));
}
if projection.is_empty() {
self.fail(START_BLOCK.start_location(),
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("invalid empty projection in debuginfo for {0:?}",
debuginfo.name))
}));
}
if projection.iter().any(|p|
!#[allow(non_exhaustive_omitted_patterns)] match p {
PlaceElem::Field(..) => true,
_ => false,
}) {
self.fail(START_BLOCK.start_location(),
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("illegal projection {0:?} in debuginfo for {1:?}",
projection, debuginfo.name))
}));
}
}
match debuginfo.value {
VarDebugInfoContents::Const(_) => {}
VarDebugInfoContents::Place(place) => {
if place.projection.iter().any(|p|
!p.can_use_in_debuginfo()) {
self.fail(START_BLOCK.start_location(),
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("illegal place {0:?} in debuginfo for {1:?}",
place, debuginfo.name))
}));
}
}
}
self.super_var_debug_info(debuginfo);
}
fn visit_place(&mut self, place: &Place<'tcx>, cntxt: PlaceContext,
location: Location) {
let _ = place.ty(&self.body.local_decls, self.tcx);
if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) &&
place.projection.len() > 1 &&
cntxt != PlaceContext::NonUse(NonUseContext::VarDebugInfo)
&& place.projection[1..].contains(&ProjectionElem::Deref) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("place {0:?} has deref as a later projection (it is only permitted as the first projection)",
place))
}));
}
let mut projections_iter = place.projection.iter();
while let Some(proj) = projections_iter.next() {
if #[allow(non_exhaustive_omitted_patterns)] match proj {
ProjectionElem::Downcast(..) => true,
_ => false,
} {
if !#[allow(non_exhaustive_omitted_patterns)] match projections_iter.next()
{
Some(ProjectionElem::Field(..)) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("place {0:?} has `Downcast` projection not followed by `Field`",
place))
}));
}
}
}
if let ClearCrossCrate::Set(box LocalInfo::DerefTemp) =
self.body.local_decls[place.local].local_info &&
!place.is_indirect_first_projection() {
if cntxt !=
PlaceContext::MutatingUse(MutatingUseContext::Store) ||
place.as_local().is_none() {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("`DerefTemp` locals must only be dereferenced or directly assigned to"))
}));
}
}
if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) &&
let Some(i) =
place.projection.iter().position(|elem|
#[allow(non_exhaustive_omitted_patterns)] match elem {
ProjectionElem::Subslice { .. } => true,
_ => false,
}) && let Some(tail) = place.projection.get(i + 1..) &&
tail.iter().any(|elem|
{
#[allow(non_exhaustive_omitted_patterns)]
match elem {
ProjectionElem::ConstantIndex { .. } |
ProjectionElem::Subslice { .. } => true,
_ => false,
}
}) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("place {0:?} has `ConstantIndex` or `Subslice` after `Subslice`",
place))
}));
}
self.super_place(place, cntxt, location);
}
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>,
location: Location) {
macro_rules! check_kinds {
($t:expr, $text:literal, $typat:pat) =>
{
if !matches!(($t).kind(), $typat)
{ self.fail(location, format!($text, $t)); }
};
}
match rvalue {
Rvalue::Use(_) => {}
Rvalue::CopyForDeref(_) => {
if self.body.phase >=
MirPhase::Runtime(RuntimePhase::Initial) {
self.fail(location,
"`CopyForDeref` should have been removed in runtime MIR");
}
}
Rvalue::Aggregate(kind, fields) =>
match **kind {
AggregateKind::Tuple => {}
AggregateKind::Array(dest) => {
for src in fields {
if !self.mir_assign_valid_types(src.ty(self.body, self.tcx),
dest) {
self.fail(location, "array field has the wrong type");
}
}
}
AggregateKind::Adt(def_id, idx, args, _, Some(field)) => {
let adt_def = self.tcx.adt_def(def_id);
if !adt_def.is_union() {
::core::panicking::panic("assertion failed: adt_def.is_union()")
};
match (&idx, &FIRST_VARIANT) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
let dest_ty =
self.tcx.normalize_erasing_regions(self.typing_env,
adt_def.non_enum_variant().fields[field].ty(self.tcx,
args));
if let [field] = fields.raw.as_slice() {
let src_ty = field.ty(self.body, self.tcx);
if !self.mir_assign_valid_types(src_ty, dest_ty) {
self.fail(location, "union field has the wrong type");
}
} else {
self.fail(location,
"unions should have one initialized field");
}
}
AggregateKind::Adt(def_id, idx, args, _, None) => {
let adt_def = self.tcx.adt_def(def_id);
if !!adt_def.is_union() {
::core::panicking::panic("assertion failed: !adt_def.is_union()")
};
let variant = &adt_def.variants()[idx];
if variant.fields.len() != fields.len() {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("adt {2:?} has the wrong number of initialized fields, expected {0}, found {1}",
fields.len(), variant.fields.len(), def_id))
}));
}
for (src, dest) in std::iter::zip(fields, &variant.fields) {
let dest_ty =
self.tcx.normalize_erasing_regions(self.typing_env,
dest.ty(self.tcx, args));
if !self.mir_assign_valid_types(src.ty(self.body, self.tcx),
dest_ty) {
self.fail(location, "adt field has the wrong type");
}
}
}
AggregateKind::Closure(_, args) => {
let upvars = args.as_closure().upvar_tys();
if upvars.len() != fields.len() {
self.fail(location,
"closure has the wrong number of initialized fields");
}
for (src, dest) in std::iter::zip(fields, upvars) {
if !self.mir_assign_valid_types(src.ty(self.body, self.tcx),
dest) {
self.fail(location, "closure field has the wrong type");
}
}
}
AggregateKind::Coroutine(_, args) => {
let upvars = args.as_coroutine().upvar_tys();
if upvars.len() != fields.len() {
self.fail(location,
"coroutine has the wrong number of initialized fields");
}
for (src, dest) in std::iter::zip(fields, upvars) {
if !self.mir_assign_valid_types(src.ty(self.body, self.tcx),
dest) {
self.fail(location, "coroutine field has the wrong type");
}
}
}
AggregateKind::CoroutineClosure(_, args) => {
let upvars = args.as_coroutine_closure().upvar_tys();
if upvars.len() != fields.len() {
self.fail(location,
"coroutine-closure has the wrong number of initialized fields");
}
for (src, dest) in std::iter::zip(fields, upvars) {
if !self.mir_assign_valid_types(src.ty(self.body, self.tcx),
dest) {
self.fail(location,
"coroutine-closure field has the wrong type");
}
}
}
AggregateKind::RawPtr(pointee_ty, mutability) => {
if !#[allow(non_exhaustive_omitted_patterns)] match self.body.phase
{
MirPhase::Runtime(_) => true,
_ => false,
} {
self.fail(location, "RawPtr should be in runtime MIR only");
}
if let [data_ptr, metadata] = fields.raw.as_slice() {
let data_ptr_ty = data_ptr.ty(self.body, self.tcx);
let metadata_ty = metadata.ty(self.body, self.tcx);
if let ty::RawPtr(in_pointee, in_mut) = data_ptr_ty.kind() {
if *in_mut != mutability {
self.fail(location,
"input and output mutability must match");
}
if !in_pointee.is_sized(self.tcx, self.typing_env) {
self.fail(location, "input pointer must be thin");
}
} else {
self.fail(location,
"first operand to raw pointer aggregate must be a raw pointer");
}
if pointee_ty.is_slice() {
if !self.mir_assign_valid_types(metadata_ty,
self.tcx.types.usize) {
self.fail(location, "slice metadata must be usize");
}
} else if pointee_ty.is_sized(self.tcx, self.typing_env) {
if metadata_ty != self.tcx.types.unit {
self.fail(location,
"metadata for pointer-to-thin must be unit");
}
}
} else {
self.fail(location,
"raw pointer aggregate must have 2 fields");
}
}
},
Rvalue::Ref(_, BorrowKind::Fake(_), _) => {
if self.body.phase >=
MirPhase::Runtime(RuntimePhase::Initial) {
self.fail(location,
"`Assign` statement with a `Fake` borrow should have been removed in runtime MIR");
}
}
Rvalue::Ref(..) => {}
Rvalue::BinaryOp(op, vals) => {
use BinOp::*;
let a = vals.0.ty(&self.body.local_decls, self.tcx);
let b = vals.1.ty(&self.body.local_decls, self.tcx);
if crate::util::binop_right_homogeneous(*op) {
if let Eq | Lt | Le | Ne | Ge | Gt = op {
if !self.mir_assign_valid_types(a, b) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot {0:?} compare incompatible types {1} and {2}",
op, a, b))
}));
}
} else if a != b {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot perform binary op {0:?} on unequal types {1} and {2}",
op, a, b))
}));
}
}
match op {
Offset => {
if !#[allow(non_exhaustive_omitted_patterns)] match (a).kind()
{
ty::RawPtr(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot offset non-pointer type {0:?}",
a))
}));
};
if b != self.tcx.types.isize && b != self.tcx.types.usize {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot offset by non-isize type {0}",
b))
}));
}
}
Eq | Lt | Le | Ne | Ge | Gt => {
for x in [a, b] {
if !#[allow(non_exhaustive_omitted_patterns)] match (x).kind()
{
ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) |
ty::Float(..) | ty::RawPtr(..) | ty::FnPtr(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot {1:?} compare type {0:?}",
x, op))
}));
}
}
}
Cmp => {
for x in [a, b] {
if !#[allow(non_exhaustive_omitted_patterns)] match (x).kind()
{
ty::Char | ty::Uint(..) | ty::Int(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot three-way compare non-integer type {0:?}",
x))
}));
}
}
}
AddUnchecked | AddWithOverflow | SubUnchecked |
SubWithOverflow | MulUnchecked | MulWithOverflow | Shl |
ShlUnchecked | Shr | ShrUnchecked => {
for x in [a, b] {
if !#[allow(non_exhaustive_omitted_patterns)] match (x).kind()
{
ty::Uint(..) | ty::Int(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot {1:?} non-integer type {0:?}",
x, op))
}));
}
}
}
BitAnd | BitOr | BitXor => {
for x in [a, b] {
if !#[allow(non_exhaustive_omitted_patterns)] match (x).kind()
{
ty::Uint(..) | ty::Int(..) | ty::Bool => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot perform bitwise op {1:?} on type {0:?}",
x, op))
}));
}
}
}
Add | Sub | Mul | Div | Rem => {
for x in [a, b] {
if !#[allow(non_exhaustive_omitted_patterns)] match (x).kind()
{
ty::Uint(..) | ty::Int(..) | ty::Float(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot perform arithmetic {1:?} on type {0:?}",
x, op))
}));
}
}
}
}
}
Rvalue::UnaryOp(op, operand) => {
let a = operand.ty(&self.body.local_decls, self.tcx);
match op {
UnOp::Neg => {
if !#[allow(non_exhaustive_omitted_patterns)] match (a).kind()
{
ty::Int(..) | ty::Float(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot negate type {0:?}",
a))
}));
}
}
UnOp::Not => {
if !#[allow(non_exhaustive_omitted_patterns)] match (a).kind()
{
ty::Int(..) | ty::Uint(..) | ty::Bool => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot binary not type {0:?}",
a))
}));
};
}
UnOp::PtrMetadata => {
if !#[allow(non_exhaustive_omitted_patterns)] match (a).kind()
{
ty::RawPtr(..) | ty::Ref(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot PtrMetadata non-pointer non-reference type {0:?}",
a))
}));
};
}
}
}
Rvalue::Cast(kind, operand, target_type) => {
let op_ty = operand.ty(self.body, self.tcx);
match kind {
CastKind::PointerWithExposedProvenance |
CastKind::PointerExposeProvenance => {}
CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer(_),
_) => {
if !#[allow(non_exhaustive_omitted_patterns)] match (op_ty).kind()
{
ty::FnDef(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("CastKind::{1:?} input must be a fn item, not {0:?}",
op_ty, kind))
}));
};
if !#[allow(non_exhaustive_omitted_patterns)] match (target_type).kind()
{
ty::FnPtr(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("CastKind::{1:?} output must be a fn pointer, not {0:?}",
target_type, kind))
}));
};
}
CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer,
_) => {
if !#[allow(non_exhaustive_omitted_patterns)] match (op_ty).kind()
{
ty::FnPtr(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("CastKind::{1:?} input must be a fn pointer, not {0:?}",
op_ty, kind))
}));
};
if !#[allow(non_exhaustive_omitted_patterns)] match (target_type).kind()
{
ty::FnPtr(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("CastKind::{1:?} output must be a fn pointer, not {0:?}",
target_type, kind))
}));
};
}
CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(..),
_) => {
if !#[allow(non_exhaustive_omitted_patterns)] match (op_ty).kind()
{
ty::Closure(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("CastKind::{1:?} input must be a closure, not {0:?}",
op_ty, kind))
}));
};
if !#[allow(non_exhaustive_omitted_patterns)] match (target_type).kind()
{
ty::FnPtr(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("CastKind::{1:?} output must be a fn pointer, not {0:?}",
target_type, kind))
}));
};
}
CastKind::PointerCoercion(PointerCoercion::MutToConstPointer,
_) => {
if !#[allow(non_exhaustive_omitted_patterns)] match (op_ty).kind()
{
ty::RawPtr(_, Mutability::Mut) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("CastKind::{1:?} input must be a raw mut pointer, not {0:?}",
op_ty, kind))
}));
};
if !#[allow(non_exhaustive_omitted_patterns)] match (target_type).kind()
{
ty::RawPtr(_, Mutability::Not) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("CastKind::{1:?} output must be a raw const pointer, not {0:?}",
target_type, kind))
}));
};
if self.body.phase >=
MirPhase::Analysis(AnalysisPhase::PostCleanup) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("After borrowck, MIR disallows {0:?}",
kind))
}));
}
}
CastKind::PointerCoercion(PointerCoercion::ArrayToPointer,
_) => {
if !#[allow(non_exhaustive_omitted_patterns)] match (op_ty).kind()
{
ty::RawPtr(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("CastKind::{1:?} input must be a raw pointer, not {0:?}",
op_ty, kind))
}));
};
if !#[allow(non_exhaustive_omitted_patterns)] match (target_type).kind()
{
ty::RawPtr(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("CastKind::{1:?} output must be a raw pointer, not {0:?}",
target_type, kind))
}));
};
if self.body.phase >=
MirPhase::Analysis(AnalysisPhase::PostCleanup) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("After borrowck, MIR disallows {0:?}",
kind))
}));
}
}
CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
if !self.predicate_must_hold_modulo_regions(ty::TraitRef::new(self.tcx,
self.tcx.require_lang_item(LangItem::CoerceUnsized,
self.body.source_info(location).span),
[op_ty, *target_type])) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Unsize coercion, but `{0}` isn\'t coercible to `{1}`",
op_ty, target_type))
}));
}
}
CastKind::IntToInt | CastKind::IntToFloat => {
let input_valid =
op_ty.is_integral() || op_ty.is_char() || op_ty.is_bool();
let target_valid =
target_type.is_numeric() || target_type.is_char();
if !input_valid || !target_valid {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Wrong cast kind {0:?} for the type {1}",
kind, op_ty))
}));
}
}
CastKind::FnPtrToPtr => {
if !#[allow(non_exhaustive_omitted_patterns)] match (op_ty).kind()
{
ty::FnPtr(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("CastKind::{1:?} input must be a fn pointer, not {0:?}",
op_ty, kind))
}));
};
if !#[allow(non_exhaustive_omitted_patterns)] match (target_type).kind()
{
ty::RawPtr(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("CastKind::{1:?} output must be a raw pointer, not {0:?}",
target_type, kind))
}));
};
}
CastKind::PtrToPtr => {
if !#[allow(non_exhaustive_omitted_patterns)] match (op_ty).kind()
{
ty::RawPtr(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("CastKind::{1:?} input must be a raw pointer, not {0:?}",
op_ty, kind))
}));
};
if !#[allow(non_exhaustive_omitted_patterns)] match (target_type).kind()
{
ty::RawPtr(..) => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("CastKind::{1:?} output must be a raw pointer, not {0:?}",
target_type, kind))
}));
};
}
CastKind::FloatToFloat | CastKind::FloatToInt => {
if !op_ty.is_floating_point() || !target_type.is_numeric() {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Trying to cast non \'Float\' as {0:?} into {1:?}",
kind, target_type))
}));
}
}
CastKind::Transmute => {
if !self.tcx.normalize_erasing_regions(self.typing_env,
op_ty).is_sized(self.tcx, self.typing_env) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot transmute from non-`Sized` type {0}",
op_ty))
}));
}
if !self.tcx.normalize_erasing_regions(self.typing_env,
*target_type).is_sized(self.tcx, self.typing_env) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot transmute to non-`Sized` type {0:?}",
target_type))
}));
}
}
CastKind::Subtype => {
if !util::sub_types(self.tcx, self.typing_env, op_ty,
*target_type) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Failed subtyping {0} and {1}",
op_ty, target_type))
}))
}
}
}
}
Rvalue::Repeat(_, _) | Rvalue::ThreadLocalRef(_) |
Rvalue::RawPtr(_, _) | Rvalue::Discriminant(_) => {}
Rvalue::WrapUnsafeBinder(op, ty) => {
let unwrapped_ty = op.ty(self.body, self.tcx);
let ty::UnsafeBinder(binder_ty) =
*ty.kind() else {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"))
}));
return;
};
let binder_inner_ty =
self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
if !self.mir_assign_valid_types(unwrapped_ty,
binder_inner_ty) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Cannot wrap {0} into unsafe binder {1:?}",
unwrapped_ty, binder_ty))
}));
}
}
}
self.super_rvalue(rvalue, location);
}
fn visit_statement(&mut self, statement: &Statement<'tcx>,
location: Location) {
match &statement.kind {
StatementKind::Assign(box (dest, rvalue)) => {
let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
if !self.mir_assign_valid_types(right_ty, left_ty) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered `{0:?}` with incompatible types:\nleft-hand side has type: {1}\nright-hand side has type: {2}",
statement.kind, left_ty, right_ty))
}));
}
if let Some(local) = dest.as_local() &&
let ClearCrossCrate::Set(box LocalInfo::DerefTemp) =
self.body.local_decls[local].local_info &&
!#[allow(non_exhaustive_omitted_patterns)] match rvalue {
Rvalue::CopyForDeref(_) => true,
_ => false,
} {
self.fail(location,
"assignment to a `DerefTemp` must use `CopyForDeref`")
}
}
StatementKind::AscribeUserType(..) => {
if self.body.phase >=
MirPhase::Runtime(RuntimePhase::Initial) {
self.fail(location,
"`AscribeUserType` should have been removed after drop lowering phase");
}
}
StatementKind::FakeRead(..) => {
if self.body.phase >=
MirPhase::Runtime(RuntimePhase::Initial) {
self.fail(location,
"`FakeRead` should have been removed after drop lowering phase");
}
}
StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(op))
=> {
let ty = op.ty(&self.body.local_decls, self.tcx);
if !ty.is_bool() {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("`assume` argument must be `bool`, but got: `{0}`",
ty))
}));
}
}
StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(CopyNonOverlapping {
src, dst, count })) => {
let src_ty = src.ty(&self.body.local_decls, self.tcx);
let op_src_ty =
if let Some(src_deref) = src_ty.builtin_deref(true) {
src_deref
} else {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Expected src to be ptr in copy_nonoverlapping, got: {0}",
src_ty))
}));
return;
};
let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
let op_dst_ty =
if let Some(dst_deref) = dst_ty.builtin_deref(true) {
dst_deref
} else {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Expected dst to be ptr in copy_nonoverlapping, got: {0}",
dst_ty))
}));
return;
};
if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("bad arg ({0} != {1})",
op_src_ty, op_dst_ty))
}));
}
let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
if op_cnt_ty != self.tcx.types.usize {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("bad arg ({0} != usize)",
op_cnt_ty))
}))
}
}
StatementKind::SetDiscriminant { place, .. } => {
if self.body.phase <
MirPhase::Runtime(RuntimePhase::Initial) {
self.fail(location,
"`SetDiscriminant`is not allowed until deaggregation");
}
let pty = place.ty(&self.body.local_decls, self.tcx).ty;
if !#[allow(non_exhaustive_omitted_patterns)] match pty.kind()
{
ty::Adt(..) | ty::Coroutine(..) | ty::Alias(ty::Opaque, ..)
=> true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("`SetDiscriminant` is only allowed on ADTs and coroutines, not {0}",
pty))
}));
}
}
StatementKind::Retag(kind, _) => {
if #[allow(non_exhaustive_omitted_patterns)] match kind {
RetagKind::TwoPhase => true,
_ => false,
} {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("explicit `{0:?}` is forbidden",
kind))
}));
}
}
StatementKind::StorageLive(_) | StatementKind::StorageDead(_)
| StatementKind::Coverage(_) |
StatementKind::ConstEvalCounter |
StatementKind::PlaceMention(..) |
StatementKind::BackwardIncompatibleDropHint { .. } |
StatementKind::Nop => {}
}
self.super_statement(statement, location);
}
fn visit_terminator(&mut self, terminator: &Terminator<'tcx>,
location: Location) {
match &terminator.kind {
TerminatorKind::SwitchInt { targets, discr } => {
let switch_ty = discr.ty(&self.body.local_decls, self.tcx);
let target_width = self.tcx.sess.target.pointer_width;
let size =
Size::from_bits(match switch_ty.kind() {
ty::Uint(uint) =>
uint.normalize(target_width).bit_width().unwrap(),
ty::Int(int) =>
int.normalize(target_width).bit_width().unwrap(),
ty::Char => 32,
ty::Bool => 1,
other =>
::rustc_middle::util::bug::bug_fmt(format_args!("unhandled type: {0:?}",
other)),
});
for (value, _) in targets.iter() {
if ScalarInt::try_from_uint(value, size).is_none() {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("the value {0:#x} is not a proper {1}",
value, switch_ty))
}))
}
}
}
TerminatorKind::Call { func, .. } | TerminatorKind::TailCall {
func, .. } => {
let func_ty = func.ty(&self.body.local_decls, self.tcx);
match func_ty.kind() {
ty::FnPtr(..) | ty::FnDef(..) => {}
_ =>
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered non-callable type {1} in `{0}` terminator",
terminator.kind.name(), func_ty))
})),
}
if let TerminatorKind::TailCall { .. } = terminator.kind {}
}
TerminatorKind::Assert { cond, .. } => {
let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
if cond_ty != self.tcx.types.bool {
self.fail(location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("encountered non-boolean condition of type {0} in `Assert` terminator",
cond_ty))
}));
}
}
TerminatorKind::Goto { .. } | TerminatorKind::Drop { .. } |
TerminatorKind::Yield { .. } | TerminatorKind::FalseEdge {
.. } | TerminatorKind::FalseUnwind { .. } |
TerminatorKind::InlineAsm { .. } |
TerminatorKind::CoroutineDrop | TerminatorKind::UnwindResume
| TerminatorKind::UnwindTerminate(_) |
TerminatorKind::Return | TerminatorKind::Unreachable => {}
}
self.super_terminator(terminator, location);
}
fn visit_local_decl(&mut self, local: Local,
local_decl: &LocalDecl<'tcx>) {
if let ClearCrossCrate::Set(box LocalInfo::DerefTemp) =
local_decl.local_info {
if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial)
{
self.fail(START_BLOCK.start_location(),
"`DerefTemp` should have been removed in runtime MIR");
} else if local_decl.ty.builtin_deref(true).is_none() {
self.fail(START_BLOCK.start_location(),
"`DerefTemp` should only be used for dereferenceable types")
}
}
self.super_local_decl(local, local_decl);
}
}
pub(super) fn validate_debuginfos<'tcx>(body: &Body<'tcx>)
-> Vec<(Location, String)> {
let mut debuginfo_checker =
DebuginfoChecker {
debuginfo_locals: debuginfo_locals(body),
failures: Vec::new(),
};
debuginfo_checker.visit_body(body);
debuginfo_checker.failures
}
struct DebuginfoChecker {
debuginfo_locals: DenseBitSet<Local>,
failures: Vec<(Location, String)>,
}
impl<'tcx> Visitor<'tcx> for DebuginfoChecker {
fn visit_statement_debuginfo(&mut self,
stmt_debuginfo: &StmtDebugInfo<'tcx>, location: Location) {
let local =
match stmt_debuginfo {
StmtDebugInfo::AssignRef(local, _) |
StmtDebugInfo::InvalidAssign(local) => *local,
};
if !self.debuginfo_locals.contains(local) {
self.failures.push((location,
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0:?} is not in debuginfo",
local))
})));
}
}
}
}
#[allow(unused_imports)]
use validate::Validator as _;
static PASS_NAMES: LazyLock<FxIndexSet<&str>> =
LazyLock::new(||
{
let mut set = FxIndexSet::default();
set.insert("PreCodegen");
set.extend(["AbortUnwindingCalls"]);
set.extend([add_call_guards::AddCallGuards::AllCallEdges.name(),
add_call_guards::AddCallGuards::CriticalCallEdges.name()]);
set.extend(["AddMovesForPackedDrops"]);
set.extend(["AddRetag"]);
set.extend(["Subtyper"]);
set.extend(["CheckForceInline"]);
set.extend(["CheckCallRecursion"]);
set.extend(["CheckDropRecursion"]);
set.extend(["CheckInlineAlwaysTargetFeature"]);
set.extend(["CheckAlignment"]);
set.extend(["CheckEnums"]);
set.extend(["CheckConstItemMutation"]);
set.extend(["CheckNull"]);
set.extend(["CheckPackedRef"]);
set.extend(["CleanupPostBorrowck"]);
set.extend(["CopyProp"]);
set.extend(["StateTransform"]);
set.extend(["InstrumentCoverage"]);
set.extend(["CtfeLimit"]);
set.extend(["DataflowConstProp"]);
set.extend([dead_store_elimination::DeadStoreElimination::Initial.name(),
dead_store_elimination::DeadStoreElimination::Final.name()]);
set.extend(["Derefer"]);
set.extend(["DestinationPropagation"]);
set.extend(["Marker"]);
set.extend(["EarlyOtherwiseBranch"]);
set.extend(["EraseDerefTemps"]);
set.extend(["ElaborateBoxDerefs"]);
set.extend(["ElaborateDrops"]);
set.extend(["FunctionItemReferences"]);
set.extend(["GVN"]);
set.extend(["Inline"]);
set.extend(["ForceInline"]);
set.extend(["ImpossiblePredicates"]);
set.extend([instsimplify::InstSimplify::BeforeInline.name(),
instsimplify::InstSimplify::AfterSimplifyCfg.name()]);
set.extend(["JumpThreading"]);
set.extend(["KnownPanicsLint"]);
set.extend(["EnumSizeOpt"]);
set.extend(["LowerIntrinsics"]);
set.extend(["LowerSliceLenCalls"]);
set.extend(["MatchBranchSimplification"]);
set.extend(["MentionedItems"]);
set.extend(["MultipleReturnTerminators"]);
set.extend(["CheckLiveDrops"]);
set.extend(["ReorderBasicBlocks"]);
set.extend(["ReorderLocals"]);
set.extend(["PromoteTemps"]);
set.extend(["ReferencePropagation"]);
set.extend(["RemoveNoopLandingPads"]);
set.extend(["RemovePlaceMention"]);
set.extend(["RemoveStorageMarkers"]);
set.extend(["RemoveUninitDrops"]);
set.extend(["RemoveUnneededDrops"]);
set.extend(["RemoveZsts"]);
set.extend(["RequiredConstsVisitor"]);
set.extend(["PostAnalysisNormalize"]);
set.extend(["SanityCheck"]);
set.extend([simplify::SimplifyCfg::Initial.name(),
simplify::SimplifyCfg::PromoteConsts.name(),
simplify::SimplifyCfg::RemoveFalseEdges.name(),
simplify::SimplifyCfg::PostAnalysis.name(),
simplify::SimplifyCfg::PreOptimizations.name(),
simplify::SimplifyCfg::Final.name(),
simplify::SimplifyCfg::MakeShim.name(),
simplify::SimplifyCfg::AfterUnreachableEnumBranching.name()]);
set.extend([simplify::SimplifyLocals::BeforeConstProp.name(),
simplify::SimplifyLocals::AfterGVN.name(),
simplify::SimplifyLocals::Final.name()]);
set.extend([simplify_branches::SimplifyConstCondition::AfterInstSimplify.name(),
simplify_branches::SimplifyConstCondition::AfterConstProp.name(),
simplify_branches::SimplifyConstCondition::Final.name()]);
set.extend(["SimplifyComparisonIntegral"]);
set.extend(["SingleUseConsts"]);
set.extend(["ScalarReplacementOfAggregates"]);
set.extend(["StripDebugInfo"]);
set.extend(["SsaRangePropagation"]);
set.extend(["UnreachableEnumBranching"]);
set.extend(["UnreachablePropagation"]);
set.extend(["Validator"]);
set
});declare_passes! {
125 mod abort_unwinding_calls : AbortUnwindingCalls;
126 mod add_call_guards : AddCallGuards { AllCallEdges, CriticalCallEdges };
127 mod add_moves_for_packed_drops : AddMovesForPackedDrops;
128 mod add_retag : AddRetag;
129 mod add_subtyping_projections : Subtyper;
130 mod check_inline : CheckForceInline;
131 mod check_call_recursion : CheckCallRecursion, CheckDropRecursion;
132 mod check_inline_always_target_features: CheckInlineAlwaysTargetFeature;
133 mod check_alignment : CheckAlignment;
134 mod check_enums : CheckEnums;
135 mod check_const_item_mutation : CheckConstItemMutation;
136 mod check_null : CheckNull;
137 mod check_packed_ref : CheckPackedRef;
138 pub mod cleanup_post_borrowck : CleanupPostBorrowck;
140
141 mod copy_prop : CopyProp;
142 mod coroutine : StateTransform;
143 mod coverage : InstrumentCoverage;
144 mod ctfe_limit : CtfeLimit;
145 mod dataflow_const_prop : DataflowConstProp;
146 mod dead_store_elimination : DeadStoreElimination {
147 Initial,
148 Final
149 };
150 mod deref_separator : Derefer;
151 mod dest_prop : DestinationPropagation;
152 pub mod dump_mir : Marker;
153 mod early_otherwise_branch : EarlyOtherwiseBranch;
154 mod erase_deref_temps : EraseDerefTemps;
155 mod elaborate_box_derefs : ElaborateBoxDerefs;
156 mod elaborate_drops : ElaborateDrops;
157 mod function_item_references : FunctionItemReferences;
158 mod gvn : GVN;
159 pub mod inline : Inline, ForceInline;
162 mod impossible_predicates : ImpossiblePredicates;
163 mod instsimplify : InstSimplify { BeforeInline, AfterSimplifyCfg };
164 mod jump_threading : JumpThreading;
165 mod known_panics_lint : KnownPanicsLint;
166 mod large_enums : EnumSizeOpt;
167 mod lower_intrinsics : LowerIntrinsics;
168 mod lower_slice_len : LowerSliceLenCalls;
169 mod match_branches : MatchBranchSimplification;
170 mod mentioned_items : MentionedItems;
171 mod multiple_return_terminators : MultipleReturnTerminators;
172 mod post_drop_elaboration : CheckLiveDrops;
173 mod prettify : ReorderBasicBlocks, ReorderLocals;
174 mod promote_consts : PromoteTemps;
175 mod ref_prop : ReferencePropagation;
176 mod remove_noop_landing_pads : RemoveNoopLandingPads;
177 mod remove_place_mention : RemovePlaceMention;
178 mod remove_storage_markers : RemoveStorageMarkers;
179 mod remove_uninit_drops : RemoveUninitDrops;
180 mod remove_unneeded_drops : RemoveUnneededDrops;
181 mod remove_zsts : RemoveZsts;
182 mod required_consts : RequiredConstsVisitor;
183 mod post_analysis_normalize : PostAnalysisNormalize;
184 mod sanity_check : SanityCheck;
185 pub mod simplify :
187 SimplifyCfg {
188 Initial,
189 PromoteConsts,
190 RemoveFalseEdges,
191 PostAnalysis,
192 PreOptimizations,
193 Final,
194 MakeShim,
195 AfterUnreachableEnumBranching
196 },
197 SimplifyLocals {
198 BeforeConstProp,
199 AfterGVN,
200 Final
201 };
202 mod simplify_branches : SimplifyConstCondition {
203 AfterInstSimplify,
204 AfterConstProp,
205 Final
206 };
207 mod simplify_comparison_integral : SimplifyComparisonIntegral;
208 mod single_use_consts : SingleUseConsts;
209 mod sroa : ScalarReplacementOfAggregates;
210 mod strip_debuginfo : StripDebugInfo;
211 mod ssa_range_prop: SsaRangePropagation;
212 mod unreachable_enum_branching : UnreachableEnumBranching;
213 mod unreachable_prop : UnreachablePropagation;
214 mod validate : Validator;
215}
216
217pub fn provide(providers: &mut Providers) {
218 coverage::query::provide(providers);
219 ffi_unwind_calls::provide(&mut providers.queries);
220 shim::provide(&mut providers.queries);
221 cross_crate_inline::provide(&mut providers.queries);
222 providers.queries = query::Providers {
223 mir_keys,
224 mir_built,
225 mir_const_qualif,
226 mir_promoted,
227 mir_drops_elaborated_and_const_checked,
228 mir_for_ctfe,
229 mir_coroutine_witnesses: coroutine::mir_coroutine_witnesses,
230 optimized_mir,
231 check_liveness: liveness::check_liveness,
232 is_mir_available,
233 mir_callgraph_cyclic: inline::cycle::mir_callgraph_cyclic,
234 mir_inliner_callees: inline::cycle::mir_inliner_callees,
235 promoted_mir,
236 deduced_param_attrs: deduce_param_attrs::deduced_param_attrs,
237 coroutine_by_move_body_def_id: coroutine::coroutine_by_move_body_def_id,
238 trivial_const: trivial_const::trivial_const_provider,
239 ..providers.queries
240 };
241}
242
243fn remap_mir_for_const_eval_select<'tcx>(
244 tcx: TyCtxt<'tcx>,
245 mut body: Body<'tcx>,
246 context: hir::Constness,
247) -> Body<'tcx> {
248 for bb in body.basic_blocks.as_mut().iter_mut() {
249 let terminator = bb.terminator.as_mut().expect("invalid terminator");
250 match terminator.kind {
251 TerminatorKind::Call {
252 func: Operand::Constant(box ConstOperand { ref const_, .. }),
253 ref mut args,
254 destination,
255 target,
256 unwind,
257 fn_span,
258 ..
259 } if let ty::FnDef(def_id, _) = *const_.ty().kind()
260 && tcx.is_intrinsic(def_id, sym::const_eval_select) =>
261 {
262 let Ok([tupled_args, called_in_const, called_at_rt]) = take_array(args) else {
263 ::core::panicking::panic("internal error: entered unreachable code")unreachable!()
264 };
265 let ty = tupled_args.node.ty(&body.local_decls, tcx);
266 let fields = ty.tuple_fields();
267 let num_args = fields.len();
268 let func =
269 if context == hir::Constness::Const { called_in_const } else { called_at_rt };
270 let (method, place): (fn(Place<'tcx>) -> Operand<'tcx>, Place<'tcx>) =
271 match tupled_args.node {
272 Operand::Constant(_) | Operand::RuntimeChecks(_) => {
273 let local = body.local_decls.push(LocalDecl::new(ty, fn_span));
277 bb.statements.push(Statement::new(
278 SourceInfo::outermost(fn_span),
279 StatementKind::Assign(Box::new((
280 local.into(),
281 Rvalue::Use(tupled_args.node.clone()),
282 ))),
283 ));
284 (Operand::Move, local.into())
285 }
286 Operand::Move(place) => (Operand::Move, place),
287 Operand::Copy(place) => (Operand::Copy, place),
288 };
289 let place_elems = place.projection;
290 let arguments = (0..num_args)
291 .map(|x| {
292 let mut place_elems = place_elems.to_vec();
293 place_elems.push(ProjectionElem::Field(x.into(), fields[x]));
294 let projection = tcx.mk_place_elems(&place_elems);
295 let place = Place { local: place.local, projection };
296 Spanned { node: method(place), span: DUMMY_SP }
297 })
298 .collect();
299 terminator.kind = TerminatorKind::Call {
300 func: func.node,
301 args: arguments,
302 destination,
303 target,
304 unwind,
305 call_source: CallSource::Misc,
306 fn_span,
307 };
308 }
309 _ => {}
310 }
311 }
312 body
313}
314
315fn take_array<T, const N: usize>(b: &mut Box<[T]>) -> Result<[T; N], Box<[T]>> {
316 let b: Box<[T; N]> = std::mem::take(b).try_into()?;
317 Ok(*b)
318}
319
320fn is_mir_available(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
321 tcx.mir_keys(()).contains(&def_id)
322}
323
324fn mir_keys(tcx: TyCtxt<'_>, (): ()) -> FxIndexSet<LocalDefId> {
327 let mut set: FxIndexSet<_> = tcx.hir_body_owners().collect();
329
330 set.retain(|&def_id| !#[allow(non_exhaustive_omitted_patterns)] match tcx.def_kind(def_id) {
DefKind::GlobalAsm => true,
_ => false,
}matches!(tcx.def_kind(def_id), DefKind::GlobalAsm));
333
334 for body_owner in tcx.hir_body_owners() {
337 if let DefKind::Closure = tcx.def_kind(body_owner)
338 && tcx.needs_coroutine_by_move_body_def_id(body_owner.to_def_id())
339 {
340 set.insert(tcx.coroutine_by_move_body_def_id(body_owner).expect_local());
341 }
342 }
343
344 for item in tcx.hir_crate_items(()).free_items() {
347 if let DefKind::Struct | DefKind::Enum = tcx.def_kind(item.owner_id) {
348 for variant in tcx.adt_def(item.owner_id).variants() {
349 if let Some((CtorKind::Fn, ctor_def_id)) = variant.ctor {
350 set.insert(ctor_def_id.expect_local());
351 }
352 }
353 }
354 }
355
356 set
357}
358
359fn mir_const_qualif(tcx: TyCtxt<'_>, def: LocalDefId) -> ConstQualifs {
360 let body = &tcx.mir_built(def).borrow();
365 let ccx = check_consts::ConstCx::new(tcx, body);
366 match ccx.const_kind {
368 Some(ConstContext::Const { .. } | ConstContext::Static(_) | ConstContext::ConstFn) => {}
369 None => ::rustc_middle::util::bug::span_bug_fmt(tcx.def_span(def),
format_args!("`mir_const_qualif` should only be called on const fns and const items"))span_bug!(
370 tcx.def_span(def),
371 "`mir_const_qualif` should only be called on const fns and const items"
372 ),
373 }
374
375 if body.return_ty().references_error() {
376 tcx.dcx().span_delayed_bug(body.span, "mir_const_qualif: MIR had errors");
378 return Default::default();
379 }
380
381 let mut validator = check_consts::check::Checker::new(&ccx);
382 validator.check_body();
383
384 validator.qualifs_in_return_place()
387}
388
389fn mir_built(tcx: TyCtxt<'_>, def: LocalDefId) -> &Steal<Body<'_>> {
391 let mut body = tcx.build_mir_inner_impl(def);
394
395 if trivial_const::trivial_const(tcx, def, || &body).is_some() {
399 let body = tcx.alloc_steal_mir(body);
401 pass_manager::dump_mir_for_phase_change(tcx, &body.borrow());
402 return body;
403 }
404
405 pass_manager::dump_mir_for_phase_change(tcx, &body);
406
407 pm::run_passes(
408 tcx,
409 &mut body,
410 &[
411 &Lint(check_inline::CheckForceInline),
413 &Lint(check_call_recursion::CheckCallRecursion),
414 &Lint(check_inline_always_target_features::CheckInlineAlwaysTargetFeature),
417 &Lint(check_packed_ref::CheckPackedRef),
418 &Lint(check_const_item_mutation::CheckConstItemMutation),
419 &Lint(function_item_references::FunctionItemReferences),
420 &simplify::SimplifyCfg::Initial,
422 &Lint(sanity_check::SanityCheck),
423 ],
424 None,
425 pm::Optimizations::Allowed,
426 );
427 tcx.alloc_steal_mir(body)
428}
429
430fn mir_promoted(
432 tcx: TyCtxt<'_>,
433 def: LocalDefId,
434) -> (&Steal<Body<'_>>, &Steal<IndexVec<Promoted, Body<'_>>>) {
435 if true {
if !!tcx.is_trivial_const(def) {
{
::core::panicking::panic_fmt(format_args!("Tried to get mir_promoted of a trivial const"));
}
};
};debug_assert!(!tcx.is_trivial_const(def), "Tried to get mir_promoted of a trivial const");
436
437 let const_qualifs = match tcx.def_kind(def) {
442 DefKind::Fn | DefKind::AssocFn | DefKind::Closure
443 if tcx.constness(def) == hir::Constness::Const =>
444 {
445 tcx.mir_const_qualif(def)
446 }
447 DefKind::AssocConst { .. }
448 | DefKind::Const { .. }
449 | DefKind::Static { .. }
450 | DefKind::InlineConst
451 | DefKind::AnonConst => tcx.mir_const_qualif(def),
452 _ => ConstQualifs::default(),
453 };
454
455 tcx.ensure_done().has_ffi_unwind_calls(def);
457
458 if tcx.needs_coroutine_by_move_body_def_id(def.to_def_id()) {
460 tcx.ensure_done().coroutine_by_move_body_def_id(def);
461 }
462
463 tcx.ensure_done().trivial_const(def);
465
466 let mut body = tcx.mir_built(def).steal();
467 if let Some(error_reported) = const_qualifs.tainted_by_errors {
468 body.tainted_by_errors = Some(error_reported);
469 }
470
471 RequiredConstsVisitor::compute_required_consts(&mut body);
474
475 let promote_pass = promote_consts::PromoteTemps::default();
477 pm::run_passes(
478 tcx,
479 &mut body,
480 &[&promote_pass, &simplify::SimplifyCfg::PromoteConsts, &coverage::InstrumentCoverage],
481 Some(MirPhase::Analysis(AnalysisPhase::Initial)),
482 pm::Optimizations::Allowed,
483 );
484
485 lint_tail_expr_drop_order::run_lint(tcx, def, &body);
486
487 let promoted = promote_pass.promoted_fragments.into_inner();
488 (tcx.alloc_steal_mir(body), tcx.alloc_steal_promoted(promoted))
489}
490
491fn mir_for_ctfe(tcx: TyCtxt<'_>, def_id: LocalDefId) -> &Body<'_> {
493 if true {
if !!tcx.is_trivial_const(def_id) {
{
::core::panicking::panic_fmt(format_args!("Tried to get mir_for_ctfe of a trivial const"));
}
};
};debug_assert!(!tcx.is_trivial_const(def_id), "Tried to get mir_for_ctfe of a trivial const");
494 tcx.arena.alloc(inner_mir_for_ctfe(tcx, def_id))
495}
496
497fn inner_mir_for_ctfe(tcx: TyCtxt<'_>, def: LocalDefId) -> Body<'_> {
498 if tcx.is_constructor(def.to_def_id()) {
500 return shim::build_adt_ctor(tcx, def.to_def_id());
505 }
506
507 let body = tcx.mir_drops_elaborated_and_const_checked(def);
508 let body = match tcx.hir_body_const_context(def) {
509 Some(hir::ConstContext::Const { .. } | hir::ConstContext::Static(_)) => body.steal(),
512 Some(hir::ConstContext::ConstFn) => body.borrow().clone(),
513 None => ::rustc_middle::util::bug::bug_fmt(format_args!("`mir_for_ctfe` called on non-const {0:?}",
def))bug!("`mir_for_ctfe` called on non-const {def:?}"),
514 };
515
516 let mut body = remap_mir_for_const_eval_select(tcx, body, hir::Constness::Const);
517 pm::run_passes(tcx, &mut body, &[&ctfe_limit::CtfeLimit], None, pm::Optimizations::Allowed);
518
519 body
520}
521
522fn mir_drops_elaborated_and_const_checked(tcx: TyCtxt<'_>, def: LocalDefId) -> &Steal<Body<'_>> {
526 if tcx.is_coroutine(def.to_def_id()) {
527 tcx.ensure_done().mir_coroutine_witnesses(def);
528 }
529
530 let tainted_by_errors = if !tcx.is_synthetic_mir(def) {
532 tcx.mir_borrowck(tcx.typeck_root_def_id(def.to_def_id()).expect_local()).err()
533 } else {
534 None
535 };
536
537 let is_fn_like = tcx.def_kind(def).is_fn_like();
538 if is_fn_like {
539 if pm::should_run_pass(tcx, &inline::Inline, pm::Optimizations::Allowed)
541 || inline::ForceInline::should_run_pass_for_callee(tcx, def.to_def_id())
542 {
543 tcx.ensure_done().mir_inliner_callees(ty::InstanceKind::Item(def.to_def_id()));
544 }
545 }
546
547 tcx.ensure_done().check_liveness(def);
548
549 let (body, _) = tcx.mir_promoted(def);
550 let mut body = body.steal();
551
552 if let Some(error_reported) = tainted_by_errors {
553 body.tainted_by_errors = Some(error_reported);
554 }
555
556 let root = tcx.typeck_root_def_id(def.to_def_id());
561 match tcx.def_kind(root) {
562 DefKind::Fn
563 | DefKind::AssocFn
564 | DefKind::Static { .. }
565 | DefKind::Const { .. }
566 | DefKind::AssocConst { .. } => {
567 if let Err(guar) = tcx.ensure_ok().check_well_formed(root.expect_local()) {
568 body.tainted_by_errors = Some(guar);
569 }
570 }
571 _ => {}
572 }
573
574 run_analysis_to_runtime_passes(tcx, &mut body);
575
576 tcx.alloc_steal_mir(body)
577}
578
579pub fn run_analysis_to_runtime_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
582 if !(body.phase == MirPhase::Analysis(AnalysisPhase::Initial)) {
::core::panicking::panic("assertion failed: body.phase == MirPhase::Analysis(AnalysisPhase::Initial)")
};assert!(body.phase == MirPhase::Analysis(AnalysisPhase::Initial));
583 let did = body.source.def_id();
584
585 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_mir_transform/src/lib.rs:585",
"rustc_mir_transform", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_mir_transform/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(585u32),
::tracing_core::__macro_support::Option::Some("rustc_mir_transform"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("analysis_mir_cleanup({0:?})",
did) as &dyn Value))])
});
} else { ; }
};debug!("analysis_mir_cleanup({:?})", did);
586 run_analysis_cleanup_passes(tcx, body);
587 if !(body.phase == MirPhase::Analysis(AnalysisPhase::PostCleanup)) {
::core::panicking::panic("assertion failed: body.phase == MirPhase::Analysis(AnalysisPhase::PostCleanup)")
};assert!(body.phase == MirPhase::Analysis(AnalysisPhase::PostCleanup));
588
589 if check_consts::post_drop_elaboration::checking_enabled(&ConstCx::new(tcx, body)) {
591 pm::run_passes(
592 tcx,
593 body,
594 &[
595 &remove_uninit_drops::RemoveUninitDrops,
596 &simplify::SimplifyCfg::RemoveFalseEdges,
597 &Lint(post_drop_elaboration::CheckLiveDrops),
598 ],
599 None,
600 pm::Optimizations::Allowed,
601 );
602 }
603
604 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_mir_transform/src/lib.rs:604",
"rustc_mir_transform", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_mir_transform/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(604u32),
::tracing_core::__macro_support::Option::Some("rustc_mir_transform"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("runtime_mir_lowering({0:?})",
did) as &dyn Value))])
});
} else { ; }
};debug!("runtime_mir_lowering({:?})", did);
605 run_runtime_lowering_passes(tcx, body);
606 if !(body.phase == MirPhase::Runtime(RuntimePhase::Initial)) {
::core::panicking::panic("assertion failed: body.phase == MirPhase::Runtime(RuntimePhase::Initial)")
};assert!(body.phase == MirPhase::Runtime(RuntimePhase::Initial));
607
608 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_mir_transform/src/lib.rs:608",
"rustc_mir_transform", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_mir_transform/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(608u32),
::tracing_core::__macro_support::Option::Some("rustc_mir_transform"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("runtime_mir_cleanup({0:?})",
did) as &dyn Value))])
});
} else { ; }
};debug!("runtime_mir_cleanup({:?})", did);
609 run_runtime_cleanup_passes(tcx, body);
610 if !(body.phase == MirPhase::Runtime(RuntimePhase::PostCleanup)) {
::core::panicking::panic("assertion failed: body.phase == MirPhase::Runtime(RuntimePhase::PostCleanup)")
};assert!(body.phase == MirPhase::Runtime(RuntimePhase::PostCleanup));
611}
612
613fn run_analysis_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
617 let passes: &[&dyn MirPass<'tcx>] = &[
618 &impossible_predicates::ImpossiblePredicates,
619 &cleanup_post_borrowck::CleanupPostBorrowck,
620 &remove_noop_landing_pads::RemoveNoopLandingPads,
621 &simplify::SimplifyCfg::PostAnalysis,
622 &deref_separator::Derefer,
623 ];
624
625 pm::run_passes(
626 tcx,
627 body,
628 passes,
629 Some(MirPhase::Analysis(AnalysisPhase::PostCleanup)),
630 pm::Optimizations::Allowed,
631 );
632}
633
634fn run_runtime_lowering_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
636 let passes: &[&dyn MirPass<'tcx>] = &[
637 &add_call_guards::CriticalCallEdges,
639 &post_analysis_normalize::PostAnalysisNormalize,
641 &add_subtyping_projections::Subtyper,
643 &elaborate_drops::ElaborateDrops,
644 &Lint(check_call_recursion::CheckDropRecursion),
646 &abort_unwinding_calls::AbortUnwindingCalls,
650 &add_moves_for_packed_drops::AddMovesForPackedDrops,
653 &add_retag::AddRetag,
656 &erase_deref_temps::EraseDerefTemps,
657 &elaborate_box_derefs::ElaborateBoxDerefs,
658 &coroutine::StateTransform,
659 &Lint(known_panics_lint::KnownPanicsLint),
660 ];
661 pm::run_passes_no_validate(tcx, body, passes, Some(MirPhase::Runtime(RuntimePhase::Initial)));
662}
663
664fn run_runtime_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
666 let passes: &[&dyn MirPass<'tcx>] = &[
667 &lower_intrinsics::LowerIntrinsics,
668 &remove_place_mention::RemovePlaceMention,
669 &simplify::SimplifyCfg::PreOptimizations,
670 ];
671
672 pm::run_passes(
673 tcx,
674 body,
675 passes,
676 Some(MirPhase::Runtime(RuntimePhase::PostCleanup)),
677 pm::Optimizations::Allowed,
678 );
679
680 for decl in &mut body.local_decls {
683 decl.local_info = ClearCrossCrate::Clear;
684 }
685}
686
687pub(crate) fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
688 fn o1<T>(x: T) -> WithMinOptLevel<T> {
689 WithMinOptLevel(1, x)
690 }
691
692 let def_id = body.source.def_id();
693 let optimizations = if tcx.def_kind(def_id).has_codegen_attrs()
694 && tcx.codegen_fn_attrs(def_id).optimize.do_not_optimize()
695 {
696 pm::Optimizations::Suppressed
697 } else {
698 pm::Optimizations::Allowed
699 };
700
701 pm::run_passes(
703 tcx,
704 body,
705 &[
706 &check_alignment::CheckAlignment,
708 &check_null::CheckNull,
709 &check_enums::CheckEnums,
710 &lower_slice_len::LowerSliceLenCalls,
715 &instsimplify::InstSimplify::BeforeInline,
718 &inline::ForceInline,
720 &inline::Inline,
722 &remove_storage_markers::RemoveStorageMarkers,
727 &remove_zsts::RemoveZsts,
729 &remove_unneeded_drops::RemoveUnneededDrops,
730 &unreachable_enum_branching::UnreachableEnumBranching,
733 &unreachable_prop::UnreachablePropagation,
734 &o1(simplify::SimplifyCfg::AfterUnreachableEnumBranching),
735 &multiple_return_terminators::MultipleReturnTerminators,
736 &instsimplify::InstSimplify::AfterSimplifyCfg,
740 &o1(simplify_branches::SimplifyConstCondition::AfterInstSimplify),
749 &ref_prop::ReferencePropagation,
750 &sroa::ScalarReplacementOfAggregates,
751 &simplify::SimplifyLocals::BeforeConstProp,
752 &dead_store_elimination::DeadStoreElimination::Initial,
753 &gvn::GVN,
754 &simplify::SimplifyLocals::AfterGVN,
755 &ssa_range_prop::SsaRangePropagation,
758 &match_branches::MatchBranchSimplification,
759 &dataflow_const_prop::DataflowConstProp,
760 &single_use_consts::SingleUseConsts,
761 &o1(simplify_branches::SimplifyConstCondition::AfterConstProp),
762 &jump_threading::JumpThreading,
763 &early_otherwise_branch::EarlyOtherwiseBranch,
764 &simplify_comparison_integral::SimplifyComparisonIntegral,
765 &o1(simplify_branches::SimplifyConstCondition::Final),
766 &o1(remove_noop_landing_pads::RemoveNoopLandingPads),
767 &o1(simplify::SimplifyCfg::Final),
768 &strip_debuginfo::StripDebugInfo,
770 ©_prop::CopyProp,
771 &dead_store_elimination::DeadStoreElimination::Final,
772 &dest_prop::DestinationPropagation,
773 &simplify::SimplifyLocals::Final,
774 &multiple_return_terminators::MultipleReturnTerminators,
775 &large_enums::EnumSizeOpt { discrepancy: 128 },
776 &add_call_guards::CriticalCallEdges,
778 &prettify::ReorderBasicBlocks,
780 &prettify::ReorderLocals,
781 &dump_mir::Marker("PreCodegen"),
783 ],
784 Some(MirPhase::Runtime(RuntimePhase::Optimized)),
785 optimizations,
786 );
787}
788
789fn optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> &Body<'_> {
791 tcx.arena.alloc(inner_optimized_mir(tcx, did))
792}
793
794fn inner_optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> Body<'_> {
795 if tcx.is_constructor(did.to_def_id()) {
796 return shim::build_adt_ctor(tcx, did.to_def_id());
801 }
802
803 match tcx.hir_body_const_context(did) {
804 Some(hir::ConstContext::ConstFn) => tcx.ensure_done().mir_for_ctfe(did),
808 None => {}
809 Some(other) => {
::core::panicking::panic_fmt(format_args!("do not use `optimized_mir` for constants: {0:?}",
other));
}panic!("do not use `optimized_mir` for constants: {other:?}"),
810 }
811 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_mir_transform/src/lib.rs:811",
"rustc_mir_transform", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_mir_transform/src/lib.rs"),
::tracing_core::__macro_support::Option::Some(811u32),
::tracing_core::__macro_support::Option::Some("rustc_mir_transform"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("about to call mir_drops_elaborated...")
as &dyn Value))])
});
} else { ; }
};debug!("about to call mir_drops_elaborated...");
812 let body = tcx.mir_drops_elaborated_and_const_checked(did).steal();
813 let mut body = remap_mir_for_const_eval_select(tcx, body, hir::Constness::NotConst);
814
815 if body.tainted_by_errors.is_some() {
816 return body;
817 }
818
819 mentioned_items::MentionedItems.run_pass(tcx, &mut body);
823
824 if let TerminatorKind::Unreachable = body.basic_blocks[START_BLOCK].terminator().kind
828 && body.basic_blocks[START_BLOCK].statements.is_empty()
829 {
830 return body;
831 }
832
833 run_optimization_passes(tcx, &mut body);
834
835 body
836}
837
838fn promoted_mir(tcx: TyCtxt<'_>, def: LocalDefId) -> &IndexVec<Promoted, Body<'_>> {
841 if tcx.is_constructor(def.to_def_id()) {
842 return tcx.arena.alloc(IndexVec::new());
843 }
844
845 if !tcx.is_synthetic_mir(def) {
846 tcx.ensure_done().mir_borrowck(tcx.typeck_root_def_id(def.to_def_id()).expect_local());
847 }
848 let mut promoted = tcx.mir_promoted(def).1.steal();
849
850 for body in &mut promoted {
851 run_analysis_to_runtime_passes(tcx, body);
852 }
853
854 tcx.arena.alloc(promoted)
855}