rustc_mir_transform/inline/
cycle.rs

1use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
2use rustc_data_structures::stack::ensure_sufficient_stack;
3use rustc_data_structures::unord::UnordSet;
4use rustc_hir::def_id::{DefId, LocalDefId};
5use rustc_hir::limit::Limit;
6use rustc_middle::mir::TerminatorKind;
7use rustc_middle::ty::{self, GenericArgsRef, InstanceKind, TyCtxt, TypeVisitableExt};
8use rustc_span::sym;
9use tracing::{instrument, trace};
10
11#[instrument(level = "debug", skip(tcx), ret)]
12fn should_recurse<'tcx>(tcx: TyCtxt<'tcx>, callee: ty::Instance<'tcx>) -> bool {
13    match callee.def {
14        // If there is no MIR available (either because it was not in metadata or
15        // because it has no MIR because it's an extern function), then the inliner
16        // won't cause cycles on this.
17        InstanceKind::Item(_) => {
18            if !tcx.is_mir_available(callee.def_id()) {
19                return false;
20            }
21        }
22
23        // These have no own callable MIR.
24        InstanceKind::Intrinsic(_) | InstanceKind::Virtual(..) => return false,
25
26        // These have MIR and if that MIR is inlined, instantiated and then inlining is run
27        // again, a function item can end up getting inlined. Thus we'll be able to cause
28        // a cycle that way
29        InstanceKind::VTableShim(_)
30        | InstanceKind::ReifyShim(..)
31        | InstanceKind::FnPtrShim(..)
32        | InstanceKind::ClosureOnceShim { .. }
33        | InstanceKind::ConstructCoroutineInClosureShim { .. }
34        | InstanceKind::ThreadLocalShim { .. }
35        | InstanceKind::CloneShim(..) => {}
36
37        // This shim does not call any other functions, thus there can be no recursion.
38        InstanceKind::FnPtrAddrShim(..) => return false,
39
40        // FIXME: A not fully instantiated drop shim can cause ICEs if one attempts to
41        // have its MIR built. Likely oli-obk just screwed up the `ParamEnv`s, so this
42        // needs some more analysis.
43        InstanceKind::DropGlue(..)
44        | InstanceKind::FutureDropPollShim(..)
45        | InstanceKind::AsyncDropGlue(..)
46        | InstanceKind::AsyncDropGlueCtorShim(..) => {
47            if callee.has_param() {
48                return false;
49            }
50        }
51    }
52
53    crate::pm::should_run_pass(tcx, &crate::inline::Inline, crate::pm::Optimizations::Allowed)
54        || crate::inline::ForceInline::should_run_pass_for_callee(tcx, callee.def.def_id())
55}
56
57#[instrument(
58    level = "debug",
59    skip(tcx, typing_env, seen, involved, recursion_limiter, recursion_limit),
60    ret
61)]
62fn process<'tcx>(
63    tcx: TyCtxt<'tcx>,
64    typing_env: ty::TypingEnv<'tcx>,
65    caller: ty::Instance<'tcx>,
66    target: LocalDefId,
67    seen: &mut FxHashMap<ty::Instance<'tcx>, bool>,
68    involved: &mut FxHashSet<LocalDefId>,
69    recursion_limiter: &mut FxHashMap<DefId, usize>,
70    recursion_limit: Limit,
71) -> Option<bool> {
72    trace!(%caller);
73    let mut reaches_root = false;
74
75    for &(callee_def_id, args) in tcx.mir_inliner_callees(caller.def) {
76        let Ok(args) = caller.try_instantiate_mir_and_normalize_erasing_regions(
77            tcx,
78            typing_env,
79            ty::EarlyBinder::bind(args),
80        ) else {
81            trace!(?caller, ?typing_env, ?args, "cannot normalize, skipping");
82            continue;
83        };
84        let Ok(Some(callee)) = ty::Instance::try_resolve(tcx, typing_env, callee_def_id, args)
85        else {
86            trace!(?callee_def_id, "cannot resolve, skipping");
87            continue;
88        };
89
90        // Found a path.
91        if callee.def_id() == target.to_def_id() {
92            reaches_root = true;
93            seen.insert(callee, true);
94            continue;
95        }
96
97        if tcx.is_constructor(callee.def_id()) {
98            trace!("constructors always have MIR");
99            // Constructor functions cannot cause a query cycle.
100            continue;
101        }
102
103        if !should_recurse(tcx, callee) {
104            continue;
105        }
106
107        let callee_reaches_root = if let Some(&c) = seen.get(&callee) {
108            // Even if we have seen this callee before, and thus don't need
109            // to recurse into it, we still need to propagate whether it reaches
110            // the root so that we can mark all the involved callers, in case we
111            // end up reaching that same recursive callee through some *other* cycle.
112            c
113        } else {
114            seen.insert(callee, false);
115            let recursion = recursion_limiter.entry(callee.def_id()).or_default();
116            trace!(?callee, recursion = *recursion);
117            let callee_reaches_root = if recursion_limit.value_within_limit(*recursion) {
118                *recursion += 1;
119                ensure_sufficient_stack(|| {
120                    process(
121                        tcx,
122                        typing_env,
123                        callee,
124                        target,
125                        seen,
126                        involved,
127                        recursion_limiter,
128                        recursion_limit,
129                    )
130                })?
131            } else {
132                return None;
133            };
134            seen.insert(callee, callee_reaches_root);
135            callee_reaches_root
136        };
137        if callee_reaches_root {
138            if let Some(callee_def_id) = callee.def_id().as_local() {
139                // Calling `optimized_mir` of a non-local definition cannot cycle.
140                involved.insert(callee_def_id);
141            }
142            reaches_root = true;
143        }
144    }
145
146    Some(reaches_root)
147}
148
149#[instrument(level = "debug", skip(tcx), ret)]
150pub(crate) fn mir_callgraph_cyclic<'tcx>(
151    tcx: TyCtxt<'tcx>,
152    root: LocalDefId,
153) -> Option<UnordSet<LocalDefId>> {
154    assert!(
155        !tcx.is_constructor(root.to_def_id()),
156        "you should not call `mir_callgraph_reachable` on enum/struct constructor functions"
157    );
158
159    // FIXME(-Znext-solver=no): Remove this hack when trait solver overflow can return an error.
160    // In code like that pointed out in #128887, the type complexity we ask the solver to deal with
161    // grows as we recurse into the call graph. If we use the same recursion limit here and in the
162    // solver, the solver hits the limit first and emits a fatal error. But if we use a reduced
163    // limit, we will hit the limit first and give up on looking for inlining. And in any case,
164    // the default recursion limits are quite generous for us. If we need to recurse 64 times
165    // into the call graph, we're probably not going to find any useful MIR inlining.
166    let recursion_limit = tcx.recursion_limit() / 8;
167    let mut involved = FxHashSet::default();
168    let typing_env = ty::TypingEnv::post_analysis(tcx, root);
169    let root_instance =
170        ty::Instance::new_raw(root.to_def_id(), ty::GenericArgs::identity_for_item(tcx, root));
171    if !should_recurse(tcx, root_instance) {
172        trace!("cannot walk, skipping");
173        return Some(involved.into());
174    }
175    match process(
176        tcx,
177        typing_env,
178        root_instance,
179        root,
180        &mut FxHashMap::default(),
181        &mut involved,
182        &mut FxHashMap::default(),
183        recursion_limit,
184    ) {
185        Some(_) => Some(involved.into()),
186        _ => None,
187    }
188}
189
190pub(crate) fn mir_inliner_callees<'tcx>(
191    tcx: TyCtxt<'tcx>,
192    instance: ty::InstanceKind<'tcx>,
193) -> &'tcx [(DefId, GenericArgsRef<'tcx>)] {
194    let steal;
195    let guard;
196    let body = match (instance, instance.def_id().as_local()) {
197        (InstanceKind::Item(_), Some(def_id)) => {
198            steal = tcx.mir_promoted(def_id).0;
199            guard = steal.borrow();
200            &*guard
201        }
202        // Functions from other crates and MIR shims
203        _ => tcx.instance_mir(instance),
204    };
205    let mut calls = FxIndexSet::default();
206    for bb_data in body.basic_blocks.iter() {
207        let terminator = bb_data.terminator();
208        if let TerminatorKind::Call { func, args: call_args, .. } = &terminator.kind {
209            let ty = func.ty(&body.local_decls, tcx);
210            let ty::FnDef(def_id, generic_args) = ty.kind() else {
211                continue;
212            };
213            let call = if tcx.is_intrinsic(*def_id, sym::const_eval_select) {
214                let func = &call_args[2].node;
215                let ty = func.ty(&body.local_decls, tcx);
216                let ty::FnDef(def_id, generic_args) = ty.kind() else {
217                    continue;
218                };
219                (*def_id, *generic_args)
220            } else {
221                (*def_id, *generic_args)
222            };
223            calls.insert(call);
224        }
225    }
226    tcx.arena.alloc_from_iter(calls.iter().copied())
227}