rustc_mir_transform/coverage/
query.rs

1use rustc_data_structures::captures::Captures;
2use rustc_index::bit_set::DenseBitSet;
3use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
4use rustc_middle::mir::coverage::{BasicCoverageBlock, CoverageIdsInfo, CoverageKind, MappingKind};
5use rustc_middle::mir::{Body, Statement, StatementKind};
6use rustc_middle::ty::{self, TyCtxt};
7use rustc_middle::util::Providers;
8use rustc_span::def_id::LocalDefId;
9use rustc_span::sym;
10use tracing::trace;
11
12use crate::coverage::counters::node_flow::make_node_counters;
13use crate::coverage::counters::{CoverageCounters, transcribe_counters};
14
15/// Registers query/hook implementations related to coverage.
16pub(crate) fn provide(providers: &mut Providers) {
17    providers.hooks.is_eligible_for_coverage = is_eligible_for_coverage;
18    providers.queries.coverage_attr_on = coverage_attr_on;
19    providers.queries.coverage_ids_info = coverage_ids_info;
20}
21
22/// Hook implementation for [`TyCtxt::is_eligible_for_coverage`].
23fn is_eligible_for_coverage(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
24    // Only instrument functions, methods, and closures (not constants since they are evaluated
25    // at compile time by Miri).
26    // FIXME(#73156): Handle source code coverage in const eval, but note, if and when const
27    // expressions get coverage spans, we will probably have to "carve out" space for const
28    // expressions from coverage spans in enclosing MIR's, like we do for closures. (That might
29    // be tricky if const expressions have no corresponding statements in the enclosing MIR.
30    // Closures are carved out by their initial `Assign` statement.)
31    if !tcx.def_kind(def_id).is_fn_like() {
32        trace!("InstrumentCoverage skipped for {def_id:?} (not an fn-like)");
33        return false;
34    }
35
36    // Don't instrument functions with `#[automatically_derived]` on their
37    // enclosing impl block, on the assumption that most users won't care about
38    // coverage for derived impls.
39    if let Some(impl_of) = tcx.impl_of_method(def_id.to_def_id())
40        && tcx.is_automatically_derived(impl_of)
41    {
42        trace!("InstrumentCoverage skipped for {def_id:?} (automatically derived)");
43        return false;
44    }
45
46    if tcx.codegen_fn_attrs(def_id).flags.contains(CodegenFnAttrFlags::NAKED) {
47        trace!("InstrumentCoverage skipped for {def_id:?} (`#[naked]`)");
48        return false;
49    }
50
51    if !tcx.coverage_attr_on(def_id) {
52        trace!("InstrumentCoverage skipped for {def_id:?} (`#[coverage(off)]`)");
53        return false;
54    }
55
56    true
57}
58
59/// Query implementation for `coverage_attr_on`.
60fn coverage_attr_on(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
61    // Check for annotations directly on this def.
62    if let Some(attr) = tcx.get_attr(def_id, sym::coverage) {
63        match attr.meta_item_list().as_deref() {
64            Some([item]) if item.has_name(sym::off) => return false,
65            Some([item]) if item.has_name(sym::on) => return true,
66            Some(_) | None => {
67                // Other possibilities should have been rejected by `rustc_parse::validate_attr`.
68                // Use `span_delayed_bug` to avoid an ICE in failing builds (#127880).
69                tcx.dcx().span_delayed_bug(attr.span, "unexpected value of coverage attribute");
70            }
71        }
72    }
73
74    match tcx.opt_local_parent(def_id) {
75        // Check the parent def (and so on recursively) until we find an
76        // enclosing attribute or reach the crate root.
77        Some(parent) => tcx.coverage_attr_on(parent),
78        // We reached the crate root without seeing a coverage attribute, so
79        // allow coverage instrumentation by default.
80        None => true,
81    }
82}
83
84/// Query implementation for `coverage_ids_info`.
85fn coverage_ids_info<'tcx>(
86    tcx: TyCtxt<'tcx>,
87    instance_def: ty::InstanceKind<'tcx>,
88) -> Option<CoverageIdsInfo> {
89    let mir_body = tcx.instance_mir(instance_def);
90    let fn_cov_info = mir_body.function_coverage_info.as_deref()?;
91
92    // Scan through the final MIR to see which BCBs survived MIR opts.
93    // Any BCB not in this set was optimized away.
94    let mut bcbs_seen = DenseBitSet::new_empty(fn_cov_info.priority_list.len());
95    for kind in all_coverage_in_mir_body(mir_body) {
96        match *kind {
97            CoverageKind::VirtualCounter { bcb } => {
98                bcbs_seen.insert(bcb);
99            }
100            _ => {}
101        }
102    }
103
104    // Determine the set of BCBs that are referred to by mappings, and therefore
105    // need a counter. Any node not in this set will only get a counter if it
106    // is part of the counter expression for a node that is in the set.
107    let mut bcb_needs_counter =
108        DenseBitSet::<BasicCoverageBlock>::new_empty(fn_cov_info.priority_list.len());
109    for mapping in &fn_cov_info.mappings {
110        match mapping.kind {
111            MappingKind::Code { bcb } => {
112                bcb_needs_counter.insert(bcb);
113            }
114            MappingKind::Branch { true_bcb, false_bcb } => {
115                bcb_needs_counter.insert(true_bcb);
116                bcb_needs_counter.insert(false_bcb);
117            }
118            MappingKind::MCDCBranch { true_bcb, false_bcb, mcdc_params: _ } => {
119                bcb_needs_counter.insert(true_bcb);
120                bcb_needs_counter.insert(false_bcb);
121            }
122            MappingKind::MCDCDecision(_) => {}
123        }
124    }
125
126    // Clone the priority list so that we can re-sort it.
127    let mut priority_list = fn_cov_info.priority_list.clone();
128    // The first ID in the priority list represents the synthetic "sink" node,
129    // and must remain first so that it _never_ gets a physical counter.
130    debug_assert_eq!(priority_list[0], priority_list.iter().copied().max().unwrap());
131    assert!(!bcbs_seen.contains(priority_list[0]));
132    // Partition the priority list, so that unreachable nodes (removed by MIR opts)
133    // are sorted later and therefore are _more_ likely to get a physical counter.
134    // This is counter-intuitive, but it means that `transcribe_counters` can
135    // easily skip those unused physical counters and replace them with zero.
136    // (The original ordering remains in effect within both partitions.)
137    priority_list[1..].sort_by_key(|&bcb| !bcbs_seen.contains(bcb));
138
139    let node_counters = make_node_counters(&fn_cov_info.node_flow_data, &priority_list);
140    let coverage_counters = transcribe_counters(&node_counters, &bcb_needs_counter, &bcbs_seen);
141
142    let CoverageCounters {
143        phys_counter_for_node, next_counter_id, node_counters, expressions, ..
144    } = coverage_counters;
145
146    Some(CoverageIdsInfo {
147        num_counters: next_counter_id.as_u32(),
148        phys_counter_for_node,
149        term_for_bcb: node_counters,
150        expressions,
151    })
152}
153
154fn all_coverage_in_mir_body<'a, 'tcx>(
155    body: &'a Body<'tcx>,
156) -> impl Iterator<Item = &'a CoverageKind> + Captures<'tcx> {
157    body.basic_blocks.iter().flat_map(|bb_data| &bb_data.statements).filter_map(|statement| {
158        match statement.kind {
159            StatementKind::Coverage(ref kind) if !is_inlined(body, statement) => Some(kind),
160            _ => None,
161        }
162    })
163}
164
165fn is_inlined(body: &Body<'_>, statement: &Statement<'_>) -> bool {
166    let scope_data = &body.source_scopes[statement.source_info.scope];
167    scope_data.inlined.is_some() || scope_data.inlined_parent_scope.is_some()
168}