rustc_mir_transform/coverage/
query.rs

1use rustc_index::bit_set::DenseBitSet;
2use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
3use rustc_middle::mir::coverage::{BasicCoverageBlock, CoverageIdsInfo, CoverageKind, MappingKind};
4use rustc_middle::mir::{Body, Statement, StatementKind};
5use rustc_middle::ty::{self, TyCtxt};
6use rustc_middle::util::Providers;
7use rustc_span::def_id::LocalDefId;
8use rustc_span::sym;
9use tracing::trace;
10
11use crate::coverage::counters::node_flow::make_node_counters;
12use crate::coverage::counters::{CoverageCounters, transcribe_counters};
13
14/// Registers query/hook implementations related to coverage.
15pub(crate) fn provide(providers: &mut Providers) {
16    providers.hooks.is_eligible_for_coverage = is_eligible_for_coverage;
17    providers.queries.coverage_attr_on = coverage_attr_on;
18    providers.queries.coverage_ids_info = coverage_ids_info;
19}
20
21/// Hook implementation for [`TyCtxt::is_eligible_for_coverage`].
22fn is_eligible_for_coverage(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
23    // Only instrument functions, methods, and closures (not constants since they are evaluated
24    // at compile time by Miri).
25    // FIXME(#73156): Handle source code coverage in const eval, but note, if and when const
26    // expressions get coverage spans, we will probably have to "carve out" space for const
27    // expressions from coverage spans in enclosing MIR's, like we do for closures. (That might
28    // be tricky if const expressions have no corresponding statements in the enclosing MIR.
29    // Closures are carved out by their initial `Assign` statement.)
30    if !tcx.def_kind(def_id).is_fn_like() {
31        trace!("InstrumentCoverage skipped for {def_id:?} (not an fn-like)");
32        return false;
33    }
34
35    // Don't instrument functions with `#[automatically_derived]` on their
36    // enclosing impl block, on the assumption that most users won't care about
37    // coverage for derived impls.
38    if let Some(impl_of) = tcx.impl_of_method(def_id.to_def_id())
39        && tcx.is_automatically_derived(impl_of)
40    {
41        trace!("InstrumentCoverage skipped for {def_id:?} (automatically derived)");
42        return false;
43    }
44
45    if tcx.codegen_fn_attrs(def_id).flags.contains(CodegenFnAttrFlags::NAKED) {
46        trace!("InstrumentCoverage skipped for {def_id:?} (`#[naked]`)");
47        return false;
48    }
49
50    if !tcx.coverage_attr_on(def_id) {
51        trace!("InstrumentCoverage skipped for {def_id:?} (`#[coverage(off)]`)");
52        return false;
53    }
54
55    true
56}
57
58/// Query implementation for `coverage_attr_on`.
59fn coverage_attr_on(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
60    // Check for annotations directly on this def.
61    if let Some(attr) = tcx.get_attr(def_id, sym::coverage) {
62        match attr.meta_item_list().as_deref() {
63            Some([item]) if item.has_name(sym::off) => return false,
64            Some([item]) if item.has_name(sym::on) => return true,
65            Some(_) | None => {
66                // Other possibilities should have been rejected by `rustc_parse::validate_attr`.
67                // Use `span_delayed_bug` to avoid an ICE in failing builds (#127880).
68                tcx.dcx().span_delayed_bug(attr.span(), "unexpected value of coverage attribute");
69            }
70        }
71    }
72
73    match tcx.opt_local_parent(def_id) {
74        // Check the parent def (and so on recursively) until we find an
75        // enclosing attribute or reach the crate root.
76        Some(parent) => tcx.coverage_attr_on(parent),
77        // We reached the crate root without seeing a coverage attribute, so
78        // allow coverage instrumentation by default.
79        None => true,
80    }
81}
82
83/// Query implementation for `coverage_ids_info`.
84fn coverage_ids_info<'tcx>(
85    tcx: TyCtxt<'tcx>,
86    instance_def: ty::InstanceKind<'tcx>,
87) -> Option<CoverageIdsInfo> {
88    let mir_body = tcx.instance_mir(instance_def);
89    let fn_cov_info = mir_body.function_coverage_info.as_deref()?;
90
91    // Scan through the final MIR to see which BCBs survived MIR opts.
92    // Any BCB not in this set was optimized away.
93    let mut bcbs_seen = DenseBitSet::new_empty(fn_cov_info.priority_list.len());
94    for kind in all_coverage_in_mir_body(mir_body) {
95        match *kind {
96            CoverageKind::VirtualCounter { bcb } => {
97                bcbs_seen.insert(bcb);
98            }
99            _ => {}
100        }
101    }
102
103    // Determine the set of BCBs that are referred to by mappings, and therefore
104    // need a counter. Any node not in this set will only get a counter if it
105    // is part of the counter expression for a node that is in the set.
106    let mut bcb_needs_counter =
107        DenseBitSet::<BasicCoverageBlock>::new_empty(fn_cov_info.priority_list.len());
108    for mapping in &fn_cov_info.mappings {
109        match mapping.kind {
110            MappingKind::Code { bcb } => {
111                bcb_needs_counter.insert(bcb);
112            }
113            MappingKind::Branch { true_bcb, false_bcb } => {
114                bcb_needs_counter.insert(true_bcb);
115                bcb_needs_counter.insert(false_bcb);
116            }
117            MappingKind::MCDCBranch { true_bcb, false_bcb, mcdc_params: _ } => {
118                bcb_needs_counter.insert(true_bcb);
119                bcb_needs_counter.insert(false_bcb);
120            }
121            MappingKind::MCDCDecision(_) => {}
122        }
123    }
124
125    // Clone the priority list so that we can re-sort it.
126    let mut priority_list = fn_cov_info.priority_list.clone();
127    // The first ID in the priority list represents the synthetic "sink" node,
128    // and must remain first so that it _never_ gets a physical counter.
129    debug_assert_eq!(priority_list[0], priority_list.iter().copied().max().unwrap());
130    assert!(!bcbs_seen.contains(priority_list[0]));
131    // Partition the priority list, so that unreachable nodes (removed by MIR opts)
132    // are sorted later and therefore are _more_ likely to get a physical counter.
133    // This is counter-intuitive, but it means that `transcribe_counters` can
134    // easily skip those unused physical counters and replace them with zero.
135    // (The original ordering remains in effect within both partitions.)
136    priority_list[1..].sort_by_key(|&bcb| !bcbs_seen.contains(bcb));
137
138    let node_counters = make_node_counters(&fn_cov_info.node_flow_data, &priority_list);
139    let coverage_counters = transcribe_counters(&node_counters, &bcb_needs_counter, &bcbs_seen);
140
141    let CoverageCounters {
142        phys_counter_for_node, next_counter_id, node_counters, expressions, ..
143    } = coverage_counters;
144
145    Some(CoverageIdsInfo {
146        num_counters: next_counter_id.as_u32(),
147        phys_counter_for_node,
148        term_for_bcb: node_counters,
149        expressions,
150    })
151}
152
153fn all_coverage_in_mir_body<'a, 'tcx>(
154    body: &'a Body<'tcx>,
155) -> impl Iterator<Item = &'a CoverageKind> {
156    body.basic_blocks.iter().flat_map(|bb_data| &bb_data.statements).filter_map(|statement| {
157        match statement.kind {
158            StatementKind::Coverage(ref kind) if !is_inlined(body, statement) => Some(kind),
159            _ => None,
160        }
161    })
162}
163
164fn is_inlined(body: &Body<'_>, statement: &Statement<'_>) -> bool {
165    let scope_data = &body.source_scopes[statement.source_info.scope];
166    scope_data.inlined.is_some() || scope_data.inlined_parent_scope.is_some()
167}