rustc_mir_transform/coverage/spans.rs
1use std::collections::VecDeque;
2
3use rustc_data_structures::fx::FxHashSet;
4use rustc_middle::mir;
5use rustc_span::{DesugaringKind, ExpnKind, MacroKind, Span};
6use tracing::{debug, debug_span, instrument};
7
8use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph};
9use crate::coverage::spans::from_mir::{Hole, RawSpanFromMir, SpanFromMir};
10use crate::coverage::{ExtractedHirInfo, mappings, unexpand};
11
12mod from_mir;
13
14pub(super) fn extract_refined_covspans(
15 mir_body: &mir::Body<'_>,
16 hir_info: &ExtractedHirInfo,
17 graph: &CoverageGraph,
18 code_mappings: &mut impl Extend<mappings::CodeMapping>,
19) {
20 let &ExtractedHirInfo { body_span, .. } = hir_info;
21
22 let raw_spans = from_mir::extract_raw_spans_from_mir(mir_body, graph);
23 let mut covspans = raw_spans
24 .into_iter()
25 .filter_map(|RawSpanFromMir { raw_span, bcb }| try {
26 let (span, expn_kind) =
27 unexpand::unexpand_into_body_span_with_expn_kind(raw_span, body_span)?;
28 // Discard any spans that fill the entire body, because they tend
29 // to represent compiler-inserted code, e.g. implicitly returning `()`.
30 if span.source_equal(body_span) {
31 return None;
32 };
33 SpanFromMir { span, expn_kind, bcb }
34 })
35 .collect::<Vec<_>>();
36
37 // Only proceed if we found at least one usable span.
38 if covspans.is_empty() {
39 return;
40 }
41
42 // Also add the adjusted function signature span, if available.
43 // Otherwise, add a fake span at the start of the body, to avoid an ugly
44 // gap between the start of the body and the first real span.
45 // FIXME: Find a more principled way to solve this problem.
46 covspans.push(SpanFromMir::for_fn_sig(
47 hir_info.fn_sig_span_extended.unwrap_or_else(|| body_span.shrink_to_lo()),
48 ));
49
50 // First, perform the passes that need macro information.
51 covspans.sort_by(|a, b| graph.cmp_in_dominator_order(a.bcb, b.bcb));
52 remove_unwanted_expansion_spans(&mut covspans);
53 split_visible_macro_spans(&mut covspans);
54
55 // We no longer need the extra information in `SpanFromMir`, so convert to `Covspan`.
56 let mut covspans = covspans.into_iter().map(SpanFromMir::into_covspan).collect::<Vec<_>>();
57
58 let compare_covspans = |a: &Covspan, b: &Covspan| {
59 compare_spans(a.span, b.span)
60 // After deduplication, we want to keep only the most-dominated BCB.
61 .then_with(|| graph.cmp_in_dominator_order(a.bcb, b.bcb).reverse())
62 };
63 covspans.sort_by(compare_covspans);
64
65 // Among covspans with the same span, keep only one,
66 // preferring the one with the most-dominated BCB.
67 // (Ideally we should try to preserve _all_ non-dominating BCBs, but that
68 // requires a lot more complexity in the span refiner, for little benefit.)
69 covspans.dedup_by(|b, a| a.span.source_equal(b.span));
70
71 // Sort the holes, and merge overlapping/adjacent holes.
72 let mut holes = hir_info
73 .hole_spans
74 .iter()
75 .copied()
76 // Discard any holes that aren't directly visible within the body span.
77 .filter(|&hole_span| body_span.contains(hole_span) && body_span.eq_ctxt(hole_span))
78 .map(|span| Hole { span })
79 .collect::<Vec<_>>();
80 holes.sort_by(|a, b| compare_spans(a.span, b.span));
81 holes.dedup_by(|b, a| a.merge_if_overlapping_or_adjacent(b));
82
83 // Split the covspans into separate buckets that don't overlap any holes.
84 let buckets = divide_spans_into_buckets(covspans, &holes);
85
86 for mut covspans in buckets {
87 // Make sure each individual bucket is internally sorted.
88 covspans.sort_by(compare_covspans);
89 let _span = debug_span!("processing bucket", ?covspans).entered();
90
91 let mut covspans = remove_unwanted_overlapping_spans(covspans);
92 debug!(?covspans, "after removing overlaps");
93
94 // Do one last merge pass, to simplify the output.
95 covspans.dedup_by(|b, a| a.merge_if_eligible(b));
96 debug!(?covspans, "after merge");
97
98 code_mappings.extend(covspans.into_iter().map(|Covspan { span, bcb }| {
99 // Each span produced by the refiner represents an ordinary code region.
100 mappings::CodeMapping { span, bcb }
101 }));
102 }
103}
104
105/// Macros that expand into branches (e.g. `assert!`, `trace!`) tend to generate
106/// multiple condition/consequent blocks that have the span of the whole macro
107/// invocation, which is unhelpful. Keeping only the first such span seems to
108/// give better mappings, so remove the others.
109///
110/// Similarly, `await` expands to a branch on the discriminant of `Poll`, which
111/// leads to incorrect coverage if the `Future` is immediately ready (#98712).
112///
113/// (The input spans should be sorted in BCB dominator order, so that the
114/// retained "first" span is likely to dominate the others.)
115fn remove_unwanted_expansion_spans(covspans: &mut Vec<SpanFromMir>) {
116 let mut deduplicated_spans = FxHashSet::default();
117
118 covspans.retain(|covspan| {
119 match covspan.expn_kind {
120 // Retain only the first await-related or macro-expanded covspan with this span.
121 Some(ExpnKind::Desugaring(DesugaringKind::Await)) => {
122 deduplicated_spans.insert(covspan.span)
123 }
124 Some(ExpnKind::Macro(MacroKind::Bang, _)) => deduplicated_spans.insert(covspan.span),
125 // Ignore (retain) other spans.
126 _ => true,
127 }
128 });
129}
130
131/// When a span corresponds to a macro invocation that is visible from the
132/// function body, split it into two parts. The first part covers just the
133/// macro name plus `!`, and the second part covers the rest of the macro
134/// invocation. This seems to give better results for code that uses macros.
135fn split_visible_macro_spans(covspans: &mut Vec<SpanFromMir>) {
136 let mut extra_spans = vec![];
137
138 covspans.retain(|covspan| {
139 let Some(ExpnKind::Macro(MacroKind::Bang, visible_macro)) = covspan.expn_kind else {
140 return true;
141 };
142
143 let split_len = visible_macro.as_str().len() as u32 + 1;
144 let (before, after) = covspan.span.split_at(split_len);
145 if !covspan.span.contains(before) || !covspan.span.contains(after) {
146 // Something is unexpectedly wrong with the split point.
147 // The debug assertion in `split_at` will have already caught this,
148 // but in release builds it's safer to do nothing and maybe get a
149 // bug report for unexpected coverage, rather than risk an ICE.
150 return true;
151 }
152
153 extra_spans.push(SpanFromMir::new(before, covspan.expn_kind.clone(), covspan.bcb));
154 extra_spans.push(SpanFromMir::new(after, covspan.expn_kind.clone(), covspan.bcb));
155 false // Discard the original covspan that we just split.
156 });
157
158 // The newly-split spans are added at the end, so any previous sorting
159 // is not preserved.
160 covspans.extend(extra_spans);
161}
162
163/// Uses the holes to divide the given covspans into buckets, such that:
164/// - No span in any hole overlaps a bucket (truncating the spans if necessary).
165/// - The spans in each bucket are strictly after all spans in previous buckets,
166/// and strictly before all spans in subsequent buckets.
167///
168/// The resulting buckets are sorted relative to each other, but might not be
169/// internally sorted.
170#[instrument(level = "debug")]
171fn divide_spans_into_buckets(input_covspans: Vec<Covspan>, holes: &[Hole]) -> Vec<Vec<Covspan>> {
172 debug_assert!(input_covspans.is_sorted_by(|a, b| compare_spans(a.span, b.span).is_le()));
173 debug_assert!(holes.is_sorted_by(|a, b| compare_spans(a.span, b.span).is_le()));
174
175 // Now we're ready to start carving holes out of the initial coverage spans,
176 // and grouping them in buckets separated by the holes.
177
178 let mut input_covspans = VecDeque::from(input_covspans);
179 let mut fragments = vec![];
180
181 // For each hole:
182 // - Identify the spans that are entirely or partly before the hole.
183 // - Put those spans in a corresponding bucket, truncated to the start of the hole.
184 // - If one of those spans also extends after the hole, put the rest of it
185 // in a "fragments" vector that is processed by the next hole.
186 let mut buckets = (0..holes.len()).map(|_| vec![]).collect::<Vec<_>>();
187 for (hole, bucket) in holes.iter().zip(&mut buckets) {
188 let fragments_from_prev = std::mem::take(&mut fragments);
189
190 // Only inspect spans that precede or overlap this hole,
191 // leaving the rest to be inspected by later holes.
192 // (This relies on the spans and holes both being sorted.)
193 let relevant_input_covspans =
194 drain_front_while(&mut input_covspans, |c| c.span.lo() < hole.span.hi());
195
196 for covspan in fragments_from_prev.into_iter().chain(relevant_input_covspans) {
197 let (before, after) = covspan.split_around_hole_span(hole.span);
198 bucket.extend(before);
199 fragments.extend(after);
200 }
201 }
202
203 // After finding the spans before each hole, any remaining fragments/spans
204 // form their own final bucket, after the final hole.
205 // (If there were no holes, this will just be all of the initial spans.)
206 fragments.extend(input_covspans);
207 buckets.push(fragments);
208
209 buckets
210}
211
212/// Similar to `.drain(..)`, but stops just before it would remove an item not
213/// satisfying the predicate.
214fn drain_front_while<'a, T>(
215 queue: &'a mut VecDeque<T>,
216 mut pred_fn: impl FnMut(&T) -> bool,
217) -> impl Iterator<Item = T> {
218 std::iter::from_fn(move || if pred_fn(queue.front()?) { queue.pop_front() } else { None })
219}
220
221/// Takes one of the buckets of (sorted) spans extracted from MIR, and "refines"
222/// those spans by removing spans that overlap in unwanted ways.
223#[instrument(level = "debug")]
224fn remove_unwanted_overlapping_spans(sorted_spans: Vec<Covspan>) -> Vec<Covspan> {
225 debug_assert!(sorted_spans.is_sorted_by(|a, b| compare_spans(a.span, b.span).is_le()));
226
227 // Holds spans that have been read from the input vector, but haven't yet
228 // been committed to the output vector.
229 let mut pending = vec![];
230 let mut refined = vec![];
231
232 for curr in sorted_spans {
233 pending.retain(|prev: &Covspan| {
234 if prev.span.hi() <= curr.span.lo() {
235 // There's no overlap between the previous/current covspans,
236 // so move the previous one into the refined list.
237 refined.push(prev.clone());
238 false
239 } else {
240 // Otherwise, retain the previous covspan only if it has the
241 // same BCB. This tends to discard long outer spans that enclose
242 // smaller inner spans with different control flow.
243 prev.bcb == curr.bcb
244 }
245 });
246 pending.push(curr);
247 }
248
249 // Drain the rest of the pending list into the refined list.
250 refined.extend(pending);
251 refined
252}
253
254#[derive(Clone, Debug)]
255struct Covspan {
256 span: Span,
257 bcb: BasicCoverageBlock,
258}
259
260impl Covspan {
261 /// Splits this covspan into 0-2 parts:
262 /// - The part that is strictly before the hole span, if any.
263 /// - The part that is strictly after the hole span, if any.
264 fn split_around_hole_span(&self, hole_span: Span) -> (Option<Self>, Option<Self>) {
265 let before = try {
266 let span = self.span.trim_end(hole_span)?;
267 Self { span, ..*self }
268 };
269 let after = try {
270 let span = self.span.trim_start(hole_span)?;
271 Self { span, ..*self }
272 };
273
274 (before, after)
275 }
276
277 /// If `self` and `other` can be merged (i.e. they have the same BCB),
278 /// mutates `self.span` to also include `other.span` and returns true.
279 ///
280 /// Note that compatible covspans can be merged even if their underlying
281 /// spans are not overlapping/adjacent; any space between them will also be
282 /// part of the merged covspan.
283 fn merge_if_eligible(&mut self, other: &Self) -> bool {
284 if self.bcb != other.bcb {
285 return false;
286 }
287
288 self.span = self.span.to(other.span);
289 true
290 }
291}
292
293/// Compares two spans in (lo ascending, hi descending) order.
294fn compare_spans(a: Span, b: Span) -> std::cmp::Ordering {
295 // First sort by span start.
296 Ord::cmp(&a.lo(), &b.lo())
297 // If span starts are the same, sort by span end in reverse order.
298 // This ensures that if spans A and B are adjacent in the list,
299 // and they overlap but are not equal, then either:
300 // - Span A extends further left, or
301 // - Both have the same start and span A extends further right
302 .then_with(|| Ord::cmp(&a.hi(), &b.hi()).reverse())
303}