blob: 2041109eb385fb586f593baa1070e10254b574d2 [file] [log] [blame]
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +01001use super::debug::term_type;
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +01002use super::graph::{BasicCoverageBlock, BasicCoverageBlockData, CoverageGraph, START_BCB};
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +01003
4use crate::util::spanview::source_range_no_file;
5
6use rustc_data_structures::graph::WithNumNodes;
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +01007use rustc_middle::mir::{
8 self, AggregateKind, BasicBlock, FakeReadCause, Rvalue, Statement, StatementKind, Terminator,
9 TerminatorKind,
10};
11use rustc_middle::ty::TyCtxt;
12
13use rustc_span::source_map::original_sp;
Chris Wailes32f78352021-07-20 14:04:55 -070014use rustc_span::{BytePos, Span};
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +010015
16use std::cmp::Ordering;
17
18#[derive(Debug, Copy, Clone)]
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +010019pub(super) enum CoverageStatement {
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +010020 Statement(BasicBlock, Span, usize),
21 Terminator(BasicBlock, Span),
22}
23
24impl CoverageStatement {
25 pub fn format(&self, tcx: TyCtxt<'tcx>, mir_body: &'a mir::Body<'tcx>) -> String {
26 match *self {
27 Self::Statement(bb, span, stmt_index) => {
28 let stmt = &mir_body[bb].statements[stmt_index];
29 format!(
30 "{}: @{}[{}]: {:?}",
31 source_range_no_file(tcx, &span),
32 bb.index(),
33 stmt_index,
34 stmt
35 )
36 }
37 Self::Terminator(bb, span) => {
38 let term = mir_body[bb].terminator();
39 format!(
40 "{}: @{}.{}: {:?}",
41 source_range_no_file(tcx, &span),
42 bb.index(),
43 term_type(&term.kind),
44 term.kind
45 )
46 }
47 }
48 }
49
50 pub fn span(&self) -> &Span {
51 match self {
52 Self::Statement(_, span, _) | Self::Terminator(_, span) => span,
53 }
54 }
55}
56
57/// A BCB is deconstructed into one or more `Span`s. Each `Span` maps to a `CoverageSpan` that
58/// references the originating BCB and one or more MIR `Statement`s and/or `Terminator`s.
59/// Initially, the `Span`s come from the `Statement`s and `Terminator`s, but subsequent
60/// transforms can combine adjacent `Span`s and `CoverageSpan` from the same BCB, merging the
61/// `CoverageStatement` vectors, and the `Span`s to cover the extent of the combined `Span`s.
62///
63/// Note: A `CoverageStatement` merged into another CoverageSpan may come from a `BasicBlock` that
64/// is not part of the `CoverageSpan` bcb if the statement was included because it's `Span` matches
65/// or is subsumed by the `Span` associated with this `CoverageSpan`, and it's `BasicBlock`
66/// `is_dominated_by()` the `BasicBlock`s in this `CoverageSpan`.
67#[derive(Debug, Clone)]
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +010068pub(super) struct CoverageSpan {
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +010069 pub span: Span,
70 pub bcb: BasicCoverageBlock,
71 pub coverage_statements: Vec<CoverageStatement>,
72 pub is_closure: bool,
73}
74
75impl CoverageSpan {
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +010076 pub fn for_fn_sig(fn_sig_span: Span) -> Self {
77 Self { span: fn_sig_span, bcb: START_BCB, coverage_statements: vec![], is_closure: false }
78 }
79
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +010080 pub fn for_statement(
81 statement: &Statement<'tcx>,
82 span: Span,
83 bcb: BasicCoverageBlock,
84 bb: BasicBlock,
85 stmt_index: usize,
86 ) -> Self {
87 let is_closure = match statement.kind {
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +010088 StatementKind::Assign(box (_, Rvalue::Aggregate(box ref kind, _))) => match kind {
89 AggregateKind::Closure(_, _) | AggregateKind::Generator(_, _, _) => true,
90 _ => false,
91 },
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +010092 _ => false,
93 };
94
95 Self {
96 span,
97 bcb,
98 coverage_statements: vec![CoverageStatement::Statement(bb, span, stmt_index)],
99 is_closure,
100 }
101 }
102
103 pub fn for_terminator(span: Span, bcb: BasicCoverageBlock, bb: BasicBlock) -> Self {
104 Self {
105 span,
106 bcb,
107 coverage_statements: vec![CoverageStatement::Terminator(bb, span)],
108 is_closure: false,
109 }
110 }
111
112 pub fn merge_from(&mut self, mut other: CoverageSpan) {
113 debug_assert!(self.is_mergeable(&other));
114 self.span = self.span.to(other.span);
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100115 self.coverage_statements.append(&mut other.coverage_statements);
116 }
117
118 pub fn cutoff_statements_at(&mut self, cutoff_pos: BytePos) {
119 self.coverage_statements.retain(|covstmt| covstmt.span().hi() <= cutoff_pos);
120 if let Some(highest_covstmt) =
121 self.coverage_statements.iter().max_by_key(|covstmt| covstmt.span().hi())
122 {
123 self.span = self.span.with_hi(highest_covstmt.span().hi());
124 }
125 }
126
127 #[inline]
128 pub fn is_mergeable(&self, other: &Self) -> bool {
129 self.is_in_same_bcb(other) && !(self.is_closure || other.is_closure)
130 }
131
132 #[inline]
133 pub fn is_in_same_bcb(&self, other: &Self) -> bool {
134 self.bcb == other.bcb
135 }
136
137 pub fn format(&self, tcx: TyCtxt<'tcx>, mir_body: &'a mir::Body<'tcx>) -> String {
138 format!(
139 "{}\n {}",
140 source_range_no_file(tcx, &self.span),
141 self.format_coverage_statements(tcx, mir_body).replace("\n", "\n "),
142 )
143 }
144
145 pub fn format_coverage_statements(
146 &self,
147 tcx: TyCtxt<'tcx>,
148 mir_body: &'a mir::Body<'tcx>,
149 ) -> String {
150 let mut sorted_coverage_statements = self.coverage_statements.clone();
151 sorted_coverage_statements.sort_unstable_by_key(|covstmt| match *covstmt {
152 CoverageStatement::Statement(bb, _, index) => (bb, index),
153 CoverageStatement::Terminator(bb, _) => (bb, usize::MAX),
154 });
155 sorted_coverage_statements
156 .iter()
157 .map(|covstmt| covstmt.format(tcx, mir_body))
158 .collect::<Vec<_>>()
159 .join("\n")
160 }
161}
162
163/// Converts the initial set of `CoverageSpan`s (one per MIR `Statement` or `Terminator`) into a
164/// minimal set of `CoverageSpan`s, using the BCB CFG to determine where it is safe and useful to:
165///
166/// * Remove duplicate source code coverage regions
167/// * Merge spans that represent continuous (both in source code and control flow), non-branching
168/// execution
169/// * Carve out (leave uncovered) any span that will be counted by another MIR (notably, closures)
170pub struct CoverageSpans<'a, 'tcx> {
171 /// The MIR, used to look up `BasicBlockData`.
172 mir_body: &'a mir::Body<'tcx>,
173
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +0100174 /// A `Span` covering the signature of function for the MIR.
175 fn_sig_span: Span,
176
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100177 /// A `Span` covering the function body of the MIR (typically from left curly brace to right
178 /// curly brace).
179 body_span: Span,
180
181 /// The BasicCoverageBlock Control Flow Graph (BCB CFG).
182 basic_coverage_blocks: &'a CoverageGraph,
183
184 /// The initial set of `CoverageSpan`s, sorted by `Span` (`lo` and `hi`) and by relative
185 /// dominance between the `BasicCoverageBlock`s of equal `Span`s.
186 sorted_spans_iter: Option<std::vec::IntoIter<CoverageSpan>>,
187
188 /// The current `CoverageSpan` to compare to its `prev`, to possibly merge, discard, force the
189 /// discard of the `prev` (and or `pending_dups`), or keep both (with `prev` moved to
190 /// `pending_dups`). If `curr` is not discarded or merged, it becomes `prev` for the next
191 /// iteration.
192 some_curr: Option<CoverageSpan>,
193
194 /// The original `span` for `curr`, in case the `curr` span is modified.
195 curr_original_span: Span,
196
197 /// The CoverageSpan from a prior iteration; typically assigned from that iteration's `curr`.
198 /// If that `curr` was discarded, `prev` retains its value from the previous iteration.
199 some_prev: Option<CoverageSpan>,
200
201 /// Assigned from `curr_original_span` from the previous iteration.
202 prev_original_span: Span,
203
204 /// One or more `CoverageSpan`s with the same `Span` but different `BasicCoverageBlock`s, and
205 /// no `BasicCoverageBlock` in this list dominates another `BasicCoverageBlock` in the list.
206 /// If a new `curr` span also fits this criteria (compared to an existing list of
207 /// `pending_dups`), that `curr` `CoverageSpan` moves to `prev` before possibly being added to
208 /// the `pending_dups` list, on the next iteration. As a result, if `prev` and `pending_dups`
209 /// have the same `Span`, the criteria for `pending_dups` holds for `prev` as well: a `prev`
210 /// with a matching `Span` does not dominate any `pending_dup` and no `pending_dup` dominates a
211 /// `prev` with a matching `Span`)
212 pending_dups: Vec<CoverageSpan>,
213
214 /// The final `CoverageSpan`s to add to the coverage map. A `Counter` or `Expression`
215 /// will also be injected into the MIR for each `CoverageSpan`.
216 refined_spans: Vec<CoverageSpan>,
217}
218
219impl<'a, 'tcx> CoverageSpans<'a, 'tcx> {
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +0100220 /// Generate a minimal set of `CoverageSpan`s, each representing a contiguous code region to be
221 /// counted.
222 ///
223 /// The basic steps are:
224 ///
225 /// 1. Extract an initial set of spans from the `Statement`s and `Terminator`s of each
226 /// `BasicCoverageBlockData`.
227 /// 2. Sort the spans by span.lo() (starting position). Spans that start at the same position
228 /// are sorted with longer spans before shorter spans; and equal spans are sorted
229 /// (deterministically) based on "dominator" relationship (if any).
230 /// 3. Traverse the spans in sorted order to identify spans that can be dropped (for instance,
231 /// if another span or spans are already counting the same code region), or should be merged
232 /// into a broader combined span (because it represents a contiguous, non-branching, and
233 /// uninterrupted region of source code).
234 ///
235 /// Closures are exposed in their enclosing functions as `Assign` `Rvalue`s, and since
236 /// closures have their own MIR, their `Span` in their enclosing function should be left
237 /// "uncovered".
238 ///
239 /// Note the resulting vector of `CoverageSpan`s may not be fully sorted (and does not need
240 /// to be).
241 pub(super) fn generate_coverage_spans(
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100242 mir_body: &'a mir::Body<'tcx>,
Chris Wailes32f78352021-07-20 14:04:55 -0700243 fn_sig_span: Span, // Ensured to be same SourceFile and SyntaxContext as `body_span`
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100244 body_span: Span,
245 basic_coverage_blocks: &'a CoverageGraph,
246 ) -> Vec<CoverageSpan> {
247 let mut coverage_spans = CoverageSpans {
248 mir_body,
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +0100249 fn_sig_span,
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100250 body_span,
251 basic_coverage_blocks,
252 sorted_spans_iter: None,
253 refined_spans: Vec::with_capacity(basic_coverage_blocks.num_nodes() * 2),
254 some_curr: None,
255 curr_original_span: Span::with_root_ctxt(BytePos(0), BytePos(0)),
256 some_prev: None,
257 prev_original_span: Span::with_root_ctxt(BytePos(0), BytePos(0)),
258 pending_dups: Vec::new(),
259 };
260
261 let sorted_spans = coverage_spans.mir_to_initial_sorted_coverage_spans();
262
263 coverage_spans.sorted_spans_iter = Some(sorted_spans.into_iter());
264 coverage_spans.some_prev = coverage_spans.sorted_spans_iter.as_mut().unwrap().next();
265 coverage_spans.prev_original_span =
266 coverage_spans.some_prev.as_ref().expect("at least one span").span;
267
268 coverage_spans.to_refined_spans()
269 }
270
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100271 fn mir_to_initial_sorted_coverage_spans(&self) -> Vec<CoverageSpan> {
272 let mut initial_spans = Vec::<CoverageSpan>::with_capacity(self.mir_body.num_nodes() * 2);
273 for (bcb, bcb_data) in self.basic_coverage_blocks.iter_enumerated() {
274 for coverage_span in self.bcb_to_initial_coverage_spans(bcb, bcb_data) {
275 initial_spans.push(coverage_span);
276 }
277 }
278
279 if initial_spans.is_empty() {
280 // This can happen if, for example, the function is unreachable (contains only a
281 // `BasicBlock`(s) with an `Unreachable` terminator).
282 return initial_spans;
283 }
284
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +0100285 initial_spans.push(CoverageSpan::for_fn_sig(self.fn_sig_span));
286
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100287 initial_spans.sort_unstable_by(|a, b| {
288 if a.span.lo() == b.span.lo() {
289 if a.span.hi() == b.span.hi() {
290 if a.is_in_same_bcb(b) {
291 Some(Ordering::Equal)
292 } else {
293 // Sort equal spans by dominator relationship, in reverse order (so
294 // dominators always come after the dominated equal spans). When later
295 // comparing two spans in order, the first will either dominate the second,
296 // or they will have no dominator relationship.
297 self.basic_coverage_blocks.dominators().rank_partial_cmp(b.bcb, a.bcb)
298 }
299 } else {
300 // Sort hi() in reverse order so shorter spans are attempted after longer spans.
301 // This guarantees that, if a `prev` span overlaps, and is not equal to, a
302 // `curr` span, the prev span either extends further left of the curr span, or
303 // they start at the same position and the prev span extends further right of
304 // the end of the curr span.
305 b.span.hi().partial_cmp(&a.span.hi())
306 }
307 } else {
308 a.span.lo().partial_cmp(&b.span.lo())
309 }
310 .unwrap()
311 });
312
313 initial_spans
314 }
315
316 /// Iterate through the sorted `CoverageSpan`s, and return the refined list of merged and
317 /// de-duplicated `CoverageSpan`s.
318 fn to_refined_spans(mut self) -> Vec<CoverageSpan> {
319 while self.next_coverage_span() {
320 if self.curr().is_mergeable(self.prev()) {
321 debug!(" same bcb (and neither is a closure), merge with prev={:?}", self.prev());
322 let prev = self.take_prev();
323 self.curr_mut().merge_from(prev);
324 // Note that curr.span may now differ from curr_original_span
325 } else if self.prev_ends_before_curr() {
326 debug!(
327 " different bcbs and disjoint spans, so keep curr for next iter, and add \
328 prev={:?}",
329 self.prev()
330 );
331 let prev = self.take_prev();
332 self.refined_spans.push(prev);
333 } else if self.prev().is_closure {
334 // drop any equal or overlapping span (`curr`) and keep `prev` to test again in the
335 // next iter
336 debug!(
337 " curr overlaps a closure (prev). Drop curr and keep prev for next iter. \
338 prev={:?}",
339 self.prev()
340 );
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +0100341 self.take_curr();
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100342 } else if self.curr().is_closure {
343 self.carve_out_span_for_closure();
344 } else if self.prev_original_span == self.curr().span {
345 // Note that this compares the new span to `prev_original_span`, which may not
346 // be the full `prev.span` (if merged during the previous iteration).
347 self.hold_pending_dups_unless_dominated();
348 } else {
349 self.cutoff_prev_at_overlapping_curr();
350 }
351 }
352
353 debug!(" AT END, adding last prev={:?}", self.prev());
354 let prev = self.take_prev();
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +0100355 let CoverageSpans { pending_dups, mut refined_spans, .. } = self;
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100356 for dup in pending_dups {
357 debug!(" ...adding at least one pending dup={:?}", dup);
358 refined_spans.push(dup);
359 }
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100360
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +0100361 // Async functions wrap a closure that implements the body to be executed. The enclosing
362 // function is called and returns an `impl Future` without initially executing any of the
363 // body. To avoid showing the return from the enclosing function as a "covered" return from
364 // the closure, the enclosing function's `TerminatorKind::Return`s `CoverageSpan` is
365 // excluded. The closure's `Return` is the only one that will be counted. This provides
366 // adequate coverage, and more intuitive counts. (Avoids double-counting the closing brace
367 // of the function body.)
368 let body_ends_with_closure = if let Some(last_covspan) = refined_spans.last() {
369 last_covspan.is_closure && last_covspan.span.hi() == self.body_span.hi()
370 } else {
371 false
372 };
373
374 if !body_ends_with_closure {
375 refined_spans.push(prev);
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100376 }
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100377
378 // Remove `CoverageSpan`s derived from closures, originally added to ensure the coverage
379 // regions for the current function leave room for the closure's own coverage regions
380 // (injected separately, from the closure's own MIR).
381 refined_spans.retain(|covspan| !covspan.is_closure);
382 refined_spans
383 }
384
385 // Generate a set of `CoverageSpan`s from the filtered set of `Statement`s and `Terminator`s of
386 // the `BasicBlock`(s) in the given `BasicCoverageBlockData`. One `CoverageSpan` is generated
387 // for each `Statement` and `Terminator`. (Note that subsequent stages of coverage analysis will
388 // merge some `CoverageSpan`s, at which point a `CoverageSpan` may represent multiple
389 // `Statement`s and/or `Terminator`s.)
390 fn bcb_to_initial_coverage_spans(
391 &self,
392 bcb: BasicCoverageBlock,
393 bcb_data: &'a BasicCoverageBlockData,
394 ) -> Vec<CoverageSpan> {
395 bcb_data
396 .basic_blocks
397 .iter()
398 .flat_map(|&bb| {
399 let data = &self.mir_body[bb];
400 data.statements
401 .iter()
402 .enumerate()
403 .filter_map(move |(index, statement)| {
404 filtered_statement_span(statement, self.body_span).map(|span| {
405 CoverageSpan::for_statement(statement, span, bcb, bb, index)
406 })
407 })
408 .chain(
409 filtered_terminator_span(data.terminator(), self.body_span)
410 .map(|span| CoverageSpan::for_terminator(span, bcb, bb)),
411 )
412 })
413 .collect()
414 }
415
416 fn curr(&self) -> &CoverageSpan {
417 self.some_curr
418 .as_ref()
419 .unwrap_or_else(|| bug!("invalid attempt to unwrap a None some_curr"))
420 }
421
422 fn curr_mut(&mut self) -> &mut CoverageSpan {
423 self.some_curr
424 .as_mut()
425 .unwrap_or_else(|| bug!("invalid attempt to unwrap a None some_curr"))
426 }
427
428 fn prev(&self) -> &CoverageSpan {
429 self.some_prev
430 .as_ref()
431 .unwrap_or_else(|| bug!("invalid attempt to unwrap a None some_prev"))
432 }
433
434 fn prev_mut(&mut self) -> &mut CoverageSpan {
435 self.some_prev
436 .as_mut()
437 .unwrap_or_else(|| bug!("invalid attempt to unwrap a None some_prev"))
438 }
439
440 fn take_prev(&mut self) -> CoverageSpan {
441 self.some_prev.take().unwrap_or_else(|| bug!("invalid attempt to unwrap a None some_prev"))
442 }
443
444 /// If there are `pending_dups` but `prev` is not a matching dup (`prev.span` doesn't match the
445 /// `pending_dups` spans), then one of the following two things happened during the previous
446 /// iteration:
447 /// * the previous `curr` span (which is now `prev`) was not a duplicate of the pending_dups
448 /// (in which case there should be at least two spans in `pending_dups`); or
449 /// * the `span` of `prev` was modified by `curr_mut().merge_from(prev)` (in which case
450 /// `pending_dups` could have as few as one span)
451 /// In either case, no more spans will match the span of `pending_dups`, so
452 /// add the `pending_dups` if they don't overlap `curr`, and clear the list.
453 fn check_pending_dups(&mut self) {
454 if let Some(dup) = self.pending_dups.last() {
455 if dup.span != self.prev().span {
456 debug!(
457 " SAME spans, but pending_dups are NOT THE SAME, so BCBs matched on \
458 previous iteration, or prev started a new disjoint span"
459 );
460 if dup.span.hi() <= self.curr().span.lo() {
461 let pending_dups = self.pending_dups.split_off(0);
462 for dup in pending_dups.into_iter() {
463 debug!(" ...adding at least one pending={:?}", dup);
464 self.refined_spans.push(dup);
465 }
466 } else {
467 self.pending_dups.clear();
468 }
469 }
470 }
471 }
472
473 /// Advance `prev` to `curr` (if any), and `curr` to the next `CoverageSpan` in sorted order.
474 fn next_coverage_span(&mut self) -> bool {
475 if let Some(curr) = self.some_curr.take() {
476 self.some_prev = Some(curr);
477 self.prev_original_span = self.curr_original_span;
478 }
479 while let Some(curr) = self.sorted_spans_iter.as_mut().unwrap().next() {
480 debug!("FOR curr={:?}", curr);
481 if self.prev_starts_after_next(&curr) {
482 debug!(
483 " prev.span starts after curr.span, so curr will be dropped (skipping past \
484 closure?); prev={:?}",
485 self.prev()
486 );
487 } else {
488 // Save a copy of the original span for `curr` in case the `CoverageSpan` is changed
489 // by `self.curr_mut().merge_from(prev)`.
490 self.curr_original_span = curr.span;
491 self.some_curr.replace(curr);
492 self.check_pending_dups();
493 return true;
494 }
495 }
496 false
497 }
498
499 /// If called, then the next call to `next_coverage_span()` will *not* update `prev` with the
500 /// `curr` coverage span.
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +0100501 fn take_curr(&mut self) -> CoverageSpan {
502 self.some_curr.take().unwrap_or_else(|| bug!("invalid attempt to unwrap a None some_curr"))
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100503 }
504
505 /// Returns true if the curr span should be skipped because prev has already advanced beyond the
506 /// end of curr. This can only happen if a prior iteration updated `prev` to skip past a region
507 /// of code, such as skipping past a closure.
508 fn prev_starts_after_next(&self, next_curr: &CoverageSpan) -> bool {
509 self.prev().span.lo() > next_curr.span.lo()
510 }
511
512 /// Returns true if the curr span starts past the end of the prev span, which means they don't
513 /// overlap, so we now know the prev can be added to the refined coverage spans.
514 fn prev_ends_before_curr(&self) -> bool {
515 self.prev().span.hi() <= self.curr().span.lo()
516 }
517
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +0100518 /// If `prev`s span extends left of the closure (`curr`), carve out the closure's span from
519 /// `prev`'s span. (The closure's coverage counters will be injected when processing the
520 /// closure's own MIR.) Add the portion of the span to the left of the closure; and if the span
521 /// extends to the right of the closure, update `prev` to that portion of the span. For any
522 /// `pending_dups`, repeat the same process.
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100523 fn carve_out_span_for_closure(&mut self) {
524 let curr_span = self.curr().span;
525 let left_cutoff = curr_span.lo();
526 let right_cutoff = curr_span.hi();
527 let has_pre_closure_span = self.prev().span.lo() < right_cutoff;
528 let has_post_closure_span = self.prev().span.hi() > right_cutoff;
529 let mut pending_dups = self.pending_dups.split_off(0);
530 if has_pre_closure_span {
531 let mut pre_closure = self.prev().clone();
532 pre_closure.span = pre_closure.span.with_hi(left_cutoff);
533 debug!(" prev overlaps a closure. Adding span for pre_closure={:?}", pre_closure);
534 if !pending_dups.is_empty() {
535 for mut dup in pending_dups.iter().cloned() {
536 dup.span = dup.span.with_hi(left_cutoff);
537 debug!(" ...and at least one pre_closure dup={:?}", dup);
538 self.refined_spans.push(dup);
539 }
540 }
541 self.refined_spans.push(pre_closure);
542 }
543 if has_post_closure_span {
544 // Update prev.span to start after the closure (and discard curr)
545 self.prev_mut().span = self.prev().span.with_lo(right_cutoff);
546 self.prev_original_span = self.prev().span;
547 for dup in pending_dups.iter_mut() {
548 dup.span = dup.span.with_lo(right_cutoff);
549 }
550 self.pending_dups.append(&mut pending_dups);
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +0100551 let closure_covspan = self.take_curr();
552 self.refined_spans.push(closure_covspan); // since self.prev() was already updated
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100553 } else {
554 pending_dups.clear();
555 }
556 }
557
558 /// Called if `curr.span` equals `prev_original_span` (and potentially equal to all
559 /// `pending_dups` spans, if any); but keep in mind, `prev.span` may start at a `Span.lo()` that
560 /// is less than (further left of) `prev_original_span.lo()`.
561 ///
562 /// When two `CoverageSpan`s have the same `Span`, dominated spans can be discarded; but if
563 /// neither `CoverageSpan` dominates the other, both (or possibly more than two) are held,
564 /// until their disposition is determined. In this latter case, the `prev` dup is moved into
565 /// `pending_dups` so the new `curr` dup can be moved to `prev` for the next iteration.
566 fn hold_pending_dups_unless_dominated(&mut self) {
567 // Equal coverage spans are ordered by dominators before dominated (if any), so it should be
568 // impossible for `curr` to dominate any previous `CoverageSpan`.
569 debug_assert!(!self.span_bcb_is_dominated_by(self.prev(), self.curr()));
570
571 let initial_pending_count = self.pending_dups.len();
572 if initial_pending_count > 0 {
573 let mut pending_dups = self.pending_dups.split_off(0);
574 pending_dups.retain(|dup| !self.span_bcb_is_dominated_by(self.curr(), dup));
575 self.pending_dups.append(&mut pending_dups);
576 if self.pending_dups.len() < initial_pending_count {
577 debug!(
578 " discarded {} of {} pending_dups that dominated curr",
579 initial_pending_count - self.pending_dups.len(),
580 initial_pending_count
581 );
582 }
583 }
584
585 if self.span_bcb_is_dominated_by(self.curr(), self.prev()) {
586 debug!(
587 " different bcbs but SAME spans, and prev dominates curr. Discard prev={:?}",
588 self.prev()
589 );
590 self.cutoff_prev_at_overlapping_curr();
591 // If one span dominates the other, assocate the span with the code from the dominated
592 // block only (`curr`), and discard the overlapping portion of the `prev` span. (Note
593 // that if `prev.span` is wider than `prev_original_span`, a `CoverageSpan` will still
594 // be created for `prev`s block, for the non-overlapping portion, left of `curr.span`.)
595 //
596 // For example:
597 // match somenum {
598 // x if x < 1 => { ... }
599 // }...
600 //
601 // The span for the first `x` is referenced by both the pattern block (every time it is
602 // evaluated) and the arm code (only when matched). The counter will be applied only to
603 // the dominated block. This allows coverage to track and highlight things like the
604 // assignment of `x` above, if the branch is matched, making `x` available to the arm
605 // code; and to track and highlight the question mark `?` "try" operator at the end of
606 // a function call returning a `Result`, so the `?` is covered when the function returns
607 // an `Err`, and not counted as covered if the function always returns `Ok`.
608 } else {
609 // Save `prev` in `pending_dups`. (`curr` will become `prev` in the next iteration.)
610 // If the `curr` CoverageSpan is later discarded, `pending_dups` can be discarded as
611 // well; but if `curr` is added to refined_spans, the `pending_dups` will also be added.
612 debug!(
613 " different bcbs but SAME spans, and neither dominates, so keep curr for \
614 next iter, and, pending upcoming spans (unless overlapping) add prev={:?}",
615 self.prev()
616 );
617 let prev = self.take_prev();
618 self.pending_dups.push(prev);
619 }
620 }
621
622 /// `curr` overlaps `prev`. If `prev`s span extends left of `curr`s span, keep _only_
623 /// statements that end before `curr.lo()` (if any), and add the portion of the
624 /// combined span for those statements. Any other statements have overlapping spans
625 /// that can be ignored because `curr` and/or other upcoming statements/spans inside
626 /// the overlap area will produce their own counters. This disambiguation process
627 /// avoids injecting multiple counters for overlapping spans, and the potential for
628 /// double-counting.
629 fn cutoff_prev_at_overlapping_curr(&mut self) {
630 debug!(
631 " different bcbs, overlapping spans, so ignore/drop pending and only add prev \
632 if it has statements that end before curr; prev={:?}",
633 self.prev()
634 );
635 if self.pending_dups.is_empty() {
636 let curr_span = self.curr().span;
637 self.prev_mut().cutoff_statements_at(curr_span.lo());
638 if self.prev().coverage_statements.is_empty() {
639 debug!(" ... no non-overlapping statements to add");
640 } else {
641 debug!(" ... adding modified prev={:?}", self.prev());
642 let prev = self.take_prev();
643 self.refined_spans.push(prev);
644 }
645 } else {
646 // with `pending_dups`, `prev` cannot have any statements that don't overlap
647 self.pending_dups.clear();
648 }
649 }
650
651 fn span_bcb_is_dominated_by(&self, covspan: &CoverageSpan, dom_covspan: &CoverageSpan) -> bool {
652 self.basic_coverage_blocks.is_dominated_by(covspan.bcb, dom_covspan.bcb)
653 }
654}
655
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +0100656pub(super) fn filtered_statement_span(
657 statement: &'a Statement<'tcx>,
658 body_span: Span,
659) -> Option<Span> {
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100660 match statement.kind {
661 // These statements have spans that are often outside the scope of the executed source code
662 // for their parent `BasicBlock`.
663 StatementKind::StorageLive(_)
664 | StatementKind::StorageDead(_)
665 // Coverage should not be encountered, but don't inject coverage coverage
666 | StatementKind::Coverage(_)
667 // Ignore `Nop`s
668 | StatementKind::Nop => None,
669
670 // FIXME(#78546): MIR InstrumentCoverage - Can the source_info.span for `FakeRead`
671 // statements be more consistent?
672 //
673 // FakeReadCause::ForGuardBinding, in this example:
674 // match somenum {
675 // x if x < 1 => { ... }
676 // }...
677 // The BasicBlock within the match arm code included one of these statements, but the span
678 // for it covered the `1` in this source. The actual statements have nothing to do with that
679 // source span:
680 // FakeRead(ForGuardBinding, _4);
681 // where `_4` is:
682 // _4 = &_1; (at the span for the first `x`)
683 // and `_1` is the `Place` for `somenum`.
684 //
685 // If and when the Issue is resolved, remove this special case match pattern:
Chris Wailes32f78352021-07-20 14:04:55 -0700686 StatementKind::FakeRead(box (cause, _)) if cause == FakeReadCause::ForGuardBinding => None,
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100687
688 // Retain spans from all other statements
Chris Wailes32f78352021-07-20 14:04:55 -0700689 StatementKind::FakeRead(box (_, _)) // Not including `ForGuardBinding`
Chris Wailese3116c42021-07-13 14:40:48 -0700690 | StatementKind::CopyNonOverlapping(..)
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100691 | StatementKind::Assign(_)
692 | StatementKind::SetDiscriminant { .. }
693 | StatementKind::LlvmInlineAsm(_)
694 | StatementKind::Retag(_, _)
695 | StatementKind::AscribeUserType(_, _) => {
696 Some(function_source_span(statement.source_info.span, body_span))
697 }
698 }
699}
700
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +0100701pub(super) fn filtered_terminator_span(
702 terminator: &'a Terminator<'tcx>,
703 body_span: Span,
704) -> Option<Span> {
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100705 match terminator.kind {
706 // These terminators have spans that don't positively contribute to computing a reasonable
707 // span of actually executed source code. (For example, SwitchInt terminators extracted from
708 // an `if condition { block }` has a span that includes the executed block, if true,
709 // but for coverage, the code region executed, up to *and* through the SwitchInt,
710 // actually stops before the if's block.)
711 TerminatorKind::Unreachable // Unreachable blocks are not connected to the MIR CFG
712 | TerminatorKind::Assert { .. }
713 | TerminatorKind::Drop { .. }
714 | TerminatorKind::DropAndReplace { .. }
715 | TerminatorKind::SwitchInt { .. }
716 // For `FalseEdge`, only the `real` branch is taken, so it is similar to a `Goto`.
Jeff Vander Stoepd59a2872021-02-15 10:22:21 +0100717 | TerminatorKind::FalseEdge { .. }
718 | TerminatorKind::Goto { .. } => None,
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100719
Chris Wailes32f78352021-07-20 14:04:55 -0700720 // Call `func` operand can have a more specific span when part of a chain of calls
721 | TerminatorKind::Call { ref func, .. } => {
722 let mut span = terminator.source_info.span;
723 if let mir::Operand::Constant(box constant) = func {
724 if constant.span.lo() > span.lo() {
725 span = span.with_lo(constant.span.lo());
726 }
727 }
728 Some(function_source_span(span, body_span))
729 }
730
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100731 // Retain spans from all other terminators
732 TerminatorKind::Resume
733 | TerminatorKind::Abort
734 | TerminatorKind::Return
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100735 | TerminatorKind::Yield { .. }
736 | TerminatorKind::GeneratorDrop
737 | TerminatorKind::FalseUnwind { .. }
738 | TerminatorKind::InlineAsm { .. } => {
739 Some(function_source_span(terminator.source_info.span, body_span))
740 }
741 }
742}
743
744#[inline]
745fn function_source_span(span: Span, body_span: Span) -> Span {
Chris Wailes32f78352021-07-20 14:04:55 -0700746 let span = original_sp(span, body_span).with_ctxt(body_span.ctxt());
Thiébaud Weksteen5bd94c12021-01-06 15:18:42 +0100747 if body_span.contains(span) { span } else { body_span }
748}