test/
lib.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
//! Support code for rustc's built in unit-test and micro-benchmarking
//! framework.
//!
//! Almost all user code will only be interested in `Bencher` and
//! `black_box`. All other interactions (such as writing tests and
//! benchmarks themselves) should be done via the `#[test]` and
//! `#[bench]` attributes.
//!
//! See the [Testing Chapter](../book/ch11-00-testing.html) of the book for more
//! details.

// Currently, not much of this is meant for users. It is intended to
// support the simplest interface possible for representing and
// running tests while providing a base that other test frameworks may
// build off of.

#![unstable(feature = "test", issue = "50297")]
#![doc(test(attr(deny(warnings))))]
#![doc(rust_logo)]
#![feature(rustdoc_internals)]
#![feature(file_buffered)]
#![feature(internal_output_capture)]
#![feature(staged_api)]
#![feature(process_exitcode_internals)]
#![feature(panic_can_unwind)]
#![feature(test)]
#![allow(internal_features)]
#![warn(rustdoc::unescaped_backticks)]

pub use cli::TestOpts;

pub use self::ColorConfig::*;
pub use self::bench::{Bencher, black_box};
pub use self::console::run_tests_console;
pub use self::options::{ColorConfig, Options, OutputFormat, RunIgnored, ShouldPanic};
pub use self::types::TestName::*;
pub use self::types::*;

// Module to be used by rustc to compile tests in libtest
pub mod test {
    pub use crate::bench::Bencher;
    pub use crate::cli::{TestOpts, parse_opts};
    pub use crate::helpers::metrics::{Metric, MetricMap};
    pub use crate::options::{Options, RunIgnored, RunStrategy, ShouldPanic};
    pub use crate::test_result::{TestResult, TrFailed, TrFailedMsg, TrIgnored, TrOk};
    pub use crate::time::{TestExecTime, TestTimeOptions};
    pub use crate::types::{
        DynTestFn, DynTestName, StaticBenchFn, StaticTestFn, StaticTestName, TestDesc,
        TestDescAndFn, TestId, TestName, TestType,
    };
    pub use crate::{assert_test_result, filter_tests, run_test, test_main, test_main_static};
}

use std::collections::VecDeque;
use std::io::prelude::Write;
use std::mem::ManuallyDrop;
use std::panic::{self, AssertUnwindSafe, PanicHookInfo, catch_unwind};
use std::process::{self, Command, Termination};
use std::sync::mpsc::{Sender, channel};
use std::sync::{Arc, Mutex};
use std::time::{Duration, Instant};
use std::{env, io, thread};

pub mod bench;
mod cli;
mod console;
mod event;
mod formatters;
mod helpers;
mod options;
pub mod stats;
mod term;
mod test_result;
mod time;
mod types;

#[cfg(test)]
mod tests;

use core::any::Any;

use event::{CompletedTest, TestEvent};
use helpers::concurrency::get_concurrency;
use helpers::shuffle::{get_shuffle_seed, shuffle_tests};
use options::RunStrategy;
use test_result::*;
use time::TestExecTime;

// Process exit code to be used to indicate test failures.
const ERROR_EXIT_CODE: i32 = 101;

const SECONDARY_TEST_INVOKER_VAR: &str = "__RUST_TEST_INVOKE";
const SECONDARY_TEST_BENCH_BENCHMARKS_VAR: &str = "__RUST_TEST_BENCH_BENCHMARKS";

// The default console test runner. It accepts the command line
// arguments and a vector of test_descs.
pub fn test_main(args: &[String], tests: Vec<TestDescAndFn>, options: Option<Options>) {
    let mut opts = match cli::parse_opts(args) {
        Some(Ok(o)) => o,
        Some(Err(msg)) => {
            eprintln!("error: {msg}");
            process::exit(ERROR_EXIT_CODE);
        }
        None => return,
    };
    if let Some(options) = options {
        opts.options = options;
    }
    if opts.list {
        if let Err(e) = console::list_tests_console(&opts, tests) {
            eprintln!("error: io error when listing tests: {e:?}");
            process::exit(ERROR_EXIT_CODE);
        }
    } else {
        if !opts.nocapture {
            // If we encounter a non-unwinding panic, flush any captured output from the current test,
            // and stop capturing output to ensure that the non-unwinding panic message is visible.
            // We also acquire the locks for both output streams to prevent output from other threads
            // from interleaving with the panic message or appearing after it.
            let builtin_panic_hook = panic::take_hook();
            let hook = Box::new({
                move |info: &'_ PanicHookInfo<'_>| {
                    if !info.can_unwind() {
                        std::mem::forget(std::io::stderr().lock());
                        let mut stdout = ManuallyDrop::new(std::io::stdout().lock());
                        if let Some(captured) = io::set_output_capture(None) {
                            if let Ok(data) = captured.lock() {
                                let _ = stdout.write_all(&data);
                                let _ = stdout.flush();
                            }
                        }
                    }
                    builtin_panic_hook(info);
                }
            });
            panic::set_hook(hook);
        }
        let res = console::run_tests_console(&opts, tests);
        // Prevent Valgrind from reporting reachable blocks in users' unit tests.
        drop(panic::take_hook());
        match res {
            Ok(true) => {}
            Ok(false) => process::exit(ERROR_EXIT_CODE),
            Err(e) => {
                eprintln!("error: io error when listing tests: {e:?}");
                process::exit(ERROR_EXIT_CODE);
            }
        }
    }
}

/// A variant optimized for invocation with a static test vector.
/// This will panic (intentionally) when fed any dynamic tests.
///
/// This is the entry point for the main function generated by `rustc --test`
/// when panic=unwind.
pub fn test_main_static(tests: &[&TestDescAndFn]) {
    let args = env::args().collect::<Vec<_>>();
    let owned_tests: Vec<_> = tests.iter().map(make_owned_test).collect();
    test_main(&args, owned_tests, None)
}

/// A variant optimized for invocation with a static test vector.
/// This will panic (intentionally) when fed any dynamic tests.
///
/// Runs tests in panic=abort mode, which involves spawning subprocesses for
/// tests.
///
/// This is the entry point for the main function generated by `rustc --test`
/// when panic=abort.
pub fn test_main_static_abort(tests: &[&TestDescAndFn]) {
    // If we're being run in SpawnedSecondary mode, run the test here. run_test
    // will then exit the process.
    if let Ok(name) = env::var(SECONDARY_TEST_INVOKER_VAR) {
        env::remove_var(SECONDARY_TEST_INVOKER_VAR);

        // Convert benchmarks to tests if we're not benchmarking.
        let mut tests = tests.iter().map(make_owned_test).collect::<Vec<_>>();
        if env::var(SECONDARY_TEST_BENCH_BENCHMARKS_VAR).is_ok() {
            env::remove_var(SECONDARY_TEST_BENCH_BENCHMARKS_VAR);
        } else {
            tests = convert_benchmarks_to_tests(tests);
        };

        let test = tests
            .into_iter()
            .find(|test| test.desc.name.as_slice() == name)
            .unwrap_or_else(|| panic!("couldn't find a test with the provided name '{name}'"));
        let TestDescAndFn { desc, testfn } = test;
        match testfn.into_runnable() {
            Runnable::Test(runnable_test) => {
                if runnable_test.is_dynamic() {
                    panic!("only static tests are supported");
                }
                run_test_in_spawned_subprocess(desc, runnable_test);
            }
            Runnable::Bench(_) => {
                panic!("benchmarks should not be executed into child processes")
            }
        }
    }

    let args = env::args().collect::<Vec<_>>();
    let owned_tests: Vec<_> = tests.iter().map(make_owned_test).collect();
    test_main(&args, owned_tests, Some(Options::new().panic_abort(true)))
}

/// Clones static values for putting into a dynamic vector, which test_main()
/// needs to hand out ownership of tests to parallel test runners.
///
/// This will panic when fed any dynamic tests, because they cannot be cloned.
fn make_owned_test(test: &&TestDescAndFn) -> TestDescAndFn {
    match test.testfn {
        StaticTestFn(f) => TestDescAndFn { testfn: StaticTestFn(f), desc: test.desc.clone() },
        StaticBenchFn(f) => TestDescAndFn { testfn: StaticBenchFn(f), desc: test.desc.clone() },
        _ => panic!("non-static tests passed to test::test_main_static"),
    }
}

/// Invoked when unit tests terminate. Returns `Result::Err` if the test is
/// considered a failure. By default, invokes `report()` and checks for a `0`
/// result.
pub fn assert_test_result<T: Termination>(result: T) -> Result<(), String> {
    let code = result.report().to_i32();
    if code == 0 {
        Ok(())
    } else {
        Err(format!(
            "the test returned a termination value with a non-zero status code \
             ({code}) which indicates a failure"
        ))
    }
}

struct FilteredTests {
    tests: Vec<(TestId, TestDescAndFn)>,
    benches: Vec<(TestId, TestDescAndFn)>,
    next_id: usize,
}

impl FilteredTests {
    fn add_bench(&mut self, desc: TestDesc, testfn: TestFn) {
        let test = TestDescAndFn { desc, testfn };
        self.benches.push((TestId(self.next_id), test));
        self.next_id += 1;
    }
    fn add_test(&mut self, desc: TestDesc, testfn: TestFn) {
        let test = TestDescAndFn { desc, testfn };
        self.tests.push((TestId(self.next_id), test));
        self.next_id += 1;
    }
    fn total_len(&self) -> usize {
        self.tests.len() + self.benches.len()
    }
}

pub fn run_tests<F>(
    opts: &TestOpts,
    tests: Vec<TestDescAndFn>,
    mut notify_about_test_event: F,
) -> io::Result<()>
where
    F: FnMut(TestEvent) -> io::Result<()>,
{
    use std::collections::HashMap;
    use std::hash::{BuildHasherDefault, DefaultHasher};
    use std::sync::mpsc::RecvTimeoutError;

    struct RunningTest {
        join_handle: Option<thread::JoinHandle<()>>,
    }

    impl RunningTest {
        fn join(self, completed_test: &mut CompletedTest) {
            if let Some(join_handle) = self.join_handle {
                if let Err(_) = join_handle.join() {
                    if let TrOk = completed_test.result {
                        completed_test.result =
                            TrFailedMsg("panicked after reporting success".to_string());
                    }
                }
            }
        }
    }

    // Use a deterministic hasher
    type TestMap = HashMap<TestId, RunningTest, BuildHasherDefault<DefaultHasher>>;

    struct TimeoutEntry {
        id: TestId,
        desc: TestDesc,
        timeout: Instant,
    }

    let tests_len = tests.len();

    let mut filtered = FilteredTests { tests: Vec::new(), benches: Vec::new(), next_id: 0 };

    let mut filtered_tests = filter_tests(opts, tests);
    if !opts.bench_benchmarks {
        filtered_tests = convert_benchmarks_to_tests(filtered_tests);
    }

    for test in filtered_tests {
        let mut desc = test.desc;
        desc.name = desc.name.with_padding(test.testfn.padding());

        match test.testfn {
            DynBenchFn(_) | StaticBenchFn(_) => {
                filtered.add_bench(desc, test.testfn);
            }
            testfn => {
                filtered.add_test(desc, testfn);
            }
        };
    }

    let filtered_out = tests_len - filtered.total_len();
    let event = TestEvent::TeFilteredOut(filtered_out);
    notify_about_test_event(event)?;

    let shuffle_seed = get_shuffle_seed(opts);

    let event = TestEvent::TeFiltered(filtered.total_len(), shuffle_seed);
    notify_about_test_event(event)?;

    let concurrency = opts.test_threads.unwrap_or_else(get_concurrency);

    let mut remaining = filtered.tests;
    if let Some(shuffle_seed) = shuffle_seed {
        shuffle_tests(shuffle_seed, &mut remaining);
    }
    // Store the tests in a VecDeque so we can efficiently remove the first element to run the
    // tests in the order they were passed (unless shuffled).
    let mut remaining = VecDeque::from(remaining);
    let mut pending = 0;

    let (tx, rx) = channel::<CompletedTest>();
    let run_strategy = if opts.options.panic_abort && !opts.force_run_in_process {
        RunStrategy::SpawnPrimary
    } else {
        RunStrategy::InProcess
    };

    let mut running_tests: TestMap = HashMap::default();
    let mut timeout_queue: VecDeque<TimeoutEntry> = VecDeque::new();

    fn get_timed_out_tests(
        running_tests: &TestMap,
        timeout_queue: &mut VecDeque<TimeoutEntry>,
    ) -> Vec<TestDesc> {
        let now = Instant::now();
        let mut timed_out = Vec::new();
        while let Some(timeout_entry) = timeout_queue.front() {
            if now < timeout_entry.timeout {
                break;
            }
            let timeout_entry = timeout_queue.pop_front().unwrap();
            if running_tests.contains_key(&timeout_entry.id) {
                timed_out.push(timeout_entry.desc);
            }
        }
        timed_out
    }

    fn calc_timeout(timeout_queue: &VecDeque<TimeoutEntry>) -> Option<Duration> {
        timeout_queue.front().map(|&TimeoutEntry { timeout: next_timeout, .. }| {
            let now = Instant::now();
            if next_timeout >= now { next_timeout - now } else { Duration::new(0, 0) }
        })
    }

    if concurrency == 1 {
        while !remaining.is_empty() {
            let (id, test) = remaining.pop_front().unwrap();
            let event = TestEvent::TeWait(test.desc.clone());
            notify_about_test_event(event)?;
            let join_handle = run_test(opts, !opts.run_tests, id, test, run_strategy, tx.clone());
            // Wait for the test to complete.
            let mut completed_test = rx.recv().unwrap();
            RunningTest { join_handle }.join(&mut completed_test);

            let fail_fast = match completed_test.result {
                TrIgnored | TrOk | TrBench(_) => false,
                TrFailed | TrFailedMsg(_) | TrTimedFail => opts.fail_fast,
            };

            let event = TestEvent::TeResult(completed_test);
            notify_about_test_event(event)?;

            if fail_fast {
                return Ok(());
            }
        }
    } else {
        while pending > 0 || !remaining.is_empty() {
            while pending < concurrency && !remaining.is_empty() {
                let (id, test) = remaining.pop_front().unwrap();
                let timeout = time::get_default_test_timeout();
                let desc = test.desc.clone();

                let event = TestEvent::TeWait(desc.clone());
                notify_about_test_event(event)?; //here no pad
                let join_handle =
                    run_test(opts, !opts.run_tests, id, test, run_strategy, tx.clone());
                running_tests.insert(id, RunningTest { join_handle });
                timeout_queue.push_back(TimeoutEntry { id, desc, timeout });
                pending += 1;
            }

            let mut res;
            loop {
                if let Some(timeout) = calc_timeout(&timeout_queue) {
                    res = rx.recv_timeout(timeout);
                    for test in get_timed_out_tests(&running_tests, &mut timeout_queue) {
                        let event = TestEvent::TeTimeout(test);
                        notify_about_test_event(event)?;
                    }

                    match res {
                        Err(RecvTimeoutError::Timeout) => {
                            // Result is not yet ready, continue waiting.
                        }
                        _ => {
                            // We've got a result, stop the loop.
                            break;
                        }
                    }
                } else {
                    res = rx.recv().map_err(|_| RecvTimeoutError::Disconnected);
                    break;
                }
            }

            let mut completed_test = res.unwrap();
            let running_test = running_tests.remove(&completed_test.id).unwrap();
            running_test.join(&mut completed_test);

            let fail_fast = match completed_test.result {
                TrIgnored | TrOk | TrBench(_) => false,
                TrFailed | TrFailedMsg(_) | TrTimedFail => opts.fail_fast,
            };

            let event = TestEvent::TeResult(completed_test);
            notify_about_test_event(event)?;
            pending -= 1;

            if fail_fast {
                // Prevent remaining test threads from panicking
                std::mem::forget(rx);
                return Ok(());
            }
        }
    }

    if opts.bench_benchmarks {
        // All benchmarks run at the end, in serial.
        for (id, b) in filtered.benches {
            let event = TestEvent::TeWait(b.desc.clone());
            notify_about_test_event(event)?;
            let join_handle = run_test(opts, false, id, b, run_strategy, tx.clone());
            // Wait for the test to complete.
            let mut completed_test = rx.recv().unwrap();
            RunningTest { join_handle }.join(&mut completed_test);

            let event = TestEvent::TeResult(completed_test);
            notify_about_test_event(event)?;
        }
    }
    Ok(())
}

pub fn filter_tests(opts: &TestOpts, tests: Vec<TestDescAndFn>) -> Vec<TestDescAndFn> {
    let mut filtered = tests;
    let matches_filter = |test: &TestDescAndFn, filter: &str| {
        let test_name = test.desc.name.as_slice();

        match opts.filter_exact {
            true => test_name == filter,
            false => test_name.contains(filter),
        }
    };

    // Remove tests that don't match the test filter
    if !opts.filters.is_empty() {
        filtered.retain(|test| opts.filters.iter().any(|filter| matches_filter(test, filter)));
    }

    // Skip tests that match any of the skip filters
    if !opts.skip.is_empty() {
        filtered.retain(|test| !opts.skip.iter().any(|sf| matches_filter(test, sf)));
    }

    // Excludes #[should_panic] tests
    if opts.exclude_should_panic {
        filtered.retain(|test| test.desc.should_panic == ShouldPanic::No);
    }

    // maybe unignore tests
    match opts.run_ignored {
        RunIgnored::Yes => {
            filtered.iter_mut().for_each(|test| test.desc.ignore = false);
        }
        RunIgnored::Only => {
            filtered.retain(|test| test.desc.ignore);
            filtered.iter_mut().for_each(|test| test.desc.ignore = false);
        }
        RunIgnored::No => {}
    }

    filtered
}

pub fn convert_benchmarks_to_tests(tests: Vec<TestDescAndFn>) -> Vec<TestDescAndFn> {
    // convert benchmarks to tests, if we're not benchmarking them
    tests
        .into_iter()
        .map(|x| {
            let testfn = match x.testfn {
                DynBenchFn(benchfn) => DynBenchAsTestFn(benchfn),
                StaticBenchFn(benchfn) => StaticBenchAsTestFn(benchfn),
                f => f,
            };
            TestDescAndFn { desc: x.desc, testfn }
        })
        .collect()
}

pub fn run_test(
    opts: &TestOpts,
    force_ignore: bool,
    id: TestId,
    test: TestDescAndFn,
    strategy: RunStrategy,
    monitor_ch: Sender<CompletedTest>,
) -> Option<thread::JoinHandle<()>> {
    let TestDescAndFn { desc, testfn } = test;

    // Emscripten can catch panics but other wasm targets cannot
    let ignore_because_no_process_support = desc.should_panic != ShouldPanic::No
        && (cfg!(target_family = "wasm") || cfg!(target_os = "zkvm"))
        && !cfg!(target_os = "emscripten");

    if force_ignore || desc.ignore || ignore_because_no_process_support {
        let message = CompletedTest::new(id, desc, TrIgnored, None, Vec::new());
        monitor_ch.send(message).unwrap();
        return None;
    }

    match testfn.into_runnable() {
        Runnable::Test(runnable_test) => {
            if runnable_test.is_dynamic() {
                match strategy {
                    RunStrategy::InProcess => (),
                    _ => panic!("Cannot run dynamic test fn out-of-process"),
                };
            }

            let name = desc.name.clone();
            let nocapture = opts.nocapture;
            let time_options = opts.time_options;
            let bench_benchmarks = opts.bench_benchmarks;

            let runtest = move || match strategy {
                RunStrategy::InProcess => run_test_in_process(
                    id,
                    desc,
                    nocapture,
                    time_options.is_some(),
                    runnable_test,
                    monitor_ch,
                    time_options,
                ),
                RunStrategy::SpawnPrimary => spawn_test_subprocess(
                    id,
                    desc,
                    nocapture,
                    time_options.is_some(),
                    monitor_ch,
                    time_options,
                    bench_benchmarks,
                ),
            };

            // If the platform is single-threaded we're just going to run
            // the test synchronously, regardless of the concurrency
            // level.
            let supports_threads = !cfg!(target_os = "emscripten")
                && !cfg!(target_family = "wasm")
                && !cfg!(target_os = "zkvm");
            if supports_threads {
                let cfg = thread::Builder::new().name(name.as_slice().to_owned());
                let mut runtest = Arc::new(Mutex::new(Some(runtest)));
                let runtest2 = runtest.clone();
                match cfg.spawn(move || runtest2.lock().unwrap().take().unwrap()()) {
                    Ok(handle) => Some(handle),
                    Err(e) if e.kind() == io::ErrorKind::WouldBlock => {
                        // `ErrorKind::WouldBlock` means hitting the thread limit on some
                        // platforms, so run the test synchronously here instead.
                        Arc::get_mut(&mut runtest).unwrap().get_mut().unwrap().take().unwrap()();
                        None
                    }
                    Err(e) => panic!("failed to spawn thread to run test: {e}"),
                }
            } else {
                runtest();
                None
            }
        }
        Runnable::Bench(runnable_bench) => {
            // Benchmarks aren't expected to panic, so we run them all in-process.
            runnable_bench.run(id, &desc, &monitor_ch, opts.nocapture);
            None
        }
    }
}

/// Fixed frame used to clean the backtrace with `RUST_BACKTRACE=1`.
#[inline(never)]
fn __rust_begin_short_backtrace<T, F: FnOnce() -> T>(f: F) -> T {
    let result = f();

    // prevent this frame from being tail-call optimised away
    black_box(result)
}

fn run_test_in_process(
    id: TestId,
    desc: TestDesc,
    nocapture: bool,
    report_time: bool,
    runnable_test: RunnableTest,
    monitor_ch: Sender<CompletedTest>,
    time_opts: Option<time::TestTimeOptions>,
) {
    // Buffer for capturing standard I/O
    let data = Arc::new(Mutex::new(Vec::new()));

    if !nocapture {
        io::set_output_capture(Some(data.clone()));
    }

    let start = report_time.then(Instant::now);
    let result = fold_err(catch_unwind(AssertUnwindSafe(|| runnable_test.run())));
    let exec_time = start.map(|start| {
        let duration = start.elapsed();
        TestExecTime(duration)
    });

    io::set_output_capture(None);

    let test_result = match result {
        Ok(()) => calc_result(&desc, Ok(()), time_opts.as_ref(), exec_time.as_ref()),
        Err(e) => calc_result(&desc, Err(e.as_ref()), time_opts.as_ref(), exec_time.as_ref()),
    };
    let stdout = data.lock().unwrap_or_else(|e| e.into_inner()).to_vec();
    let message = CompletedTest::new(id, desc, test_result, exec_time, stdout);
    monitor_ch.send(message).unwrap();
}

fn fold_err<T, E>(
    result: Result<Result<T, E>, Box<dyn Any + Send>>,
) -> Result<T, Box<dyn Any + Send>>
where
    E: Send + 'static,
{
    match result {
        Ok(Err(e)) => Err(Box::new(e)),
        Ok(Ok(v)) => Ok(v),
        Err(e) => Err(e),
    }
}

fn spawn_test_subprocess(
    id: TestId,
    desc: TestDesc,
    nocapture: bool,
    report_time: bool,
    monitor_ch: Sender<CompletedTest>,
    time_opts: Option<time::TestTimeOptions>,
    bench_benchmarks: bool,
) {
    let (result, test_output, exec_time) = (|| {
        let args = env::args().collect::<Vec<_>>();
        let current_exe = &args[0];

        let mut command = Command::new(current_exe);
        command.env(SECONDARY_TEST_INVOKER_VAR, desc.name.as_slice());
        if bench_benchmarks {
            command.env(SECONDARY_TEST_BENCH_BENCHMARKS_VAR, "1");
        }
        if nocapture {
            command.stdout(process::Stdio::inherit());
            command.stderr(process::Stdio::inherit());
        }

        let start = report_time.then(Instant::now);
        let output = match command.output() {
            Ok(out) => out,
            Err(e) => {
                let err = format!("Failed to spawn {} as child for test: {:?}", args[0], e);
                return (TrFailed, err.into_bytes(), None);
            }
        };
        let exec_time = start.map(|start| {
            let duration = start.elapsed();
            TestExecTime(duration)
        });

        let std::process::Output { stdout, stderr, status } = output;
        let mut test_output = stdout;
        formatters::write_stderr_delimiter(&mut test_output, &desc.name);
        test_output.extend_from_slice(&stderr);

        let result =
            get_result_from_exit_code(&desc, status, time_opts.as_ref(), exec_time.as_ref());
        (result, test_output, exec_time)
    })();

    let message = CompletedTest::new(id, desc, result, exec_time, test_output);
    monitor_ch.send(message).unwrap();
}

fn run_test_in_spawned_subprocess(desc: TestDesc, runnable_test: RunnableTest) -> ! {
    let builtin_panic_hook = panic::take_hook();
    let record_result = Arc::new(move |panic_info: Option<&'_ PanicHookInfo<'_>>| {
        let test_result = match panic_info {
            Some(info) => calc_result(&desc, Err(info.payload()), None, None),
            None => calc_result(&desc, Ok(()), None, None),
        };

        // We don't support serializing TrFailedMsg, so just
        // print the message out to stderr.
        if let TrFailedMsg(msg) = &test_result {
            eprintln!("{msg}");
        }

        if let Some(info) = panic_info {
            builtin_panic_hook(info);
        }

        if let TrOk = test_result {
            process::exit(test_result::TR_OK);
        } else {
            process::abort();
        }
    });
    let record_result2 = record_result.clone();
    panic::set_hook(Box::new(move |info| record_result2(Some(info))));
    if let Err(message) = runnable_test.run() {
        panic!("{}", message);
    }
    record_result(None);
    unreachable!("panic=abort callback should have exited the process")
}