#![unstable(feature = "test", issue = "50297")]
#![doc(test(attr(deny(warnings))))]
#![doc(rust_logo)]
#![feature(rustdoc_internals)]
#![feature(internal_output_capture)]
#![feature(staged_api)]
#![feature(process_exitcode_internals)]
#![feature(panic_can_unwind)]
#![feature(test)]
#![allow(internal_features)]
pub use self::bench::{black_box, Bencher};
pub use self::console::run_tests_console;
pub use self::options::{ColorConfig, Options, OutputFormat, RunIgnored, ShouldPanic};
pub use self::types::TestName::*;
pub use self::types::*;
pub use self::ColorConfig::*;
pub use cli::TestOpts;
pub mod test {
pub use crate::{
assert_test_result,
bench::Bencher,
cli::{parse_opts, TestOpts},
filter_tests,
helpers::metrics::{Metric, MetricMap},
options::{Options, RunIgnored, RunStrategy, ShouldPanic},
run_test, test_main, test_main_static,
test_result::{TestResult, TrFailed, TrFailedMsg, TrIgnored, TrOk},
time::{TestExecTime, TestTimeOptions},
types::{
DynTestFn, DynTestName, StaticBenchFn, StaticTestFn, StaticTestName, TestDesc,
TestDescAndFn, TestId, TestName, TestType,
},
};
}
use std::{
collections::VecDeque,
env, io,
io::prelude::Write,
mem::ManuallyDrop,
panic::{self, catch_unwind, AssertUnwindSafe, PanicInfo},
process::{self, Command, Termination},
sync::mpsc::{channel, Sender},
sync::{Arc, Mutex},
thread,
time::{Duration, Instant},
};
pub mod bench;
mod cli;
mod console;
mod event;
mod formatters;
mod helpers;
mod options;
pub mod stats;
mod term;
mod test_result;
mod time;
mod types;
#[cfg(test)]
mod tests;
use core::any::Any;
use event::{CompletedTest, TestEvent};
use helpers::concurrency::get_concurrency;
use helpers::exit_code::get_exit_code;
use helpers::shuffle::{get_shuffle_seed, shuffle_tests};
use options::RunStrategy;
use test_result::*;
use time::TestExecTime;
const ERROR_EXIT_CODE: i32 = 101;
const SECONDARY_TEST_INVOKER_VAR: &str = "__RUST_TEST_INVOKE";
const SECONDARY_TEST_BENCH_BENCHMARKS_VAR: &str = "__RUST_TEST_BENCH_BENCHMARKS";
pub fn test_main(args: &[String], tests: Vec<TestDescAndFn>, options: Option<Options>) {
let mut opts = match cli::parse_opts(args) {
Some(Ok(o)) => o,
Some(Err(msg)) => {
eprintln!("error: {msg}");
process::exit(ERROR_EXIT_CODE);
}
None => return,
};
if let Some(options) = options {
opts.options = options;
}
if opts.list {
if let Err(e) = console::list_tests_console(&opts, tests) {
eprintln!("error: io error when listing tests: {e:?}");
process::exit(ERROR_EXIT_CODE);
}
} else {
if !opts.nocapture {
let builtin_panic_hook = panic::take_hook();
let hook = Box::new({
move |info: &'_ PanicInfo<'_>| {
if !info.can_unwind() {
std::mem::forget(std::io::stderr().lock());
let mut stdout = ManuallyDrop::new(std::io::stdout().lock());
if let Some(captured) = io::set_output_capture(None) {
if let Ok(data) = captured.lock() {
let _ = stdout.write_all(&data);
let _ = stdout.flush();
}
}
}
builtin_panic_hook(info);
}
});
panic::set_hook(hook);
}
match console::run_tests_console(&opts, tests) {
Ok(true) => {}
Ok(false) => process::exit(ERROR_EXIT_CODE),
Err(e) => {
eprintln!("error: io error when listing tests: {e:?}");
process::exit(ERROR_EXIT_CODE);
}
}
}
}
pub fn test_main_static(tests: &[&TestDescAndFn]) {
let args = env::args().collect::<Vec<_>>();
let owned_tests: Vec<_> = tests.iter().map(make_owned_test).collect();
test_main(&args, owned_tests, None)
}
pub fn test_main_static_abort(tests: &[&TestDescAndFn]) {
if let Ok(name) = env::var(SECONDARY_TEST_INVOKER_VAR) {
env::remove_var(SECONDARY_TEST_INVOKER_VAR);
let mut tests = tests.iter().map(make_owned_test).collect::<Vec<_>>();
if env::var(SECONDARY_TEST_BENCH_BENCHMARKS_VAR).is_ok() {
env::remove_var(SECONDARY_TEST_BENCH_BENCHMARKS_VAR);
} else {
tests = convert_benchmarks_to_tests(tests);
};
let test = tests
.into_iter()
.find(|test| test.desc.name.as_slice() == name)
.unwrap_or_else(|| panic!("couldn't find a test with the provided name '{name}'"));
let TestDescAndFn { desc, testfn } = test;
match testfn.into_runnable() {
Runnable::Test(runnable_test) => {
if runnable_test.is_dynamic() {
panic!("only static tests are supported");
}
run_test_in_spawned_subprocess(desc, runnable_test);
}
Runnable::Bench(_) => {
panic!("benchmarks should not be executed into child processes")
}
}
}
let args = env::args().collect::<Vec<_>>();
let owned_tests: Vec<_> = tests.iter().map(make_owned_test).collect();
test_main(&args, owned_tests, Some(Options::new().panic_abort(true)))
}
fn make_owned_test(test: &&TestDescAndFn) -> TestDescAndFn {
match test.testfn {
StaticTestFn(f) => TestDescAndFn { testfn: StaticTestFn(f), desc: test.desc.clone() },
StaticBenchFn(f) => TestDescAndFn { testfn: StaticBenchFn(f), desc: test.desc.clone() },
_ => panic!("non-static tests passed to test::test_main_static"),
}
}
pub fn assert_test_result<T: Termination>(result: T) -> Result<(), String> {
let code = result.report().to_i32();
if code == 0 {
Ok(())
} else {
Err(format!(
"the test returned a termination value with a non-zero status code \
({code}) which indicates a failure"
))
}
}
struct FilteredTests {
tests: Vec<(TestId, TestDescAndFn)>,
benches: Vec<(TestId, TestDescAndFn)>,
next_id: usize,
}
impl FilteredTests {
fn add_bench(&mut self, desc: TestDesc, testfn: TestFn) {
let test = TestDescAndFn { desc, testfn };
self.benches.push((TestId(self.next_id), test));
self.next_id += 1;
}
fn add_test(&mut self, desc: TestDesc, testfn: TestFn) {
let test = TestDescAndFn { desc, testfn };
self.tests.push((TestId(self.next_id), test));
self.next_id += 1;
}
fn total_len(&self) -> usize {
self.tests.len() + self.benches.len()
}
}
pub fn run_tests<F>(
opts: &TestOpts,
tests: Vec<TestDescAndFn>,
mut notify_about_test_event: F,
) -> io::Result<()>
where
F: FnMut(TestEvent) -> io::Result<()>,
{
use std::collections::HashMap;
use std::hash::{BuildHasherDefault, DefaultHasher};
use std::sync::mpsc::RecvTimeoutError;
struct RunningTest {
join_handle: Option<thread::JoinHandle<()>>,
}
impl RunningTest {
fn join(self, completed_test: &mut CompletedTest) {
if let Some(join_handle) = self.join_handle {
if let Err(_) = join_handle.join() {
if let TrOk = completed_test.result {
completed_test.result =
TrFailedMsg("panicked after reporting success".to_string());
}
}
}
}
}
type TestMap = HashMap<TestId, RunningTest, BuildHasherDefault<DefaultHasher>>;
struct TimeoutEntry {
id: TestId,
desc: TestDesc,
timeout: Instant,
}
let tests_len = tests.len();
let mut filtered = FilteredTests { tests: Vec::new(), benches: Vec::new(), next_id: 0 };
let mut filtered_tests = filter_tests(opts, tests);
if !opts.bench_benchmarks {
filtered_tests = convert_benchmarks_to_tests(filtered_tests);
}
for test in filtered_tests {
let mut desc = test.desc;
desc.name = desc.name.with_padding(test.testfn.padding());
match test.testfn {
DynBenchFn(_) | StaticBenchFn(_) => {
filtered.add_bench(desc, test.testfn);
}
testfn => {
filtered.add_test(desc, testfn);
}
};
}
let filtered_out = tests_len - filtered.total_len();
let event = TestEvent::TeFilteredOut(filtered_out);
notify_about_test_event(event)?;
let shuffle_seed = get_shuffle_seed(opts);
let event = TestEvent::TeFiltered(filtered.total_len(), shuffle_seed);
notify_about_test_event(event)?;
let concurrency = opts.test_threads.unwrap_or_else(get_concurrency);
let mut remaining = filtered.tests;
if let Some(shuffle_seed) = shuffle_seed {
shuffle_tests(shuffle_seed, &mut remaining);
}
let mut remaining = VecDeque::from(remaining);
let mut pending = 0;
let (tx, rx) = channel::<CompletedTest>();
let run_strategy = if opts.options.panic_abort && !opts.force_run_in_process {
RunStrategy::SpawnPrimary
} else {
RunStrategy::InProcess
};
let mut running_tests: TestMap = HashMap::default();
let mut timeout_queue: VecDeque<TimeoutEntry> = VecDeque::new();
fn get_timed_out_tests(
running_tests: &TestMap,
timeout_queue: &mut VecDeque<TimeoutEntry>,
) -> Vec<TestDesc> {
let now = Instant::now();
let mut timed_out = Vec::new();
while let Some(timeout_entry) = timeout_queue.front() {
if now < timeout_entry.timeout {
break;
}
let timeout_entry = timeout_queue.pop_front().unwrap();
if running_tests.contains_key(&timeout_entry.id) {
timed_out.push(timeout_entry.desc);
}
}
timed_out
}
fn calc_timeout(timeout_queue: &VecDeque<TimeoutEntry>) -> Option<Duration> {
timeout_queue.front().map(|&TimeoutEntry { timeout: next_timeout, .. }| {
let now = Instant::now();
if next_timeout >= now { next_timeout - now } else { Duration::new(0, 0) }
})
}
if concurrency == 1 {
while !remaining.is_empty() {
let (id, test) = remaining.pop_front().unwrap();
let event = TestEvent::TeWait(test.desc.clone());
notify_about_test_event(event)?;
let join_handle = run_test(opts, !opts.run_tests, id, test, run_strategy, tx.clone());
let mut completed_test = rx.recv().unwrap();
RunningTest { join_handle }.join(&mut completed_test);
let fail_fast = match completed_test.result {
TrIgnored | TrOk | TrBench(_) => false,
TrFailed | TrFailedMsg(_) | TrTimedFail => opts.fail_fast,
};
let event = TestEvent::TeResult(completed_test);
notify_about_test_event(event)?;
if fail_fast {
return Ok(());
}
}
} else {
while pending > 0 || !remaining.is_empty() {
while pending < concurrency && !remaining.is_empty() {
let (id, test) = remaining.pop_front().unwrap();
let timeout = time::get_default_test_timeout();
let desc = test.desc.clone();
let event = TestEvent::TeWait(desc.clone());
notify_about_test_event(event)?; let join_handle =
run_test(opts, !opts.run_tests, id, test, run_strategy, tx.clone());
running_tests.insert(id, RunningTest { join_handle });
timeout_queue.push_back(TimeoutEntry { id, desc, timeout });
pending += 1;
}
let mut res;
loop {
if let Some(timeout) = calc_timeout(&timeout_queue) {
res = rx.recv_timeout(timeout);
for test in get_timed_out_tests(&running_tests, &mut timeout_queue) {
let event = TestEvent::TeTimeout(test);
notify_about_test_event(event)?;
}
match res {
Err(RecvTimeoutError::Timeout) => {
}
_ => {
break;
}
}
} else {
res = rx.recv().map_err(|_| RecvTimeoutError::Disconnected);
break;
}
}
let mut completed_test = res.unwrap();
let running_test = running_tests.remove(&completed_test.id).unwrap();
running_test.join(&mut completed_test);
let fail_fast = match completed_test.result {
TrIgnored | TrOk | TrBench(_) => false,
TrFailed | TrFailedMsg(_) | TrTimedFail => opts.fail_fast,
};
let event = TestEvent::TeResult(completed_test);
notify_about_test_event(event)?;
pending -= 1;
if fail_fast {
std::mem::forget(rx);
return Ok(());
}
}
}
if opts.bench_benchmarks {
for (id, b) in filtered.benches {
let event = TestEvent::TeWait(b.desc.clone());
notify_about_test_event(event)?;
let join_handle = run_test(opts, false, id, b, run_strategy, tx.clone());
let mut completed_test = rx.recv().unwrap();
RunningTest { join_handle }.join(&mut completed_test);
let event = TestEvent::TeResult(completed_test);
notify_about_test_event(event)?;
}
}
Ok(())
}
pub fn filter_tests(opts: &TestOpts, tests: Vec<TestDescAndFn>) -> Vec<TestDescAndFn> {
let mut filtered = tests;
let matches_filter = |test: &TestDescAndFn, filter: &str| {
let test_name = test.desc.name.as_slice();
match opts.filter_exact {
true => test_name == filter,
false => test_name.contains(filter),
}
};
if !opts.filters.is_empty() {
filtered.retain(|test| opts.filters.iter().any(|filter| matches_filter(test, filter)));
}
if !opts.skip.is_empty() {
filtered.retain(|test| !opts.skip.iter().any(|sf| matches_filter(test, sf)));
}
if opts.exclude_should_panic {
filtered.retain(|test| test.desc.should_panic == ShouldPanic::No);
}
match opts.run_ignored {
RunIgnored::Yes => {
filtered.iter_mut().for_each(|test| test.desc.ignore = false);
}
RunIgnored::Only => {
filtered.retain(|test| test.desc.ignore);
filtered.iter_mut().for_each(|test| test.desc.ignore = false);
}
RunIgnored::No => {}
}
filtered
}
pub fn convert_benchmarks_to_tests(tests: Vec<TestDescAndFn>) -> Vec<TestDescAndFn> {
tests
.into_iter()
.map(|x| {
let testfn = match x.testfn {
DynBenchFn(benchfn) => DynBenchAsTestFn(benchfn),
StaticBenchFn(benchfn) => StaticBenchAsTestFn(benchfn),
f => f,
};
TestDescAndFn { desc: x.desc, testfn }
})
.collect()
}
pub fn run_test(
opts: &TestOpts,
force_ignore: bool,
id: TestId,
test: TestDescAndFn,
strategy: RunStrategy,
monitor_ch: Sender<CompletedTest>,
) -> Option<thread::JoinHandle<()>> {
let TestDescAndFn { desc, testfn } = test;
let ignore_because_no_process_support = desc.should_panic != ShouldPanic::No
&& (cfg!(target_family = "wasm") || cfg!(target_os = "zkvm"))
&& !cfg!(target_os = "emscripten");
if force_ignore || desc.ignore || ignore_because_no_process_support {
let message = CompletedTest::new(id, desc, TrIgnored, None, Vec::new());
monitor_ch.send(message).unwrap();
return None;
}
match testfn.into_runnable() {
Runnable::Test(runnable_test) => {
if runnable_test.is_dynamic() {
match strategy {
RunStrategy::InProcess => (),
_ => panic!("Cannot run dynamic test fn out-of-process"),
};
}
let name = desc.name.clone();
let nocapture = opts.nocapture;
let time_options = opts.time_options;
let bench_benchmarks = opts.bench_benchmarks;
let runtest = move || match strategy {
RunStrategy::InProcess => run_test_in_process(
id,
desc,
nocapture,
time_options.is_some(),
runnable_test,
monitor_ch,
time_options,
),
RunStrategy::SpawnPrimary => spawn_test_subprocess(
id,
desc,
nocapture,
time_options.is_some(),
monitor_ch,
time_options,
bench_benchmarks,
),
};
let supports_threads = !cfg!(target_os = "emscripten") && !cfg!(target_family = "wasm");
if supports_threads {
let cfg = thread::Builder::new().name(name.as_slice().to_owned());
let mut runtest = Arc::new(Mutex::new(Some(runtest)));
let runtest2 = runtest.clone();
match cfg.spawn(move || runtest2.lock().unwrap().take().unwrap()()) {
Ok(handle) => Some(handle),
Err(e) if e.kind() == io::ErrorKind::WouldBlock => {
Arc::get_mut(&mut runtest).unwrap().get_mut().unwrap().take().unwrap()();
None
}
Err(e) => panic!("failed to spawn thread to run test: {e}"),
}
} else {
runtest();
None
}
}
Runnable::Bench(runnable_bench) => {
runnable_bench.run(id, &desc, &monitor_ch, opts.nocapture);
None
}
}
}
#[inline(never)]
fn __rust_begin_short_backtrace<T, F: FnOnce() -> T>(f: F) -> T {
let result = f();
black_box(result)
}
fn run_test_in_process(
id: TestId,
desc: TestDesc,
nocapture: bool,
report_time: bool,
runnable_test: RunnableTest,
monitor_ch: Sender<CompletedTest>,
time_opts: Option<time::TestTimeOptions>,
) {
let data = Arc::new(Mutex::new(Vec::new()));
if !nocapture {
io::set_output_capture(Some(data.clone()));
}
let start = report_time.then(Instant::now);
let result = fold_err(catch_unwind(AssertUnwindSafe(|| runnable_test.run())));
let exec_time = start.map(|start| {
let duration = start.elapsed();
TestExecTime(duration)
});
io::set_output_capture(None);
let test_result = match result {
Ok(()) => calc_result(&desc, Ok(()), &time_opts, &exec_time),
Err(e) => calc_result(&desc, Err(e.as_ref()), &time_opts, &exec_time),
};
let stdout = data.lock().unwrap_or_else(|e| e.into_inner()).to_vec();
let message = CompletedTest::new(id, desc, test_result, exec_time, stdout);
monitor_ch.send(message).unwrap();
}
fn fold_err<T, E>(
result: Result<Result<T, E>, Box<dyn Any + Send>>,
) -> Result<T, Box<dyn Any + Send>>
where
E: Send + 'static,
{
match result {
Ok(Err(e)) => Err(Box::new(e)),
Ok(Ok(v)) => Ok(v),
Err(e) => Err(e),
}
}
fn spawn_test_subprocess(
id: TestId,
desc: TestDesc,
nocapture: bool,
report_time: bool,
monitor_ch: Sender<CompletedTest>,
time_opts: Option<time::TestTimeOptions>,
bench_benchmarks: bool,
) {
let (result, test_output, exec_time) = (|| {
let args = env::args().collect::<Vec<_>>();
let current_exe = &args[0];
let mut command = Command::new(current_exe);
command.env(SECONDARY_TEST_INVOKER_VAR, desc.name.as_slice());
if bench_benchmarks {
command.env(SECONDARY_TEST_BENCH_BENCHMARKS_VAR, "1");
}
if nocapture {
command.stdout(process::Stdio::inherit());
command.stderr(process::Stdio::inherit());
}
let start = report_time.then(Instant::now);
let output = match command.output() {
Ok(out) => out,
Err(e) => {
let err = format!("Failed to spawn {} as child for test: {:?}", args[0], e);
return (TrFailed, err.into_bytes(), None);
}
};
let exec_time = start.map(|start| {
let duration = start.elapsed();
TestExecTime(duration)
});
let std::process::Output { stdout, stderr, status } = output;
let mut test_output = stdout;
formatters::write_stderr_delimiter(&mut test_output, &desc.name);
test_output.extend_from_slice(&stderr);
let result = match (|| -> Result<TestResult, String> {
let exit_code = get_exit_code(status)?;
Ok(get_result_from_exit_code(&desc, exit_code, &time_opts, &exec_time))
})() {
Ok(r) => r,
Err(e) => {
write!(&mut test_output, "Unexpected error: {e}").unwrap();
TrFailed
}
};
(result, test_output, exec_time)
})();
let message = CompletedTest::new(id, desc, result, exec_time, test_output);
monitor_ch.send(message).unwrap();
}
fn run_test_in_spawned_subprocess(desc: TestDesc, runnable_test: RunnableTest) -> ! {
let builtin_panic_hook = panic::take_hook();
let record_result = Arc::new(move |panic_info: Option<&'_ PanicInfo<'_>>| {
let test_result = match panic_info {
Some(info) => calc_result(&desc, Err(info.payload()), &None, &None),
None => calc_result(&desc, Ok(()), &None, &None),
};
if let TrFailedMsg(msg) = &test_result {
eprintln!("{msg}");
}
if let Some(info) = panic_info {
builtin_panic_hook(info);
}
if let TrOk = test_result {
process::exit(test_result::TR_OK);
} else {
process::exit(test_result::TR_FAILED);
}
});
let record_result2 = record_result.clone();
panic::set_hook(Box::new(move |info| record_result2(Some(info))));
if let Err(message) = runnable_test.run() {
panic!("{}", message);
}
record_result(None);
unreachable!("panic=abort callback should have exited the process")
}