diff --git a/fixture-data/src/nextest_tests.rs b/fixture-data/src/nextest_tests.rs index 343f3d152ac..cdcac5eee3a 100644 --- a/fixture-data/src/nextest_tests.rs +++ b/fixture-data/src/nextest_tests.rs @@ -47,6 +47,10 @@ pub static EXPECTED_TEST_SUITES: LazyLock> = LazyLock "test_slow_timeout_subprocess", TestCaseFixtureStatus::IgnoredPass, ), + TestCaseFixture::new( + "test_flaky_slow_timeout_mod_3", + TestCaseFixtureStatus::IgnoredFail + ), TestCaseFixture::new("test_stdin_closed", TestCaseFixtureStatus::Pass), TestCaseFixture::new("test_subprocess_doesnt_exit", TestCaseFixtureStatus::Leak), TestCaseFixture::new("test_subprocess_doesnt_exit_fail", TestCaseFixtureStatus::FailLeak), diff --git a/fixtures/nextest-tests/.config/nextest.toml b/fixtures/nextest-tests/.config/nextest.toml index f5881d6b882..27e1ef4d2ae 100644 --- a/fixtures/nextest-tests/.config/nextest.toml +++ b/fixtures/nextest-tests/.config/nextest.toml @@ -88,6 +88,22 @@ filter = 'test(=test_slow_timeout_2)' slow-timeout = { period = "500ms", terminate-after = 2 } test-group = '@global' +[profile.with-timeout-success] +slow-timeout = { period = "10ms", terminate-after = 2, on-timeout = "pass" } + +[[profile.with-timeout-success.overrides]] +filter = 'test(=test_slow_timeout_2)' +slow-timeout = { period = "500ms", terminate-after = 2, on-timeout = "fail" } +test-group = '@global' + +[[profile.with-timeout-success.overrides]] +filter = 'test(=test_slow_timeout_subprocess)' +slow-timeout = { period = "10ms", terminate-after = 2, on-timeout = "fail" } + +[profile.with-timeout-retries-success] +slow-timeout = { period = "10ms", terminate-after = 2, on-timeout = "pass" } +retries = 2 + [profile.with-junit] retries = 2 diff --git a/fixtures/nextest-tests/tests/basic.rs b/fixtures/nextest-tests/tests/basic.rs index 0ab3ccfbb16..f232b473dd4 100644 --- a/fixtures/nextest-tests/tests/basic.rs +++ b/fixtures/nextest-tests/tests/basic.rs @@ -319,6 +319,17 @@ fn test_slow_timeout_subprocess() { cmd.output().unwrap(); } +#[test] +#[ignore] +fn test_flaky_slow_timeout_mod_3() { + let nextest_attempt = nextest_attempt(); + if nextest_attempt % 3 != 0 { + panic!("Failed because attempt {} % 3 != 0", nextest_attempt) + } + // The timeout for the with-timeout-success profile is set to 2 seconds. + std::thread::sleep(std::time::Duration::from_secs(4)); +} + #[test] fn test_result_failure() -> Result<(), std::io::Error> { Err(std::io::Error::new( diff --git a/nextest-runner/default-config.toml b/nextest-runner/default-config.toml index fd759accb47..8576bea3862 100644 --- a/nextest-runner/default-config.toml +++ b/nextest-runner/default-config.toml @@ -88,7 +88,11 @@ fail-fast = true # which will cause slow tests to be terminated after the specified number of # periods have passed. # Example: slow-timeout = { period = "60s", terminate-after = 2 } -slow-timeout = { period = "60s" } +# +# The 'on-timeout' parameter controls whether timeouts are treated as failures (the default) +# or successes. +# Example: slow-timeout = { period = "60s", on-timeout = "pass" } +slow-timeout = { period = "60s", on-timeout = "fail" } # Treat a test as leaky if after the process is shut down, standard output and standard error # aren't closed within this duration. diff --git a/nextest-runner/src/config/elements/slow_timeout.rs b/nextest-runner/src/config/elements/slow_timeout.rs index 03e04b98931..ff8fb760120 100644 --- a/nextest-runner/src/config/elements/slow_timeout.rs +++ b/nextest-runner/src/config/elements/slow_timeout.rs @@ -15,6 +15,8 @@ pub struct SlowTimeout { pub(crate) terminate_after: Option, #[serde(with = "humantime_serde", default = "default_grace_period")] pub(crate) grace_period: Duration, + #[serde(default)] + pub(crate) on_timeout: SlowTimeoutResult, } impl SlowTimeout { @@ -24,6 +26,7 @@ impl SlowTimeout { period: far_future_duration(), terminate_after: None, grace_period: Duration::from_secs(10), + on_timeout: SlowTimeoutResult::Fail, }; } @@ -61,6 +64,7 @@ where period, terminate_after: None, grace_period: default_grace_period(), + on_timeout: SlowTimeoutResult::Fail, })) } } @@ -76,6 +80,25 @@ where deserializer.deserialize_any(V) } +/// The result of controlling slow timeout behavior. +/// +/// In most situations a timed out test should be marked failing. However, there are certain +/// classes of tests which are expected to run indefinitely long, like fuzzing, which explores a +/// huge state space. For these tests it's nice to be able to treat a timeout as a success since +/// they generally check for invariants and other properties of the code under test during their +/// execution. A timeout in this context doesn't mean that there are no failing inputs, it just +/// means that they weren't found up until that moment, which is still valuable information. +#[derive(Clone, Copy, Debug, Deserialize, Default, PartialEq, Eq)] +#[serde(rename_all = "kebab-case")] +pub enum SlowTimeoutResult { + #[default] + /// The test is marked as failed. + Fail, + + /// The test is marked as passed. + Pass, +} + #[cfg(test)] mod tests { use super::*; @@ -87,7 +110,7 @@ mod tests { #[test_case( "", - Ok(SlowTimeout { period: Duration::from_secs(60), terminate_after: None, grace_period: Duration::from_secs(10) }), + Ok(SlowTimeout { period: Duration::from_secs(60), terminate_after: None, grace_period: Duration::from_secs(10), on_timeout: SlowTimeoutResult::Fail }), None ; "empty config is expected to use the hardcoded values" @@ -97,7 +120,7 @@ mod tests { [profile.default] slow-timeout = "30s" "#}, - Ok(SlowTimeout { period: Duration::from_secs(30), terminate_after: None, grace_period: Duration::from_secs(10) }), + Ok(SlowTimeout { period: Duration::from_secs(30), terminate_after: None, grace_period: Duration::from_secs(10), on_timeout: SlowTimeoutResult::Fail }), None ; "overrides the default profile" @@ -110,8 +133,8 @@ mod tests { [profile.ci] slow-timeout = { period = "60s", terminate-after = 3 } "#}, - Ok(SlowTimeout { period: Duration::from_secs(30), terminate_after: None, grace_period: Duration::from_secs(10) }), - Some(SlowTimeout { period: Duration::from_secs(60), terminate_after: Some(NonZeroUsize::new(3).unwrap()), grace_period: Duration::from_secs(10) }) + Ok(SlowTimeout { period: Duration::from_secs(30), terminate_after: None, grace_period: Duration::from_secs(10), on_timeout: SlowTimeoutResult::Fail }), + Some(SlowTimeout { period: Duration::from_secs(60), terminate_after: Some(NonZeroUsize::new(3).unwrap()), grace_period: Duration::from_secs(10), on_timeout: SlowTimeoutResult::Fail }) ; "adds a custom profile 'ci'" )] @@ -123,8 +146,8 @@ mod tests { [profile.ci] slow-timeout = "30s" "#}, - Ok(SlowTimeout { period: Duration::from_secs(60), terminate_after: Some(NonZeroUsize::new(3).unwrap()), grace_period: Duration::from_secs(10) }), - Some(SlowTimeout { period: Duration::from_secs(30), terminate_after: None, grace_period: Duration::from_secs(10) }) + Ok(SlowTimeout { period: Duration::from_secs(60), terminate_after: Some(NonZeroUsize::new(3).unwrap()), grace_period: Duration::from_secs(10), on_timeout: SlowTimeoutResult::Fail }), + Some(SlowTimeout { period: Duration::from_secs(30), terminate_after: None, grace_period: Duration::from_secs(10), on_timeout: SlowTimeoutResult::Fail }) ; "ci profile uses string notation" )] @@ -136,8 +159,8 @@ mod tests { [profile.ci] slow-timeout = "30s" "#}, - Ok(SlowTimeout { period: Duration::from_secs(60), terminate_after: Some(NonZeroUsize::new(3).unwrap()), grace_period: Duration::from_secs(1) }), - Some(SlowTimeout { period: Duration::from_secs(30), terminate_after: None, grace_period: Duration::from_secs(10) }) + Ok(SlowTimeout { period: Duration::from_secs(60), terminate_after: Some(NonZeroUsize::new(3).unwrap()), grace_period: Duration::from_secs(1), on_timeout: SlowTimeoutResult::Fail }), + Some(SlowTimeout { period: Duration::from_secs(30), terminate_after: None, grace_period: Duration::from_secs(10), on_timeout: SlowTimeoutResult::Fail }) ; "timeout grace period" )] @@ -146,7 +169,7 @@ mod tests { [profile.default] slow-timeout = { period = "60s" } "#}, - Ok(SlowTimeout { period: Duration::from_secs(60), terminate_after: None, grace_period: Duration::from_secs(10) }), + Ok(SlowTimeout { period: Duration::from_secs(60), terminate_after: None, grace_period: Duration::from_secs(10), on_timeout: SlowTimeoutResult::Fail }), None ; "partial table" @@ -161,6 +184,39 @@ mod tests { ; "zero terminate-after should fail" )] + #[test_case( + indoc! {r#" + [profile.default] + slow-timeout = { period = "60s", on-timeout = "pass" } + "#}, + Ok(SlowTimeout { period: Duration::from_secs(60), terminate_after: None, grace_period: Duration::from_secs(10), on_timeout: SlowTimeoutResult::Pass }), + None + + ; "timeout result success" + )] + #[test_case( + indoc! {r#" + [profile.default] + slow-timeout = { period = "60s", on-timeout = "fail" } + "#}, + Ok(SlowTimeout { period: Duration::from_secs(60), terminate_after: None, grace_period: Duration::from_secs(10), on_timeout: SlowTimeoutResult::Fail }), + None + + ; "timeout result failure" + )] + #[test_case( + indoc! {r#" + [profile.default] + slow-timeout = { period = "60s", on-timeout = "pass" } + + [profile.ci] + slow-timeout = { period = "30s", on-timeout = "fail" } + "#}, + Ok(SlowTimeout { period: Duration::from_secs(60), terminate_after: None, grace_period: Duration::from_secs(10), on_timeout: SlowTimeoutResult::Pass }), + Some(SlowTimeout { period: Duration::from_secs(30), terminate_after: None, grace_period: Duration::from_secs(10), on_timeout: SlowTimeoutResult::Fail }) + + ; "override on-timeout option" + )] #[test_case( indoc! {r#" [profile.default] diff --git a/nextest-runner/src/config/overrides/imp.rs b/nextest-runner/src/config/overrides/imp.rs index 83720c57cfc..58e6252252f 100644 --- a/nextest-runner/src/config/overrides/imp.rs +++ b/nextest-runner/src/config/overrides/imp.rs @@ -1019,7 +1019,11 @@ impl<'de> Deserialize<'de> for PlatformStrings { #[cfg(test)] mod tests { use super::*; - use crate::config::{core::NextestConfig, elements::LeakTimeoutResult, utils::test_helpers::*}; + use crate::config::{ + core::NextestConfig, + elements::{LeakTimeoutResult, SlowTimeoutResult}, + utils::test_helpers::*, + }; use camino_tempfile::tempdir; use indoc::indoc; use std::{num::NonZeroUsize, time::Duration}; @@ -1066,6 +1070,11 @@ mod tests { filter = "test(override5)" retries = 8 + # Override 6 -- timeout result success + [[profile.default.overrides]] + filter = "test(timeout_success)" + slow-timeout = { period = "30s", on-timeout = "pass" } + [profile.default.junit] path = "my-path.xml" @@ -1108,6 +1117,7 @@ mod tests { overrides.slow_timeout(), SlowTimeout { period: Duration::from_secs(60), + on_timeout: SlowTimeoutResult::default(), terminate_after: None, grace_period: Duration::from_secs(10), } @@ -1159,6 +1169,7 @@ mod tests { period: Duration::from_secs(120), terminate_after: Some(NonZeroUsize::new(1).unwrap()), grace_period: Duration::ZERO, + on_timeout: SlowTimeoutResult::default(), } ); assert_eq!( @@ -1197,6 +1208,22 @@ mod tests { let overrides = profile.settings_for(&query); assert_eq!(overrides.retries(), RetryPolicy::new_without_delay(8)); + // This query matches override 6. + let query = TestQuery { + binary_query: target_binary_query.to_query(), + test_name: "timeout_success", + }; + let overrides = profile.settings_for(&query); + assert_eq!( + overrides.slow_timeout(), + SlowTimeout { + period: Duration::from_secs(30), + on_timeout: SlowTimeoutResult::Pass, + terminate_after: None, + grace_period: Duration::from_secs(10), + } + ); + // This query does not match any overrides. let query = TestQuery { binary_query: target_binary_query.to_query(), diff --git a/nextest-runner/src/reporter/aggregator/junit.rs b/nextest-runner/src/reporter/aggregator/junit.rs index a1d3d947f2f..6bd753e37eb 100644 --- a/nextest-runner/src/reporter/aggregator/junit.rs +++ b/nextest-runner/src/reporter/aggregator/junit.rs @@ -5,7 +5,7 @@ use crate::{ config::{ - elements::{JunitConfig, LeakTimeoutResult}, + elements::{JunitConfig, LeakTimeoutResult, SlowTimeoutResult}, scripts::ScriptId, }, errors::{DisplayErrorChain, WriteEventError}, @@ -339,7 +339,9 @@ fn non_success_kind_and_type(kind: UnitKind, result: ExecutionResult) -> (NonSuc NonSuccessKind::Failure, format!("{kind} failure with exit code {code}"), ), - ExecutionResult::Timeout => (NonSuccessKind::Failure, format!("{kind} timeout")), + ExecutionResult::Timeout { + result: SlowTimeoutResult::Fail, + } => (NonSuccessKind::Failure, format!("{kind} timeout")), ExecutionResult::ExecFail => (NonSuccessKind::Error, "execution failure".to_owned()), ExecutionResult::Leak { result: LeakTimeoutResult::Pass, @@ -353,7 +355,10 @@ fn non_success_kind_and_type(kind: UnitKind, result: ExecutionResult) -> (NonSuc NonSuccessKind::Error, format!("{kind} exited with code 0, but leaked handles so was marked failed"), ), - ExecutionResult::Pass => { + ExecutionResult::Pass + | ExecutionResult::Timeout { + result: SlowTimeoutResult::Pass, + } => { unreachable!("this is a failure status") } } diff --git a/nextest-runner/src/reporter/displayer/imp.rs b/nextest-runner/src/reporter/displayer/imp.rs index fc84ed316e1..c0e464e5bf0 100644 --- a/nextest-runner/src/reporter/displayer/imp.rs +++ b/nextest-runner/src/reporter/displayer/imp.rs @@ -19,7 +19,11 @@ use super::{ }; use crate::{ cargo_config::CargoConfigs, - config::{elements::LeakTimeoutResult, overrides::CompiledDefaultFilter, scripts::ScriptId}, + config::{ + elements::{LeakTimeoutResult, SlowTimeoutResult}, + overrides::CompiledDefaultFilter, + scripts::ScriptId, + }, errors::WriteEventError, helpers::{ DisplayCounterIndex, DisplayScriptInstance, DisplayTestInstance, plural, @@ -787,14 +791,21 @@ impl<'a> DisplayReporterImpl<'a> { let last_status = run_statuses.last_status(); let test_output_display = match last_status.result { ExecutionResult::Pass + | ExecutionResult::Timeout { + result: SlowTimeoutResult::Pass, + } | ExecutionResult::Leak { result: LeakTimeoutResult::Pass, } => self.unit_output.success_output(*success_output), ExecutionResult::Leak { result: LeakTimeoutResult::Fail, } - | ExecutionResult::Fail { .. } - | ExecutionResult::Timeout => self.unit_output.failure_output(*failure_output), + | ExecutionResult::Timeout { + result: SlowTimeoutResult::Fail, + } + | ExecutionResult::Fail { .. } => { + self.unit_output.failure_output(*failure_output) + } ExecutionResult::ExecFail => self.unit_output.exec_fail_output(*failure_output), }; @@ -1923,7 +1934,14 @@ impl<'a> DisplayReporterImpl<'a> { "{}: exited with code 0, but leaked handles", "failed".style(self.styles.fail), ), - Some(ExecutionResult::Timeout) => { + Some(ExecutionResult::Timeout { + result: SlowTimeoutResult::Pass, + }) => { + write!(writer, "{}", "passed with timeout".style(self.styles.skip)) + } + Some(ExecutionResult::Timeout { + result: SlowTimeoutResult::Fail, + }) => { write!(writer, "{}", "timed out".style(self.styles.fail)) } Some(ExecutionResult::Fail { @@ -2190,7 +2208,7 @@ fn show_finished_status_info_line(result: ExecutionResult) -> bool { // again. false } - ExecutionResult::Timeout => { + ExecutionResult::Timeout { .. } => { // Show this to be clear what happened. true } @@ -2235,7 +2253,12 @@ fn status_str(result: ExecutionResult) -> Cow<'static, str> { ExecutionResult::Leak { result: LeakTimeoutResult::Fail, } => "LEAK-FAIL".into(), - ExecutionResult::Timeout => "TIMEOUT".into(), + ExecutionResult::Timeout { + result: SlowTimeoutResult::Pass, + } => "TIMEOUT-PASS".into(), + ExecutionResult::Timeout { + result: SlowTimeoutResult::Fail, + } => "TIMEOUT".into(), } } @@ -2273,7 +2296,12 @@ fn short_status_str(result: ExecutionResult) -> Cow<'static, str> { ExecutionResult::Leak { result: LeakTimeoutResult::Fail, } => "LKFAIL".into(), - ExecutionResult::Timeout => "TMT".into(), + ExecutionResult::Timeout { + result: SlowTimeoutResult::Pass, + } => "TMPASS".into(), + ExecutionResult::Timeout { + result: SlowTimeoutResult::Fail, + } => "TMT".into(), } } @@ -2521,10 +2549,11 @@ mod tests { setup_scripts_timed_out: 0, passed: 5, passed_slow: 0, + passed_timed_out: 0, flaky: 0, failed: 0, failed_slow: 0, - timed_out: 0, + failed_timed_out: 0, leaky: 0, leaky_failed: 0, exec_failed: 0, @@ -2557,10 +2586,11 @@ mod tests { setup_scripts_timed_out: 0, passed: 5, passed_slow: 1, + passed_timed_out: 2, flaky: 1, failed: 2, failed_slow: 0, - timed_out: 1, + failed_timed_out: 1, leaky: 1, leaky_failed: 0, exec_failed: 1, @@ -2644,10 +2674,11 @@ mod tests { setup_scripts_timed_out: 0, passed: 0, passed_slow: 0, + passed_timed_out: 0, flaky: 0, failed: 0, failed_slow: 0, - timed_out: 0, + failed_timed_out: 0, leaky: 0, leaky_failed: 0, exec_failed: 0, @@ -2704,10 +2735,11 @@ mod tests { setup_scripts_timed_out: 0, passed: 17, passed_slow: 4, + passed_timed_out: 3, flaky: 2, failed: 2, failed_slow: 1, - timed_out: 1, + failed_timed_out: 1, leaky: 1, leaky_failed: 2, exec_failed: 1, diff --git a/nextest-runner/src/reporter/displayer/progress.rs b/nextest-runner/src/reporter/displayer/progress.rs index 47ef2764fc5..ffbb9c0725b 100644 --- a/nextest-runner/src/reporter/displayer/progress.rs +++ b/nextest-runner/src/reporter/displayer/progress.rs @@ -720,10 +720,11 @@ pub(super) fn write_summary_str(run_stats: &RunStats, styles: &Styles, out: &mut setup_scripts_timed_out: _, passed, passed_slow, + passed_timed_out: _, flaky, failed, failed_slow: _, - timed_out, + failed_timed_out, leaky, leaky_failed, exec_failed, @@ -792,11 +793,11 @@ pub(super) fn write_summary_str(run_stats: &RunStats, styles: &Styles, out: &mut ); } - if timed_out > 0 { + if failed_timed_out > 0 { swrite!( out, "{} {}, ", - timed_out.style(styles.count), + failed_timed_out.style(styles.count), "timed out".style(styles.fail), ); } @@ -1051,7 +1052,7 @@ mod tests { RunStats { initial_run_count: 20, finished_count: 10, - timed_out: 1, + failed_timed_out: 1, cancel_reason: Some(CancelReason::TestFailure), ..RunStats::default() }, diff --git a/nextest-runner/src/reporter/displayer/snapshots/nextest_runner__reporter__displayer__progress__tests__one_timed_out_success_one_failure_with_cancel_reason.snap b/nextest-runner/src/reporter/displayer/snapshots/nextest_runner__reporter__displayer__progress__tests__one_timed_out_success_one_failure_with_cancel_reason.snap new file mode 100644 index 00000000000..8874a43805f --- /dev/null +++ b/nextest-runner/src/reporter/displayer/snapshots/nextest_runner__reporter__displayer__progress__tests__one_timed_out_success_one_failure_with_cancel_reason.snap @@ -0,0 +1,5 @@ +--- +source: nextest-runner/src/reporter/displayer/progress.rs +expression: s +--- + Cancelling [ 34:17:36] 10/20: 10 running, 0 passed, 1 timed out, 0 skipped diff --git a/nextest-runner/src/reporter/displayer/snapshots/nextest_runner__reporter__displayer__progress__tests__one_timed_out_success_one_failure_without_cancel_reason.snap b/nextest-runner/src/reporter/displayer/snapshots/nextest_runner__reporter__displayer__progress__tests__one_timed_out_success_one_failure_without_cancel_reason.snap new file mode 100644 index 00000000000..381dcd41040 --- /dev/null +++ b/nextest-runner/src/reporter/displayer/snapshots/nextest_runner__reporter__displayer__progress__tests__one_timed_out_success_one_failure_without_cancel_reason.snap @@ -0,0 +1,5 @@ +--- +source: nextest-runner/src/reporter/displayer/progress.rs +expression: s +--- + Running [ 34:17:36] 10/20: 10 running, 0 passed, 1 timed out, 0 skipped diff --git a/nextest-runner/src/reporter/events.rs b/nextest-runner/src/reporter/events.rs index 50e2b285b4c..be225d0c43d 100644 --- a/nextest-runner/src/reporter/events.rs +++ b/nextest-runner/src/reporter/events.rs @@ -8,7 +8,10 @@ use super::{FinalStatusLevel, StatusLevel, TestOutputDisplay}; use crate::{ - config::{elements::LeakTimeoutResult, scripts::ScriptId}, + config::{ + elements::{LeakTimeoutResult, SlowTimeoutResult}, + scripts::ScriptId, + }, list::{TestInstance, TestInstanceId, TestList}, runner::{StressCondition, StressCount}, test_output::ChildExecutionOutput, @@ -519,23 +522,26 @@ pub struct RunStats { /// The number of setup scripts that timed out. pub setup_scripts_timed_out: usize, - /// The number of tests that passed. Includes `passed_slow`, `flaky` and `leaky`. + /// The number of tests that passed. Includes `passed_slow`, `passed_timed_out`, `flaky` and `leaky`. pub passed: usize, /// The number of slow tests that passed. pub passed_slow: usize, + /// The number of timed out tests that passed. + pub passed_timed_out: usize, + /// The number of tests that passed on retry. pub flaky: usize, - /// The number of tests that failed. + /// The number of tests that failed. Includes `leaky_failed`. pub failed: usize, /// The number of failed tests that were slow. pub failed_slow: usize, - /// The number of tests that timed out. - pub timed_out: usize, + /// The number of timed out tests that failed. + pub failed_timed_out: usize, /// The number of tests that passed but leaked handles. pub leaky: usize, @@ -567,7 +573,7 @@ impl RunStats { /// Returns count of tests that did not pass. pub fn failed_count(&self) -> usize { - self.failed + self.exec_failed + self.timed_out + self.failed + self.exec_failed + self.failed_timed_out } /// Summarizes the stats as an enum at the end of a test run. @@ -640,7 +646,8 @@ impl RunStats { ExecutionResult::ExecFail => { self.setup_scripts_exec_failed += 1; } - ExecutionResult::Timeout => { + // Timed out setup scripts are always treated as failures + ExecutionResult::Timeout { .. } => { self.setup_scripts_timed_out += 1; } } @@ -694,7 +701,20 @@ impl RunStats { self.failed_slow += 1; } } - ExecutionResult::Timeout => self.timed_out += 1, + ExecutionResult::Timeout { + result: SlowTimeoutResult::Pass, + } => { + self.passed += 1; + self.passed_timed_out += 1; + if run_statuses.len() > 1 { + self.flaky += 1; + } + } + ExecutionResult::Timeout { + result: SlowTimeoutResult::Fail, + } => { + self.failed_timed_out += 1; + } ExecutionResult::ExecFail => self.exec_failed += 1, } } @@ -1038,7 +1058,10 @@ pub enum ExecutionResult { /// An error occurred while executing the test. ExecFail, /// The test was terminated due to a timeout. - Timeout, + Timeout { + /// Whether this timeout was treated as a failure. + result: SlowTimeoutResult, + }, } impl ExecutionResult { @@ -1046,6 +1069,9 @@ impl ExecutionResult { pub fn is_success(self) -> bool { match self { ExecutionResult::Pass + | ExecutionResult::Timeout { + result: SlowTimeoutResult::Pass, + } | ExecutionResult::Leak { result: LeakTimeoutResult::Pass, } => true, @@ -1054,7 +1080,9 @@ impl ExecutionResult { } | ExecutionResult::Fail { .. } | ExecutionResult::ExecFail - | ExecutionResult::Timeout => false, + | ExecutionResult::Timeout { + result: SlowTimeoutResult::Fail, + } => false, } } @@ -1090,7 +1118,7 @@ impl ExecutionResult { ExecutionResult::Leak { .. } => "leak", ExecutionResult::Fail { .. } => "fail", ExecutionResult::ExecFail => "exec-fail", - ExecutionResult::Timeout => "timeout", + ExecutionResult::Timeout { .. } => "timeout", } } } @@ -1574,7 +1602,19 @@ mod tests { RunStats { initial_run_count: 42, finished_count: 42, - timed_out: 1, + failed_timed_out: 1, + ..RunStats::default() + } + .summarize_final(), + FinalRunStats::Failed(RunStatsFailureKind::Test { + initial_run_count: 42, + not_run: 0 + }), + "timed out => failure {:?} {:?}", + RunStats { + initial_run_count: 42, + finished_count: 42, + failed_timed_out: 1, ..RunStats::default() } .summarize_final(), @@ -1582,7 +1622,6 @@ mod tests { initial_run_count: 42, not_run: 0 }), - "timed out => failure" ); assert_eq!( RunStats { diff --git a/nextest-runner/src/reporter/structured/libtest.rs b/nextest-runner/src/reporter/structured/libtest.rs index b09e1c71738..d4205fbcd15 100644 --- a/nextest-runner/src/reporter/structured/libtest.rs +++ b/nextest-runner/src/reporter/structured/libtest.rs @@ -23,7 +23,7 @@ //! using before use crate::{ - config::elements::LeakTimeoutResult, + config::elements::{LeakTimeoutResult, SlowTimeoutResult}, errors::{DisplayErrorChain, FormatVersionError, FormatVersionErrorInner, WriteEventError}, list::RustTestSuite, reporter::events::{ExecutionResult, StressIndex, TestEvent, TestEventKind}, @@ -268,6 +268,9 @@ impl<'cfg> LibtestReporter<'cfg> { KIND_TEST, match run_statuses.last_status().result { ExecutionResult::Pass + | ExecutionResult::Timeout { + result: SlowTimeoutResult::Pass, + } | ExecutionResult::Leak { result: LeakTimeoutResult::Pass, } => EVENT_OK, @@ -276,7 +279,9 @@ impl<'cfg> LibtestReporter<'cfg> { } | ExecutionResult::Fail { .. } | ExecutionResult::ExecFail - | ExecutionResult::Timeout => EVENT_FAILED, + | ExecutionResult::Timeout { + result: SlowTimeoutResult::Fail, + } => EVENT_FAILED, }, stress_index, test_instance, @@ -425,7 +430,9 @@ impl<'cfg> LibtestReporter<'cfg> { )?; out.extend_from_slice(b"\""); } - ExecutionResult::Timeout => { + ExecutionResult::Timeout { + result: SlowTimeoutResult::Fail, + } => { test_suite_mut.failed += 1; out.extend_from_slice(br#","reason":"time limit exceeded""#); } diff --git a/nextest-runner/src/runner/executor.rs b/nextest-runner/src/runner/executor.rs index eae65a53c87..13c2c676745 100644 --- a/nextest-runner/src/runner/executor.rs +++ b/nextest-runner/src/runner/executor.rs @@ -490,7 +490,9 @@ impl<'a> ExecutorContext<'a> { job.as_ref(), slow_timeout.grace_period, ).await; - status = Some(ExecutionResult::Timeout); + status = Some(ExecutionResult::Timeout { + result: slow_timeout.on_timeout + }); if slow_timeout.grace_period.is_zero() { break child.wait().await; } @@ -874,7 +876,7 @@ impl<'a> ExecutorContext<'a> { job.as_ref(), slow_timeout.grace_period, ).await; - status = Some(ExecutionResult::Timeout); + status = Some(ExecutionResult::Timeout {result: slow_timeout.on_timeout}); if slow_timeout.grace_period.is_zero() { break child.wait().await; } diff --git a/nextest-runner/tests/integration/basic.rs b/nextest-runner/tests/integration/basic.rs index c951648cc7a..0f4afa5ca71 100644 --- a/nextest-runner/tests/integration/basic.rs +++ b/nextest-runner/tests/integration/basic.rs @@ -14,7 +14,7 @@ use nextest_metadata::{FilterMatch, MismatchReason}; use nextest_runner::{ config::{ core::NextestConfig, - elements::{LeakTimeoutResult, RetryPolicy}, + elements::{LeakTimeoutResult, RetryPolicy, SlowTimeoutResult}, }, double_spawn::DoubleSpawnInfo, input::InputHandlerKind, @@ -71,6 +71,182 @@ fn test_list_binaries() -> Result<()> { Ok(()) } +#[test] +fn test_timeout_with_retries() -> Result<()> { + test_init(); + let pcx = ParseContext::new(&PACKAGE_GRAPH); + let expr = Filterset::parse( + "test(/^test_slow_timeout/)".to_owned(), + &pcx, + FiltersetKind::Test, + ) + .unwrap(); + let test_filter = TestFilterBuilder::new( + RunIgnored::Only, + None, + TestFilterPatterns::default(), + vec![expr], + ) + .unwrap(); + + let test_list = FIXTURE_TARGETS.make_test_list( + "with-timeout-retries-success", + &test_filter, + &TargetRunner::empty(), + )?; + let config = load_config(); + let profile = config + .profile("with-timeout-retries-success") + .expect("with-timeout-retries-success config is valid"); + let build_platforms = BuildPlatforms::new_with_no_target().unwrap(); + let profile = profile.apply_build_platforms(&build_platforms); + + let profile_retries = profile.retries(); + assert_eq!( + profile_retries, + RetryPolicy::new_without_delay(2), + "retries set in with-timeout-retries-success profile" + ); + + let runner = TestRunnerBuilder::default() + .build( + &test_list, + &profile, + vec![], + SignalHandlerKind::Noop, + InputHandlerKind::Noop, + DoubleSpawnInfo::disabled(), + TargetRunner::empty(), + ) + .unwrap(); + + let (instance_statuses, _run_stats) = execute_collect(runner); + + // With retries and on-timeout=pass, timed out tests should not be retried + for test_name in [ + "test_slow_timeout", + "test_slow_timeout_2", + "test_slow_timeout_subprocess", + ] { + let (_, instance_value) = instance_statuses + .iter() + .find(|&(&(_, name), _)| name == test_name) + .unwrap_or_else(|| panic!("{test_name} should be present")); + + match &instance_value.status { + InstanceStatus::Skipped(_) => panic!("{test_name} should have been run"), + InstanceStatus::Finished(run_statuses) => { + assert_eq!( + run_statuses.len(), + 1, + "{test_name} should have been run 3 times (1 initial + 2 retries)", + ); + + let status = run_statuses.last_status(); + assert_eq!( + status.result, + ExecutionResult::Timeout { + result: SlowTimeoutResult::Pass + } + ); + } + }; + } + + Ok(()) +} + +#[test] +fn test_timeout_with_flaky() -> Result<()> { + test_init(); + + let pcx = ParseContext::new(&PACKAGE_GRAPH); + let expr = Filterset::parse( + "test(test_flaky_slow_timeout_mod_3)".to_owned(), + &pcx, + FiltersetKind::Test, + ) + .unwrap(); + let test_filter = TestFilterBuilder::new( + RunIgnored::Only, + None, + TestFilterPatterns::default(), + vec![expr], + ) + .unwrap(); + + let test_list = FIXTURE_TARGETS.make_test_list( + "with-timeout-retries-success", + &test_filter, + &TargetRunner::empty(), + )?; + let config = load_config(); + let profile = config + .profile("with-timeout-retries-success") + .expect("with-timeout-retries-success config is valid"); + let build_platforms = BuildPlatforms::new_with_no_target().unwrap(); + let profile = profile.apply_build_platforms(&build_platforms); + + let runner = TestRunnerBuilder::default() + .build( + &test_list, + &profile, + vec![], + SignalHandlerKind::Noop, + InputHandlerKind::Noop, + DoubleSpawnInfo::disabled(), + TargetRunner::empty(), + ) + .unwrap(); + + let (instance_statuses, _run_stats) = execute_collect(runner); + + let (_, instance_value) = instance_statuses + .iter() + .find(|&(&(_, name), _)| name == "test_flaky_slow_timeout_mod_3") + .unwrap_or_else(|| panic!("test_flaky_slow_timeout_mod_3 should be present")); + + match &instance_value.status { + InstanceStatus::Skipped(_) => panic!("test_flaky_slow_timeout_mod_3 should have been run"), + InstanceStatus::Finished(run_statuses) => { + assert!( + run_statuses.len() == 3, + "test_flaky_slow_timeout_mod_3 should have been run 3 times, was run {} times", + run_statuses.len() + ); + + match run_statuses.describe() { + ExecutionDescription::Flaky { + last_status, + prior_statuses, + } => { + for (i, prior_status) in prior_statuses.iter().enumerate() { + assert!( + matches!(prior_status.result, ExecutionResult::Fail { .. }), + "prior attempt {} should be fail, got {:?}", + i + 1, + prior_status.result + ); + } + assert!( + matches!( + last_status.result, + ExecutionResult::Timeout { + result: SlowTimeoutResult::Pass + } + ), + "last attempt should be a successful timeout, was {:?}", + last_status.result + ); + } + _ => panic!("test_flaky_slow_timeout_mod_3 should be flaky"), + } + } + }; + + Ok(()) +} + #[test] fn test_list_tests() -> Result<()> { test_init(); @@ -705,7 +881,7 @@ fn test_termination() -> Result<()> { .unwrap(); let (instance_statuses, run_stats) = execute_collect(runner); - assert_eq!(run_stats.timed_out, 3, "3 tests timed out"); + assert_eq!(run_stats.failed_timed_out, 3, "3 tests timed out"); for test_name in [ "test_slow_timeout", "test_slow_timeout_2", @@ -733,7 +909,10 @@ fn test_termination() -> Result<()> { "{test_name} should have taken less than 5 seconds, actually took {:?}", run_status.time_taken ); - run_status.result == ExecutionResult::Timeout + run_status.result + == ExecutionResult::Timeout { + result: SlowTimeoutResult::Fail, + } } }; if !valid { @@ -746,3 +925,107 @@ fn test_termination() -> Result<()> { Ok(()) } + +#[test] +fn test_override_timeout_result() -> Result<()> { + test_init(); + + let pcx = ParseContext::new(&PACKAGE_GRAPH); + let expr = Filterset::parse( + "test(/^test_slow_timeout/)".to_owned(), + &pcx, + FiltersetKind::Test, + ) + .unwrap(); + let test_filter = TestFilterBuilder::new( + RunIgnored::Only, + None, + TestFilterPatterns::default(), + vec![expr], + ) + .unwrap(); + + let test_list = FIXTURE_TARGETS.make_test_list( + "with-timeout-success", + &test_filter, + &TargetRunner::empty(), + )?; + let config = load_config(); + let profile = config + .profile("with-timeout-success") + .expect("with-timeout-success config is valid"); + let build_platforms = BuildPlatforms::new_with_no_target().unwrap(); + let profile = profile.apply_build_platforms(&build_platforms); + + let runner = TestRunnerBuilder::default() + .build( + &test_list, + &profile, + vec![], + SignalHandlerKind::Noop, + InputHandlerKind::Noop, + DoubleSpawnInfo::disabled(), + TargetRunner::empty(), + ) + .unwrap(); + + let (instance_statuses, run_stats) = execute_collect(runner); + + println!("{instance_statuses:?}"); + assert_eq!(run_stats.finished_count, 3, "4 tests should have finished"); + assert_eq!( + run_stats.passed_timed_out, 1, + "1 test should pass with timeout" + ); + assert_eq!(run_stats.failed_timed_out, 2, "2 tests should fail"); + + for test_name in [ + "test_slow_timeout", + "test_slow_timeout_2", + "test_slow_timeout_subprocess", + ] { + let (_, instance_value) = instance_statuses + .iter() + .find(|&(&(_, name), _)| name == test_name) + .unwrap_or_else(|| panic!("{test_name} should be present")); + let (expected, actual) = match &instance_value.status { + InstanceStatus::Skipped(_) => panic!("{test_name} should have been run"), + InstanceStatus::Finished(run_statuses) => { + assert_eq!( + run_statuses.len(), + 1, + "{test_name} should have been run exactly once", + ); + let run_status = run_statuses.last_status(); + assert!( + run_status.time_taken < Duration::from_secs(5), + "{test_name} should have taken less than 5 seconds, actually took {:?}", + run_status.time_taken + ); + if matches!(test_name, "test_slow_timeout") { + ( + ExecutionResult::Timeout { + result: SlowTimeoutResult::Pass, + }, + run_status.result, + ) + } else { + ( + ExecutionResult::Timeout { + result: SlowTimeoutResult::Fail, + }, + run_status.result, + ) + } + } + }; + + assert_eq!( + expected, actual, + "for test_slow_timeout, mismatch in status: expected {:?}, actual {:?}", + expected, actual + ); + } + + Ok(()) +} diff --git a/site/src/docs/configuration/reference.md b/site/src/docs/configuration/reference.md index 86bc7bda647..d3fcada5ea8 100644 --- a/site/src/docs/configuration/reference.md +++ b/site/src/docs/configuration/reference.md @@ -145,8 +145,16 @@ Profiles are configured under `[profile.]`. The default profile is called slow-timeout = "60s" # or slow-timeout = { period = "120s", terminate-after = 2, grace-period = "10s" } + # or + slow-timeout = { period = "30s", terminate-after = 4, on-timeout = "pass" } ``` +The `slow-timeout` object accepts the following parameters: +- `period`: Time period after which a test is considered slow +- `terminate-after`: Number of periods after which to terminate the test +- `grace-period`: Time to wait for graceful shutdown before force termination +- `on-timeout`: What to do when a test times out (`"fail"` or `"pass"`) + #### `profile..leak-timeout` - **Type**: String (duration) or object diff --git a/site/src/docs/features/slow-tests.md b/site/src/docs/features/slow-tests.md index 0474834163e..9f977971b49 100644 --- a/site/src/docs/features/slow-tests.md +++ b/site/src/docs/features/slow-tests.md @@ -43,6 +43,29 @@ Nextest lets you optionally specify a number of `slow-timeout` periods after whi slow-timeout = { period = "30s", terminate-after = 4 } ``` +### Configuring timeout behavior + + + +By default, tests that time out are treated as failures. However, for fuzz tests with very large state spaces (or on a constrained environment like CI), it may be useful to treat timeouts as successes, since they're usually not expected to run until completion. A timeout in this context means that no failing input was found up until this point. +For such tests, you can configure timeouts to be marked as successes. For example, to run tests in the `fuzz-targets` crate for 30 seconds, then mark them as successes: + +```toml title="Timeouts as successes" +[[profile.default.overrides]] +filter = 'package(fuzz-targets)' +slow-timeout = { period = "30s", terminate-after = 1, on-timeout = "pass" } +``` + +The possible values for `on-timeout` are: + +`fail` + +: (default) Tests that time out are treated as failures + +`pass` + +: Tests that time out are treated as successes + ### Example The run below is configured with: