1
0
mirror of https://github.com/Kitware/CMake.git synced 2025-10-15 12:16:40 +08:00

ctest: add option --stop-on-failure

To stop the tests once one has failed

Fixes: #16628
This commit is contained in:
Johnny Jazeix
2020-04-20 23:05:15 +02:00
parent df2d39bc51
commit e89aeba5c4
17 changed files with 106 additions and 1 deletions

View File

@@ -20,6 +20,7 @@ Perform the :ref:`CTest Test Step` as a :ref:`Dashboard Client`.
[RESOURCE_SPEC_FILE <file>]
[TEST_LOAD <threshold>]
[SCHEDULE_RANDOM <ON|OFF>]
[STOP_ON_FAILURE]
[STOP_TIME <time-of-day>]
[RETURN_VALUE <result-var>]
[CAPTURE_CMAKE_ERROR <result-var>]
@@ -119,6 +120,9 @@ The options are:
Launch tests in a random order. This may be useful for detecting
implicit test dependencies.
``STOP_ON_FAILURE``
Stop the execution of the tests once one has failed.
``STOP_TIME <time-of-day>``
Specify a time of day at which the tests should all stop running.

View File

@@ -72,6 +72,9 @@ Options
This option can also be enabled by setting the
:envvar:`CTEST_OUTPUT_ON_FAILURE` environment variable
``--stop-on-failure``
Stop running the tests when the first failure happens.
``-F``
Enable failover.

View File

@@ -0,0 +1,8 @@
ctest_stop_on_failure
---------------------
* :manual:`ctest(1)` gained a ``--stop-on-failure`` option,
which can be used to stop running the tests once one has failed.
* The :command:`ctest_test` command gained a ``STOP_ON_FAILURE`` option
which can be used to stop running the tests once one has failed.

View File

@@ -137,7 +137,7 @@ void cmCTestMultiProcessHandler::RunTests()
uv_run(&this->Loop, UV_RUN_DEFAULT);
uv_loop_close(&this->Loop);
if (!this->StopTimePassed) {
if (!this->StopTimePassed && !this->CheckStopOnFailure()) {
assert(this->Completed == this->Total);
assert(this->Tests.empty());
}
@@ -367,6 +367,11 @@ void cmCTestMultiProcessHandler::CheckResourcesAvailable()
}
}
bool cmCTestMultiProcessHandler::CheckStopOnFailure()
{
return this->CTest->GetStopOnFailure();
}
bool cmCTestMultiProcessHandler::CheckStopTimePassed()
{
if (!this->StopTimePassed) {
@@ -483,6 +488,10 @@ void cmCTestMultiProcessHandler::StartNextTests()
return;
}
if (this->CheckStopOnFailure() && !this->Failed->empty()) {
return;
}
size_t numToStart = 0;
if (this->RunningCount < this->ParallelLevel) {

View File

@@ -138,6 +138,8 @@ protected:
inline size_t GetProcessorsUsed(int index);
std::string GetName(int index);
bool CheckStopOnFailure();
bool CheckStopTimePassed();
void SetStopTimePassed();

View File

@@ -34,6 +34,7 @@ void cmCTestTestCommand::BindArguments()
this->Bind("STOP_TIME"_s, this->StopTime);
this->Bind("TEST_LOAD"_s, this->TestLoad);
this->Bind("RESOURCE_SPEC_FILE"_s, this->ResourceSpecFile);
this->Bind("STOP_ON_FAILURE"_s, this->StopOnFailure);
}
cmCTestGenericHandler* cmCTestTestCommand::InitializeHandler()
@@ -90,6 +91,9 @@ cmCTestGenericHandler* cmCTestTestCommand::InitializeHandler()
handler->SetOption("ExcludeFixtureCleanupRegularExpression",
this->ExcludeFixtureCleanup.c_str());
}
if (this->StopOnFailure) {
handler->SetOption("StopOnFailure", "ON");
}
if (!this->ParallelLevel.empty()) {
handler->SetOption("ParallelLevel", this->ParallelLevel.c_str());
}

View File

@@ -60,6 +60,7 @@ protected:
std::string StopTime;
std::string TestLoad;
std::string ResourceSpecFile;
bool StopOnFailure = false;
};
#endif

View File

@@ -514,6 +514,10 @@ bool cmCTestTestHandler::ProcessOptions()
this->CTest->SetParallelLevel(atoi(this->GetOption("ParallelLevel")));
}
if (this->GetOption("StopOnFailure")) {
this->CTest->SetStopOnFailure(true);
}
const char* val;
val = this->GetOption("LabelRegularExpression");
if (val) {

View File

@@ -92,6 +92,7 @@ struct cmCTest::Private
std::string ConfigType;
std::string ScheduleType;
std::chrono::system_clock::time_point StopTime;
bool StopOnFailure = false;
bool TestProgressOutput = false;
bool Verbose = false;
bool ExtraVerbose = false;
@@ -1932,6 +1933,10 @@ bool cmCTest::HandleCommandLineArguments(size_t& i,
this->SetStopTime(args[i]);
}
else if (this->CheckArgument(arg, "--stop-on-failure"_s)) {
this->Impl->StopOnFailure = true;
}
else if (this->CheckArgument(arg, "-C"_s, "--build-config") &&
i < args.size() - 1) {
i++;
@@ -2493,6 +2498,16 @@ void cmCTest::SetNotesFiles(const char* notes)
this->Impl->NotesFiles = notes;
}
bool cmCTest::GetStopOnFailure() const
{
return this->Impl->StopOnFailure;
}
void cmCTest::SetStopOnFailure(bool stop)
{
this->Impl->StopOnFailure = stop;
}
std::chrono::system_clock::time_point cmCTest::GetStopTime() const
{
return this->Impl->StopTime;

View File

@@ -204,6 +204,9 @@ public:
bool ShouldCompressTestOutput();
bool CompressString(std::string& str);
bool GetStopOnFailure() const;
void SetStopOnFailure(bool stop);
std::chrono::system_clock::time_point GetStopTime() const;
void SetStopTime(std::string const& time);

View File

@@ -35,6 +35,7 @@ static const char* cmDocumentationOptions[][2] = {
{ "--output-on-failure",
"Output anything outputted by the test program "
"if the test should fail." },
{ "--stop-on-failure", "Stop running the tests after one has failed." },
{ "--test-output-size-passed <size>",
"Limit the output for passed tests "
"to <size> bytes" },

View File

@@ -242,6 +242,20 @@ function(run_TestOutputSize)
endfunction()
run_TestOutputSize()
# Test --stop-on-failure
function(run_stop_on_failure)
set(RunCMake_TEST_BINARY_DIR ${RunCMake_BINARY_DIR}/stop-on-failure)
set(RunCMake_TEST_NO_CLEAN 1)
file(REMOVE_RECURSE "${RunCMake_TEST_BINARY_DIR}")
file(MAKE_DIRECTORY "${RunCMake_TEST_BINARY_DIR}")
file(WRITE "${RunCMake_TEST_BINARY_DIR}/CTestTestfile.cmake" "
add_test(test1 \"${CMAKE_COMMAND}\" -E false)
add_test(test2 \"${CMAKE_COMMAND}\" -E echo \"not running\")
")
run_cmake_command(stop-on-failure ${CMAKE_CTEST_COMMAND} --stop-on-failure)
endfunction()
run_stop_on_failure()
function(run_TestAffinity)
set(RunCMake_TEST_BINARY_DIR ${RunCMake_BINARY_DIR}/TestAffinity)
set(RunCMake_TEST_NO_CLEAN 1)

View File

@@ -0,0 +1 @@
8

View File

@@ -0,0 +1 @@
Errors while running CTest

View File

@@ -0,0 +1,10 @@
^Test project .*/Tests/RunCMake/CTestCommandLine/stop-on-failure
Start 1: test1
1/2 Test #1: test1 ............................\*\*\*Failed +[0-9.]+ sec
+
0% tests passed, 1 tests failed out of 1
+
Total Test time \(real\) = +[0-9.]+ sec
+
The following tests FAILED:
[ ]+1 - test1 \(Failed\)$

View File

@@ -95,3 +95,15 @@ endfunction()
run_TestRepeat(UntilFail REPEAT UNTIL_FAIL:3)
run_TestRepeat(UntilPass REPEAT UNTIL_PASS:3)
run_TestRepeat(AfterTimeout REPEAT AFTER_TIMEOUT:3)
# test --stop-on-failure
function(run_stop_on_failure)
set(CASE_CTEST_TEST_ARGS EXCLUDE RunCMakeVersion)
set(CASE_CMAKELISTS_SUFFIX_CODE [[
add_test(NAME StoppingTest COMMAND ${CMAKE_COMMAND} -E false)
add_test(NAME NotRunTest COMMAND ${CMAKE_COMMAND} -E true)
]])
run_ctest_test(stop-on-failure STOP_ON_FAILURE)
endfunction()
run_stop_on_failure()

View File

@@ -0,0 +1,13 @@
Test project [^
]*/Tests/RunCMake/ctest_test/stop-on-failure-build
Start 1: RunCMakeVersion
1/3 Test #1: RunCMakeVersion .................. Passed +[0-9.]+ sec
Start 2: StoppingTest
2/3 Test #2: StoppingTest .....................\*\*\*Failed +[0-9.]+ sec
+
50% tests passed, 1 tests failed out of 2
+
Total Test time \(real\) = +[0-9.]+ sec
+
The following tests FAILED:
[ ]+2 - StoppingTest \(Failed\)$