Merge topic 'ctest-disable-tests'

b070947d Add 'DISABLED' test property
42c68aad CTest: Improve 'Completion Status' reported to CDash for 'Not Run' tests

Acked-by: Kitware Robot <kwrobot@kitware.com>
Reviewed-by: Brad King <brad.king@kitware.com>
Merge-request: !571
This commit is contained in:
Brad King 2017-03-30 13:01:00 +00:00 committed by Kitware Robot
commit 1cc918a770
25 changed files with 304 additions and 19 deletions

View File

@ -325,6 +325,7 @@ Properties on Tests
/prop_test/ATTACHED_FILES
/prop_test/COST
/prop_test/DEPENDS
/prop_test/DISABLED
/prop_test/ENVIRONMENT
/prop_test/FAIL_REGULAR_EXPRESSION
/prop_test/FIXTURES_CLEANUP

View File

@ -0,0 +1,15 @@
DISABLED
--------
If set to true, the test will be skipped and its status will be 'Not Run'. A
DISABLED test will not be counted in the total number of tests and its
completion status will be reported to CDash as 'Disabled'.
A DISABLED test does not participate in test fixture dependency resolution.
If a DISABLED test has fixture requirements defined in its
:prop_test:`FIXTURES_REQUIRED` property, it will not cause setup or cleanup
tests for those fixtures to be added to the test set.
If a test with the :prop_test:`FIXTURES_SETUP` property set is DISABLED, the
fixture behavior will be as though that setup test was passing and any test
case requiring that fixture will still run.

View File

@ -0,0 +1,5 @@
ctest-disable-tests
-------------------
* A :prop_test:`DISABLED` test property was added to mark tests that
are configured but explicitly disabled so they do not run.

View File

@ -163,7 +163,9 @@ void cmCTestMultiProcessHandler::StartTestProcess(int test)
this->TestRunningMap[test] = false;
this->RunningCount -= GetProcessorsUsed(test);
testRun->EndTest(this->Completed, this->Total, false);
this->Failed->push_back(this->Properties[test]->Name);
if (!this->Properties[test]->Disabled) {
this->Failed->push_back(this->Properties[test]->Name);
}
delete testRun;
}
}

View File

@ -215,6 +215,9 @@ bool cmCTestRunTest::EndTest(size_t completed, size_t total, bool started)
if (this->TestProperties->SkipReturnCode >= 0 &&
this->TestProperties->SkipReturnCode == retVal) {
this->TestResult.Status = cmCTestTestHandler::NOT_RUN;
std::ostringstream s;
s << "SKIP_RETURN_CODE=" << this->TestProperties->SkipReturnCode;
this->TestResult.CompletionStatus = s.str();
cmCTestLog(this->CTest, HANDLER_OUTPUT, "***Skipped ");
} else if ((success && !this->TestProperties->WillFail) ||
(!success && this->TestProperties->WillFail)) {
@ -253,6 +256,8 @@ bool cmCTestRunTest::EndTest(size_t completed, size_t total, bool started)
cmCTestLog(this->CTest, HANDLER_OUTPUT, "Other");
this->TestResult.Status = cmCTestTestHandler::OTHER_FAULT;
}
} else if ("Disabled" == this->TestResult.CompletionStatus) {
cmCTestLog(this->CTest, HANDLER_OUTPUT, "***Not Run (Disabled) ");
} else // cmsysProcess_State_Error
{
cmCTestLog(this->CTest, HANDLER_OUTPUT, "***Not Run ");
@ -413,6 +418,24 @@ bool cmCTestRunTest::StartTest(size_t total)
<< this->TestProperties->Index << ": "
<< this->TestProperties->Name << std::endl);
this->ProcessOutput.clear();
// Return immediately if test is disabled
if (this->TestProperties->Disabled) {
this->TestResult.Properties = this->TestProperties;
this->TestResult.ExecutionTime = 0;
this->TestResult.CompressOutput = false;
this->TestResult.ReturnValue = -1;
this->TestResult.CompletionStatus = "Disabled";
this->TestResult.Status = cmCTestTestHandler::NOT_RUN;
this->TestResult.TestCount = this->TestProperties->Index;
this->TestResult.Name = this->TestProperties->Name;
this->TestResult.Path = this->TestProperties->Directory;
this->TestProcess = new cmProcess;
this->TestResult.Output = "Disabled";
this->TestResult.FullCommandLine = "";
return false;
}
this->ComputeArguments();
std::vector<std::string>& args = this->TestProperties->Args;
this->TestResult.Properties = this->TestProperties;
@ -437,7 +460,7 @@ bool cmCTestRunTest::StartTest(size_t total)
cmCTestLog(this->CTest, HANDLER_OUTPUT, msg << std::endl);
this->TestResult.Output = msg;
this->TestResult.FullCommandLine = "";
this->TestResult.CompletionStatus = "Not Run";
this->TestResult.CompletionStatus = "Fixture dependency failed";
this->TestResult.Status = cmCTestTestHandler::NOT_RUN;
return false;
}
@ -457,7 +480,7 @@ bool cmCTestRunTest::StartTest(size_t total)
cmCTestLog(this->CTest, ERROR_MESSAGE, msg << std::endl);
this->TestResult.Output = msg;
this->TestResult.FullCommandLine = "";
this->TestResult.CompletionStatus = "Not Run";
this->TestResult.CompletionStatus = "Missing Configuration";
this->TestResult.Status = cmCTestTestHandler::NOT_RUN;
return false;
}
@ -477,7 +500,7 @@ bool cmCTestRunTest::StartTest(size_t total)
"Unable to find required file: " << file << std::endl);
this->TestResult.Output = "Unable to find required file: " + file;
this->TestResult.FullCommandLine = "";
this->TestResult.CompletionStatus = "Not Run";
this->TestResult.CompletionStatus = "Required Files Missing";
this->TestResult.Status = cmCTestTestHandler::NOT_RUN;
return false;
}
@ -493,7 +516,7 @@ bool cmCTestRunTest::StartTest(size_t total)
"Unable to find executable: " << args[1] << std::endl);
this->TestResult.Output = "Unable to find executable: " + args[1];
this->TestResult.FullCommandLine = "";
this->TestResult.CompletionStatus = "Not Run";
this->TestResult.CompletionStatus = "Unable to find executable";
this->TestResult.Status = cmCTestTestHandler::NOT_RUN;
return false;
}

View File

@ -487,6 +487,19 @@ int cmCTestTestHandler::ProcessHandler()
}
}
typedef std::set<cmCTestTestHandler::cmCTestTestResult,
cmCTestTestResultLess>
SetOfTests;
SetOfTests resultsSet(this->TestResults.begin(), this->TestResults.end());
std::vector<cmCTestTestHandler::cmCTestTestResult> disabledTests;
for (SetOfTests::iterator ftit = resultsSet.begin();
ftit != resultsSet.end(); ++ftit) {
if (ftit->CompletionStatus == "Disabled") {
disabledTests.push_back(*ftit);
}
}
float percent = float(passed.size()) * 100.0f / float(total);
if (!failed.empty() && percent > 99) {
percent = 99;
@ -505,21 +518,33 @@ int cmCTestTestHandler::ProcessHandler()
"\nTotal Test time (real) = " << realBuf << "\n",
this->Quiet);
if (!disabledTests.empty()) {
cmGeneratedFileStream ofs;
cmCTestLog(this->CTest, HANDLER_OUTPUT, std::endl
<< "The following tests are disabled and did not run:"
<< std::endl);
this->StartLogFile("TestsDisabled", ofs);
for (std::vector<cmCTestTestHandler::cmCTestTestResult>::iterator dtit =
disabledTests.begin();
dtit != disabledTests.end(); ++dtit) {
ofs << dtit->TestCount << ":" << dtit->Name << std::endl;
cmCTestLog(this->CTest, HANDLER_OUTPUT, "\t"
<< std::setw(3) << dtit->TestCount << " - " << dtit->Name
<< std::endl);
}
}
if (!failed.empty()) {
cmGeneratedFileStream ofs;
cmCTestLog(this->CTest, HANDLER_OUTPUT, std::endl
<< "The following tests FAILED:" << std::endl);
this->StartLogFile("TestsFailed", ofs);
typedef std::set<cmCTestTestHandler::cmCTestTestResult,
cmCTestTestResultLess>
SetOfTests;
SetOfTests resultsSet(this->TestResults.begin(),
this->TestResults.end());
for (SetOfTests::iterator ftit = resultsSet.begin();
ftit != resultsSet.end(); ++ftit) {
if (ftit->Status != cmCTestTestHandler::COMPLETED) {
if (ftit->Status != cmCTestTestHandler::COMPLETED &&
ftit->CompletionStatus != "Disabled") {
ofs << ftit->TestCount << ":" << ftit->Name << std::endl;
cmCTestLog(
this->CTest, HANDLER_OUTPUT, "\t"
@ -841,6 +866,11 @@ void cmCTestTestHandler::UpdateForFixtures(ListOfTests& tests) const
size_t fixtureTestsAdded = 0;
std::set<std::string> addedFixtures;
for (size_t i = 0; i < tests.size(); ++i) {
// Skip disabled tests
if (tests[i].Disabled) {
continue;
}
// There are two things to do for each test:
// 1. For every fixture required by this test, record that fixture as
// being required and create dependencies on that fixture's setup
@ -1200,6 +1230,7 @@ void cmCTestTestHandler::GenerateDartOutput(cmXMLWriter& xml)
cmCTestTestResult* result = &this->TestResults[cc];
this->WriteTestResultHeader(xml, result);
xml.StartElement("Results");
if (result->Status != cmCTestTestHandler::NOT_RUN) {
if (result->Status != cmCTestTestHandler::COMPLETED ||
result->ReturnValue) {
@ -1208,6 +1239,7 @@ void cmCTestTestHandler::GenerateDartOutput(cmXMLWriter& xml)
xml.Attribute("name", "Exit Code");
xml.Element("Value", this->GetTestStatus(result->Status));
xml.EndElement(); // NamedMeasurement
xml.StartElement("NamedMeasurement");
xml.Attribute("type", "text/string");
xml.Attribute("name", "Exit Value");
@ -1222,8 +1254,7 @@ void cmCTestTestHandler::GenerateDartOutput(cmXMLWriter& xml)
xml.EndElement(); // NamedMeasurement
if (!result->Reason.empty()) {
const char* reasonType = "Pass Reason";
if (result->Status != cmCTestTestHandler::COMPLETED &&
result->Status != cmCTestTestHandler::NOT_RUN) {
if (result->Status != cmCTestTestHandler::COMPLETED) {
reasonType = "Fail Reason";
}
xml.StartElement("NamedMeasurement");
@ -1232,12 +1263,14 @@ void cmCTestTestHandler::GenerateDartOutput(cmXMLWriter& xml)
xml.Element("Value", result->Reason);
xml.EndElement(); // NamedMeasurement
}
xml.StartElement("NamedMeasurement");
xml.Attribute("type", "text/string");
xml.Attribute("name", "Completion Status");
xml.Element("Value", result->CompletionStatus);
xml.EndElement(); // NamedMeasurement
}
xml.StartElement("NamedMeasurement");
xml.Attribute("type", "text/string");
xml.Attribute("name", "Completion Status");
xml.Element("Value", result->CompletionStatus);
xml.EndElement(); // NamedMeasurement
xml.StartElement("NamedMeasurement");
xml.Attribute("type", "text/string");
xml.Attribute("name", "Command Line");
@ -2000,6 +2033,9 @@ bool cmCTestTestHandler::SetTestsProperties(
if (key == "WILL_FAIL") {
rtit->WillFail = cmSystemTools::IsOn(val.c_str());
}
if (key == "DISABLED") {
rtit->Disabled = cmSystemTools::IsOn(val.c_str());
}
if (key == "ATTACHED_FILES") {
cmSystemTools::ExpandListArgument(val, rtit->AttachedFiles);
}
@ -2178,6 +2214,7 @@ bool cmCTestTestHandler::AddTest(const std::vector<std::string>& args)
test.IsInBasedOnREOptions = true;
test.WillFail = false;
test.Disabled = false;
test.RunSerial = false;
test.Timeout = 0;
test.ExplicitTimeout = false;

View File

@ -114,6 +114,7 @@ public:
std::map<std::string, std::string> Measurements;
bool IsInBasedOnREOptions;
bool WillFail;
bool Disabled;
float Cost;
int PreviousRuns;
bool RunSerial;

View File

@ -202,6 +202,7 @@ endif()
add_RunCMake_test(ctest_start)
add_RunCMake_test(ctest_submit)
add_RunCMake_test(ctest_test)
add_RunCMake_test(ctest_disabled_test)
add_RunCMake_test(ctest_upload)
add_RunCMake_test(ctest_fixtures)
add_RunCMake_test(file)

View File

@ -0,0 +1,6 @@
cmake_minimum_required(VERSION 3.7)
project(@CASE_NAME@ NONE)
include(CTest)
add_test(NAME SuccessfulTest COMMAND "${CMAKE_COMMAND}" --version)
@CASE_CMAKELISTS_SUFFIX_CODE@

View File

@ -0,0 +1 @@
set(CTEST_PROJECT_NAME "@CASE_NAME@")

View File

@ -0,0 +1 @@
(-1|255)

View File

@ -0,0 +1 @@
No tests were found!!!

View File

@ -0,0 +1,2 @@
Start 1: SuccessfulTest
1/1 Test #1: SuccessfulTest ...................\*\*\*\Not Run \(Disabled\) +[0-9.]+ sec

View File

@ -0,0 +1,11 @@
Start 1: SuccessfulTest
1/2 Test #1: SuccessfulTest ................... Passed +[0-9.]+ sec
Start 2: CleanupTest
2/2 Test #2: CleanupTest ......................\*\*\*\Not Run \(Disabled\) +[0-9.]+ sec
+
100% tests passed, 0 tests failed out of 1
+
Total Test time \(real\) = +[0-9.]+ sec
+
The following tests are disabled and did not run:
.*2 \- CleanupTest

View File

@ -0,0 +1,9 @@
50% tests passed, 1 tests failed out of 2
+
Total Test time \(real\) = +[0-9.]+ sec
+
The following tests are disabled and did not run:
.*3 \- DisabledFailingTest
+
The following tests FAILED:
.*2 \- FailingTest \(Failed\)

View File

@ -0,0 +1 @@
(-1|255)

View File

@ -0,0 +1 @@
Unable to find executable: invalidCommand

View File

@ -0,0 +1,17 @@
Start 1: SuccessfulTest
1/3 Test #1: SuccessfulTest ................... Passed +[0-9.]+ sec
Start 2: DisabledTest
2/3 Test #2: DisabledTest .....................\*\*\*\Not Run \(Disabled\) +[0-9.]+ sec
Start 3: NotRunTest
.*
3/3 Test #3: NotRunTest .......................\*\*\*\Not Run +[0-9.]+ sec
+
50% tests passed, 1 tests failed out of 2
+
Total Test time \(real\) = +[0-9.]+ sec
+
The following tests are disabled and did not run:
.*2 \- DisabledTest
+
The following tests FAILED:
.*3 - NotRunTest \(Not Run\)

View File

@ -0,0 +1,13 @@
Start 1: SuccessfulTest
1/3 Test #1: SuccessfulTest ................... Passed +[0-9.]+ sec
Start 2: DisabledTest
2/3 Test #2: DisabledTest .....................\*\*\*\Not Run \(Disabled\) +[0-9.]+ sec
Start 3: SuccessfulCleanupTest
3/3 Test #3: SuccessfulCleanupTest ............ Passed +[0-9.]+ sec
+
100% tests passed, 0 tests failed out of 2
+
Total Test time \(real\) = +[0-9.]+ sec
+
The following tests are disabled and did not run:
.*2 \- DisabledTest

View File

@ -0,0 +1,13 @@
Start 2: DisabledTest
1/3 Test #2: DisabledTest .....................\*\*\*\Not Run \(Disabled\) +[0-9.]+ sec
Start 1: SuccessfulTest
2/3 Test #1: SuccessfulTest ................... Passed +[0-9.]+ sec
Start 3: SuccessfulCleanupTest
3/3 Test #3: SuccessfulCleanupTest ............ Passed +[0-9.]+ sec
+
100% tests passed, 0 tests failed out of 2
+
Total Test time \(real\) = +[0-9.]+ sec
+
The following tests are disabled and did not run:
.*2 \- DisabledTest

View File

@ -0,0 +1 @@
(-1|255)

View File

@ -0,0 +1 @@
Unable to find executable: invalidCommand

View File

@ -0,0 +1,17 @@
Start 1: SuccessfulTest
1/3 Test #1: SuccessfulTest ................... Passed +[0-9.]+ sec
Start 2: DisabledTest
2/3 Test #2: DisabledTest .....................\*\*\*\Not Run \(Disabled\) +[0-9.]+ sec
Start 3: NotRunTest
.*
3/3 Test #3: NotRunTest .......................\*\*\*\Not Run +[0-9.]+ sec
+
50% tests passed, 1 tests failed out of 2
+
Total Test time \(real\) = +[0-9.]+ sec
+
The following tests are disabled and did not run:
.*2 \- DisabledTest
+
The following tests FAILED:
.*3 - NotRunTest \(Not Run\)

View File

@ -0,0 +1,89 @@
include(RunCTest)
function(run_DisableNotRunTest)
set(CASE_CMAKELISTS_SUFFIX_CODE [[
add_test(NAME DisabledTest COMMAND notACommand --version)
add_test(NAME NotRunTest COMMAND invalidCommand --version)
set_tests_properties(SuccessfulTest PROPERTIES DISABLED false)
set_tests_properties(DisabledTest PROPERTIES DISABLED true)
]])
run_ctest(DisableNotRunTest)
endfunction()
run_DisableNotRunTest()
function(run_DisableFailingTest)
set(CASE_CMAKELISTS_SUFFIX_CODE [[
set(someFile "${CMAKE_CURRENT_SOURCE_DIR}/test.cmake")
add_test(NAME FailingTest
COMMAND ${CMAKE_COMMAND} -E compare_files "${someFile}" "${someFile}xxx")
add_test(NAME DisabledFailingTest
COMMAND ${CMAKE_COMMAND} -E compare_files "${someFile}" "${someFile}xxx")
set_tests_properties(FailingTest PROPERTIES DISABLED false)
set_tests_properties(DisabledFailingTest PROPERTIES DISABLED true)
]])
run_ctest(DisableFailingTest)
endfunction()
run_DisableFailingTest()
function(run_DisableSetupTest)
set(CASE_CMAKELISTS_SUFFIX_CODE [[
add_test(NAME DisabledTest COMMAND "${CMAKE_COMMAND}" --version)
add_test(NAME SuccessfulCleanupTest COMMAND "${CMAKE_COMMAND}" --version)
set_tests_properties(DisabledTest PROPERTIES DISABLED true
FIXTURES_SETUP "Foo")
set_tests_properties(SuccessfulTest PROPERTIES FIXTURES_REQUIRED "Foo")
set_tests_properties(SuccessfulCleanupTest PROPERTIES FIXTURES_CLEANUP "Foo")
]])
run_ctest(DisableSetupTest)
endfunction()
run_DisableSetupTest()
function(run_DisableRequiredTest)
set(CASE_CMAKELISTS_SUFFIX_CODE [[
add_test(NAME DisabledTest COMMAND "${CMAKE_COMMAND}" --version)
add_test(NAME SuccessfulCleanupTest COMMAND "${CMAKE_COMMAND}" --version)
set_tests_properties(SuccessfulTest PROPERTIES FIXTURES_SETUP "Foo")
set_tests_properties(DisabledTest PROPERTIES DISABLED true
FIXTURES_REQUIRED "Foo")
set_tests_properties(SuccessfulCleanupTest PROPERTIES FIXTURES_CLEANUP "Foo")
]])
run_ctest(DisableRequiredTest)
endfunction()
run_DisableRequiredTest()
function(run_DisableCleanupTest)
set(CASE_CMAKELISTS_SUFFIX_CODE [[
add_test(NAME CleanupTest COMMAND "${CMAKE_COMMAND}" --version)
set_tests_properties(SuccessfulTest PROPERTIES FIXTURES_REQUIRED "Foo")
set_tests_properties(CleanupTest PROPERTIES DISABLED true
FIXTURES_CLEANUP "Foo")
]])
run_ctest(DisableCleanupTest)
endfunction()
run_DisableCleanupTest()
# Consider a fixture that has a setup test, a cleanup test and a disabled test
# which requires that fixture. Limit the test list with a regular expression
# that matches the disabled test but not the setup or cleanup tests, so the
# initial set of tests to be executed contains just the disabled test. Since
# the only test requiring the fixture is disabled, CTest should not
# automatically add in the setup and cleanup tests for the fixture, since no
# enabled test requires them.
function(run_DisableAllTests)
set(CASE_CMAKELISTS_SUFFIX_CODE [[
add_test(NAME SetupTest COMMAND "${CMAKE_COMMAND}" --version)
add_test(NAME CleanupTest COMMAND "${CMAKE_COMMAND}" --version)
set_tests_properties(SetupTest PROPERTIES FIXTURES_SETUP "Foo")
set_tests_properties(SuccessfulTest PROPERTIES DISABLED true
FIXTURES_REQUIRED "Foo")
set_tests_properties(CleanupTest PROPERTIES FIXTURES_CLEANUP "Foo")
]])
run_ctest(DisableAllTests -R Successful)
endfunction()
run_DisableAllTests()

View File

@ -0,0 +1,16 @@
cmake_minimum_required(VERSION 3.7)
set(CTEST_SITE "test-site")
set(CTEST_BUILD_NAME "test-build-name")
set(CTEST_SOURCE_DIRECTORY "@RunCMake_BINARY_DIR@/@CASE_NAME@")
set(CTEST_BINARY_DIRECTORY "@RunCMake_BINARY_DIR@/@CASE_NAME@-build")
set(CTEST_CMAKE_GENERATOR "@RunCMake_GENERATOR@")
set(CTEST_CMAKE_GENERATOR_PLATFORM "@RunCMake_GENERATOR_PLATFORM@")
set(CTEST_CMAKE_GENERATOR_TOOLSET "@RunCMake_GENERATOR_TOOLSET@")
set(CTEST_BUILD_CONFIGURATION "$ENV{CMAKE_CONFIG_TYPE}")
set(ctest_test_args "@CASE_CTEST_TEST_ARGS@")
ctest_start(Experimental)
ctest_configure()
ctest_build()
ctest_test(${ctest_test_args})