googletest: backport GTEST_SKIP to googletest 1.8.1

This commit backports revisions 00938b2b228f3b70d3d9e51f29a1505bdad43f1e and
59f90a338bce2376b540ee239cf4e269bf6d68ad from googletest's master branch to
our included version of googletest, which is based on 1.8.1. It adds the
GTEST_SKIP feature, which is very useful for a project like FreeBSD where
some tests depend on particular system configurations.

Reviewed by:	ngie
Obtained from:	github.com/google/googletest
MFC after:	2 months
Sponsored by:	The FreeBSD Foundation
Differential Revision:	https://reviews.freebsd.org/rS345331
This commit is contained in:
Alan Somers 2019-03-20 23:15:58 +00:00
commit bda54b8f96
Notes: svn2git 2020-12-20 02:59:44 +00:00
svn path=/head/; revision=345353
11 changed files with 225 additions and 30 deletions

View file

@ -217,6 +217,7 @@ if (gtest_build_tests)
test/gtest-typed-test2_test.cc)
cxx_test(gtest_unittest gtest_main)
cxx_test(gtest-unittest-api_test gtest)
cxx_test(gtest_skip_test gtest_main)
############################################################
# C++ tests built with non-standard compiler flags.

View file

@ -53,7 +53,8 @@ class GTEST_API_ TestPartResult {
enum Type {
kSuccess, // Succeeded.
kNonFatalFailure, // Failed but the test can continue.
kFatalFailure // Failed and the test should be terminated.
kFatalFailure, // Failed and the test should be terminated.
kSkip // Skipped.
};
// C'tor. TestPartResult does NOT have a default constructor.
@ -89,18 +90,21 @@ class GTEST_API_ TestPartResult {
// Gets the message associated with the test part.
const char* message() const { return message_.c_str(); }
// Returns true iff the test part was skipped.
bool skipped() const { return type_ == kSkip; }
// Returns true iff the test part passed.
bool passed() const { return type_ == kSuccess; }
// Returns true iff the test part failed.
bool failed() const { return type_ != kSuccess; }
// Returns true iff the test part non-fatally failed.
bool nonfatally_failed() const { return type_ == kNonFatalFailure; }
// Returns true iff the test part fatally failed.
bool fatally_failed() const { return type_ == kFatalFailure; }
// Returns true iff the test part failed.
bool failed() const { return fatally_failed() || nonfatally_failed(); }
private:
Type type_;

View file

@ -440,6 +440,9 @@ class GTEST_API_ Test {
// Returns true iff the current test has a non-fatal failure.
static bool HasNonfatalFailure();
// Returns true iff the current test was skipped.
static bool IsSkipped();
// Returns true iff the current test has a (either fatal or
// non-fatal) failure.
static bool HasFailure() { return HasFatalFailure() || HasNonfatalFailure(); }
@ -574,7 +577,10 @@ class GTEST_API_ TestResult {
int test_property_count() const;
// Returns true iff the test passed (i.e. no test part failed).
bool Passed() const { return !Failed(); }
bool Passed() const { return !Skipped() && !Failed(); }
// Returns true iff the test was skipped.
bool Skipped() const;
// Returns true iff the test failed.
bool Failed() const;
@ -854,6 +860,9 @@ class GTEST_API_ TestCase {
// Gets the number of successful tests in this test case.
int successful_test_count() const;
// Gets the number of skipped tests in this test case.
int skipped_test_count() const;
// Gets the number of failed tests in this test case.
int failed_test_count() const;
@ -936,6 +945,11 @@ class GTEST_API_ TestCase {
return test_info->should_run() && test_info->result()->Passed();
}
// Returns true iff test skipped.
static bool TestSkipped(const TestInfo* test_info) {
return test_info->should_run() && test_info->result()->Skipped();
}
// Returns true iff test failed.
static bool TestFailed(const TestInfo* test_info) {
return test_info->should_run() && test_info->result()->Failed();
@ -1258,6 +1272,9 @@ class GTEST_API_ UnitTest {
// Gets the number of successful tests.
int successful_test_count() const;
// Gets the number of skipped tests.
int skipped_test_count() const;
// Gets the number of failed tests.
int failed_test_count() const;
@ -1835,6 +1852,11 @@ class TestWithParam : public Test, public WithParamInterface<T> {
// Macros for indicating success/failure in test code.
// Skips test in runtime.
// Skipping test aborts current function.
// Skipped tests are neither successful nor failed.
#define GTEST_SKIP() GTEST_SKIP_("Skipped")
// ADD_FAILURE unconditionally adds a failure to the current test.
// SUCCEED generates a success - it doesn't automatically make the
// current test successful, as a test is only successful when it has

View file

@ -1208,7 +1208,10 @@ class NativeArray {
#define GTEST_SUCCESS_(message) \
GTEST_MESSAGE_(message, ::testing::TestPartResult::kSuccess)
// Suppress MSVC warning 4702 (unreachable code) for the code following
#define GTEST_SKIP_(message) \
return GTEST_MESSAGE_(message, ::testing::TestPartResult::kSkip)
// Suppress MSVC warning 4072 (unreachable code) for the code following
// statement if it returns or throws (or doesn't return or throw in some
// situations).
#define GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement) \

View file

@ -544,6 +544,9 @@ class GTEST_API_ UnitTestImpl {
// Gets the number of successful tests.
int successful_test_count() const;
// Gets the number of skipped tests.
int skipped_test_count() const;
// Gets the number of failed tests.
int failed_test_count() const;

View file

@ -47,12 +47,16 @@ std::string TestPartResult::ExtractSummary(const char* message) {
// Prints a TestPartResult object.
std::ostream& operator<<(std::ostream& os, const TestPartResult& result) {
return os
<< result.file_name() << ":" << result.line_number() << ": "
<< (result.type() == TestPartResult::kSuccess ? "Success" :
result.type() == TestPartResult::kFatalFailure ? "Fatal failure" :
"Non-fatal failure") << ":\n"
<< result.message() << std::endl;
return os << result.file_name() << ":" << result.line_number() << ": "
<< (result.type() == TestPartResult::kSuccess
? "Success"
: result.type() == TestPartResult::kSkip
? "Skipped"
: result.type() == TestPartResult::kFatalFailure
? "Fatal failure"
: "Non-fatal failure")
<< ":\n"
<< result.message() << std::endl;
}
// Appends a TestPartResult to the array.

View file

@ -796,6 +796,11 @@ int UnitTestImpl::successful_test_count() const {
return SumOverTestCaseList(test_cases_, &TestCase::successful_test_count);
}
// Gets the number of skipped tests.
int UnitTestImpl::skipped_test_count() const {
return SumOverTestCaseList(test_cases_, &TestCase::skipped_test_count);
}
// Gets the number of failed tests.
int UnitTestImpl::failed_test_count() const {
return SumOverTestCaseList(test_cases_, &TestCase::failed_test_count);
@ -2207,6 +2212,16 @@ void TestResult::Clear() {
elapsed_time_ = 0;
}
// Returns true off the test part was skipped.
static bool TestPartSkipped(const TestPartResult& result) {
return result.skipped();
}
// Returns true iff the test was skipped.
bool TestResult::Skipped() const {
return !Failed() && CountIf(test_part_results_, TestPartSkipped) > 0;
}
// Returns true iff the test failed.
bool TestResult::Failed() const {
for (int i = 0; i < total_part_count(); ++i) {
@ -2511,8 +2526,9 @@ void Test::Run() {
internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
impl->os_stack_trace_getter()->UponLeavingGTest();
internal::HandleExceptionsInMethodIfSupported(this, &Test::SetUp, "SetUp()");
// We will run the test only if SetUp() was successful.
if (!HasFatalFailure()) {
// We will run the test only if SetUp() was successful and didn't call
// GTEST_SKIP().
if (!HasFatalFailure() && !IsSkipped()) {
impl->os_stack_trace_getter()->UponLeavingGTest();
internal::HandleExceptionsInMethodIfSupported(
this, &Test::TestBody, "the test body");
@ -2537,6 +2553,11 @@ bool Test::HasNonfatalFailure() {
HasNonfatalFailure();
}
// Returns true iff the current test was skipped.
bool Test::IsSkipped() {
return internal::GetUnitTestImpl()->current_test_result()->Skipped();
}
// class TestInfo
// Constructs a TestInfo object. It assumes ownership of the test factory
@ -2685,9 +2706,10 @@ void TestInfo::Run() {
factory_, &internal::TestFactoryBase::CreateTest,
"the test fixture's constructor");
// Runs the test if the constructor didn't generate a fatal failure.
// Runs the test if the constructor didn't generate a fatal failure or invoke
// GTEST_SKIP().
// Note that the object will not be null
if (!Test::HasFatalFailure()) {
if (!Test::HasFatalFailure() && !Test::IsSkipped()) {
// This doesn't throw as all user code that can throw are wrapped into
// exception handling code.
test->Run();
@ -2715,6 +2737,11 @@ int TestCase::successful_test_count() const {
return CountIf(test_info_list_, TestPassed);
}
// Gets the number of successful tests in this test case.
int TestCase::skipped_test_count() const {
return CountIf(test_info_list_, TestSkipped);
}
// Gets the number of failed tests in this test case.
int TestCase::failed_test_count() const {
return CountIf(test_info_list_, TestFailed);
@ -2866,6 +2893,8 @@ static std::string FormatTestCaseCount(int test_case_count) {
// between the two when viewing the test result.
static const char * TestPartResultTypeToString(TestPartResult::Type type) {
switch (type) {
case TestPartResult::kSkip:
return "Skipped";
case TestPartResult::kSuccess:
return "Success";
@ -3119,6 +3148,7 @@ class PrettyUnitTestResultPrinter : public TestEventListener {
private:
static void PrintFailedTests(const UnitTest& unit_test);
static void PrintSkippedTests(const UnitTest& unit_test);
};
// Fired before each iteration of tests starts.
@ -3187,18 +3217,25 @@ void PrettyUnitTestResultPrinter::OnTestStart(const TestInfo& test_info) {
// Called after an assertion failure.
void PrettyUnitTestResultPrinter::OnTestPartResult(
const TestPartResult& result) {
// If the test part succeeded, we don't need to do anything.
if (result.type() == TestPartResult::kSuccess)
return;
// Print failure message from the assertion (e.g. expected this and got that).
PrintTestPartResult(result);
fflush(stdout);
switch (result.type()) {
// If the test part succeeded, or was skipped,
// we don't need to do anything.
case TestPartResult::kSkip:
case TestPartResult::kSuccess:
return;
default:
// Print failure message from the assertion
// (e.g. expected this and got that).
PrintTestPartResult(result);
fflush(stdout);
}
}
void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) {
if (test_info.result()->Passed()) {
ColoredPrintf(COLOR_GREEN, "[ OK ] ");
} else if (test_info.result()->Skipped()) {
ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] ");
} else {
ColoredPrintf(COLOR_RED, "[ FAILED ] ");
}
@ -3248,7 +3285,7 @@ void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) {
}
for (int j = 0; j < test_case.total_test_count(); ++j) {
const TestInfo& test_info = *test_case.GetTestInfo(j);
if (!test_info.should_run() || test_info.result()->Passed()) {
if (!test_info.should_run() || !test_info.result()->Failed()) {
continue;
}
ColoredPrintf(COLOR_RED, "[ FAILED ] ");
@ -3259,6 +3296,30 @@ void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) {
}
}
// Internal helper for printing the list of skipped tests.
void PrettyUnitTestResultPrinter::PrintSkippedTests(const UnitTest& unit_test) {
const int skipped_test_count = unit_test.skipped_test_count();
if (skipped_test_count == 0) {
return;
}
for (int i = 0; i < unit_test.total_test_case_count(); ++i) {
const TestCase& test_case = *unit_test.GetTestCase(i);
if (!test_case.should_run() || (test_case.skipped_test_count() == 0)) {
continue;
}
for (int j = 0; j < test_case.total_test_count(); ++j) {
const TestInfo& test_info = *test_case.GetTestInfo(j);
if (!test_info.should_run() || !test_info.result()->Skipped()) {
continue;
}
ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] ");
printf("%s.%s", test_case.name(), test_info.name());
printf("\n");
}
}
}
void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
int /*iteration*/) {
ColoredPrintf(COLOR_GREEN, "[==========] ");
@ -3273,6 +3334,13 @@ void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
ColoredPrintf(COLOR_GREEN, "[ PASSED ] ");
printf("%s.\n", FormatTestCount(unit_test.successful_test_count()).c_str());
const int skipped_test_count = unit_test.skipped_test_count();
if (skipped_test_count > 0) {
ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] ");
printf("%s, listed below:\n", FormatTestCount(skipped_test_count).c_str());
PrintSkippedTests(unit_test);
}
int num_failures = unit_test.failed_test_count();
if (!unit_test.Passed()) {
const int failed_test_count = unit_test.failed_test_count();
@ -4540,6 +4608,11 @@ int UnitTest::successful_test_count() const {
return impl()->successful_test_count();
}
// Gets the number of skipped tests.
int UnitTest::skipped_test_count() const {
return impl()->skipped_test_count();
}
// Gets the number of failed tests.
int UnitTest::failed_test_count() const { return impl()->failed_test_count(); }
@ -4660,7 +4733,8 @@ void UnitTest::AddTestPartResult(
impl_->GetTestPartResultReporterForCurrentThread()->
ReportTestPartResult(result);
if (result_type != TestPartResult::kSuccess) {
if (result_type != TestPartResult::kSuccess &&
result_type != TestPartResult::kSkip) {
// gtest_break_on_failure takes precedence over
// gtest_throw_on_failure. This allows a user to set the latter
// in the code (perhaps in order to use Google Test assertions

View file

@ -46,9 +46,10 @@ class TestPartResultTest : public Test {
TestPartResultTest()
: r1_(TestPartResult::kSuccess, "foo/bar.cc", 10, "Success!"),
r2_(TestPartResult::kNonFatalFailure, "foo/bar.cc", -1, "Failure!"),
r3_(TestPartResult::kFatalFailure, NULL, -1, "Failure!") {}
r3_(TestPartResult::kFatalFailure, nullptr, -1, "Failure!"),
r4_(TestPartResult::kSkip, "foo/bar.cc", 2, "Skipped!") {}
TestPartResult r1_, r2_, r3_;
TestPartResult r1_, r2_, r3_, r4_;
};
@ -79,6 +80,7 @@ TEST_F(TestPartResultTest, ResultAccessorsWork) {
EXPECT_FALSE(success.failed());
EXPECT_FALSE(success.nonfatally_failed());
EXPECT_FALSE(success.fatally_failed());
EXPECT_FALSE(success.skipped());
const TestPartResult nonfatal_failure(TestPartResult::kNonFatalFailure,
"file.cc",
@ -88,6 +90,7 @@ TEST_F(TestPartResultTest, ResultAccessorsWork) {
EXPECT_TRUE(nonfatal_failure.failed());
EXPECT_TRUE(nonfatal_failure.nonfatally_failed());
EXPECT_FALSE(nonfatal_failure.fatally_failed());
EXPECT_FALSE(nonfatal_failure.skipped());
const TestPartResult fatal_failure(TestPartResult::kFatalFailure,
"file.cc",
@ -97,6 +100,14 @@ TEST_F(TestPartResultTest, ResultAccessorsWork) {
EXPECT_TRUE(fatal_failure.failed());
EXPECT_FALSE(fatal_failure.nonfatally_failed());
EXPECT_TRUE(fatal_failure.fatally_failed());
EXPECT_FALSE(fatal_failure.skipped());
const TestPartResult skip(TestPartResult::kSkip, "file.cc", 42, "message");
EXPECT_FALSE(skip.passed());
EXPECT_FALSE(skip.failed());
EXPECT_FALSE(skip.nonfatally_failed());
EXPECT_FALSE(skip.fatally_failed());
EXPECT_TRUE(skip.skipped());
}
// Tests TestPartResult::type().
@ -104,23 +115,27 @@ TEST_F(TestPartResultTest, type) {
EXPECT_EQ(TestPartResult::kSuccess, r1_.type());
EXPECT_EQ(TestPartResult::kNonFatalFailure, r2_.type());
EXPECT_EQ(TestPartResult::kFatalFailure, r3_.type());
EXPECT_EQ(TestPartResult::kSkip, r4_.type());
}
// Tests TestPartResult::file_name().
TEST_F(TestPartResultTest, file_name) {
EXPECT_STREQ("foo/bar.cc", r1_.file_name());
EXPECT_STREQ(NULL, r3_.file_name());
EXPECT_STREQ("foo/bar.cc", r4_.file_name());
}
// Tests TestPartResult::line_number().
TEST_F(TestPartResultTest, line_number) {
EXPECT_EQ(10, r1_.line_number());
EXPECT_EQ(-1, r2_.line_number());
EXPECT_EQ(2, r4_.line_number());
}
// Tests TestPartResult::message().
TEST_F(TestPartResultTest, message) {
EXPECT_STREQ("Success!", r1_.message());
EXPECT_STREQ("Skipped!", r4_.message());
}
// Tests TestPartResult::passed().
@ -128,6 +143,7 @@ TEST_F(TestPartResultTest, Passed) {
EXPECT_TRUE(r1_.passed());
EXPECT_FALSE(r2_.passed());
EXPECT_FALSE(r3_.passed());
EXPECT_FALSE(r4_.passed());
}
// Tests TestPartResult::failed().
@ -135,6 +151,15 @@ TEST_F(TestPartResultTest, Failed) {
EXPECT_FALSE(r1_.failed());
EXPECT_TRUE(r2_.failed());
EXPECT_TRUE(r3_.failed());
EXPECT_FALSE(r4_.failed());
}
// Tests TestPartResult::failed().
TEST_F(TestPartResultTest, Skipped) {
EXPECT_FALSE(r1_.skipped());
EXPECT_FALSE(r2_.skipped());
EXPECT_FALSE(r3_.skipped());
EXPECT_TRUE(r4_.skipped());
}
// Tests TestPartResult::fatally_failed().
@ -142,6 +167,7 @@ TEST_F(TestPartResultTest, FatallyFailed) {
EXPECT_FALSE(r1_.fatally_failed());
EXPECT_FALSE(r2_.fatally_failed());
EXPECT_TRUE(r3_.fatally_failed());
EXPECT_FALSE(r4_.fatally_failed());
}
// Tests TestPartResult::nonfatally_failed().
@ -149,6 +175,7 @@ TEST_F(TestPartResultTest, NonfatallyFailed) {
EXPECT_FALSE(r1_.nonfatally_failed());
EXPECT_TRUE(r2_.nonfatally_failed());
EXPECT_FALSE(r3_.nonfatally_failed());
EXPECT_FALSE(r4_.nonfatally_failed());
}
// Tests the TestPartResultArray class.

View file

@ -37,10 +37,11 @@
#include "test/googletest-message-test.cc"
#include "test/googletest-options-test.cc"
#include "test/googletest-port-test.cc"
#include "test/googletest-test-part-test.cc"
#include "test/gtest-typed-test2_test.cc"
#include "test/gtest-typed-test_test.cc"
#include "test/gtest_pred_impl_unittest.cc"
#include "test/gtest_prod_test.cc"
#include "test/googletest-test-part-test.cc"
#include "test/gtest-typed-test_test.cc"
#include "test/gtest-typed-test2_test.cc"
#include "test/gtest_skip_test.cc"
#include "test/gtest_unittest.cc"
#include "test/production.cc"

View file

@ -0,0 +1,55 @@
// Copyright 2008 Google Inc.
// All Rights Reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Author: arseny.aprelev@gmail.com (Arseny Aprelev)
//
#include "gtest/gtest.h"
using ::testing::Test;
TEST(SkipTest, DoesSkip) {
GTEST_SKIP();
EXPECT_EQ(0, 1);
}
class Fixture : public Test {
protected:
void SetUp() override {
GTEST_SKIP() << "skipping all tests for this fixture";
}
};
TEST_F(Fixture, SkipsOneTest) {
EXPECT_EQ(5, 7);
}
TEST_F(Fixture, SkipsAnotherTest) {
EXPECT_EQ(99, 100);
}

View file

@ -18,6 +18,7 @@ GTESTS+= gtest_prod_test
GTESTS+= gtest_sole_header_test
GTESTS+= googletest-test-part-test
GTESTS+= gtest-typed-test_test
GTESTS+= gtest_skip_test
GTESTS+= gtest_unittest
CXXFLAGS+= -I${GOOGLETEST_SRCROOT}/include