aboutsummaryrefslogtreecommitdiffstats
path: root/googletest/src
diff options
context:
space:
mode:
Diffstat (limited to 'googletest/src')
-rw-r--r--googletest/src/gtest-death-test.cc119
-rw-r--r--googletest/src/gtest-filepath.cc14
-rw-r--r--googletest/src/gtest-internal-inl.h180
-rw-r--r--googletest/src/gtest-matchers.cc55
-rw-r--r--googletest/src/gtest-port.cc144
-rw-r--r--googletest/src/gtest-printers.cc29
-rw-r--r--googletest/src/gtest-test-part.cc2
-rw-r--r--googletest/src/gtest-typed-test.cc4
-rw-r--r--googletest/src/gtest.cc1127
-rw-r--r--googletest/src/gtest_main.cc10
10 files changed, 924 insertions, 760 deletions
diff --git a/googletest/src/gtest-death-test.cc b/googletest/src/gtest-death-test.cc
index 44247e81..e5ec287d 100644
--- a/googletest/src/gtest-death-test.cc
+++ b/googletest/src/gtest-death-test.cc
@@ -65,12 +65,13 @@
# endif // GTEST_OS_QNX
# if GTEST_OS_FUCHSIA
+# include <lib/fdio/fd.h>
# include <lib/fdio/io.h>
# include <lib/fdio/spawn.h>
-# include <lib/fdio/util.h>
-# include <lib/zx/socket.h>
+# include <lib/zx/channel.h>
# include <lib/zx/port.h>
# include <lib/zx/process.h>
+# include <lib/zx/socket.h>
# include <zircon/processargs.h>
# include <zircon/syscalls.h>
# include <zircon/syscalls/policy.h>
@@ -274,8 +275,6 @@ static const int kFuchsiaReadPipeFd = 3;
// statement, which is not allowed; THREW means that the test statement
// returned control by throwing an exception. IN_PROGRESS means the test
// has not yet concluded.
-// FIXME: Unify names and possibly values for
-// AbortReason, DeathTestOutcome, and flag characters above.
enum DeathTestOutcome { IN_PROGRESS, DIED, LIVED, RETURNED, THREW };
// Routine for aborting the program which is safe to call from an
@@ -564,7 +563,7 @@ static ::std::string FormatDeathTestOutput(const ::std::string& output) {
// status_ok: true if exit_status is acceptable in the context of
// this particular death test, which fails if it is false
//
-// Returns true iff all of the above conditions are met. Otherwise, the
+// Returns true if all of the above conditions are met. Otherwise, the
// first failing condition, in the order given above, is the one that is
// reported. Also sets the last death test message string.
bool DeathTestImpl::Passed(bool status_ok) {
@@ -755,9 +754,9 @@ DeathTest::TestRole WindowsDeathTest::AssumeRole() {
FALSE, // The initial state is non-signalled.
nullptr)); // The even is unnamed.
GTEST_DEATH_TEST_CHECK_(event_handle_.Get() != nullptr);
- const std::string filter_flag =
- std::string("--") + GTEST_FLAG_PREFIX_ + kFilterFlag + "=" +
- info->test_case_name() + "." + info->name();
+ const std::string filter_flag = std::string("--") + GTEST_FLAG_PREFIX_ +
+ kFilterFlag + "=" + info->test_suite_name() +
+ "." + info->name();
const std::string internal_flag =
std::string("--") + GTEST_FLAG_PREFIX_ + kInternalRunDeathTestFlag +
"=" + file_ + "|" + StreamableToString(line_) + "|" +
@@ -833,7 +832,7 @@ class FuchsiaDeathTest : public DeathTestImpl {
std::string captured_stderr_;
zx::process child_process_;
- zx::port port_;
+ zx::channel exception_channel_;
zx::socket stderr_socket_;
};
@@ -878,43 +877,53 @@ class Arguments {
int FuchsiaDeathTest::Wait() {
const int kProcessKey = 0;
const int kSocketKey = 1;
+ const int kExceptionKey = 2;
if (!spawned())
return 0;
- // Register to wait for the child process to terminate.
+ // Create a port to wait for socket/task/exception events.
zx_status_t status_zx;
+ zx::port port;
+ status_zx = zx::port::create(0, &port);
+ GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);
+
+ // Register to wait for the child process to terminate.
status_zx = child_process_.wait_async(
- port_, kProcessKey, ZX_PROCESS_TERMINATED, ZX_WAIT_ASYNC_ONCE);
+ port, kProcessKey, ZX_PROCESS_TERMINATED, ZX_WAIT_ASYNC_ONCE);
GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);
+
// Register to wait for the socket to be readable or closed.
status_zx = stderr_socket_.wait_async(
- port_, kSocketKey, ZX_SOCKET_READABLE | ZX_SOCKET_PEER_CLOSED,
- ZX_WAIT_ASYNC_REPEATING);
+ port, kSocketKey, ZX_SOCKET_READABLE | ZX_SOCKET_PEER_CLOSED,
+ ZX_WAIT_ASYNC_ONCE);
+ GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);
+
+ // Register to wait for an exception.
+ status_zx = exception_channel_.wait_async(
+ port, kExceptionKey, ZX_CHANNEL_READABLE, ZX_WAIT_ASYNC_ONCE);
GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);
bool process_terminated = false;
bool socket_closed = false;
do {
zx_port_packet_t packet = {};
- status_zx = port_.wait(zx::time::infinite(), &packet);
+ status_zx = port.wait(zx::time::infinite(), &packet);
GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);
- if (packet.key == kProcessKey) {
- if (ZX_PKT_IS_EXCEPTION(packet.type)) {
- // Process encountered an exception. Kill it directly rather than
- // letting other handlers process the event. We will get a second
- // kProcessKey event when the process actually terminates.
- status_zx = child_process_.kill();
- GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);
- } else {
- // Process terminated.
- GTEST_DEATH_TEST_CHECK_(ZX_PKT_IS_SIGNAL_ONE(packet.type));
- GTEST_DEATH_TEST_CHECK_(packet.signal.observed & ZX_PROCESS_TERMINATED);
- process_terminated = true;
- }
+ if (packet.key == kExceptionKey) {
+ // Process encountered an exception. Kill it directly rather than
+ // letting other handlers process the event. We will get a kProcessKey
+ // event when the process actually terminates.
+ status_zx = child_process_.kill();
+ GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);
+ } else if (packet.key == kProcessKey) {
+ // Process terminated.
+ GTEST_DEATH_TEST_CHECK_(ZX_PKT_IS_SIGNAL_ONE(packet.type));
+ GTEST_DEATH_TEST_CHECK_(packet.signal.observed & ZX_PROCESS_TERMINATED);
+ process_terminated = true;
} else if (packet.key == kSocketKey) {
- GTEST_DEATH_TEST_CHECK_(ZX_PKT_IS_SIGNAL_REP(packet.type));
+ GTEST_DEATH_TEST_CHECK_(ZX_PKT_IS_SIGNAL_ONE(packet.type));
if (packet.signal.observed & ZX_SOCKET_READABLE) {
// Read data from the socket.
constexpr size_t kBufferSize = 1024;
@@ -931,6 +940,10 @@ int FuchsiaDeathTest::Wait() {
socket_closed = true;
} else {
GTEST_DEATH_TEST_CHECK_(status_zx == ZX_ERR_SHOULD_WAIT);
+ status_zx = stderr_socket_.wait_async(
+ port, kSocketKey, ZX_SOCKET_READABLE | ZX_SOCKET_PEER_CLOSED,
+ ZX_WAIT_ASYNC_ONCE);
+ GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);
}
} else {
GTEST_DEATH_TEST_CHECK_(packet.signal.observed & ZX_SOCKET_PEER_CLOSED);
@@ -974,9 +987,9 @@ DeathTest::TestRole FuchsiaDeathTest::AssumeRole() {
FlushInfoLog();
// Build the child process command line.
- const std::string filter_flag =
- std::string("--") + GTEST_FLAG_PREFIX_ + kFilterFlag + "="
- + info->test_case_name() + "." + info->name();
+ const std::string filter_flag = std::string("--") + GTEST_FLAG_PREFIX_ +
+ kFilterFlag + "=" + info->test_suite_name() +
+ "." + info->name();
const std::string internal_flag =
std::string("--") + GTEST_FLAG_PREFIX_ + kInternalRunDeathTestFlag + "="
+ file_ + "|"
@@ -990,16 +1003,16 @@ DeathTest::TestRole FuchsiaDeathTest::AssumeRole() {
// Build the pipe for communication with the child.
zx_status_t status;
zx_handle_t child_pipe_handle;
- uint32_t type;
- status = fdio_pipe_half(&child_pipe_handle, &type);
- GTEST_DEATH_TEST_CHECK_(status >= 0);
- set_read_fd(status);
+ int child_pipe_fd;
+ status = fdio_pipe_half(&child_pipe_fd, &child_pipe_handle);
+ GTEST_DEATH_TEST_CHECK_(status == ZX_OK);
+ set_read_fd(child_pipe_fd);
// Set the pipe handle for the child.
fdio_spawn_action_t spawn_actions[2] = {};
fdio_spawn_action_t* add_handle_action = &spawn_actions[0];
add_handle_action->action = FDIO_SPAWN_ACTION_ADD_HANDLE;
- add_handle_action->h.id = PA_HND(type, kFuchsiaReadPipeFd);
+ add_handle_action->h.id = PA_HND(PA_FD, kFuchsiaReadPipeFd);
add_handle_action->h.handle = child_pipe_handle;
// Create a socket pair will be used to receive the child process' stderr.
@@ -1008,10 +1021,8 @@ DeathTest::TestRole FuchsiaDeathTest::AssumeRole() {
zx::socket::create(0, &stderr_producer_socket, &stderr_socket_);
GTEST_DEATH_TEST_CHECK_(status >= 0);
int stderr_producer_fd = -1;
- zx_handle_t producer_handle[1] = { stderr_producer_socket.release() };
- uint32_t producer_handle_type[1] = { PA_FDIO_SOCKET };
- status = fdio_create_fd(
- producer_handle, producer_handle_type, 1, &stderr_producer_fd);
+ status =
+ fdio_fd_create(stderr_producer_socket.release(), &stderr_producer_fd);
GTEST_DEATH_TEST_CHECK_(status >= 0);
// Make the stderr socket nonblocking.
@@ -1033,12 +1044,11 @@ DeathTest::TestRole FuchsiaDeathTest::AssumeRole() {
child_job, ZX_JOB_POL_RELATIVE, ZX_JOB_POL_BASIC, &policy, 1);
GTEST_DEATH_TEST_CHECK_(status == ZX_OK);
- // Create an exception port and attach it to the |child_job|, to allow
+ // Create an exception channel attached to the |child_job|, to allow
// us to suppress the system default exception handler from firing.
- status = zx::port::create(0, &port_);
- GTEST_DEATH_TEST_CHECK_(status == ZX_OK);
- status = zx_task_bind_exception_port(
- child_job, port_.get(), 0 /* key */, 0 /*options */);
+ status =
+ zx_task_create_exception_channel(
+ child_job, 0, exception_channel_.reset_and_get_address());
GTEST_DEATH_TEST_CHECK_(status == ZX_OK);
// Spawn the child process.
@@ -1274,6 +1284,9 @@ static int ExecDeathTestChildMain(void* child_arg) {
// correct answer.
static void StackLowerThanAddress(const void* ptr,
bool* result) GTEST_NO_INLINE_;
+// HWAddressSanitizer add a random tag to the MSB of the local variable address,
+// making comparison result unpredictable.
+GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_
static void StackLowerThanAddress(const void* ptr, bool* result) {
int dummy;
*result = (&dummy < ptr);
@@ -1281,6 +1294,7 @@ static void StackLowerThanAddress(const void* ptr, bool* result) {
// Make sure AddressSanitizer does not tamper with the stack here.
GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_
+GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_
static bool StackGrowsDown() {
int dummy;
bool result;
@@ -1350,7 +1364,7 @@ static pid_t ExecDeathTestSpawnChild(char* const* argv, int close_fd) {
if (!use_fork) {
static const bool stack_grows_down = StackGrowsDown();
- const size_t stack_size = getpagesize();
+ const auto stack_size = static_cast<size_t>(getpagesize());
// MMAP_ANONYMOUS is not defined on Mac, so we use MAP_ANON instead.
void* const stack = mmap(nullptr, stack_size, PROT_READ | PROT_WRITE,
MAP_ANON | MAP_PRIVATE, -1, 0);
@@ -1366,8 +1380,9 @@ static pid_t ExecDeathTestSpawnChild(char* const* argv, int close_fd) {
void* const stack_top =
static_cast<char*>(stack) +
(stack_grows_down ? stack_size - kMaxStackAlignment : 0);
- GTEST_DEATH_TEST_CHECK_(stack_size > kMaxStackAlignment &&
- reinterpret_cast<intptr_t>(stack_top) % kMaxStackAlignment == 0);
+ GTEST_DEATH_TEST_CHECK_(
+ static_cast<size_t>(stack_size) > kMaxStackAlignment &&
+ reinterpret_cast<uintptr_t>(stack_top) % kMaxStackAlignment == 0);
child_pid = clone(&ExecDeathTestChildMain, stack_top, SIGCHLD, &args);
@@ -1413,9 +1428,9 @@ DeathTest::TestRole ExecDeathTest::AssumeRole() {
// it be closed when the child process does an exec:
GTEST_DEATH_TEST_CHECK_(fcntl(pipe_fd[1], F_SETFD, 0) != -1);
- const std::string filter_flag =
- std::string("--") + GTEST_FLAG_PREFIX_ + kFilterFlag + "="
- + info->test_case_name() + "." + info->name();
+ const std::string filter_flag = std::string("--") + GTEST_FLAG_PREFIX_ +
+ kFilterFlag + "=" + info->test_suite_name() +
+ "." + info->name();
const std::string internal_flag =
std::string("--") + GTEST_FLAG_PREFIX_ + kInternalRunDeathTestFlag + "="
+ file_ + "|" + StreamableToString(line_) + "|"
@@ -1523,8 +1538,6 @@ static int GetStatusFileDescriptor(unsigned int parent_process_id,
StreamableToString(parent_process_id));
}
- // FIXME: Replace the following check with a
- // compile-time assertion when available.
GTEST_CHECK_(sizeof(HANDLE) <= sizeof(size_t));
const HANDLE write_handle =
diff --git a/googletest/src/gtest-filepath.cc b/googletest/src/gtest-filepath.cc
index d708b337..322fbb1b 100644
--- a/googletest/src/gtest-filepath.cc
+++ b/googletest/src/gtest-filepath.cc
@@ -38,9 +38,6 @@
#elif GTEST_OS_WINDOWS
# include <direct.h>
# include <io.h>
-#elif GTEST_OS_SYMBIAN
-// Symbian OpenC has PATH_MAX in sys/syslimits.h
-# include <sys/syslimits.h>
#else
# include <limits.h>
# include <climits> // Some Linux distributions define PATH_MAX here.
@@ -95,8 +92,9 @@ static bool IsPathSeparator(char c) {
// Returns the current working directory, or "" if unsuccessful.
FilePath FilePath::GetCurrentDir() {
-#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_WINDOWS_PHONE || GTEST_OS_WINDOWS_RT
- // Windows CE doesn't have a current directory, so we just return
+#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_WINDOWS_PHONE || \
+ GTEST_OS_WINDOWS_RT || ARDUINO
+ // Windows CE and Arduino don't have a current directory, so we just return
// something reasonable.
return FilePath(kCurrentDirectoryString);
#elif GTEST_OS_WINDOWS
@@ -165,7 +163,7 @@ FilePath FilePath::RemoveFileName() const {
const char* const last_sep = FindLastPathSeparator();
std::string dir;
if (last_sep) {
- dir = std::string(c_str(), last_sep + 1 - c_str());
+ dir = std::string(c_str(), static_cast<size_t>(last_sep + 1 - c_str()));
} else {
dir = kCurrentDirectoryString;
}
@@ -250,9 +248,6 @@ bool FilePath::DirectoryExists() const {
// root directory per disk drive.)
bool FilePath::IsRootDirectory() const {
#if GTEST_OS_WINDOWS
- // FIXME: on Windows a network share like
- // \\server\share can be a root directory, although it cannot be the
- // current directory. Handle this properly.
return pathname_.length() == 3 && IsAbsolutePath();
#else
return pathname_.length() == 1 && IsPathSeparator(pathname_.c_str()[0]);
@@ -350,7 +345,6 @@ FilePath FilePath::RemoveTrailingPathSeparator() const {
// Removes any redundant separators that might be in the pathname.
// For example, "bar///foo" becomes "bar/foo". Does not eliminate other
// redundancies that might be in a pathname involving "." or "..".
-// FIXME: handle Windows network shares (e.g. \\server\share).
void FilePath::Normalize() {
if (pathname_.c_str() == nullptr) {
pathname_ = "";
diff --git a/googletest/src/gtest-internal-inl.h b/googletest/src/gtest-internal-inl.h
index 55d3a694..e29d9927 100644
--- a/googletest/src/gtest-internal-inl.h
+++ b/googletest/src/gtest-internal-inl.h
@@ -99,14 +99,14 @@ const char kFlagfileFlag[] = "flagfile";
// A valid random seed must be in [1, kMaxRandomSeed].
const int kMaxRandomSeed = 99999;
-// g_help_flag is true iff the --help flag or an equivalent form is
+// g_help_flag is true if the --help flag or an equivalent form is
// specified on the command line.
GTEST_API_ extern bool g_help_flag;
// Returns the current time in milliseconds.
GTEST_API_ TimeInMillis GetTimeInMillis();
-// Returns true iff Google Test should use colors in the output.
+// Returns true if Google Test should use colors in the output.
GTEST_API_ bool ShouldUseColor(bool stdout_is_tty);
// Formats the given time in milliseconds as seconds.
@@ -231,7 +231,7 @@ GTEST_API_ std::string CodePointToUtf8(UInt32 code_point);
// Converts a wide string to a narrow string in UTF-8 encoding.
// The wide string is assumed to have the following encoding:
-// UTF-16 if sizeof(wchar_t) == 2 (on Windows, Cygwin, Symbian OS)
+// UTF-16 if sizeof(wchar_t) == 2 (on Windows, Cygwin)
// UTF-32 if sizeof(wchar_t) == 4 (on Linux)
// Parameter str points to a null-terminated wide string.
// Parameter num_chars may additionally limit the number
@@ -266,7 +266,7 @@ GTEST_API_ bool ShouldShard(const char* total_shards_str,
GTEST_API_ Int32 Int32FromEnvOrDie(const char* env_var, Int32 default_val);
// Given the total number of shards, the shard index, and the test id,
-// returns true iff the test should be run on this shard. The test id is
+// returns true if the test should be run on this shard. The test id is
// some arbitrary but unique non-negative integer assigned to each test
// method. Assumes that 0 <= shard_index < total_shards.
GTEST_API_ bool ShouldRunTestOnShard(
@@ -298,7 +298,8 @@ void ForEach(const Container& c, Functor functor) {
// in range [0, v.size()).
template <typename E>
inline E GetElementOr(const std::vector<E>& v, int i, E default_value) {
- return (i < 0 || i >= static_cast<int>(v.size())) ? default_value : v[i];
+ return (i < 0 || i >= static_cast<int>(v.size())) ? default_value
+ : v[static_cast<size_t>(i)];
}
// Performs an in-place shuffle of a range of the vector's elements.
@@ -320,8 +321,11 @@ void ShuffleRange(internal::Random* random, int begin, int end,
// http://en.wikipedia.org/wiki/Fisher-Yates_shuffle
for (int range_width = end - begin; range_width >= 2; range_width--) {
const int last_in_range = begin + range_width - 1;
- const int selected = begin + random->Generate(range_width);
- std::swap((*v)[selected], (*v)[last_in_range]);
+ const int selected =
+ begin +
+ static_cast<int>(random->Generate(static_cast<UInt32>(range_width)));
+ std::swap((*v)[static_cast<size_t>(selected)],
+ (*v)[static_cast<size_t>(last_in_range)]);
}
}
@@ -348,7 +352,7 @@ class TestPropertyKeyIs {
// TestPropertyKeyIs has NO default constructor.
explicit TestPropertyKeyIs(const std::string& key) : key_(key) {}
- // Returns true iff the test name of test property matches on key_.
+ // Returns true if the test name of test property matches on key_.
bool operator()(const TestProperty& test_property) const {
return test_property.key() == key_;
}
@@ -381,17 +385,17 @@ class GTEST_API_ UnitTestOptions {
// Functions for processing the gtest_filter flag.
- // Returns true iff the wildcard pattern matches the string. The
+ // Returns true if the wildcard pattern matches the string. The
// first ':' or '\0' character in pattern marks the end of it.
//
// This recursive algorithm isn't very efficient, but is clear and
// works well enough for matching test names, which are short.
static bool PatternMatchesString(const char *pattern, const char *str);
- // Returns true iff the user-specified filter matches the test case
+ // Returns true if the user-specified filter matches the test suite
// name and the test name.
- static bool FilterMatchesTest(const std::string &test_case_name,
- const std::string &test_name);
+ static bool FilterMatchesTest(const std::string& test_suite_name,
+ const std::string& test_name);
#if GTEST_OS_WINDOWS
// Function for supporting the gtest_catch_exception flag.
@@ -529,18 +533,18 @@ class GTEST_API_ UnitTestImpl {
void SetTestPartResultReporterForCurrentThread(
TestPartResultReporterInterface* reporter);
- // Gets the number of successful test cases.
- int successful_test_case_count() const;
+ // Gets the number of successful test suites.
+ int successful_test_suite_count() const;
- // Gets the number of failed test cases.
- int failed_test_case_count() const;
+ // Gets the number of failed test suites.
+ int failed_test_suite_count() const;
- // Gets the number of all test cases.
- int total_test_case_count() const;
+ // Gets the number of all test suites.
+ int total_test_suite_count() const;
- // Gets the number of all test cases that contain at least one test
+ // Gets the number of all test suites that contain at least one test
// that should run.
- int test_case_to_run_count() const;
+ int test_suite_to_run_count() const;
// Gets the number of successful tests.
int successful_test_count() const;
@@ -573,27 +577,32 @@ class GTEST_API_ UnitTestImpl {
// Gets the elapsed time, in milliseconds.
TimeInMillis elapsed_time() const { return elapsed_time_; }
- // Returns true iff the unit test passed (i.e. all test cases passed).
+ // Returns true if the unit test passed (i.e. all test suites passed).
bool Passed() const { return !Failed(); }
- // Returns true iff the unit test failed (i.e. some test case failed
+ // Returns true if the unit test failed (i.e. some test suite failed
// or something outside of all tests failed).
bool Failed() const {
- return failed_test_case_count() > 0 || ad_hoc_test_result()->Failed();
+ return failed_test_suite_count() > 0 || ad_hoc_test_result()->Failed();
}
- // Gets the i-th test case among all the test cases. i can range from 0 to
- // total_test_case_count() - 1. If i is not in that range, returns NULL.
- const TestCase* GetTestCase(int i) const {
- const int index = GetElementOr(test_case_indices_, i, -1);
- return index < 0 ? nullptr : test_cases_[i];
+ // Gets the i-th test suite among all the test suites. i can range from 0 to
+ // total_test_suite_count() - 1. If i is not in that range, returns NULL.
+ const TestSuite* GetTestSuite(int i) const {
+ const int index = GetElementOr(test_suite_indices_, i, -1);
+ return index < 0 ? nullptr : test_suites_[static_cast<size_t>(i)];
}
- // Gets the i-th test case among all the test cases. i can range from 0 to
- // total_test_case_count() - 1. If i is not in that range, returns NULL.
- TestCase* GetMutableTestCase(int i) {
- const int index = GetElementOr(test_case_indices_, i, -1);
- return index < 0 ? nullptr : test_cases_[index];
+ // Legacy API is deprecated but still available
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
+ const TestCase* GetTestCase(int i) const { return GetTestSuite(i); }
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
+
+ // Gets the i-th test suite among all the test suites. i can range from 0 to
+ // total_test_suite_count() - 1. If i is not in that range, returns NULL.
+ TestSuite* GetMutableSuiteCase(int i) {
+ const int index = GetElementOr(test_suite_indices_, i, -1);
+ return index < 0 ? nullptr : test_suites_[static_cast<size_t>(index)];
}
// Provides access to the event listener list.
@@ -630,30 +639,38 @@ class GTEST_API_ UnitTestImpl {
// trace but Bar() and CurrentOsStackTraceExceptTop() won't.
std::string CurrentOsStackTraceExceptTop(int skip_count) GTEST_NO_INLINE_;
- // Finds and returns a TestCase with the given name. If one doesn't
+ // Finds and returns a TestSuite with the given name. If one doesn't
// exist, creates one and returns it.
//
// Arguments:
//
- // test_case_name: name of the test case
+ // test_suite_name: name of the test suite
// type_param: the name of the test's type parameter, or NULL if
// this is not a typed or a type-parameterized test.
- // set_up_tc: pointer to the function that sets up the test case
- // tear_down_tc: pointer to the function that tears down the test case
- TestCase* GetTestCase(const char* test_case_name,
- const char* type_param,
- Test::SetUpTestCaseFunc set_up_tc,
- Test::TearDownTestCaseFunc tear_down_tc);
+ // set_up_tc: pointer to the function that sets up the test suite
+ // tear_down_tc: pointer to the function that tears down the test suite
+ TestSuite* GetTestSuite(const char* test_suite_name, const char* type_param,
+ internal::SetUpTestSuiteFunc set_up_tc,
+ internal::TearDownTestSuiteFunc tear_down_tc);
+
+// Legacy API is deprecated but still available
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
+ TestCase* GetTestCase(const char* test_case_name, const char* type_param,
+ internal::SetUpTestSuiteFunc set_up_tc,
+ internal::TearDownTestSuiteFunc tear_down_tc) {
+ return GetTestSuite(test_case_name, type_param, set_up_tc, tear_down_tc);
+ }
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
// Adds a TestInfo to the unit test.
//
// Arguments:
//
- // set_up_tc: pointer to the function that sets up the test case
- // tear_down_tc: pointer to the function that tears down the test case
+ // set_up_tc: pointer to the function that sets up the test suite
+ // tear_down_tc: pointer to the function that tears down the test suite
// test_info: the TestInfo object
- void AddTestInfo(Test::SetUpTestCaseFunc set_up_tc,
- Test::TearDownTestCaseFunc tear_down_tc,
+ void AddTestInfo(internal::SetUpTestSuiteFunc set_up_tc,
+ internal::TearDownTestSuiteFunc tear_down_tc,
TestInfo* test_info) {
// In order to support thread-safe death tests, we need to
// remember the original working directory when the test program
@@ -668,21 +685,20 @@ class GTEST_API_ UnitTestImpl {
<< "Failed to get the current working directory.";
}
- GetTestCase(test_info->test_case_name(),
- test_info->type_param(),
- set_up_tc,
- tear_down_tc)->AddTestInfo(test_info);
+ GetTestSuite(test_info->test_suite_name(), test_info->type_param(),
+ set_up_tc, tear_down_tc)
+ ->AddTestInfo(test_info);
}
- // Returns ParameterizedTestCaseRegistry object used to keep track of
+ // Returns ParameterizedTestSuiteRegistry object used to keep track of
// value-parameterized tests and instantiate and register them.
- internal::ParameterizedTestCaseRegistry& parameterized_test_registry() {
+ internal::ParameterizedTestSuiteRegistry& parameterized_test_registry() {
return parameterized_test_registry_;
}
- // Sets the TestCase object for the test that's currently running.
- void set_current_test_case(TestCase* a_current_test_case) {
- current_test_case_ = a_current_test_case;
+ // Sets the TestSuite object for the test that's currently running.
+ void set_current_test_suite(TestSuite* a_current_test_suite) {
+ current_test_suite_ = a_current_test_suite;
}
// Sets the TestInfo object for the test that's currently running. If
@@ -693,7 +709,7 @@ class GTEST_API_ UnitTestImpl {
}
// Registers all parameterized tests defined using TEST_P and
- // INSTANTIATE_TEST_CASE_P, creating regular tests for each test/parameter
+ // INSTANTIATE_TEST_SUITE_P, creating regular tests for each test/parameter
// combination. This method can be called more then once; it has guards
// protecting from registering the tests more then once. If
// value-parameterized tests are disabled, RegisterParameterizedTests is
@@ -708,7 +724,7 @@ class GTEST_API_ UnitTestImpl {
// Clears the results of all tests, except the ad hoc tests.
void ClearNonAdHocTestResult() {
- ForEach(test_cases_, TestCase::ClearTestCaseResult);
+ ForEach(test_suites_, TestSuite::ClearTestSuiteResult);
}
// Clears the results of ad-hoc test assertions.
@@ -717,7 +733,7 @@ class GTEST_API_ UnitTestImpl {
}
// Adds a TestProperty to the current TestResult object when invoked in a
- // context of a test or a test case, or to the global property set. If the
+ // context of a test or a test suite, or to the global property set. If the
// result already contains a property with the same key, the value will be
// updated.
void RecordProperty(const TestProperty& test_property);
@@ -729,7 +745,7 @@ class GTEST_API_ UnitTestImpl {
// Matches the full name of each test against the user-specified
// filter to decide whether the test should run, then records the
- // result in each TestCase and TestInfo object.
+ // result in each TestSuite and TestInfo object.
// If shard_tests == HONOR_SHARDING_PROTOCOL, further filters tests
// based on sharding variables in the environment.
// Returns the number of tests that should run.
@@ -738,7 +754,7 @@ class GTEST_API_ UnitTestImpl {
// Prints the names of the tests matching the user-specified filter flag.
void ListTestsMatchingFilter();
- const TestCase* current_test_case() const { return current_test_case_; }
+ const TestSuite* current_test_suite() const { return current_test_suite_; }
TestInfo* current_test_info() { return current_test_info_; }
const TestInfo* current_test_info() const { return current_test_info_; }
@@ -799,11 +815,11 @@ class GTEST_API_ UnitTestImpl {
// Gets the random number generator.
internal::Random* random() { return &random_; }
- // Shuffles all test cases, and the tests within each test case,
+ // Shuffles all test suites, and the tests within each test suite,
// making sure that death tests are still run first.
void ShuffleTests();
- // Restores the test cases and tests to their order before the first shuffle.
+ // Restores the test suites and tests to their order before the first shuffle.
void UnshuffleTests();
// Returns the value of GTEST_FLAG(catch_exceptions) at the moment
@@ -843,31 +859,31 @@ class GTEST_API_ UnitTestImpl {
// before/after the tests are run.
std::vector<Environment*> environments_;
- // The vector of TestCases in their original order. It owns the
+ // The vector of TestSuites in their original order. It owns the
// elements in the vector.
- std::vector<TestCase*> test_cases_;
+ std::vector<TestSuite*> test_suites_;
- // Provides a level of indirection for the test case list to allow
- // easy shuffling and restoring the test case order. The i-th
- // element of this vector is the index of the i-th test case in the
+ // Provides a level of indirection for the test suite list to allow
+ // easy shuffling and restoring the test suite order. The i-th
+ // element of this vector is the index of the i-th test suite in the
// shuffled order.
- std::vector<int> test_case_indices_;
+ std::vector<int> test_suite_indices_;
// ParameterizedTestRegistry object used to register value-parameterized
// tests.
- internal::ParameterizedTestCaseRegistry parameterized_test_registry_;
+ internal::ParameterizedTestSuiteRegistry parameterized_test_registry_;
// Indicates whether RegisterParameterizedTests() has been called already.
bool parameterized_tests_registered_;
- // Index of the last death test case registered. Initially -1.
- int last_death_test_case_;
+ // Index of the last death test suite registered. Initially -1.
+ int last_death_test_suite_;
- // This points to the TestCase for the currently running test. It
- // changes as Google Test goes through one test case after another.
+ // This points to the TestSuite for the currently running test. It
+ // changes as Google Test goes through one test suite after another.
// When no test is running, this is set to NULL and Google Test
// stores assertion results in ad_hoc_test_result_. Initially NULL.
- TestCase* current_test_case_;
+ TestSuite* current_test_suite_;
// This points to the TestInfo for the currently running test. It
// changes as Google Test goes through one test after another. When
@@ -895,7 +911,7 @@ class GTEST_API_ UnitTestImpl {
// desired.
OsStackTraceGetterInterface* os_stack_trace_getter_;
- // True iff PostFlagParsingInit() has been called.
+ // True if PostFlagParsingInit() has been called.
bool post_flag_parse_init_performed_;
// The random number seed used at the beginning of the test run.
@@ -998,8 +1014,6 @@ bool ParseNaturalNumber(const ::std::string& str, Integer* number) {
const bool parse_success = *end == '\0' && errno == 0;
- // FIXME: Convert this to compile time assertion when it is
- // available.
GTEST_CHECK_(sizeof(Integer) <= sizeof(parsed));
const Integer result = static_cast<Integer>(parsed);
@@ -1073,8 +1087,8 @@ class StreamingListener : public EmptyTestEventListener {
GTEST_CHECK_(sockfd_ != -1)
<< "Send() can be called only when there is a connection.";
- const int len = static_cast<int>(message.length());
- if (write(sockfd_, message.c_str(), len) != len) {
+ const auto len = static_cast<size_t>(message.length());
+ if (write(sockfd_, message.c_str(), len) != static_cast<ssize_t>(len)) {
GTEST_LOG_(WARNING)
<< "stream_result_to: failed to stream to "
<< host_name_ << ":" << port_num_;
@@ -1138,14 +1152,18 @@ class StreamingListener : public EmptyTestEventListener {
StreamableToString(unit_test.elapsed_time()) + "ms");
}
+ // Note that "event=TestCaseStart" is a wire format and has to remain
+ // "case" for compatibilty
void OnTestCaseStart(const TestCase& test_case) override {
SendLn(std::string("event=TestCaseStart&name=") + test_case.name());
}
+ // Note that "event=TestCaseEnd" is a wire format and has to remain
+ // "case" for compatibilty
void OnTestCaseEnd(const TestCase& test_case) override {
- SendLn("event=TestCaseEnd&passed=" + FormatBool(test_case.Passed())
- + "&elapsed_time=" + StreamableToString(test_case.elapsed_time())
- + "ms");
+ SendLn("event=TestCaseEnd&passed=" + FormatBool(test_case.Passed()) +
+ "&elapsed_time=" + StreamableToString(test_case.elapsed_time()) +
+ "ms");
}
void OnTestStart(const TestInfo& test_info) override {
diff --git a/googletest/src/gtest-matchers.cc b/googletest/src/gtest-matchers.cc
index de275d36..7d2fb685 100644
--- a/googletest/src/gtest-matchers.cc
+++ b/googletest/src/gtest-matchers.cc
@@ -44,14 +44,6 @@ namespace testing {
// equal to s.
Matcher<const std::string&>::Matcher(const std::string& s) { *this = Eq(s); }
-#if GTEST_HAS_GLOBAL_STRING
-// Constructs a matcher that matches a const std::string& whose value is
-// equal to s.
-Matcher<const std::string&>::Matcher(const ::string& s) {
- *this = Eq(static_cast<std::string>(s));
-}
-#endif // GTEST_HAS_GLOBAL_STRING
-
// Constructs a matcher that matches a const std::string& whose value is
// equal to s.
Matcher<const std::string&>::Matcher(const char* s) {
@@ -62,45 +54,10 @@ Matcher<const std::string&>::Matcher(const char* s) {
// s.
Matcher<std::string>::Matcher(const std::string& s) { *this = Eq(s); }
-#if GTEST_HAS_GLOBAL_STRING
-// Constructs a matcher that matches a std::string whose value is equal to
-// s.
-Matcher<std::string>::Matcher(const ::string& s) {
- *this = Eq(static_cast<std::string>(s));
-}
-#endif // GTEST_HAS_GLOBAL_STRING
-
// Constructs a matcher that matches a std::string whose value is equal to
// s.
Matcher<std::string>::Matcher(const char* s) { *this = Eq(std::string(s)); }
-#if GTEST_HAS_GLOBAL_STRING
-// Constructs a matcher that matches a const ::string& whose value is
-// equal to s.
-Matcher<const ::string&>::Matcher(const std::string& s) {
- *this = Eq(static_cast<::string>(s));
-}
-
-// Constructs a matcher that matches a const ::string& whose value is
-// equal to s.
-Matcher<const ::string&>::Matcher(const ::string& s) { *this = Eq(s); }
-
-// Constructs a matcher that matches a const ::string& whose value is
-// equal to s.
-Matcher<const ::string&>::Matcher(const char* s) { *this = Eq(::string(s)); }
-
-// Constructs a matcher that matches a ::string whose value is equal to s.
-Matcher<::string>::Matcher(const std::string& s) {
- *this = Eq(static_cast<::string>(s));
-}
-
-// Constructs a matcher that matches a ::string whose value is equal to s.
-Matcher<::string>::Matcher(const ::string& s) { *this = Eq(s); }
-
-// Constructs a matcher that matches a string whose value is equal to s.
-Matcher<::string>::Matcher(const char* s) { *this = Eq(::string(s)); }
-#endif // GTEST_HAS_GLOBAL_STRING
-
#if GTEST_HAS_ABSL
// Constructs a matcher that matches a const absl::string_view& whose value is
// equal to s.
@@ -108,12 +65,6 @@ Matcher<const absl::string_view&>::Matcher(const std::string& s) {
*this = Eq(s);
}
-#if GTEST_HAS_GLOBAL_STRING
-// Constructs a matcher that matches a const absl::string_view& whose value is
-// equal to s.
-Matcher<const absl::string_view&>::Matcher(const ::string& s) { *this = Eq(s); }
-#endif // GTEST_HAS_GLOBAL_STRING
-
// Constructs a matcher that matches a const absl::string_view& whose value is
// equal to s.
Matcher<const absl::string_view&>::Matcher(const char* s) {
@@ -130,12 +81,6 @@ Matcher<const absl::string_view&>::Matcher(absl::string_view s) {
// s.
Matcher<absl::string_view>::Matcher(const std::string& s) { *this = Eq(s); }
-#if GTEST_HAS_GLOBAL_STRING
-// Constructs a matcher that matches a absl::string_view whose value is equal to
-// s.
-Matcher<absl::string_view>::Matcher(const ::string& s) { *this = Eq(s); }
-#endif // GTEST_HAS_GLOBAL_STRING
-
// Constructs a matcher that matches a absl::string_view whose value is equal to
// s.
Matcher<absl::string_view>::Matcher(const char* s) {
diff --git a/googletest/src/gtest-port.cc b/googletest/src/gtest-port.cc
index 950c16b6..9024f030 100644
--- a/googletest/src/gtest-port.cc
+++ b/googletest/src/gtest-port.cc
@@ -55,6 +55,14 @@
# include <mach/vm_map.h>
#endif // GTEST_OS_MAC
+#if GTEST_OS_DRAGONFLY || GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD || \
+ GTEST_OS_NETBSD || GTEST_OS_OPENBSD
+# include <sys/sysctl.h>
+# if GTEST_OS_DRAGONFLY || GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD
+# include <sys/user.h>
+# endif
+#endif
+
#if GTEST_OS_QNX
# include <devctl.h>
# include <fcntl.h>
@@ -109,7 +117,7 @@ T ReadProcFileField(const std::string& filename, int field) {
size_t GetThreadCount() {
const std::string filename =
(Message() << "/proc/" << getpid() << "/stat").GetString();
- return ReadProcFileField<int>(filename, 19);
+ return ReadProcFileField<size_t>(filename, 19);
}
#elif GTEST_OS_MAC
@@ -131,6 +139,81 @@ size_t GetThreadCount() {
}
}
+#elif GTEST_OS_DRAGONFLY || GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD || \
+ GTEST_OS_NETBSD
+
+#if GTEST_OS_NETBSD
+#undef KERN_PROC
+#define KERN_PROC KERN_PROC2
+#define kinfo_proc kinfo_proc2
+#endif
+
+#if GTEST_OS_DRAGONFLY
+#define KP_NLWP(kp) (kp.kp_nthreads)
+#elif GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD
+#define KP_NLWP(kp) (kp.ki_numthreads)
+#elif GTEST_OS_NETBSD
+#define KP_NLWP(kp) (kp.p_nlwps)
+#endif
+
+// Returns the number of threads running in the process, or 0 to indicate that
+// we cannot detect it.
+size_t GetThreadCount() {
+ int mib[] = {
+ CTL_KERN,
+ KERN_PROC,
+ KERN_PROC_PID,
+ getpid(),
+#if GTEST_OS_NETBSD
+ sizeof(struct kinfo_proc),
+ 1,
+#endif
+ };
+ u_int miblen = sizeof(mib) / sizeof(mib[0]);
+ struct kinfo_proc info;
+ size_t size = sizeof(info);
+ if (sysctl(mib, miblen, &info, &size, NULL, 0)) {
+ return 0;
+ }
+ return static_cast<size_t>(KP_NLWP(info));
+}
+#elif GTEST_OS_OPENBSD
+
+// Returns the number of threads running in the process, or 0 to indicate that
+// we cannot detect it.
+size_t GetThreadCount() {
+ int mib[] = {
+ CTL_KERN,
+ KERN_PROC,
+ KERN_PROC_PID | KERN_PROC_SHOW_THREADS,
+ getpid(),
+ sizeof(struct kinfo_proc),
+ 0,
+ };
+ u_int miblen = sizeof(mib) / sizeof(mib[0]);
+
+ // get number of structs
+ size_t size;
+ if (sysctl(mib, miblen, NULL, &size, NULL, 0)) {
+ return 0;
+ }
+ mib[5] = size / mib[4];
+
+ // populate array of structs
+ struct kinfo_proc info[mib[5]];
+ if (sysctl(mib, miblen, &info, &size, NULL, 0)) {
+ return 0;
+ }
+
+ // exclude empty members
+ int nthreads = 0;
+ for (int i = 0; i < size / mib[4]; i++) {
+ if (info[i].p_tid != -1)
+ nthreads++;
+ }
+ return nthreads;
+}
+
#elif GTEST_OS_QNX
// Returns the number of threads running in the process, or 0 to indicate that
@@ -196,7 +279,7 @@ size_t GetThreadCount() {
#if GTEST_IS_THREADSAFE && GTEST_OS_WINDOWS
void SleepMilliseconds(int n) {
- ::Sleep(n);
+ ::Sleep(static_cast<DWORD>(n));
}
AutoHandle::AutoHandle()
@@ -265,9 +348,6 @@ Mutex::Mutex()
Mutex::~Mutex() {
// Static mutexes are leaked intentionally. It is not thread-safe to try
// to clean them up.
- // FIXME: Switch to Slim Reader/Writer (SRW) Locks, which requires
- // nothing to clean it up but is available only on Vista and later.
- // https://docs.microsoft.com/en-us/windows/desktop/Sync/slim-reader-writer--srw--locks
if (type_ == kDynamic) {
::DeleteCriticalSection(critical_section_);
delete critical_section_;
@@ -300,6 +380,7 @@ void Mutex::AssertHeld() {
namespace {
+#ifdef _MSC_VER
// Use the RAII idiom to flag mem allocs that are intentionally never
// deallocated. The motivation is to silence the false positive mem leaks
// that are reported by the debug version of MS's CRT which can only detect
@@ -312,19 +393,15 @@ class MemoryIsNotDeallocated
{
public:
MemoryIsNotDeallocated() : old_crtdbg_flag_(0) {
-#ifdef _MSC_VER
old_crtdbg_flag_ = _CrtSetDbgFlag(_CRTDBG_REPORT_FLAG);
// Set heap allocation block type to _IGNORE_BLOCK so that MS debug CRT
// doesn't report mem leak if there's no matching deallocation.
_CrtSetDbgFlag(old_crtdbg_flag_ & ~_CRTDBG_ALLOC_MEM_DF);
-#endif // _MSC_VER
}
~MemoryIsNotDeallocated() {
-#ifdef _MSC_VER
// Restore the original _CRTDBG_ALLOC_MEM_DF flag
_CrtSetDbgFlag(old_crtdbg_flag_);
-#endif // _MSC_VER
}
private:
@@ -332,6 +409,7 @@ class MemoryIsNotDeallocated
GTEST_DISALLOW_COPY_AND_ASSIGN_(MemoryIsNotDeallocated);
};
+#endif // _MSC_VER
} // namespace
@@ -347,7 +425,9 @@ void Mutex::ThreadSafeLazyInit() {
owner_thread_id_ = 0;
{
// Use RAII to flag that following mem alloc is never deallocated.
+#ifdef _MSC_VER
MemoryIsNotDeallocated memory_is_not_deallocated;
+#endif // _MSC_VER
critical_section_ = new CRITICAL_SECTION;
}
::InitializeCriticalSection(critical_section_);
@@ -388,7 +468,6 @@ class ThreadWithParamSupport : public ThreadWithParamBase {
Notification* thread_can_start) {
ThreadMainParam* param = new ThreadMainParam(runnable, thread_can_start);
DWORD thread_id;
- // FIXME: Consider to use _beginthreadex instead.
HANDLE thread_handle = ::CreateThread(
nullptr, // Default security.
0, // Default stack size.
@@ -591,7 +670,9 @@ class ThreadLocalRegistryImpl {
// Returns map of thread local instances.
static ThreadIdToThreadLocals* GetThreadLocalsMapLocked() {
mutex_.AssertHeld();
+#ifdef _MSC_VER
MemoryIsNotDeallocated memory_is_not_deallocated;
+#endif // _MSC_VER
static ThreadIdToThreadLocals* map = new ThreadIdToThreadLocals();
return map;
}
@@ -634,7 +715,7 @@ RE::~RE() {
free(const_cast<char*>(pattern_));
}
-// Returns true iff regular expression re matches the entire str.
+// Returns true if regular expression re matches the entire str.
bool RE::FullMatch(const char* str, const RE& re) {
if (!re.is_valid_) return false;
@@ -642,7 +723,7 @@ bool RE::FullMatch(const char* str, const RE& re) {
return regexec(&re.full_regex_, str, 1, &match, 0) == 0;
}
-// Returns true iff regular expression re matches a substring of str
+// Returns true if regular expression re matches a substring of str
// (including str itself).
bool RE::PartialMatch(const char* str, const RE& re) {
if (!re.is_valid_) return false;
@@ -683,13 +764,13 @@ void RE::Init(const char* regex) {
#elif GTEST_USES_SIMPLE_RE
-// Returns true iff ch appears anywhere in str (excluding the
+// Returns true if ch appears anywhere in str (excluding the
// terminating '\0' character).
bool IsInSet(char ch, const char* str) {
return ch != '\0' && strchr(str, ch) != nullptr;
}
-// Returns true iff ch belongs to the given classification. Unlike
+// Returns true if ch belongs to the given classification. Unlike
// similar functions in <ctype.h>, these aren't affected by the
// current locale.
bool IsAsciiDigit(char ch) { return '0' <= ch && ch <= '9'; }
@@ -703,12 +784,12 @@ bool IsAsciiWordChar(char ch) {
('0' <= ch && ch <= '9') || ch == '_';
}
-// Returns true iff "\\c" is a supported escape sequence.
+// Returns true if "\\c" is a supported escape sequence.
bool IsValidEscape(char c) {
return (IsAsciiPunct(c) || IsInSet(c, "dDfnrsStvwW"));
}
-// Returns true iff the given atom (specified by escaped and pattern)
+// Returns true if the given atom (specified by escaped and pattern)
// matches ch. The result is undefined if the atom is invalid.
bool AtomMatchesChar(bool escaped, char pattern_char, char ch) {
if (escaped) { // "\\p" where p is pattern_char.
@@ -741,16 +822,13 @@ static std::string FormatRegexSyntaxError(const char* regex, int index) {
// otherwise returns true.
bool ValidateRegex(const char* regex) {
if (regex == nullptr) {
- // FIXME: fix the source file location in the
- // assertion failures to match where the regex is used in user
- // code.
ADD_FAILURE() << "NULL is not a valid simple regular expression.";
return false;
}
bool is_valid = true;
- // True iff ?, *, or + can follow the previous atom.
+ // True if ?, *, or + can follow the previous atom.
bool prev_repeatable = false;
for (int i = 0; regex[i]; i++) {
if (regex[i] == '\\') { // An escape sequence
@@ -826,7 +904,7 @@ bool MatchRepetitionAndRegexAtHead(
return false;
}
-// Returns true iff regex matches a prefix of str. regex must be a
+// Returns true if regex matches a prefix of str. regex must be a
// valid simple regular expression and not start with "^", or the
// result is undefined.
bool MatchRegexAtHead(const char* regex, const char* str) {
@@ -857,7 +935,7 @@ bool MatchRegexAtHead(const char* regex, const char* str) {
}
}
-// Returns true iff regex matches any substring of str. regex must be
+// Returns true if regex matches any substring of str. regex must be
// a valid simple regular expression, or the result is undefined.
//
// The algorithm is recursive, but the recursion depth doesn't exceed
@@ -886,12 +964,12 @@ RE::~RE() {
free(const_cast<char*>(full_pattern_));
}
-// Returns true iff regular expression re matches the entire str.
+// Returns true if regular expression re matches the entire str.
bool RE::FullMatch(const char* str, const RE& re) {
return re.is_valid_ && MatchRegexAnywhere(re.full_pattern_, str);
}
-// Returns true iff regular expression re matches a substring of str
+// Returns true if regular expression re matches a substring of str
// (including str itself).
bool RE::PartialMatch(const char* str, const RE& re) {
return re.is_valid_ && MatchRegexAnywhere(re.pattern_, str);
@@ -1035,6 +1113,11 @@ class CapturedStream {
char name_template[] = "/tmp/captured_stream.XXXXXX";
# endif // GTEST_OS_LINUX_ANDROID
const int captured_fd = mkstemp(name_template);
+ if (captured_fd == -1) {
+ GTEST_LOG_(WARNING)
+ << "Failed to create tmp file " << name_template
+ << " for test; does the test have access to the /tmp directory?";
+ }
filename_ = name_template;
# endif // GTEST_OS_WINDOWS
fflush(nullptr);
@@ -1056,6 +1139,10 @@ class CapturedStream {
}
FILE* const file = posix::FOpen(filename_.c_str(), "r");
+ if (file == nullptr) {
+ GTEST_LOG_(FATAL) << "Failed to open tmp file " << filename_
+ << " for capturing stream.";
+ }
const std::string content = ReadEntireFile(file);
posix::FClose(file);
return content;
@@ -1169,13 +1256,6 @@ void SetInjectableArgvs(const std::vector<std::string>& new_argvs) {
new std::vector<std::string>(new_argvs.begin(), new_argvs.end()));
}
-#if GTEST_HAS_GLOBAL_STRING
-void SetInjectableArgvs(const std::vector< ::string>& new_argvs) {
- SetInjectableArgvs(
- new std::vector<std::string>(new_argvs.begin(), new_argvs.end()));
-}
-#endif // GTEST_HAS_GLOBAL_STRING
-
void ClearInjectableArgvs() {
delete g_injected_test_argvs;
g_injected_test_argvs = nullptr;
@@ -1250,7 +1330,7 @@ bool ParseInt32(const Message& src_text, const char* str, Int32* value) {
// Reads and returns the Boolean environment variable corresponding to
// the given flag; if it's not set, returns default_value.
//
-// The value is considered true iff it's not "0".
+// The value is considered true if it's not "0".
bool BoolFromGTestEnv(const char* flag, bool default_value) {
#if defined(GTEST_GET_BOOL_FROM_ENV_)
return GTEST_GET_BOOL_FROM_ENV_(flag, default_value);
diff --git a/googletest/src/gtest-printers.cc b/googletest/src/gtest-printers.cc
index 12e6dbb6..3337be31 100644
--- a/googletest/src/gtest-printers.cc
+++ b/googletest/src/gtest-printers.cc
@@ -59,6 +59,7 @@ using ::std::ostream;
// Prints a segment of bytes in the given object.
GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_
GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_
+GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_
GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_
void PrintByteSegmentInObjectTo(const unsigned char* obj_bytes, size_t start,
size_t count, ostream* os) {
@@ -89,7 +90,6 @@ void PrintBytesInObjectToImpl(const unsigned char* obj_bytes, size_t count,
// If the object size is bigger than kThreshold, we'll have to omit
// some details by printing only the first and the last kChunkSize
// bytes.
- // FIXME: let the user control the threshold using a flag.
if (count < kThreshold) {
PrintByteSegmentInObjectTo(obj_bytes, 0, count, os);
} else {
@@ -144,7 +144,8 @@ inline bool IsPrintableAscii(wchar_t c) {
// which is the type of c.
template <typename UnsignedChar, typename Char>
static CharFormat PrintAsCharLiteralTo(Char c, ostream* os) {
- switch (static_cast<wchar_t>(c)) {
+ wchar_t w_c = static_cast<wchar_t>(c);
+ switch (w_c) {
case L'\0':
*os << "\\0";
break;
@@ -176,7 +177,7 @@ static CharFormat PrintAsCharLiteralTo(Char c, ostream* os) {
*os << "\\v";
break;
default:
- if (IsPrintableAscii(c)) {
+ if (IsPrintableAscii(w_c)) {
*os << static_cast<char>(c);
return kAsIs;
} else {
@@ -236,7 +237,7 @@ void PrintCharAndCodeTo(Char c, ostream* os) {
if (format == kHexEscape || (1 <= c && c <= 9)) {
// Do nothing.
} else {
- *os << ", 0x" << String::FormatHexInt(static_cast<UnsignedChar>(c));
+ *os << ", 0x" << String::FormatHexInt(static_cast<int>(c));
}
*os << ")";
}
@@ -261,6 +262,7 @@ void PrintTo(wchar_t wc, ostream* os) {
template <typename CharType>
GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_
GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_
+GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_
GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_
static CharFormat PrintCharsAsStringTo(
const CharType* begin, size_t len, ostream* os) {
@@ -291,6 +293,7 @@ static CharFormat PrintCharsAsStringTo(
template <typename CharType>
GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_
GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_
+GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_
GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_
static void UniversalPrintCharArray(
const CharType* begin, size_t len, ostream* os) {
@@ -420,17 +423,6 @@ void ConditionalPrintAsText(const char* str, size_t length, ostream* os) {
} // anonymous namespace
-// Prints a ::string object.
-#if GTEST_HAS_GLOBAL_STRING
-void PrintStringTo(const ::string& s, ostream* os) {
- if (PrintCharsAsStringTo(s.data(), s.size(), os) == kHexEscape) {
- if (GTEST_FLAG(print_utf8)) {
- ConditionalPrintAsText(s.data(), s.size(), os);
- }
- }
-}
-#endif // GTEST_HAS_GLOBAL_STRING
-
void PrintStringTo(const ::std::string& s, ostream* os) {
if (PrintCharsAsStringTo(s.data(), s.size(), os) == kHexEscape) {
if (GTEST_FLAG(print_utf8)) {
@@ -439,13 +431,6 @@ void PrintStringTo(const ::std::string& s, ostream* os) {
}
}
-// Prints a ::wstring object.
-#if GTEST_HAS_GLOBAL_WSTRING
-void PrintWideStringTo(const ::wstring& s, ostream* os) {
- PrintCharsAsStringTo(s.data(), s.size(), os);
-}
-#endif // GTEST_HAS_GLOBAL_WSTRING
-
#if GTEST_HAS_STD_WSTRING
void PrintWideStringTo(const ::std::wstring& s, ostream* os) {
PrintCharsAsStringTo(s.data(), s.size(), os);
diff --git a/googletest/src/gtest-test-part.cc b/googletest/src/gtest-test-part.cc
index 515b3084..178317a6 100644
--- a/googletest/src/gtest-test-part.cc
+++ b/googletest/src/gtest-test-part.cc
@@ -70,7 +70,7 @@ const TestPartResult& TestPartResultArray::GetTestPartResult(int index) const {
internal::posix::Abort();
}
- return array_[index];
+ return array_[static_cast<size_t>(index)];
}
// Returns the number of TestPartResult objects in the array.
diff --git a/googletest/src/gtest-typed-test.cc b/googletest/src/gtest-typed-test.cc
index a305ed1a..8677caf7 100644
--- a/googletest/src/gtest-typed-test.cc
+++ b/googletest/src/gtest-typed-test.cc
@@ -57,7 +57,7 @@ static std::vector<std::string> SplitIntoTestNames(const char* src) {
// Verifies that registered_tests match the test names in
// registered_tests_; returns registered_tests if successful, or
// aborts the program otherwise.
-const char* TypedTestCasePState::VerifyRegisteredTestNames(
+const char* TypedTestSuitePState::VerifyRegisteredTestNames(
const char* file, int line, const char* registered_tests) {
typedef RegisteredTestsMap::const_iterator RegisteredTestIter;
registered_ = true;
@@ -89,7 +89,7 @@ const char* TypedTestCasePState::VerifyRegisteredTestNames(
tests.insert(name);
} else {
errors << "No test named " << name
- << " can be found in this test case.\n";
+ << " can be found in this test suite.\n";
}
}
diff --git a/googletest/src/gtest.cc b/googletest/src/gtest.cc
index 34641af3..cbd48d31 100644
--- a/googletest/src/gtest.cc
+++ b/googletest/src/gtest.cc
@@ -54,8 +54,6 @@
#if GTEST_OS_LINUX
-// FIXME: Use autoconf to detect availability of
-// gettimeofday().
# define GTEST_HAS_GETTIMEOFDAY_ 1
# include <fcntl.h> // NOLINT
@@ -68,10 +66,6 @@
# include <unistd.h> // NOLINT
# include <string>
-#elif GTEST_OS_SYMBIAN
-# define GTEST_HAS_GETTIMEOFDAY_ 1
-# include <sys/time.h> // NOLINT
-
#elif GTEST_OS_ZOS
# define GTEST_HAS_GETTIMEOFDAY_ 1
# include <sys/time.h> // NOLINT
@@ -86,6 +80,11 @@
#elif GTEST_OS_WINDOWS // We are on Windows proper.
+# include <windows.h> // NOLINT
+# undef min
+
+# include <crtdbg.h> // NOLINT
+# include <debugapi.h> // NOLINT
# include <io.h> // NOLINT
# include <sys/timeb.h> // NOLINT
# include <sys/types.h> // NOLINT
@@ -93,25 +92,13 @@
# if GTEST_OS_WINDOWS_MINGW
// MinGW has gettimeofday() but not _ftime64().
-// FIXME: Use autoconf to detect availability of
-// gettimeofday().
-// FIXME: There are other ways to get the time on
-// Windows, like GetTickCount() or GetSystemTimeAsFileTime(). MinGW
-// supports these. consider using them instead.
# define GTEST_HAS_GETTIMEOFDAY_ 1
# include <sys/time.h> // NOLINT
# endif // GTEST_OS_WINDOWS_MINGW
-// cpplint thinks that the header is already included, so we want to
-// silence it.
-# include <windows.h> // NOLINT
-# undef min
-
#else
// Assume other platforms have gettimeofday().
-// FIXME: Use autoconf to detect availability of
-// gettimeofday().
# define GTEST_HAS_GETTIMEOFDAY_ 1
// cpplint thinks that the header is already included, so we want to
@@ -160,14 +147,14 @@ using internal::Shuffle;
// Constants.
-// A test whose test case name or test name matches this filter is
+// A test whose test suite name or test name matches this filter is
// disabled and not run.
static const char kDisableTestFilter[] = "DISABLED_*:*/DISABLED_*";
-// A test case whose name matches this filter is considered a death
-// test case and will be run before test cases whose name doesn't
+// A test suite whose name matches this filter is considered a death
+// test suite and will be run before test suites whose name doesn't
// match this filter.
-static const char kDeathTestCaseFilter[] = "*DeathTest:*DeathTest/*";
+static const char kDeathTestSuiteFilter[] = "*DeathTest:*DeathTest/*";
// A test filter that matches everything.
static const char kUniversalFilter[] = "*";
@@ -190,7 +177,7 @@ namespace internal {
// stack trace.
const char kStackTraceMarker[] = "\nStack trace:\n";
-// g_help_flag is true iff the --help flag or an equivalent form is
+// g_help_flag is true if the --help flag or an equivalent form is
// specified on the command line.
bool g_help_flag = false;
@@ -230,12 +217,12 @@ GTEST_DEFINE_bool_(
GTEST_DEFINE_bool_(
break_on_failure,
internal::BoolFromGTestEnv("break_on_failure", false),
- "True iff a failed assertion should be a debugger break-point.");
+ "True if a failed assertion should be a debugger break-point.");
GTEST_DEFINE_bool_(
catch_exceptions,
internal::BoolFromGTestEnv("catch_exceptions", true),
- "True iff " GTEST_NAME_
+ "True if " GTEST_NAME_
" should catch exceptions and treat them as test failures.");
GTEST_DEFINE_string_(
@@ -286,13 +273,13 @@ GTEST_DEFINE_string_(
GTEST_DEFINE_bool_(
print_time,
internal::BoolFromGTestEnv("print_time", true),
- "True iff " GTEST_NAME_
+ "True if " GTEST_NAME_
" should display elapsed time in text output.");
GTEST_DEFINE_bool_(
print_utf8,
internal::BoolFromGTestEnv("print_utf8", true),
- "True iff " GTEST_NAME_
+ "True if " GTEST_NAME_
" prints UTF8 characters as text.");
GTEST_DEFINE_int32_(
@@ -309,13 +296,13 @@ GTEST_DEFINE_int32_(
GTEST_DEFINE_bool_(
show_internal_stack_frames, false,
- "True iff " GTEST_NAME_ " should include internal stack frames when "
+ "True if " GTEST_NAME_ " should include internal stack frames when "
"printing test failure stack traces.");
GTEST_DEFINE_bool_(
shuffle,
internal::BoolFromGTestEnv("shuffle", false),
- "True iff " GTEST_NAME_
+ "True if " GTEST_NAME_
" should randomize tests' order on every run.");
GTEST_DEFINE_int32_(
@@ -367,16 +354,16 @@ UInt32 Random::Generate(UInt32 range) {
return state_ % range;
}
-// GTestIsInitialized() returns true iff the user has initialized
+// GTestIsInitialized() returns true if the user has initialized
// Google Test. Useful for catching the user mistake of not initializing
// Google Test before calling RUN_ALL_TESTS().
static bool GTestIsInitialized() { return GetArgvs().size() > 0; }
-// Iterates over a vector of TestCases, keeping a running sum of the
+// Iterates over a vector of TestSuites, keeping a running sum of the
// results of calling a given int-returning method on each.
// Returns the sum.
-static int SumOverTestCaseList(const std::vector<TestCase*>& case_list,
- int (TestCase::*method)() const) {
+static int SumOverTestSuiteList(const std::vector<TestSuite*>& case_list,
+ int (TestSuite::*method)() const) {
int sum = 0;
for (size_t i = 0; i < case_list.size(); i++) {
sum += (case_list[i]->*method)();
@@ -384,20 +371,20 @@ static int SumOverTestCaseList(const std::vector<TestCase*>& case_list,
return sum;
}
-// Returns true iff the test case passed.
-static bool TestCasePassed(const TestCase* test_case) {
- return test_case->should_run() && test_case->Passed();
+// Returns true if the test suite passed.
+static bool TestSuitePassed(const TestSuite* test_suite) {
+ return test_suite->should_run() && test_suite->Passed();
}
-// Returns true iff the test case failed.
-static bool TestCaseFailed(const TestCase* test_case) {
- return test_case->should_run() && test_case->Failed();
+// Returns true if the test suite failed.
+static bool TestSuiteFailed(const TestSuite* test_suite) {
+ return test_suite->should_run() && test_suite->Failed();
}
-// Returns true iff test_case contains at least one test that should
+// Returns true if test_suite contains at least one test that should
// run.
-static bool ShouldRunTestCase(const TestCase* test_case) {
- return test_case->should_run();
+static bool ShouldRunTestSuite(const TestSuite* test_suite) {
+ return test_suite->should_run();
}
// AssertHelper constructor.
@@ -459,7 +446,8 @@ std::string UnitTestOptions::GetOutputFormat() {
const char* const colon = strchr(gtest_output_flag, ':');
return (colon == nullptr)
? std::string(gtest_output_flag)
- : std::string(gtest_output_flag, colon - gtest_output_flag);
+ : std::string(gtest_output_flag,
+ static_cast<size_t>(colon - gtest_output_flag));
}
// Returns the name of the requested output file, or the default if none
@@ -481,10 +469,6 @@ std::string UnitTestOptions::GetAbsolutePathToOutputFile() {
internal::FilePath output_name(colon + 1);
if (!output_name.IsAbsolutePath())
- // FIXME: on Windows \some\path is not an absolute
- // path (as its meaning depends on the current drive), yet the
- // following logic for turning it into an absolute path is wrong.
- // Fix it.
output_name = internal::FilePath::ConcatPaths(
internal::FilePath(UnitTest::GetInstance()->original_working_dir()),
internal::FilePath(colon + 1));
@@ -498,7 +482,7 @@ std::string UnitTestOptions::GetAbsolutePathToOutputFile() {
return result.string();
}
-// Returns true iff the wildcard pattern matches the string. The
+// Returns true if the wildcard pattern matches the string. The
// first ':' or '\0' character in pattern marks the end of it.
//
// This recursive algorithm isn't very efficient, but is clear and
@@ -541,11 +525,11 @@ bool UnitTestOptions::MatchesFilter(
}
}
-// Returns true iff the user-specified filter matches the test case
+// Returns true if the user-specified filter matches the test suite
// name and the test name.
-bool UnitTestOptions::FilterMatchesTest(const std::string &test_case_name,
- const std::string &test_name) {
- const std::string& full_name = test_case_name + "." + test_name.c_str();
+bool UnitTestOptions::FilterMatchesTest(const std::string& test_suite_name,
+ const std::string& test_name) {
+ const std::string& full_name = test_suite_name + "." + test_name.c_str();
// Split --gtest_filter at '-', if there is one, to separate into
// positive filter and negative filter portions
@@ -767,66 +751,66 @@ void UnitTestImpl::SetTestPartResultReporterForCurrentThread(
per_thread_test_part_result_reporter_.set(reporter);
}
-// Gets the number of successful test cases.
-int UnitTestImpl::successful_test_case_count() const {
- return CountIf(test_cases_, TestCasePassed);
+// Gets the number of successful test suites.
+int UnitTestImpl::successful_test_suite_count() const {
+ return CountIf(test_suites_, TestSuitePassed);
}
-// Gets the number of failed test cases.
-int UnitTestImpl::failed_test_case_count() const {
- return CountIf(test_cases_, TestCaseFailed);
+// Gets the number of failed test suites.
+int UnitTestImpl::failed_test_suite_count() const {
+ return CountIf(test_suites_, TestSuiteFailed);
}
-// Gets the number of all test cases.
-int UnitTestImpl::total_test_case_count() const {
- return static_cast<int>(test_cases_.size());
+// Gets the number of all test suites.
+int UnitTestImpl::total_test_suite_count() const {
+ return static_cast<int>(test_suites_.size());
}
-// Gets the number of all test cases that contain at least one test
+// Gets the number of all test suites that contain at least one test
// that should run.
-int UnitTestImpl::test_case_to_run_count() const {
- return CountIf(test_cases_, ShouldRunTestCase);
+int UnitTestImpl::test_suite_to_run_count() const {
+ return CountIf(test_suites_, ShouldRunTestSuite);
}
// Gets the number of successful tests.
int UnitTestImpl::successful_test_count() const {
- return SumOverTestCaseList(test_cases_, &TestCase::successful_test_count);
+ return SumOverTestSuiteList(test_suites_, &TestSuite::successful_test_count);
}
// Gets the number of skipped tests.
int UnitTestImpl::skipped_test_count() const {
- return SumOverTestCaseList(test_cases_, &TestCase::skipped_test_count);
+ return SumOverTestSuiteList(test_suites_, &TestSuite::skipped_test_count);
}
// Gets the number of failed tests.
int UnitTestImpl::failed_test_count() const {
- return SumOverTestCaseList(test_cases_, &TestCase::failed_test_count);
+ return SumOverTestSuiteList(test_suites_, &TestSuite::failed_test_count);
}
// Gets the number of disabled tests that will be reported in the XML report.
int UnitTestImpl::reportable_disabled_test_count() const {
- return SumOverTestCaseList(test_cases_,
- &TestCase::reportable_disabled_test_count);
+ return SumOverTestSuiteList(test_suites_,
+ &TestSuite::reportable_disabled_test_count);
}
// Gets the number of disabled tests.
int UnitTestImpl::disabled_test_count() const {
- return SumOverTestCaseList(test_cases_, &TestCase::disabled_test_count);
+ return SumOverTestSuiteList(test_suites_, &TestSuite::disabled_test_count);
}
// Gets the number of tests to be printed in the XML report.
int UnitTestImpl::reportable_test_count() const {
- return SumOverTestCaseList(test_cases_, &TestCase::reportable_test_count);
+ return SumOverTestSuiteList(test_suites_, &TestSuite::reportable_test_count);
}
// Gets the number of all tests.
int UnitTestImpl::total_test_count() const {
- return SumOverTestCaseList(test_cases_, &TestCase::total_test_count);
+ return SumOverTestSuiteList(test_suites_, &TestSuite::total_test_count);
}
// Gets the number of tests that should run.
int UnitTestImpl::test_to_run_count() const {
- return SumOverTestCaseList(test_cases_, &TestCase::test_to_run_count);
+ return SumOverTestSuiteList(test_suites_, &TestSuite::test_to_run_count);
}
// Returns the current OS stack trace as an std::string.
@@ -860,8 +844,6 @@ TimeInMillis GetTimeInMillis() {
SYSTEMTIME now_systime;
FILETIME now_filetime;
ULARGE_INTEGER now_int64;
- // FIXME: Shouldn't this just use
- // GetSystemTimeAsFileTime()?
GetSystemTime(&now_systime);
if (SystemTimeToFileTime(&now_systime, &now_filetime)) {
now_int64.LowPart = now_filetime.dwLowDateTime;
@@ -876,8 +858,6 @@ TimeInMillis GetTimeInMillis() {
// MSVC 8 deprecates _ftime64(), so we want to suppress warning 4996
// (deprecated function) there.
- // FIXME: Use GetTickCount()? Or use
- // SystemTimeToFileTime()
GTEST_DISABLE_MSC_DEPRECATED_PUSH_()
_ftime64(&now);
GTEST_DISABLE_MSC_DEPRECATED_POP_()
@@ -930,7 +910,7 @@ const char* String::Utf16ToAnsi(LPCWSTR utf16_str) {
#endif // GTEST_OS_WINDOWS_MOBILE
-// Compares two C strings. Returns true iff they have the same content.
+// Compares two C strings. Returns true if they have the same content.
//
// Unlike strcmp(), this function can handle NULL argument(s). A NULL
// C string is considered different to any non-NULL C string,
@@ -943,7 +923,7 @@ bool String::CStringEquals(const char * lhs, const char * rhs) {
return strcmp(lhs, rhs) == 0;
}
-#if GTEST_HAS_STD_WSTRING || GTEST_HAS_GLOBAL_WSTRING
+#if GTEST_HAS_STD_WSTRING
// Converts an array of wide chars to a narrow string using the UTF-8
// encoding, and streams the result to the given Message object.
@@ -961,7 +941,7 @@ static void StreamWideCharsToMessage(const wchar_t* wstr, size_t length,
}
}
-#endif // GTEST_HAS_STD_WSTRING || GTEST_HAS_GLOBAL_WSTRING
+#endif // GTEST_HAS_STD_WSTRING
void SplitString(const ::std::string& str, char delimiter,
::std::vector< ::std::string>* dest) {
@@ -1011,15 +991,6 @@ Message& Message::operator <<(const ::std::wstring& wstr) {
}
#endif // GTEST_HAS_STD_WSTRING
-#if GTEST_HAS_GLOBAL_WSTRING
-// Converts the given wide string to a narrow string using the UTF-8
-// encoding, and streams the result to this Message object.
-Message& Message::operator <<(const ::wstring& wstr) {
- internal::StreamWideCharsToMessage(wstr.c_str(), wstr.length(), this);
- return *this;
-}
-#endif // GTEST_HAS_GLOBAL_WSTRING
-
// Gets the text streamed to this object so far as an std::string.
// Each '\0' character in the buffer is replaced with "\\0".
std::string Message::GetString() const {
@@ -1270,9 +1241,10 @@ std::string CreateUnifiedDiff(const std::vector<std::string>& left,
for (; edit_i < edits.size(); ++edit_i) {
if (n_suffix >= context) {
// Continue only if the next hunk is very close.
- std::vector<EditType>::const_iterator it = edits.begin() + edit_i;
+ auto it = edits.begin() + static_cast<int>(edit_i);
while (it != edits.end() && *it == kMatch) ++it;
- if (it == edits.end() || (it - edits.begin()) - edit_i >= context) {
+ if (it == edits.end() ||
+ static_cast<size_t>(it - edits.begin()) - edit_i >= context) {
// There is no next edit or it is too far away.
break;
}
@@ -1348,7 +1320,7 @@ std::vector<std::string> SplitEscapedString(const std::string& str) {
// lhs_value: "5"
// rhs_value: "6"
//
-// The ignoring_case parameter is true iff the assertion is a
+// The ignoring_case parameter is true if the assertion is a
// *_STRCASEEQ*. When it's true, the string "Ignoring case" will
// be inserted into the message.
AssertionResult EqFailure(const char* lhs_expression,
@@ -1411,8 +1383,6 @@ AssertionResult DoubleNearPredFormat(const char* expr1,
const double diff = fabs(val1 - val2);
if (diff <= abs_error) return AssertionSuccess();
- // FIXME: do not print the value of an expression if it's
- // already a literal.
return AssertionFailure()
<< "The difference between " << expr1 << " and " << expr2
<< " is " << diff << ", which exceeds " << abs_error_expr << ", where\n"
@@ -1593,7 +1563,7 @@ namespace {
// Helper functions for implementing IsSubString() and IsNotSubstring().
-// This group of overloaded functions return true iff needle is a
+// This group of overloaded functions return true if needle is a
// substring of haystack. NULL is considered a substring of itself
// only.
@@ -1721,7 +1691,7 @@ AssertionResult HRESULTFailureHelper(const char* expr,
char error_text[kBufSize] = { '\0' };
DWORD message_length = ::FormatMessageA(kFlags,
0, // no source, we're asking system
- hr, // the error
+ static_cast<DWORD>(hr), // the error
0, // no line width restrictions
error_text, // output buffer
kBufSize, // buf size
@@ -1799,7 +1769,7 @@ inline UInt32 ChopLowBits(UInt32* bits, int n) {
// to "(Invalid Unicode 0xXXXXXXXX)".
std::string CodePointToUtf8(UInt32 code_point) {
if (code_point > kMaxCodePoint4) {
- return "(Invalid Unicode 0x" + String::FormatHexInt(code_point) + ")";
+ return "(Invalid Unicode 0x" + String::FormatHexUInt32(code_point) + ")";
}
char str[5]; // Big enough for the largest valid code point.
@@ -1827,7 +1797,7 @@ std::string CodePointToUtf8(UInt32 code_point) {
// The following two functions only make sense if the system
// uses UTF-16 for wide string encoding. All supported systems
-// with 16 bit wchar_t (Windows, Cygwin, Symbian OS) do use UTF-16.
+// with 16 bit wchar_t (Windows, Cygwin) do use UTF-16.
// Determines if the arguments constitute UTF-16 surrogate pair
// and thus should be combined into a single Unicode code point
@@ -1840,17 +1810,20 @@ inline bool IsUtf16SurrogatePair(wchar_t first, wchar_t second) {
// Creates a Unicode code point from UTF16 surrogate pair.
inline UInt32 CreateCodePointFromUtf16SurrogatePair(wchar_t first,
wchar_t second) {
+ const auto first_u = static_cast<UInt32>(first);
+ const auto second_u = static_cast<UInt32>(second);
const UInt32 mask = (1 << 10) - 1;
- return (sizeof(wchar_t) == 2) ?
- (((first & mask) << 10) | (second & mask)) + 0x10000 :
- // This function should not be called when the condition is
- // false, but we provide a sensible default in case it is.
- static_cast<UInt32>(first);
+ return (sizeof(wchar_t) == 2)
+ ? (((first_u & mask) << 10) | (second_u & mask)) + 0x10000
+ :
+ // This function should not be called when the condition is
+ // false, but we provide a sensible default in case it is.
+ first_u;
}
// Converts a wide string to a narrow string in UTF-8 encoding.
// The wide string is assumed to have the following encoding:
-// UTF-16 if sizeof(wchar_t) == 2 (on Windows, Cygwin, Symbian OS)
+// UTF-16 if sizeof(wchar_t) == 2 (on Windows, Cygwin)
// UTF-32 if sizeof(wchar_t) == 4 (on Linux)
// Parameter str points to a null-terminated wide string.
// Parameter num_chars may additionally limit the number
@@ -1892,7 +1865,7 @@ std::string String::ShowWideCString(const wchar_t * wide_c_str) {
return internal::WideStringToUtf8(wide_c_str, -1);
}
-// Compares two wide C strings. Returns true iff they have the same
+// Compares two wide C strings. Returns true if they have the same
// content.
//
// Unlike wcscmp(), this function can handle NULL argument(s). A NULL
@@ -1937,7 +1910,7 @@ AssertionResult CmpHelperSTRNE(const char* s1_expression,
<< " vs " << PrintToString(s2);
}
-// Compares two C strings, ignoring case. Returns true iff they have
+// Compares two C strings, ignoring case. Returns true if they have
// the same content.
//
// Unlike strcasecmp(), this function can handle NULL argument(s). A
@@ -1949,7 +1922,7 @@ bool String::CaseInsensitiveCStringEquals(const char * lhs, const char * rhs) {
return posix::StrCaseCmp(lhs, rhs) == 0;
}
- // Compares two wide C strings, ignoring case. Returns true iff they
+ // Compares two wide C strings, ignoring case. Returns true if they
// have the same content.
//
// Unlike wcscasecmp(), this function can handle NULL argument(s).
@@ -1976,14 +1949,14 @@ bool String::CaseInsensitiveWideCStringEquals(const wchar_t* lhs,
// Other unknown OSes may not define it either.
wint_t left, right;
do {
- left = towlower(*lhs++);
- right = towlower(*rhs++);
+ left = towlower(static_cast<wint_t>(*lhs++));
+ right = towlower(static_cast<wint_t>(*rhs++));
} while (left && left == right);
return left == right;
#endif // OS selector
}
-// Returns true iff str ends with the given suffix, ignoring case.
+// Returns true if str ends with the given suffix, ignoring case.
// Any string is considered to end with an empty suffix.
bool String::EndsWithCaseInsensitive(
const std::string& str, const std::string& suffix) {
@@ -2002,12 +1975,17 @@ std::string String::FormatIntWidth2(int value) {
}
// Formats an int value as "%X".
-std::string String::FormatHexInt(int value) {
+std::string String::FormatHexUInt32(UInt32 value) {
std::stringstream ss;
ss << std::hex << std::uppercase << value;
return ss.str();
}
+// Formats an int value as "%X".
+std::string String::FormatHexInt(int value) {
+ return FormatHexUInt32(static_cast<UInt32>(value));
+}
+
// Formats a byte as "%02X".
std::string String::FormatByte(unsigned char value) {
std::stringstream ss;
@@ -2024,7 +2002,7 @@ std::string StringStreamToString(::std::stringstream* ss) {
const char* const end = start + str.length();
std::string result;
- result.reserve(2 * (end - start));
+ result.reserve(static_cast<size_t>(2 * (end - start)));
for (const char* ch = start; ch != end; ++ch) {
if (*ch == '\0') {
result += "\\0"; // Replaces NUL with "\\0";
@@ -2054,9 +2032,7 @@ std::string AppendUserMessage(const std::string& gtest_msg,
// Creates an empty TestResult.
TestResult::TestResult()
- : death_test_count_(0),
- elapsed_time_(0) {
-}
+ : death_test_count_(0), start_timestamp_(0), elapsed_time_(0) {}
// D'tor.
TestResult::~TestResult() {
@@ -2068,7 +2044,7 @@ TestResult::~TestResult() {
const TestPartResult& TestResult::GetTestPartResult(int i) const {
if (i < 0 || i >= total_part_count())
internal::posix::Abort();
- return test_part_results_.at(i);
+ return test_part_results_.at(static_cast<size_t>(i));
}
// Returns the i-th test property. i can range from 0 to
@@ -2077,7 +2053,7 @@ const TestPartResult& TestResult::GetTestPartResult(int i) const {
const TestProperty& TestResult::GetTestProperty(int i) const {
if (i < 0 || i >= test_property_count())
internal::posix::Abort();
- return test_properties_.at(i);
+ return test_properties_.at(static_cast<size_t>(i));
}
// Clears the test part results.
@@ -2125,18 +2101,18 @@ static const char* const kReservedTestSuitesAttributes[] = {
// The list of reserved attributes used in the <testsuite> element of XML
// output.
static const char* const kReservedTestSuiteAttributes[] = {
- "disabled",
- "errors",
- "failures",
- "name",
- "tests",
- "time"
-};
+ "disabled", "errors", "failures", "name", "tests", "time", "timestamp"};
// The list of reserved attributes used in the <testcase> element of XML output.
static const char* const kReservedTestCaseAttributes[] = {
- "classname", "name", "status", "time",
- "type_param", "value_param", "file", "line"};
+ "classname", "name", "status", "time", "type_param",
+ "value_param", "file", "line"};
+
+// Use a slightly different set for allowed output to ensure existing tests can
+// still RecordProperty("result") or "RecordProperty(timestamp")
+static const char* const kReservedOutputTestCaseAttributes[] = {
+ "classname", "name", "status", "time", "type_param",
+ "value_param", "file", "line", "result", "timestamp"};
template <int kSize>
std::vector<std::string> ArrayAsVector(const char* const (&array)[kSize]) {
@@ -2158,6 +2134,22 @@ static std::vector<std::string> GetReservedAttributesForElement(
return std::vector<std::string>();
}
+// TODO(jdesprez): Merge the two getReserved attributes once skip is improved
+static std::vector<std::string> GetReservedOutputAttributesForElement(
+ const std::string& xml_element) {
+ if (xml_element == "testsuites") {
+ return ArrayAsVector(kReservedTestSuitesAttributes);
+ } else if (xml_element == "testsuite") {
+ return ArrayAsVector(kReservedTestSuiteAttributes);
+ } else if (xml_element == "testcase") {
+ return ArrayAsVector(kReservedOutputTestCaseAttributes);
+ } else {
+ GTEST_CHECK_(false) << "Unrecognized xml_element provided: " << xml_element;
+ }
+ // This code is unreachable but some compilers may not realizes that.
+ return std::vector<std::string>();
+}
+
static std::string FormatWordList(const std::vector<std::string>& words) {
Message word_list;
for (size_t i = 0; i < words.size(); ++i) {
@@ -2206,12 +2198,12 @@ static bool TestPartSkipped(const TestPartResult& result) {
return result.skipped();
}
-// Returns true iff the test was skipped.
+// Returns true if the test was skipped.
bool TestResult::Skipped() const {
return !Failed() && CountIf(test_part_results_, TestPartSkipped) > 0;
}
-// Returns true iff the test failed.
+// Returns true if the test failed.
bool TestResult::Failed() const {
for (int i = 0; i < total_part_count(); ++i) {
if (GetTestPartResult(i).failed())
@@ -2220,22 +2212,22 @@ bool TestResult::Failed() const {
return false;
}
-// Returns true iff the test part fatally failed.
+// Returns true if the test part fatally failed.
static bool TestPartFatallyFailed(const TestPartResult& result) {
return result.fatally_failed();
}
-// Returns true iff the test fatally failed.
+// Returns true if the test fatally failed.
bool TestResult::HasFatalFailure() const {
return CountIf(test_part_results_, TestPartFatallyFailed) > 0;
}
-// Returns true iff the test part non-fatally failed.
+// Returns true if the test part non-fatally failed.
static bool TestPartNonfatallyFailed(const TestPartResult& result) {
return result.nonfatally_failed();
}
-// Returns true iff the test has a non-fatal failure.
+// Returns true if the test has a non-fatal failure.
bool TestResult::HasNonfatalFailure() const {
return CountIf(test_part_results_, TestPartNonfatallyFailed) > 0;
}
@@ -2306,17 +2298,17 @@ void ReportFailureInUnknownLocation(TestPartResult::Type result_type,
} // namespace internal
-// Google Test requires all tests in the same test case to use the same test
+// Google Test requires all tests in the same test suite to use the same test
// fixture class. This function checks if the current test has the
-// same fixture class as the first test in the current test case. If
+// same fixture class as the first test in the current test suite. If
// yes, it returns true; otherwise it generates a Google Test failure and
// returns false.
bool Test::HasSameFixtureClass() {
internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
- const TestCase* const test_case = impl->current_test_case();
+ const TestSuite* const test_suite = impl->current_test_suite();
- // Info about the first test in the current test case.
- const TestInfo* const first_test_info = test_case->test_info_list()[0];
+ // Info about the first test in the current test suite.
+ const TestInfo* const first_test_info = test_suite->test_info_list()[0];
const internal::TypeId first_fixture_id = first_test_info->fixture_class_id_;
const char* const first_test_name = first_test_info->name();
@@ -2332,7 +2324,7 @@ bool Test::HasSameFixtureClass() {
const bool this_is_TEST = this_fixture_id == internal::GetTestTypeId();
if (first_is_TEST || this_is_TEST) {
- // Both TEST and TEST_F appear in same test case, which is incorrect.
+ // Both TEST and TEST_F appear in same test suite, which is incorrect.
// Tell the user how to fix this.
// Gets the name of the TEST and the name of the TEST_F. Note
@@ -2344,9 +2336,9 @@ bool Test::HasSameFixtureClass() {
first_is_TEST ? this_test_name : first_test_name;
ADD_FAILURE()
- << "All tests in the same test case must use the same test fixture\n"
- << "class, so mixing TEST_F and TEST in the same test case is\n"
- << "illegal. In test case " << this_test_info->test_case_name()
+ << "All tests in the same test suite must use the same test fixture\n"
+ << "class, so mixing TEST_F and TEST in the same test suite is\n"
+ << "illegal. In test suite " << this_test_info->test_suite_name()
<< ",\n"
<< "test " << TEST_F_name << " is defined using TEST_F but\n"
<< "test " << TEST_name << " is defined using TEST. You probably\n"
@@ -2356,15 +2348,15 @@ bool Test::HasSameFixtureClass() {
// Two fixture classes with the same name appear in two different
// namespaces, which is not allowed. Tell the user how to fix this.
ADD_FAILURE()
- << "All tests in the same test case must use the same test fixture\n"
- << "class. However, in test case "
- << this_test_info->test_case_name() << ",\n"
- << "you defined test " << first_test_name
- << " and test " << this_test_name << "\n"
+ << "All tests in the same test suite must use the same test fixture\n"
+ << "class. However, in test suite "
+ << this_test_info->test_suite_name() << ",\n"
+ << "you defined test " << first_test_name << " and test "
+ << this_test_name << "\n"
<< "using two different test fixture classes. This can happen if\n"
<< "the two classes are from different namespaces or translation\n"
<< "units and have the same name. You should probably rename one\n"
- << "of the classes to put the tests into different test cases.";
+ << "of the classes to put the tests into different test suites.";
}
return false;
}
@@ -2531,18 +2523,18 @@ void Test::Run() {
this, &Test::TearDown, "TearDown()");
}
-// Returns true iff the current test has a fatal failure.
+// Returns true if the current test has a fatal failure.
bool Test::HasFatalFailure() {
return internal::GetUnitTestImpl()->current_test_result()->HasFatalFailure();
}
-// Returns true iff the current test has a non-fatal failure.
+// Returns true if the current test has a non-fatal failure.
bool Test::HasNonfatalFailure() {
return internal::GetUnitTestImpl()->current_test_result()->
HasNonfatalFailure();
}
-// Returns true iff the current test was skipped.
+// Returns true if the current test was skipped.
bool Test::IsSkipped() {
return internal::GetUnitTestImpl()->current_test_result()->Skipped();
}
@@ -2551,13 +2543,13 @@ bool Test::IsSkipped() {
// Constructs a TestInfo object. It assumes ownership of the test factory
// object.
-TestInfo::TestInfo(const std::string& a_test_case_name,
+TestInfo::TestInfo(const std::string& a_test_suite_name,
const std::string& a_name, const char* a_type_param,
const char* a_value_param,
internal::CodeLocation a_code_location,
internal::TypeId fixture_class_id,
internal::TestFactoryBase* factory)
- : test_case_name_(a_test_case_name),
+ : test_suite_name_(a_test_suite_name),
name_(a_name),
type_param_(a_type_param ? new std::string(a_type_param) : nullptr),
value_param_(a_value_param ? new std::string(a_value_param) : nullptr),
@@ -2579,7 +2571,7 @@ namespace internal {
//
// Arguments:
//
-// test_case_name: name of the test case
+// test_suite_name: name of the test suite
// name: name of the test
// type_param: the name of the test's type parameter, or NULL if
// this is not a typed or a type-parameterized test.
@@ -2587,40 +2579,35 @@ namespace internal {
// or NULL if this is not a value-parameterized test.
// code_location: code location where the test is defined
// fixture_class_id: ID of the test fixture class
-// set_up_tc: pointer to the function that sets up the test case
-// tear_down_tc: pointer to the function that tears down the test case
+// set_up_tc: pointer to the function that sets up the test suite
+// tear_down_tc: pointer to the function that tears down the test suite
// factory: pointer to the factory that creates a test object.
// The newly created TestInfo instance will assume
// ownership of the factory object.
TestInfo* MakeAndRegisterTestInfo(
- const char* test_case_name,
- const char* name,
- const char* type_param,
- const char* value_param,
- CodeLocation code_location,
- TypeId fixture_class_id,
- SetUpTestCaseFunc set_up_tc,
- TearDownTestCaseFunc tear_down_tc,
- TestFactoryBase* factory) {
+ const char* test_suite_name, const char* name, const char* type_param,
+ const char* value_param, CodeLocation code_location,
+ TypeId fixture_class_id, SetUpTestSuiteFunc set_up_tc,
+ TearDownTestSuiteFunc tear_down_tc, TestFactoryBase* factory) {
TestInfo* const test_info =
- new TestInfo(test_case_name, name, type_param, value_param,
+ new TestInfo(test_suite_name, name, type_param, value_param,
code_location, fixture_class_id, factory);
GetUnitTestImpl()->AddTestInfo(set_up_tc, tear_down_tc, test_info);
return test_info;
}
-void ReportInvalidTestCaseType(const char* test_case_name,
- CodeLocation code_location) {
+void ReportInvalidTestSuiteType(const char* test_suite_name,
+ CodeLocation code_location) {
Message errors;
errors
- << "Attempted redefinition of test case " << test_case_name << ".\n"
- << "All tests in the same test case must use the same test fixture\n"
- << "class. However, in test case " << test_case_name << ", you tried\n"
+ << "Attempted redefinition of test suite " << test_suite_name << ".\n"
+ << "All tests in the same test suite must use the same test fixture\n"
+ << "class. However, in test suite " << test_suite_name << ", you tried\n"
<< "to define a test using a fixture class different from the one\n"
<< "used earlier. This can happen if the two fixture classes are\n"
<< "from different namespaces and have the same name. You should\n"
<< "probably rename one of the classes to put the tests into different\n"
- << "test cases.";
+ << "test suites.";
GTEST_LOG_(ERROR) << FormatFileLocation(code_location.file.c_str(),
code_location.line)
@@ -2633,7 +2620,7 @@ namespace {
// A predicate that checks the test name of a TestInfo against a known
// value.
//
-// This is used for implementation of the TestCase class only. We put
+// This is used for implementation of the TestSuite class only. We put
// it in the anonymous namespace to prevent polluting the outer
// namespace.
//
@@ -2646,7 +2633,7 @@ class TestNameIs {
explicit TestNameIs(const char* name)
: name_(name) {}
- // Returns true iff the test name of test_info matches name_.
+ // Returns true if the test name of test_info matches name_.
bool operator()(const TestInfo * test_info) const {
return test_info && test_info->name() == name_;
}
@@ -2660,7 +2647,7 @@ class TestNameIs {
namespace internal {
// This method expands all parameterized tests registered with macros TEST_P
-// and INSTANTIATE_TEST_CASE_P into regular tests and registers those.
+// and INSTANTIATE_TEST_SUITE_P into regular tests and registers those.
// This will be done just once during the program runtime.
void UnitTestImpl::RegisterParameterizedTests() {
if (!parameterized_tests_registered_) {
@@ -2703,11 +2690,14 @@ void TestInfo::Run() {
test->Run();
}
+ if (test != nullptr) {
// Deletes the test object.
impl->os_stack_trace_getter()->UponLeavingGTest();
internal::HandleExceptionsInMethodIfSupported(
test, &Test::DeleteSelf_, "the test fixture's destructor");
+ }
+ result_.set_start_timestamp(start);
result_.set_elapsed_time(internal::GetTimeInMillis() - start);
// Notifies the unit test event listener that a test has just finished.
@@ -2718,135 +2708,148 @@ void TestInfo::Run() {
impl->set_current_test_info(nullptr);
}
-// class TestCase
+// class TestSuite
-// Gets the number of successful tests in this test case.
-int TestCase::successful_test_count() const {
+// Gets the number of successful tests in this test suite.
+int TestSuite::successful_test_count() const {
return CountIf(test_info_list_, TestPassed);
}
-// Gets the number of successful tests in this test case.
-int TestCase::skipped_test_count() const {
+// Gets the number of successful tests in this test suite.
+int TestSuite::skipped_test_count() const {
return CountIf(test_info_list_, TestSkipped);
}
-// Gets the number of failed tests in this test case.
-int TestCase::failed_test_count() const {
+// Gets the number of failed tests in this test suite.
+int TestSuite::failed_test_count() const {
return CountIf(test_info_list_, TestFailed);
}
// Gets the number of disabled tests that will be reported in the XML report.
-int TestCase::reportable_disabled_test_count() const {
+int TestSuite::reportable_disabled_test_count() const {
return CountIf(test_info_list_, TestReportableDisabled);
}
-// Gets the number of disabled tests in this test case.
-int TestCase::disabled_test_count() const {
+// Gets the number of disabled tests in this test suite.
+int TestSuite::disabled_test_count() const {
return CountIf(test_info_list_, TestDisabled);
}
// Gets the number of tests to be printed in the XML report.
-int TestCase::reportable_test_count() const {
+int TestSuite::reportable_test_count() const {
return CountIf(test_info_list_, TestReportable);
}
-// Get the number of tests in this test case that should run.
-int TestCase::test_to_run_count() const {
+// Get the number of tests in this test suite that should run.
+int TestSuite::test_to_run_count() const {
return CountIf(test_info_list_, ShouldRunTest);
}
// Gets the number of all tests.
-int TestCase::total_test_count() const {
+int TestSuite::total_test_count() const {
return static_cast<int>(test_info_list_.size());
}
-// Creates a TestCase with the given name.
+// Creates a TestSuite with the given name.
//
// Arguments:
//
-// name: name of the test case
-// a_type_param: the name of the test case's type parameter, or NULL if
-// this is not a typed or a type-parameterized test case.
-// set_up_tc: pointer to the function that sets up the test case
-// tear_down_tc: pointer to the function that tears down the test case
-TestCase::TestCase(const char* a_name, const char* a_type_param,
- Test::SetUpTestCaseFunc set_up_tc,
- Test::TearDownTestCaseFunc tear_down_tc)
+// name: name of the test suite
+// a_type_param: the name of the test suite's type parameter, or NULL if
+// this is not a typed or a type-parameterized test suite.
+// set_up_tc: pointer to the function that sets up the test suite
+// tear_down_tc: pointer to the function that tears down the test suite
+TestSuite::TestSuite(const char* a_name, const char* a_type_param,
+ internal::SetUpTestSuiteFunc set_up_tc,
+ internal::TearDownTestSuiteFunc tear_down_tc)
: name_(a_name),
type_param_(a_type_param ? new std::string(a_type_param) : nullptr),
set_up_tc_(set_up_tc),
tear_down_tc_(tear_down_tc),
should_run_(false),
+ start_timestamp_(0),
elapsed_time_(0) {}
-// Destructor of TestCase.
-TestCase::~TestCase() {
+// Destructor of TestSuite.
+TestSuite::~TestSuite() {
// Deletes every Test in the collection.
ForEach(test_info_list_, internal::Delete<TestInfo>);
}
// Returns the i-th test among all the tests. i can range from 0 to
// total_test_count() - 1. If i is not in that range, returns NULL.
-const TestInfo* TestCase::GetTestInfo(int i) const {
+const TestInfo* TestSuite::GetTestInfo(int i) const {
const int index = GetElementOr(test_indices_, i, -1);
- return index < 0 ? nullptr : test_info_list_[index];
+ return index < 0 ? nullptr : test_info_list_[static_cast<size_t>(index)];
}
// Returns the i-th test among all the tests. i can range from 0 to
// total_test_count() - 1. If i is not in that range, returns NULL.
-TestInfo* TestCase::GetMutableTestInfo(int i) {
+TestInfo* TestSuite::GetMutableTestInfo(int i) {
const int index = GetElementOr(test_indices_, i, -1);
- return index < 0 ? nullptr : test_info_list_[index];
+ return index < 0 ? nullptr : test_info_list_[static_cast<size_t>(index)];
}
-// Adds a test to this test case. Will delete the test upon
-// destruction of the TestCase object.
-void TestCase::AddTestInfo(TestInfo * test_info) {
+// Adds a test to this test suite. Will delete the test upon
+// destruction of the TestSuite object.
+void TestSuite::AddTestInfo(TestInfo* test_info) {
test_info_list_.push_back(test_info);
test_indices_.push_back(static_cast<int>(test_indices_.size()));
}
-// Runs every test in this TestCase.
-void TestCase::Run() {
+// Runs every test in this TestSuite.
+void TestSuite::Run() {
if (!should_run_) return;
internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();
- impl->set_current_test_case(this);
+ impl->set_current_test_suite(this);
TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater();
+ // Call both legacy and the new API
+ repeater->OnTestSuiteStart(*this);
+// Legacy API is deprecated but still available
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI
repeater->OnTestCaseStart(*this);
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI
+
impl->os_stack_trace_getter()->UponLeavingGTest();
internal::HandleExceptionsInMethodIfSupported(
- this, &TestCase::RunSetUpTestCase, "SetUpTestCase()");
+ this, &TestSuite::RunSetUpTestSuite, "SetUpTestSuite()");
- const internal::TimeInMillis start = internal::GetTimeInMillis();
+ start_timestamp_ = internal::GetTimeInMillis();
for (int i = 0; i < total_test_count(); i++) {
GetMutableTestInfo(i)->Run();
}
- elapsed_time_ = internal::GetTimeInMillis() - start;
+ elapsed_time_ = internal::GetTimeInMillis() - start_timestamp_;
impl->os_stack_trace_getter()->UponLeavingGTest();
internal::HandleExceptionsInMethodIfSupported(
- this, &TestCase::RunTearDownTestCase, "TearDownTestCase()");
+ this, &TestSuite::RunTearDownTestSuite, "TearDownTestSuite()");
+ // Call both legacy and the new API
+ repeater->OnTestSuiteEnd(*this);
+// Legacy API is deprecated but still available
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI
repeater->OnTestCaseEnd(*this);
- impl->set_current_test_case(nullptr);
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI
+
+ impl->set_current_test_suite(nullptr);
}
-// Clears the results of all tests in this test case.
-void TestCase::ClearResult() {
+// Clears the results of all tests in this test suite.
+void TestSuite::ClearResult() {
ad_hoc_test_result_.Clear();
ForEach(test_info_list_, TestInfo::ClearTestResult);
}
-// Shuffles the tests in this test case.
-void TestCase::ShuffleTests(internal::Random* random) {
+// Shuffles the tests in this test suite.
+void TestSuite::ShuffleTests(internal::Random* random) {
Shuffle(random, &test_indices_);
}
// Restores the test order to before the first shuffle.
-void TestCase::UnshuffleTests() {
+void TestSuite::UnshuffleTests() {
for (size_t i = 0; i < test_indices_.size(); i++) {
test_indices_[i] = static_cast<int>(i);
}
@@ -2869,9 +2872,9 @@ static std::string FormatTestCount(int test_count) {
return FormatCountableNoun(test_count, "test", "tests");
}
-// Formats the count of test cases.
-static std::string FormatTestCaseCount(int test_case_count) {
- return FormatCountableNoun(test_case_count, "test case", "test cases");
+// Formats the count of test suites.
+static std::string FormatTestSuiteCount(int test_suite_count) {
+ return FormatCountableNoun(test_suite_count, "test suite", "test suites");
}
// Converts a TestPartResult::Type enum to human-friendly string
@@ -2984,12 +2987,12 @@ static const char* GetAnsiColorCode(GTestColor color) {
case COLOR_YELLOW: return "3";
default:
return nullptr;
- };
+ }
}
#endif // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE
-// Returns true iff Google Test should use colors in the output.
+// Returns true if Google Test should use colors in the output.
bool ShouldUseColor(bool stdout_is_tty) {
const char* const gtest_color = GTEST_FLAG(color).c_str();
@@ -3034,15 +3037,14 @@ void ColoredPrintf(GTestColor color, const char* fmt, ...) {
va_list args;
va_start(args, fmt);
-#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_SYMBIAN || GTEST_OS_ZOS || \
- GTEST_OS_IOS || GTEST_OS_WINDOWS_PHONE || GTEST_OS_WINDOWS_RT
+#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_ZOS || GTEST_OS_IOS || \
+ GTEST_OS_WINDOWS_PHONE || GTEST_OS_WINDOWS_RT
const bool use_color = AlwaysFalse();
#else
static const bool in_color_mode =
ShouldUseColor(posix::IsATTY(posix::FileNo(stdout)) != 0);
const bool use_color = in_color_mode && (color != COLOR_DEFAULT);
-#endif // GTEST_OS_WINDOWS_MOBILE || GTEST_OS_SYMBIAN || GTEST_OS_ZOS
- // The '!= 0' comparison is necessary to satisfy MSVC 7.1.
+#endif // GTEST_OS_WINDOWS_MOBILE || GTEST_OS_ZOS
if (!use_color) {
vprintf(fmt, args);
@@ -3106,8 +3108,8 @@ static void PrintFullTestCommentIfPresent(const TestInfo& test_info) {
class PrettyUnitTestResultPrinter : public TestEventListener {
public:
PrettyUnitTestResultPrinter() {}
- static void PrintTestName(const char * test_case, const char * test) {
- printf("%s.%s", test_case, test);
+ static void PrintTestName(const char* test_suite, const char* test) {
+ printf("%s.%s", test_suite, test);
}
// The following methods override what's in the TestEventListener class.
@@ -3115,11 +3117,22 @@ class PrettyUnitTestResultPrinter : public TestEventListener {
void OnTestIterationStart(const UnitTest& unit_test, int iteration) override;
void OnEnvironmentsSetUpStart(const UnitTest& unit_test) override;
void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) override {}
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void OnTestCaseStart(const TestCase& test_case) override;
+#else
+ void OnTestSuiteStart(const TestSuite& test_suite) override;
+#endif // OnTestCaseStart
+
void OnTestStart(const TestInfo& test_info) override;
+
void OnTestPartResult(const TestPartResult& result) override;
void OnTestEnd(const TestInfo& test_info) override;
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void OnTestCaseEnd(const TestCase& test_case) override;
+#else
+ void OnTestSuiteEnd(const TestSuite& test_suite) override;
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
+
void OnEnvironmentsTearDownStart(const UnitTest& unit_test) override;
void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) override {}
void OnTestIterationEnd(const UnitTest& unit_test, int iteration) override;
@@ -3162,7 +3175,7 @@ void PrettyUnitTestResultPrinter::OnTestIterationStart(
ColoredPrintf(COLOR_GREEN, "[==========] ");
printf("Running %s from %s.\n",
FormatTestCount(unit_test.test_to_run_count()).c_str(),
- FormatTestCaseCount(unit_test.test_case_to_run_count()).c_str());
+ FormatTestSuiteCount(unit_test.test_suite_to_run_count()).c_str());
fflush(stdout);
}
@@ -3173,6 +3186,7 @@ void PrettyUnitTestResultPrinter::OnEnvironmentsSetUpStart(
fflush(stdout);
}
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void PrettyUnitTestResultPrinter::OnTestCaseStart(const TestCase& test_case) {
const std::string counts =
FormatCountableNoun(test_case.test_to_run_count(), "test", "tests");
@@ -3185,10 +3199,25 @@ void PrettyUnitTestResultPrinter::OnTestCaseStart(const TestCase& test_case) {
}
fflush(stdout);
}
+#else
+void PrettyUnitTestResultPrinter::OnTestSuiteStart(
+ const TestSuite& test_suite) {
+ const std::string counts =
+ FormatCountableNoun(test_suite.test_to_run_count(), "test", "tests");
+ ColoredPrintf(COLOR_GREEN, "[----------] ");
+ printf("%s from %s", counts.c_str(), test_suite.name());
+ if (test_suite.type_param() == nullptr) {
+ printf("\n");
+ } else {
+ printf(", where %s = %s\n", kTypeParamLabel, test_suite.type_param());
+ }
+ fflush(stdout);
+}
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void PrettyUnitTestResultPrinter::OnTestStart(const TestInfo& test_info) {
ColoredPrintf(COLOR_GREEN, "[ RUN ] ");
- PrintTestName(test_info.test_case_name(), test_info.name());
+ PrintTestName(test_info.test_suite_name(), test_info.name());
printf("\n");
fflush(stdout);
}
@@ -3218,7 +3247,7 @@ void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) {
} else {
ColoredPrintf(COLOR_RED, "[ FAILED ] ");
}
- PrintTestName(test_info.test_case_name(), test_info.name());
+ PrintTestName(test_info.test_suite_name(), test_info.name());
if (test_info.result()->Failed())
PrintFullTestCommentIfPresent(test_info);
@@ -3231,17 +3260,29 @@ void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) {
fflush(stdout);
}
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void PrettyUnitTestResultPrinter::OnTestCaseEnd(const TestCase& test_case) {
if (!GTEST_FLAG(print_time)) return;
const std::string counts =
FormatCountableNoun(test_case.test_to_run_count(), "test", "tests");
ColoredPrintf(COLOR_GREEN, "[----------] ");
- printf("%s from %s (%s ms total)\n\n",
- counts.c_str(), test_case.name(),
+ printf("%s from %s (%s ms total)\n\n", counts.c_str(), test_case.name(),
internal::StreamableToString(test_case.elapsed_time()).c_str());
fflush(stdout);
}
+#else
+void PrettyUnitTestResultPrinter::OnTestSuiteEnd(const TestSuite& test_suite) {
+ if (!GTEST_FLAG(print_time)) return;
+
+ const std::string counts =
+ FormatCountableNoun(test_suite.test_to_run_count(), "test", "tests");
+ ColoredPrintf(COLOR_GREEN, "[----------] ");
+ printf("%s from %s (%s ms total)\n\n", counts.c_str(), test_suite.name(),
+ internal::StreamableToString(test_suite.elapsed_time()).c_str());
+ fflush(stdout);
+}
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
void PrettyUnitTestResultPrinter::OnEnvironmentsTearDownStart(
const UnitTest& /*unit_test*/) {
@@ -3257,18 +3298,18 @@ void PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) {
return;
}
- for (int i = 0; i < unit_test.total_test_case_count(); ++i) {
- const TestCase& test_case = *unit_test.GetTestCase(i);
- if (!test_case.should_run() || (test_case.failed_test_count() == 0)) {
+ for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {
+ const TestSuite& test_suite = *unit_test.GetTestSuite(i);
+ if (!test_suite.should_run() || (test_suite.failed_test_count() == 0)) {
continue;
}
- for (int j = 0; j < test_case.total_test_count(); ++j) {
- const TestInfo& test_info = *test_case.GetTestInfo(j);
+ for (int j = 0; j < test_suite.total_test_count(); ++j) {
+ const TestInfo& test_info = *test_suite.GetTestInfo(j);
if (!test_info.should_run() || !test_info.result()->Failed()) {
continue;
}
ColoredPrintf(COLOR_RED, "[ FAILED ] ");
- printf("%s.%s", test_case.name(), test_info.name());
+ printf("%s.%s", test_suite.name(), test_info.name());
PrintFullTestCommentIfPresent(test_info);
printf("\n");
}
@@ -3282,18 +3323,18 @@ void PrettyUnitTestResultPrinter::PrintSkippedTests(const UnitTest& unit_test) {
return;
}
- for (int i = 0; i < unit_test.total_test_case_count(); ++i) {
- const TestCase& test_case = *unit_test.GetTestCase(i);
- if (!test_case.should_run() || (test_case.skipped_test_count() == 0)) {
+ for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {
+ const TestSuite& test_suite = *unit_test.GetTestSuite(i);
+ if (!test_suite.should_run() || (test_suite.skipped_test_count() == 0)) {
continue;
}
- for (int j = 0; j < test_case.total_test_count(); ++j) {
- const TestInfo& test_info = *test_case.GetTestInfo(j);
+ for (int j = 0; j < test_suite.total_test_count(); ++j) {
+ const TestInfo& test_info = *test_suite.GetTestInfo(j);
if (!test_info.should_run() || !test_info.result()->Skipped()) {
continue;
}
ColoredPrintf(COLOR_GREEN, "[ SKIPPED ] ");
- printf("%s.%s", test_case.name(), test_info.name());
+ printf("%s.%s", test_suite.name(), test_info.name());
printf("\n");
}
}
@@ -3304,7 +3345,7 @@ void PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
ColoredPrintf(COLOR_GREEN, "[==========] ");
printf("%s from %s ran.",
FormatTestCount(unit_test.test_to_run_count()).c_str(),
- FormatTestCaseCount(unit_test.test_case_to_run_count()).c_str());
+ FormatTestSuiteCount(unit_test.test_suite_to_run_count()).c_str());
if (GTEST_FLAG(print_time)) {
printf(" (%s ms total)",
internal::StreamableToString(unit_test.elapsed_time()).c_str());
@@ -3365,11 +3406,19 @@ class TestEventRepeater : public TestEventListener {
void OnTestIterationStart(const UnitTest& unit_test, int iteration) override;
void OnEnvironmentsSetUpStart(const UnitTest& unit_test) override;
void OnEnvironmentsSetUpEnd(const UnitTest& unit_test) override;
- void OnTestCaseStart(const TestCase& test_case) override;
+// Legacy API is deprecated but still available
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
+ void OnTestCaseStart(const TestSuite& parameter) override;
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
+ void OnTestSuiteStart(const TestSuite& parameter) override;
void OnTestStart(const TestInfo& test_info) override;
void OnTestPartResult(const TestPartResult& result) override;
void OnTestEnd(const TestInfo& test_info) override;
- void OnTestCaseEnd(const TestCase& test_case) override;
+// Legacy API is deprecated but still available
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
+ void OnTestCaseEnd(const TestCase& parameter) override;
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
+ void OnTestSuiteEnd(const TestSuite& parameter) override;
void OnEnvironmentsTearDownStart(const UnitTest& unit_test) override;
void OnEnvironmentsTearDownEnd(const UnitTest& unit_test) override;
void OnTestIterationEnd(const UnitTest& unit_test, int iteration) override;
@@ -3393,11 +3442,10 @@ void TestEventRepeater::Append(TestEventListener *listener) {
listeners_.push_back(listener);
}
-// FIXME: Factor the search functionality into Vector::Find.
TestEventListener* TestEventRepeater::Release(TestEventListener *listener) {
for (size_t i = 0; i < listeners_.size(); ++i) {
if (listeners_[i] == listener) {
- listeners_.erase(listeners_.begin() + i);
+ listeners_.erase(listeners_.begin() + static_cast<int>(i));
return listener;
}
}
@@ -3417,25 +3465,33 @@ void TestEventRepeater::Name(const Type& parameter) { \
}
// This defines a member that forwards the call to all listeners in reverse
// order.
-#define GTEST_REVERSE_REPEATER_METHOD_(Name, Type) \
-void TestEventRepeater::Name(const Type& parameter) { \
- if (forwarding_enabled_) { \
- for (int i = static_cast<int>(listeners_.size()) - 1; i >= 0; i--) { \
- listeners_[i]->Name(parameter); \
- } \
- } \
-}
+#define GTEST_REVERSE_REPEATER_METHOD_(Name, Type) \
+ void TestEventRepeater::Name(const Type& parameter) { \
+ if (forwarding_enabled_) { \
+ for (size_t i = listeners_.size(); i != 0; i--) { \
+ listeners_[i - 1]->Name(parameter); \
+ } \
+ } \
+ }
GTEST_REPEATER_METHOD_(OnTestProgramStart, UnitTest)
GTEST_REPEATER_METHOD_(OnEnvironmentsSetUpStart, UnitTest)
-GTEST_REPEATER_METHOD_(OnTestCaseStart, TestCase)
+// Legacy API is deprecated but still available
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
+GTEST_REPEATER_METHOD_(OnTestCaseStart, TestSuite)
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
+GTEST_REPEATER_METHOD_(OnTestSuiteStart, TestSuite)
GTEST_REPEATER_METHOD_(OnTestStart, TestInfo)
GTEST_REPEATER_METHOD_(OnTestPartResult, TestPartResult)
GTEST_REPEATER_METHOD_(OnEnvironmentsTearDownStart, UnitTest)
GTEST_REVERSE_REPEATER_METHOD_(OnEnvironmentsSetUpEnd, UnitTest)
GTEST_REVERSE_REPEATER_METHOD_(OnEnvironmentsTearDownEnd, UnitTest)
GTEST_REVERSE_REPEATER_METHOD_(OnTestEnd, TestInfo)
-GTEST_REVERSE_REPEATER_METHOD_(OnTestCaseEnd, TestCase)
+// Legacy API is deprecated but still available
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
+GTEST_REVERSE_REPEATER_METHOD_(OnTestCaseEnd, TestSuite)
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
+GTEST_REVERSE_REPEATER_METHOD_(OnTestSuiteEnd, TestSuite)
GTEST_REVERSE_REPEATER_METHOD_(OnTestProgramEnd, UnitTest)
#undef GTEST_REPEATER_METHOD_
@@ -3453,8 +3509,8 @@ void TestEventRepeater::OnTestIterationStart(const UnitTest& unit_test,
void TestEventRepeater::OnTestIterationEnd(const UnitTest& unit_test,
int iteration) {
if (forwarding_enabled_) {
- for (int i = static_cast<int>(listeners_.size()) - 1; i >= 0; i--) {
- listeners_[i]->OnTestIterationEnd(unit_test, iteration);
+ for (size_t i = listeners_.size(); i > 0; i--) {
+ listeners_[i - 1]->OnTestIterationEnd(unit_test, iteration);
}
}
}
@@ -3467,11 +3523,11 @@ class XmlUnitTestResultPrinter : public EmptyTestEventListener {
explicit XmlUnitTestResultPrinter(const char* output_file);
void OnTestIterationEnd(const UnitTest& unit_test, int iteration) override;
- void ListTestsMatchingFilter(const std::vector<TestCase*>& test_cases);
+ void ListTestsMatchingFilter(const std::vector<TestSuite*>& test_suites);
// Prints an XML summary of all unit tests.
static void PrintXmlTestsList(std::ostream* stream,
- const std::vector<TestCase*>& test_cases);
+ const std::vector<TestSuite*>& test_suites);
private:
// Is c a whitespace character that is normalized to a space character
@@ -3516,12 +3572,12 @@ class XmlUnitTestResultPrinter : public EmptyTestEventListener {
// Streams an XML representation of a TestInfo object.
static void OutputXmlTestInfo(::std::ostream* stream,
- const char* test_case_name,
+ const char* test_suite_name,
const TestInfo& test_info);
- // Prints an XML representation of a TestCase object
- static void PrintXmlTestCase(::std::ostream* stream,
- const TestCase& test_case);
+ // Prints an XML representation of a TestSuite object
+ static void PrintXmlTestSuite(::std::ostream* stream,
+ const TestSuite& test_suite);
// Prints an XML summary of unit_test to output stream out.
static void PrintXmlUnitTest(::std::ostream* stream,
@@ -3563,10 +3619,10 @@ void XmlUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
}
void XmlUnitTestResultPrinter::ListTestsMatchingFilter(
- const std::vector<TestCase*>& test_cases) {
+ const std::vector<TestSuite*>& test_suites) {
FILE* xmlout = OpenFileForWriting(output_file_);
std::stringstream stream;
- PrintXmlTestsList(&stream, test_cases);
+ PrintXmlTestsList(&stream, test_suites);
fprintf(xmlout, "%s", StringStreamToString(&stream).c_str());
fclose(xmlout);
}
@@ -3581,8 +3637,6 @@ void XmlUnitTestResultPrinter::ListTestsMatchingFilter(
// module will consist of ordinary English text.
// If this module is ever modified to produce version 1.1 XML output,
// most invalid characters can be retained using character references.
-// FIXME: It might be nice to have a minimally invasive, human-readable
-// escaping scheme for invalid characters, rather than dropping them.
std::string XmlUnitTestResultPrinter::EscapeXml(
const std::string& str, bool is_attribute) {
Message m;
@@ -3647,7 +3701,7 @@ std::string XmlUnitTestResultPrinter::RemoveInvalidXmlCharacters(
// This is how Google Test concepts map to the DTD:
//
// <testsuites name="AllTests"> <-- corresponds to a UnitTest object
-// <testsuite name="testcase-name"> <-- corresponds to a TestCase object
+// <testsuite name="testcase-name"> <-- corresponds to a TestSuite object
// <testcase name="test-name"> <-- corresponds to a TestInfo object
// <failure message="...">...</failure>
// <failure message="...">...</failure>
@@ -3720,7 +3774,7 @@ void XmlUnitTestResultPrinter::OutputXmlAttribute(
const std::string& name,
const std::string& value) {
const std::vector<std::string>& allowed_names =
- GetReservedAttributesForElement(element_name);
+ GetReservedOutputAttributesForElement(element_name);
GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) !=
allowed_names.end())
@@ -3731,41 +3785,47 @@ void XmlUnitTestResultPrinter::OutputXmlAttribute(
}
// Prints an XML representation of a TestInfo object.
-// FIXME: There is also value in printing properties with the plain printer.
void XmlUnitTestResultPrinter::OutputXmlTestInfo(::std::ostream* stream,
- const char* test_case_name,
+ const char* test_suite_name,
const TestInfo& test_info) {
const TestResult& result = *test_info.result();
- const std::string kTestcase = "testcase";
+ const std::string kTestsuite = "testcase";
if (test_info.is_in_another_shard()) {
return;
}
*stream << " <testcase";
- OutputXmlAttribute(stream, kTestcase, "name", test_info.name());
+ OutputXmlAttribute(stream, kTestsuite, "name", test_info.name());
if (test_info.value_param() != nullptr) {
- OutputXmlAttribute(stream, kTestcase, "value_param",
+ OutputXmlAttribute(stream, kTestsuite, "value_param",
test_info.value_param());
}
if (test_info.type_param() != nullptr) {
- OutputXmlAttribute(stream, kTestcase, "type_param", test_info.type_param());
+ OutputXmlAttribute(stream, kTestsuite, "type_param",
+ test_info.type_param());
}
if (GTEST_FLAG(list_tests)) {
- OutputXmlAttribute(stream, kTestcase, "file", test_info.file());
- OutputXmlAttribute(stream, kTestcase, "line",
+ OutputXmlAttribute(stream, kTestsuite, "file", test_info.file());
+ OutputXmlAttribute(stream, kTestsuite, "line",
StreamableToString(test_info.line()));
*stream << " />\n";
return;
}
- OutputXmlAttribute(stream, kTestcase, "status",
- result.Skipped() ? "skipped" :
- test_info.should_run() ? "run" : "notrun");
- OutputXmlAttribute(stream, kTestcase, "time",
+ OutputXmlAttribute(stream, kTestsuite, "status",
+ test_info.should_run() ? "run" : "notrun");
+ OutputXmlAttribute(stream, kTestsuite, "result",
+ test_info.should_run()
+ ? (result.Skipped() ? "skipped" : "completed")
+ : "suppressed");
+ OutputXmlAttribute(stream, kTestsuite, "time",
FormatTimeInMillisAsSeconds(result.elapsed_time()));
- OutputXmlAttribute(stream, kTestcase, "classname", test_case_name);
+ OutputXmlAttribute(
+ stream, kTestsuite, "timestamp",
+ FormatEpochTimeInMillisAsIso8601(result.start_timestamp()));
+ OutputXmlAttribute(stream, kTestsuite, "classname", test_suite_name);
int failures = 0;
for (int i = 0; i < result.total_part_count(); ++i) {
@@ -3798,29 +3858,32 @@ void XmlUnitTestResultPrinter::OutputXmlTestInfo(::std::ostream* stream,
}
}
-// Prints an XML representation of a TestCase object
-void XmlUnitTestResultPrinter::PrintXmlTestCase(std::ostream* stream,
- const TestCase& test_case) {
+// Prints an XML representation of a TestSuite object
+void XmlUnitTestResultPrinter::PrintXmlTestSuite(std::ostream* stream,
+ const TestSuite& test_suite) {
const std::string kTestsuite = "testsuite";
*stream << " <" << kTestsuite;
- OutputXmlAttribute(stream, kTestsuite, "name", test_case.name());
+ OutputXmlAttribute(stream, kTestsuite, "name", test_suite.name());
OutputXmlAttribute(stream, kTestsuite, "tests",
- StreamableToString(test_case.reportable_test_count()));
+ StreamableToString(test_suite.reportable_test_count()));
if (!GTEST_FLAG(list_tests)) {
OutputXmlAttribute(stream, kTestsuite, "failures",
- StreamableToString(test_case.failed_test_count()));
+ StreamableToString(test_suite.failed_test_count()));
OutputXmlAttribute(
stream, kTestsuite, "disabled",
- StreamableToString(test_case.reportable_disabled_test_count()));
+ StreamableToString(test_suite.reportable_disabled_test_count()));
OutputXmlAttribute(stream, kTestsuite, "errors", "0");
OutputXmlAttribute(stream, kTestsuite, "time",
- FormatTimeInMillisAsSeconds(test_case.elapsed_time()));
- *stream << TestPropertiesAsXmlAttributes(test_case.ad_hoc_test_result());
+ FormatTimeInMillisAsSeconds(test_suite.elapsed_time()));
+ OutputXmlAttribute(
+ stream, kTestsuite, "timestamp",
+ FormatEpochTimeInMillisAsIso8601(test_suite.start_timestamp()));
+ *stream << TestPropertiesAsXmlAttributes(test_suite.ad_hoc_test_result());
}
*stream << ">\n";
- for (int i = 0; i < test_case.total_test_count(); ++i) {
- if (test_case.GetTestInfo(i)->is_reportable())
- OutputXmlTestInfo(stream, test_case.name(), *test_case.GetTestInfo(i));
+ for (int i = 0; i < test_suite.total_test_count(); ++i) {
+ if (test_suite.GetTestInfo(i)->is_reportable())
+ OutputXmlTestInfo(stream, test_suite.name(), *test_suite.GetTestInfo(i));
}
*stream << " </" << kTestsuite << ">\n";
}
@@ -3841,11 +3904,11 @@ void XmlUnitTestResultPrinter::PrintXmlUnitTest(std::ostream* stream,
stream, kTestsuites, "disabled",
StreamableToString(unit_test.reportable_disabled_test_count()));
OutputXmlAttribute(stream, kTestsuites, "errors", "0");
+ OutputXmlAttribute(stream, kTestsuites, "time",
+ FormatTimeInMillisAsSeconds(unit_test.elapsed_time()));
OutputXmlAttribute(
stream, kTestsuites, "timestamp",
FormatEpochTimeInMillisAsIso8601(unit_test.start_timestamp()));
- OutputXmlAttribute(stream, kTestsuites, "time",
- FormatTimeInMillisAsSeconds(unit_test.elapsed_time()));
if (GTEST_FLAG(shuffle)) {
OutputXmlAttribute(stream, kTestsuites, "random_seed",
@@ -3856,31 +3919,31 @@ void XmlUnitTestResultPrinter::PrintXmlUnitTest(std::ostream* stream,
OutputXmlAttribute(stream, kTestsuites, "name", "AllTests");
*stream << ">\n";
- for (int i = 0; i < unit_test.total_test_case_count(); ++i) {
- if (unit_test.GetTestCase(i)->reportable_test_count() > 0)
- PrintXmlTestCase(stream, *unit_test.GetTestCase(i));
+ for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {
+ if (unit_test.GetTestSuite(i)->reportable_test_count() > 0)
+ PrintXmlTestSuite(stream, *unit_test.GetTestSuite(i));
}
*stream << "</" << kTestsuites << ">\n";
}
void XmlUnitTestResultPrinter::PrintXmlTestsList(
- std::ostream* stream, const std::vector<TestCase*>& test_cases) {
+ std::ostream* stream, const std::vector<TestSuite*>& test_suites) {
const std::string kTestsuites = "testsuites";
*stream << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n";
*stream << "<" << kTestsuites;
int total_tests = 0;
- for (size_t i = 0; i < test_cases.size(); ++i) {
- total_tests += test_cases[i]->total_test_count();
+ for (auto test_suite : test_suites) {
+ total_tests += test_suite->total_test_count();
}
OutputXmlAttribute(stream, kTestsuites, "tests",
StreamableToString(total_tests));
OutputXmlAttribute(stream, kTestsuites, "name", "AllTests");
*stream << ">\n";
- for (size_t i = 0; i < test_cases.size(); ++i) {
- PrintXmlTestCase(stream, *test_cases[i]);
+ for (auto test_suite : test_suites) {
+ PrintXmlTestSuite(stream, *test_suite);
}
*stream << "</" << kTestsuites << ">\n";
}
@@ -3929,7 +3992,7 @@ class JsonUnitTestResultPrinter : public EmptyTestEventListener {
// Prints an JSON summary of all unit tests.
static void PrintJsonTestList(::std::ostream* stream,
- const std::vector<TestCase*>& test_cases);
+ const std::vector<TestSuite*>& test_suites);
private:
// Returns an JSON-escaped copy of the input string str.
@@ -3952,12 +4015,12 @@ class JsonUnitTestResultPrinter : public EmptyTestEventListener {
// Streams a JSON representation of a TestInfo object.
static void OutputJsonTestInfo(::std::ostream* stream,
- const char* test_case_name,
+ const char* test_suite_name,
const TestInfo& test_info);
- // Prints a JSON representation of a TestCase object
- static void PrintJsonTestCase(::std::ostream* stream,
- const TestCase& test_case);
+ // Prints a JSON representation of a TestSuite object
+ static void PrintJsonTestSuite(::std::ostream* stream,
+ const TestSuite& test_suite);
// Prints a JSON summary of unit_test to output stream out.
static void PrintJsonUnitTest(::std::ostream* stream,
@@ -4056,7 +4119,7 @@ static std::string FormatEpochTimeInMillisAsRFC3339(TimeInMillis ms) {
String::FormatIntWidth2(time_struct.tm_sec) + "Z";
}
-static inline std::string Indent(int width) {
+static inline std::string Indent(size_t width) {
return std::string(width, ' ');
}
@@ -4068,7 +4131,7 @@ void JsonUnitTestResultPrinter::OutputJsonKey(
const std::string& indent,
bool comma) {
const std::vector<std::string>& allowed_names =
- GetReservedAttributesForElement(element_name);
+ GetReservedOutputAttributesForElement(element_name);
GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) !=
allowed_names.end())
@@ -4088,7 +4151,7 @@ void JsonUnitTestResultPrinter::OutputJsonKey(
const std::string& indent,
bool comma) {
const std::vector<std::string>& allowed_names =
- GetReservedAttributesForElement(element_name);
+ GetReservedOutputAttributesForElement(element_name);
GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) !=
allowed_names.end())
@@ -4102,36 +4165,44 @@ void JsonUnitTestResultPrinter::OutputJsonKey(
// Prints a JSON representation of a TestInfo object.
void JsonUnitTestResultPrinter::OutputJsonTestInfo(::std::ostream* stream,
- const char* test_case_name,
+ const char* test_suite_name,
const TestInfo& test_info) {
const TestResult& result = *test_info.result();
- const std::string kTestcase = "testcase";
+ const std::string kTestsuite = "testcase";
const std::string kIndent = Indent(10);
*stream << Indent(8) << "{\n";
- OutputJsonKey(stream, kTestcase, "name", test_info.name(), kIndent);
+ OutputJsonKey(stream, kTestsuite, "name", test_info.name(), kIndent);
if (test_info.value_param() != nullptr) {
- OutputJsonKey(stream, kTestcase, "value_param",
- test_info.value_param(), kIndent);
+ OutputJsonKey(stream, kTestsuite, "value_param", test_info.value_param(),
+ kIndent);
}
if (test_info.type_param() != nullptr) {
- OutputJsonKey(stream, kTestcase, "type_param", test_info.type_param(),
+ OutputJsonKey(stream, kTestsuite, "type_param", test_info.type_param(),
kIndent);
}
if (GTEST_FLAG(list_tests)) {
- OutputJsonKey(stream, kTestcase, "file", test_info.file(), kIndent);
- OutputJsonKey(stream, kTestcase, "line", test_info.line(), kIndent, false);
+ OutputJsonKey(stream, kTestsuite, "file", test_info.file(), kIndent);
+ OutputJsonKey(stream, kTestsuite, "line", test_info.line(), kIndent, false);
*stream << "\n" << Indent(8) << "}";
return;
}
- OutputJsonKey(stream, kTestcase, "status",
- result.Skipped() ? "SKIPPED" :
+ OutputJsonKey(stream, kTestsuite, "status",
test_info.should_run() ? "RUN" : "NOTRUN", kIndent);
- OutputJsonKey(stream, kTestcase, "time",
+ OutputJsonKey(stream, kTestsuite, "result",
+ test_info.should_run()
+ ? (result.Skipped() ? "SKIPPED" : "COMPLETED")
+ : "SUPPRESSED",
+ kIndent);
+ OutputJsonKey(stream, kTestsuite, "timestamp",
+ FormatEpochTimeInMillisAsRFC3339(result.start_timestamp()),
+ kIndent);
+ OutputJsonKey(stream, kTestsuite, "time",
FormatTimeInMillisAsDuration(result.elapsed_time()), kIndent);
- OutputJsonKey(stream, kTestcase, "classname", test_case_name, kIndent, false);
+ OutputJsonKey(stream, kTestsuite, "classname", test_suite_name, kIndent,
+ false);
*stream << TestPropertiesAsJson(result, kIndent);
int failures = 0;
@@ -4158,40 +4229,44 @@ void JsonUnitTestResultPrinter::OutputJsonTestInfo(::std::ostream* stream,
*stream << "\n" << Indent(8) << "}";
}
-// Prints an JSON representation of a TestCase object
-void JsonUnitTestResultPrinter::PrintJsonTestCase(std::ostream* stream,
- const TestCase& test_case) {
+// Prints an JSON representation of a TestSuite object
+void JsonUnitTestResultPrinter::PrintJsonTestSuite(
+ std::ostream* stream, const TestSuite& test_suite) {
const std::string kTestsuite = "testsuite";
const std::string kIndent = Indent(6);
*stream << Indent(4) << "{\n";
- OutputJsonKey(stream, kTestsuite, "name", test_case.name(), kIndent);
- OutputJsonKey(stream, kTestsuite, "tests", test_case.reportable_test_count(),
+ OutputJsonKey(stream, kTestsuite, "name", test_suite.name(), kIndent);
+ OutputJsonKey(stream, kTestsuite, "tests", test_suite.reportable_test_count(),
kIndent);
if (!GTEST_FLAG(list_tests)) {
- OutputJsonKey(stream, kTestsuite, "failures", test_case.failed_test_count(),
- kIndent);
+ OutputJsonKey(stream, kTestsuite, "failures",
+ test_suite.failed_test_count(), kIndent);
OutputJsonKey(stream, kTestsuite, "disabled",
- test_case.reportable_disabled_test_count(), kIndent);
+ test_suite.reportable_disabled_test_count(), kIndent);
OutputJsonKey(stream, kTestsuite, "errors", 0, kIndent);
+ OutputJsonKey(
+ stream, kTestsuite, "timestamp",
+ FormatEpochTimeInMillisAsRFC3339(test_suite.start_timestamp()),
+ kIndent);
OutputJsonKey(stream, kTestsuite, "time",
- FormatTimeInMillisAsDuration(test_case.elapsed_time()),
+ FormatTimeInMillisAsDuration(test_suite.elapsed_time()),
kIndent, false);
- *stream << TestPropertiesAsJson(test_case.ad_hoc_test_result(), kIndent)
+ *stream << TestPropertiesAsJson(test_suite.ad_hoc_test_result(), kIndent)
<< ",\n";
}
*stream << kIndent << "\"" << kTestsuite << "\": [\n";
bool comma = false;
- for (int i = 0; i < test_case.total_test_count(); ++i) {
- if (test_case.GetTestInfo(i)->is_reportable()) {
+ for (int i = 0; i < test_suite.total_test_count(); ++i) {
+ if (test_suite.GetTestInfo(i)->is_reportable()) {
if (comma) {
*stream << ",\n";
} else {
comma = true;
}
- OutputJsonTestInfo(stream, test_case.name(), *test_case.GetTestInfo(i));
+ OutputJsonTestInfo(stream, test_suite.name(), *test_suite.GetTestInfo(i));
}
}
*stream << "\n" << kIndent << "]\n" << Indent(4) << "}";
@@ -4229,14 +4304,14 @@ void JsonUnitTestResultPrinter::PrintJsonUnitTest(std::ostream* stream,
*stream << kIndent << "\"" << kTestsuites << "\": [\n";
bool comma = false;
- for (int i = 0; i < unit_test.total_test_case_count(); ++i) {
- if (unit_test.GetTestCase(i)->reportable_test_count() > 0) {
+ for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {
+ if (unit_test.GetTestSuite(i)->reportable_test_count() > 0) {
if (comma) {
*stream << ",\n";
} else {
comma = true;
}
- PrintJsonTestCase(stream, *unit_test.GetTestCase(i));
+ PrintJsonTestSuite(stream, *unit_test.GetTestSuite(i));
}
}
@@ -4244,24 +4319,24 @@ void JsonUnitTestResultPrinter::PrintJsonUnitTest(std::ostream* stream,
}
void JsonUnitTestResultPrinter::PrintJsonTestList(
- std::ostream* stream, const std::vector<TestCase*>& test_cases) {
+ std::ostream* stream, const std::vector<TestSuite*>& test_suites) {
const std::string kTestsuites = "testsuites";
const std::string kIndent = Indent(2);
*stream << "{\n";
int total_tests = 0;
- for (size_t i = 0; i < test_cases.size(); ++i) {
- total_tests += test_cases[i]->total_test_count();
+ for (auto test_suite : test_suites) {
+ total_tests += test_suite->total_test_count();
}
OutputJsonKey(stream, kTestsuites, "tests", total_tests, kIndent);
OutputJsonKey(stream, kTestsuites, "name", "AllTests", kIndent);
*stream << kIndent << "\"" << kTestsuites << "\": [\n";
- for (size_t i = 0; i < test_cases.size(); ++i) {
+ for (size_t i = 0; i < test_suites.size(); ++i) {
if (i != 0) {
*stream << ",\n";
}
- PrintJsonTestCase(stream, *test_cases[i]);
+ PrintJsonTestSuite(stream, *test_suites[i]);
}
*stream << "\n"
@@ -4552,26 +4627,42 @@ UnitTest* UnitTest::GetInstance() {
#endif // defined(__BORLANDC__)
}
-// Gets the number of successful test cases.
-int UnitTest::successful_test_case_count() const {
- return impl()->successful_test_case_count();
+// Gets the number of successful test suites.
+int UnitTest::successful_test_suite_count() const {
+ return impl()->successful_test_suite_count();
}
-// Gets the number of failed test cases.
-int UnitTest::failed_test_case_count() const {
- return impl()->failed_test_case_count();
+// Gets the number of failed test suites.
+int UnitTest::failed_test_suite_count() const {
+ return impl()->failed_test_suite_count();
}
-// Gets the number of all test cases.
-int UnitTest::total_test_case_count() const {
- return impl()->total_test_case_count();
+// Gets the number of all test suites.
+int UnitTest::total_test_suite_count() const {
+ return impl()->total_test_suite_count();
}
-// Gets the number of all test cases that contain at least one test
+// Gets the number of all test suites that contain at least one test
// that should run.
+int UnitTest::test_suite_to_run_count() const {
+ return impl()->test_suite_to_run_count();
+}
+
+// Legacy API is deprecated but still available
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
+int UnitTest::successful_test_case_count() const {
+ return impl()->successful_test_suite_count();
+}
+int UnitTest::failed_test_case_count() const {
+ return impl()->failed_test_suite_count();
+}
+int UnitTest::total_test_case_count() const {
+ return impl()->total_test_suite_count();
+}
int UnitTest::test_case_to_run_count() const {
- return impl()->test_case_to_run_count();
+ return impl()->test_suite_to_run_count();
}
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
// Gets the number of successful tests.
int UnitTest::successful_test_count() const {
@@ -4618,29 +4709,36 @@ internal::TimeInMillis UnitTest::elapsed_time() const {
return impl()->elapsed_time();
}
-// Returns true iff the unit test passed (i.e. all test cases passed).
+// Returns true if the unit test passed (i.e. all test suites passed).
bool UnitTest::Passed() const { return impl()->Passed(); }
-// Returns true iff the unit test failed (i.e. some test case failed
+// Returns true if the unit test failed (i.e. some test suite failed
// or something outside of all tests failed).
bool UnitTest::Failed() const { return impl()->Failed(); }
-// Gets the i-th test case among all the test cases. i can range from 0 to
-// total_test_case_count() - 1. If i is not in that range, returns NULL.
+// Gets the i-th test suite among all the test suites. i can range from 0 to
+// total_test_suite_count() - 1. If i is not in that range, returns NULL.
+const TestSuite* UnitTest::GetTestSuite(int i) const {
+ return impl()->GetTestSuite(i);
+}
+
+// Legacy API is deprecated but still available
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
const TestCase* UnitTest::GetTestCase(int i) const {
return impl()->GetTestCase(i);
}
+#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_
// Returns the TestResult containing information on test failures and
-// properties logged outside of individual test cases.
+// properties logged outside of individual test suites.
const TestResult& UnitTest::ad_hoc_test_result() const {
return *impl()->ad_hoc_test_result();
}
-// Gets the i-th test case among all the test cases. i can range from 0 to
-// total_test_case_count() - 1. If i is not in that range, returns NULL.
-TestCase* UnitTest::GetMutableTestCase(int i) {
- return impl()->GetMutableTestCase(i);
+// Gets the i-th test suite among all the test suites. i can range from 0 to
+// total_test_suite_count() - 1. If i is not in that range, returns NULL.
+TestSuite* UnitTest::GetMutableTestSuite(int i) {
+ return impl()->GetMutableSuiteCase(i);
}
// Returns the list of event listeners that can be used to track events
@@ -4685,8 +4783,7 @@ void UnitTest::AddTestPartResult(
if (impl_->gtest_trace_stack().size() > 0) {
msg << "\n" << GTEST_NAME_ << " trace:";
- for (int i = static_cast<int>(impl_->gtest_trace_stack().size());
- i > 0; --i) {
+ for (size_t i = impl_->gtest_trace_stack().size(); i > 0; --i) {
const internal::TraceInfo& trace = impl_->gtest_trace_stack()[i - 1];
msg << "\n" << internal::FormatFileLocation(trace.file, trace.line)
<< " " << trace.message;
@@ -4723,8 +4820,7 @@ void UnitTest::AddTestPartResult(
#else
// Dereference nullptr through a volatile pointer to prevent the compiler
// from removing. We use this rather than abort() or __builtin_trap() for
- // portability: Symbian doesn't implement abort() well, and some debuggers
- // don't correctly trap abort().
+ // portability: some debuggers don't correctly trap abort().
*static_cast<volatile int*>(nullptr) = 1;
#endif // GTEST_OS_WINDOWS
} else if (GTEST_FLAG(throw_on_failure)) {
@@ -4740,8 +4836,8 @@ void UnitTest::AddTestPartResult(
}
// Adds a TestProperty to the current TestResult object when invoked from
-// inside a test, to current TestCase's ad_hoc_test_result_ when invoked
-// from SetUpTestCase or TearDownTestCase, or to the global property set
+// inside a test, to current TestSuite's ad_hoc_test_result_ when invoked
+// from SetUpTestSuite or TearDownTestSuite, or to the global property set
// when invoked elsewhere. If the result already contains a property with
// the same key, the value will be updated.
void UnitTest::RecordProperty(const std::string& key,
@@ -4818,6 +4914,16 @@ int UnitTest::Run() {
0x0, // Clear the following flags:
_WRITE_ABORT_MSG | _CALL_REPORTFAULT); // pop-up window, core dump.
# endif
+
+ // In debug mode, the Windows CRT can crash with an assertion over invalid
+ // input (e.g. passing an invalid file descriptor). The default handling
+ // for these assertions is to pop up a dialog and wait for user input.
+ // Instead ask the CRT to dump such assertions to stderr non-interactively.
+ if (!IsDebuggerPresent()) {
+ (void)_CrtSetReportMode(_CRT_ASSERT,
+ _CRTDBG_MODE_FILE | _CRTDBG_MODE_DEBUG);
+ (void)_CrtSetReportFile(_CRT_ASSERT, _CRTDBG_FILE_STDERR);
+ }
}
#endif // GTEST_OS_WINDOWS
@@ -4833,13 +4939,22 @@ const char* UnitTest::original_working_dir() const {
return impl_->original_working_dir_.c_str();
}
-// Returns the TestCase object for the test that's currently running,
+// Returns the TestSuite object for the test that's currently running,
// or NULL if no test is running.
+const TestSuite* UnitTest::current_test_suite() const
+ GTEST_LOCK_EXCLUDED_(mutex_) {
+ internal::MutexLock lock(&mutex_);
+ return impl_->current_test_suite();
+}
+
+// Legacy API is still available but deprecated
+#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_
const TestCase* UnitTest::current_test_case() const
GTEST_LOCK_EXCLUDED_(mutex_) {
internal::MutexLock lock(&mutex_);
- return impl_->current_test_case();
+ return impl_->current_test_suite();
}
+#endif
// Returns the TestInfo object for the test that's currently running,
// or NULL if no test is running.
@@ -4852,11 +4967,10 @@ const TestInfo* UnitTest::current_test_info() const
// Returns the random seed used at the start of the current test run.
int UnitTest::random_seed() const { return impl_->random_seed(); }
-// Returns ParameterizedTestCaseRegistry object used to keep track of
+// Returns ParameterizedTestSuiteRegistry object used to keep track of
// value-parameterized tests and instantiate and register them.
-internal::ParameterizedTestCaseRegistry&
- UnitTest::parameterized_test_registry()
- GTEST_LOCK_EXCLUDED_(mutex_) {
+internal::ParameterizedTestSuiteRegistry&
+UnitTest::parameterized_test_registry() GTEST_LOCK_EXCLUDED_(mutex_) {
return impl_->parameterized_test_registry();
}
@@ -4898,8 +5012,8 @@ UnitTestImpl::UnitTestImpl(UnitTest* parent)
&default_per_thread_test_part_result_reporter_),
parameterized_test_registry_(),
parameterized_tests_registered_(false),
- last_death_test_case_(-1),
- current_test_case_(nullptr),
+ last_death_test_suite_(-1),
+ current_test_suite_(nullptr),
current_test_info_(nullptr),
ad_hoc_test_result_(),
os_stack_trace_getter_(nullptr),
@@ -4917,8 +5031,8 @@ UnitTestImpl::UnitTestImpl(UnitTest* parent)
}
UnitTestImpl::~UnitTestImpl() {
- // Deletes every TestCase.
- ForEach(test_cases_, internal::Delete<TestCase>);
+ // Deletes every TestSuite.
+ ForEach(test_suites_, internal::Delete<TestSuite>);
// Deletes every Environment.
ForEach(environments_, internal::Delete<Environment>);
@@ -4927,8 +5041,8 @@ UnitTestImpl::~UnitTestImpl() {
}
// Adds a TestProperty to the current TestResult object when invoked in a
-// context of a test, to current test case's ad_hoc_test_result when invoke
-// from SetUpTestCase/TearDownTestCase, or to the global property set
+// context of a test, to current test suite's ad_hoc_test_result when invoke
+// from SetUpTestSuite/TearDownTestSuite, or to the global property set
// otherwise. If the result already contains a property with the same key,
// the value will be updated.
void UnitTestImpl::RecordProperty(const TestProperty& test_property) {
@@ -4938,9 +5052,9 @@ void UnitTestImpl::RecordProperty(const TestProperty& test_property) {
if (current_test_info_ != nullptr) {
xml_element = "testcase";
test_result = &(current_test_info_->result_);
- } else if (current_test_case_ != nullptr) {
+ } else if (current_test_suite_ != nullptr) {
xml_element = "testsuite";
- test_result = &(current_test_case_->ad_hoc_test_result_);
+ test_result = &(current_test_suite_->ad_hoc_test_result_);
} else {
xml_element = "testsuites";
test_result = &ad_hoc_test_result_;
@@ -5034,75 +5148,73 @@ void UnitTestImpl::PostFlagParsingInit() {
}
}
-// A predicate that checks the name of a TestCase against a known
+// A predicate that checks the name of a TestSuite against a known
// value.
//
// This is used for implementation of the UnitTest class only. We put
// it in the anonymous namespace to prevent polluting the outer
// namespace.
//
-// TestCaseNameIs is copyable.
-class TestCaseNameIs {
+// TestSuiteNameIs is copyable.
+class TestSuiteNameIs {
public:
// Constructor.
- explicit TestCaseNameIs(const std::string& name)
- : name_(name) {}
+ explicit TestSuiteNameIs(const std::string& name) : name_(name) {}
- // Returns true iff the name of test_case matches name_.
- bool operator()(const TestCase* test_case) const {
- return test_case != nullptr &&
- strcmp(test_case->name(), name_.c_str()) == 0;
+ // Returns true if the name of test_suite matches name_.
+ bool operator()(const TestSuite* test_suite) const {
+ return test_suite != nullptr &&
+ strcmp(test_suite->name(), name_.c_str()) == 0;
}
private:
std::string name_;
};
-// Finds and returns a TestCase with the given name. If one doesn't
+// Finds and returns a TestSuite with the given name. If one doesn't
// exist, creates one and returns it. It's the CALLER'S
// RESPONSIBILITY to ensure that this function is only called WHEN THE
// TESTS ARE NOT SHUFFLED.
//
// Arguments:
//
-// test_case_name: name of the test case
-// type_param: the name of the test case's type parameter, or NULL if
-// this is not a typed or a type-parameterized test case.
-// set_up_tc: pointer to the function that sets up the test case
-// tear_down_tc: pointer to the function that tears down the test case
-TestCase* UnitTestImpl::GetTestCase(const char* test_case_name,
- const char* type_param,
- Test::SetUpTestCaseFunc set_up_tc,
- Test::TearDownTestCaseFunc tear_down_tc) {
- // Can we find a TestCase with the given name?
- const std::vector<TestCase*>::const_reverse_iterator test_case =
- std::find_if(test_cases_.rbegin(), test_cases_.rend(),
- TestCaseNameIs(test_case_name));
-
- if (test_case != test_cases_.rend())
- return *test_case;
+// test_suite_name: name of the test suite
+// type_param: the name of the test suite's type parameter, or NULL if
+// this is not a typed or a type-parameterized test suite.
+// set_up_tc: pointer to the function that sets up the test suite
+// tear_down_tc: pointer to the function that tears down the test suite
+TestSuite* UnitTestImpl::GetTestSuite(
+ const char* test_suite_name, const char* type_param,
+ internal::SetUpTestSuiteFunc set_up_tc,
+ internal::TearDownTestSuiteFunc tear_down_tc) {
+ // Can we find a TestSuite with the given name?
+ const auto test_suite =
+ std::find_if(test_suites_.rbegin(), test_suites_.rend(),
+ TestSuiteNameIs(test_suite_name));
+
+ if (test_suite != test_suites_.rend()) return *test_suite;
// No. Let's create one.
- TestCase* const new_test_case =
- new TestCase(test_case_name, type_param, set_up_tc, tear_down_tc);
-
- // Is this a death test case?
- if (internal::UnitTestOptions::MatchesFilter(test_case_name,
- kDeathTestCaseFilter)) {
- // Yes. Inserts the test case after the last death test case
- // defined so far. This only works when the test cases haven't
+ auto* const new_test_suite =
+ new TestSuite(test_suite_name, type_param, set_up_tc, tear_down_tc);
+
+ // Is this a death test suite?
+ if (internal::UnitTestOptions::MatchesFilter(test_suite_name,
+ kDeathTestSuiteFilter)) {
+ // Yes. Inserts the test suite after the last death test suite
+ // defined so far. This only works when the test suites haven't
// been shuffled. Otherwise we may end up running a death test
// after a non-death test.
- ++last_death_test_case_;
- test_cases_.insert(test_cases_.begin() + last_death_test_case_,
- new_test_case);
+ ++last_death_test_suite_;
+ test_suites_.insert(test_suites_.begin() + last_death_test_suite_,
+ new_test_suite);
} else {
// No. Appends to the end of the list.
- test_cases_.push_back(new_test_case);
+ test_suites_.push_back(new_test_suite);
}
- test_case_indices_.push_back(static_cast<int>(test_case_indices_.size()));
- return new_test_case;
+ test_suite_indices_.push_back(static_cast<int>(test_suite_indices_.size()));
+ return new_test_suite;
}
// Helpers for setting up / tearing down the given environment. They
@@ -5120,7 +5232,7 @@ static void TearDownEnvironment(Environment* env) { env->TearDown(); }
// All other functions called from RunAllTests() may safely assume that
// parameterized tests are ready to be counted and run.
bool UnitTestImpl::RunAllTests() {
- // True iff Google Test is initialized before RUN_ALL_TESTS() is called.
+ // True if Google Test is initialized before RUN_ALL_TESTS() is called.
const bool gtest_is_initialized_before_run_all_tests = GTestIsInitialized();
// Do not run any test if the --help flag was specified.
@@ -5136,7 +5248,7 @@ bool UnitTestImpl::RunAllTests() {
// protocol.
internal::WriteToShardStatusFileIfNeeded();
- // True iff we are in a subprocess for running a thread-safe-style
+ // True if we are in a subprocess for running a thread-safe-style
// death test.
bool in_subprocess_for_death_test = false;
@@ -5169,7 +5281,7 @@ bool UnitTestImpl::RunAllTests() {
random_seed_ = GTEST_FLAG(shuffle) ?
GetRandomSeedFromFlag(GTEST_FLAG(random_seed)) : 0;
- // True iff at least one test has failed.
+ // True if at least one test has failed.
bool failed = false;
TestEventListener* repeater = listeners()->repeater();
@@ -5181,17 +5293,17 @@ bool UnitTestImpl::RunAllTests() {
// when we are inside the subprocess of a death test.
const int repeat = in_subprocess_for_death_test ? 1 : GTEST_FLAG(repeat);
// Repeats forever if the repeat count is negative.
- const bool forever = repeat < 0;
- for (int i = 0; forever || i != repeat; i++) {
+ const bool gtest_repeat_forever = repeat < 0;
+ for (int i = 0; gtest_repeat_forever || i != repeat; i++) {
// We want to preserve failures generated by ad-hoc test
// assertions executed before RUN_ALL_TESTS().
ClearNonAdHocTestResult();
const TimeInMillis start = GetTimeInMillis();
- // Shuffles test cases and tests if requested.
+ // Shuffles test suites and tests if requested.
if (has_tests_to_run && GTEST_FLAG(shuffle)) {
- random()->Reseed(random_seed_);
+ random()->Reseed(static_cast<UInt32>(random_seed_));
// This should be done before calling OnTestIterationStart(),
// such that a test event listener can see the actual test order
// in the event.
@@ -5201,19 +5313,33 @@ bool UnitTestImpl::RunAllTests() {
// Tells the unit test event listeners that the tests are about to start.
repeater->OnTestIterationStart(*parent_, i);
- // Runs each test case if there is at least one test to run.
+ // Runs each test suite if there is at least one test to run.
if (has_tests_to_run) {
// Sets up all environments beforehand.
repeater->OnEnvironmentsSetUpStart(*parent_);
ForEach(environments_, SetUpEnvironment);
repeater->OnEnvironmentsSetUpEnd(*parent_);
- // Runs the tests only if there was no fatal failure during global
- // set-up.
- if (!Test::HasFatalFailure()) {
- for (int test_index = 0; test_index < total_test_case_count();
+ // Runs the tests only if there was no fatal failure or skip triggered
+ // during global set-up.
+ if (Test::IsSkipped()) {
+ // Emit diagnostics when global set-up calls skip, as it will not be
+ // emitted by default.
+ TestResult& test_result =
+ *internal::GetUnitTestImpl()->current_test_result();
+ for (int j = 0; j < test_result.total_part_count(); ++j) {
+ const TestPartResult& test_part_result =
+ test_result.GetTestPartResult(j);
+ if (test_part_result.type() == TestPartResult::kSkip) {
+ const std::string& result = test_part_result.message();
+ printf("%s\n", result.c_str());
+ }
+ }
+ fflush(stdout);
+ } else if (!Test::HasFatalFailure()) {
+ for (int test_index = 0; test_index < total_test_suite_count();
test_index++) {
- GetMutableTestCase(test_index)->Run();
+ GetMutableSuiteCase(test_index)->Run();
}
}
@@ -5353,7 +5479,7 @@ Int32 Int32FromEnvOrDie(const char* var, Int32 default_val) {
}
// Given the total number of shards, the shard index, and the test id,
-// returns true iff the test should be run on this shard. The test id is
+// returns true if the test should be run on this shard. The test id is
// some arbitrary but unique non-negative integer assigned to each test
// method. Assumes that 0 <= shard_index < total_shards.
bool ShouldRunTestOnShard(int total_shards, int shard_index, int test_id) {
@@ -5362,7 +5488,7 @@ bool ShouldRunTestOnShard(int total_shards, int shard_index, int test_id) {
// Compares the name of each test with the user-specified filter to
// decide whether the test should be run, then records the result in
-// each TestCase and TestInfo object.
+// each TestSuite and TestInfo object.
// If shard_tests == true, further filters tests based on sharding
// variables in the environment - see
// https://github.com/google/googletest/blob/master/googletest/docs/advanced.md
@@ -5379,26 +5505,23 @@ int UnitTestImpl::FilterTests(ReactionToSharding shard_tests) {
// this shard.
int num_runnable_tests = 0;
int num_selected_tests = 0;
- for (size_t i = 0; i < test_cases_.size(); i++) {
- TestCase* const test_case = test_cases_[i];
- const std::string &test_case_name = test_case->name();
- test_case->set_should_run(false);
+ for (auto* test_suite : test_suites_) {
+ const std::string& test_suite_name = test_suite->name();
+ test_suite->set_should_run(false);
- for (size_t j = 0; j < test_case->test_info_list().size(); j++) {
- TestInfo* const test_info = test_case->test_info_list()[j];
+ for (size_t j = 0; j < test_suite->test_info_list().size(); j++) {
+ TestInfo* const test_info = test_suite->test_info_list()[j];
const std::string test_name(test_info->name());
- // A test is disabled if test case name or test name matches
+ // A test is disabled if test suite name or test name matches
// kDisableTestFilter.
- const bool is_disabled =
- internal::UnitTestOptions::MatchesFilter(test_case_name,
- kDisableTestFilter) ||
- internal::UnitTestOptions::MatchesFilter(test_name,
- kDisableTestFilter);
+ const bool is_disabled = internal::UnitTestOptions::MatchesFilter(
+ test_suite_name, kDisableTestFilter) ||
+ internal::UnitTestOptions::MatchesFilter(
+ test_name, kDisableTestFilter);
test_info->is_disabled_ = is_disabled;
- const bool matches_filter =
- internal::UnitTestOptions::FilterMatchesTest(test_case_name,
- test_name);
+ const bool matches_filter = internal::UnitTestOptions::FilterMatchesTest(
+ test_suite_name, test_name);
test_info->matches_filter_ = matches_filter;
const bool is_runnable =
@@ -5415,7 +5538,7 @@ int UnitTestImpl::FilterTests(ReactionToSharding shard_tests) {
num_selected_tests += is_selected;
test_info->should_run_ = is_selected;
- test_case->set_should_run(test_case->should_run() || is_selected);
+ test_suite->set_should_run(test_suite->should_run() || is_selected);
}
}
return num_selected_tests;
@@ -5448,22 +5571,20 @@ void UnitTestImpl::ListTestsMatchingFilter() {
// Print at most this many characters for each type/value parameter.
const int kMaxParamLength = 250;
- for (size_t i = 0; i < test_cases_.size(); i++) {
- const TestCase* const test_case = test_cases_[i];
- bool printed_test_case_name = false;
+ for (auto* test_suite : test_suites_) {
+ bool printed_test_suite_name = false;
- for (size_t j = 0; j < test_case->test_info_list().size(); j++) {
- const TestInfo* const test_info =
- test_case->test_info_list()[j];
+ for (size_t j = 0; j < test_suite->test_info_list().size(); j++) {
+ const TestInfo* const test_info = test_suite->test_info_list()[j];
if (test_info->matches_filter_) {
- if (!printed_test_case_name) {
- printed_test_case_name = true;
- printf("%s.", test_case->name());
- if (test_case->type_param() != nullptr) {
+ if (!printed_test_suite_name) {
+ printed_test_suite_name = true;
+ printf("%s.", test_suite->name());
+ if (test_suite->type_param() != nullptr) {
printf(" # %s = ", kTypeParamLabel);
// We print the type parameter on a single line to make
// the output easy to parse by a program.
- PrintOnOneLine(test_case->type_param(), kMaxParamLength);
+ PrintOnOneLine(test_suite->type_param(), kMaxParamLength);
}
printf("\n");
}
@@ -5487,11 +5608,11 @@ void UnitTestImpl::ListTestsMatchingFilter() {
if (output_format == "xml") {
XmlUnitTestResultPrinter(
UnitTestOptions::GetAbsolutePathToOutputFile().c_str())
- .PrintXmlTestsList(&stream, test_cases_);
+ .PrintXmlTestsList(&stream, test_suites_);
} else if (output_format == "json") {
JsonUnitTestResultPrinter(
UnitTestOptions::GetAbsolutePathToOutputFile().c_str())
- .PrintJsonTestList(&stream, test_cases_);
+ .PrintJsonTestList(&stream, test_suites_);
}
fprintf(fileout, "%s", StringStreamToString(&stream).c_str());
fclose(fileout);
@@ -5531,35 +5652,35 @@ TestResult* UnitTestImpl::current_test_result() {
if (current_test_info_ != nullptr) {
return &current_test_info_->result_;
}
- if (current_test_case_ != nullptr) {
- return &current_test_case_->ad_hoc_test_result_;
+ if (current_test_suite_ != nullptr) {
+ return &current_test_suite_->ad_hoc_test_result_;
}
return &ad_hoc_test_result_;
}
-// Shuffles all test cases, and the tests within each test case,
+// Shuffles all test suites, and the tests within each test suite,
// making sure that death tests are still run first.
void UnitTestImpl::ShuffleTests() {
- // Shuffles the death test cases.
- ShuffleRange(random(), 0, last_death_test_case_ + 1, &test_case_indices_);
+ // Shuffles the death test suites.
+ ShuffleRange(random(), 0, last_death_test_suite_ + 1, &test_suite_indices_);
- // Shuffles the non-death test cases.
- ShuffleRange(random(), last_death_test_case_ + 1,
- static_cast<int>(test_cases_.size()), &test_case_indices_);
+ // Shuffles the non-death test suites.
+ ShuffleRange(random(), last_death_test_suite_ + 1,
+ static_cast<int>(test_suites_.size()), &test_suite_indices_);
- // Shuffles the tests inside each test case.
- for (size_t i = 0; i < test_cases_.size(); i++) {
- test_cases_[i]->ShuffleTests(random());
+ // Shuffles the tests inside each test suite.
+ for (auto& test_suite : test_suites_) {
+ test_suite->ShuffleTests(random());
}
}
-// Restores the test cases and tests to their order before the first shuffle.
+// Restores the test suites and tests to their order before the first shuffle.
void UnitTestImpl::UnshuffleTests() {
- for (size_t i = 0; i < test_cases_.size(); i++) {
- // Unshuffles the tests in each test case.
- test_cases_[i]->UnshuffleTests();
- // Resets the index of each test case.
- test_case_indices_[i] = static_cast<int>(i);
+ for (size_t i = 0; i < test_suites_.size(); i++) {
+ // Unshuffles the tests in each test suite.
+ test_suites_[i]->UnshuffleTests();
+ // Resets the index of each test suite.
+ test_suite_indices_[i] = static_cast<int>(i);
}
}
@@ -5723,8 +5844,6 @@ static bool HasGoogleTestFlagPrefix(const char* str) {
// @Y changes the color to yellow.
// @D changes to the default terminal text color.
//
-// FIXME: Write tests for this once we add stdout
-// capturing to Google Test.
static void PrintColorEncoded(const char* str) {
GTestColor color = COLOR_DEFAULT; // The current color.
@@ -5934,7 +6053,7 @@ void ParseGoogleTestFlagsOnlyImpl(int* argc, CharType** argv) {
void ParseGoogleTestFlagsOnly(int* argc, char** argv) {
ParseGoogleTestFlagsOnlyImpl(argc, argv);
- // Fix the value of *_NSGetArgc() on macOS, but iff
+ // Fix the value of *_NSGetArgc() on macOS, but if
// *_NSGetArgv() == argv
// Only applicable to char** version of argv
#if GTEST_OS_MAC
@@ -6002,6 +6121,22 @@ void InitGoogleTest(int* argc, wchar_t** argv) {
#endif // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)
}
+// This overloaded version can be used on Arduino/embedded platforms where
+// there is no argc/argv.
+void InitGoogleTest() {
+ // Since Arduino doesn't have a command line, fake out the argc/argv arguments
+ int argc = 1;
+ const auto arg0 = "dummy";
+ char* argv0 = const_cast<char*>(arg0);
+ char** argv = &argv0;
+
+#if defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)
+ GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_(&argc, argv);
+#else // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)
+ internal::InitGoogleTestImpl(&argc, argv);
+#endif // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)
+}
+
std::string TempDir() {
#if defined(GTEST_CUSTOM_TEMPDIR_FUNCTION_)
return GTEST_CUSTOM_TEMPDIR_FUNCTION_();
diff --git a/googletest/src/gtest_main.cc b/googletest/src/gtest_main.cc
index ee1ae52b..f6e1dd96 100644
--- a/googletest/src/gtest_main.cc
+++ b/googletest/src/gtest_main.cc
@@ -27,18 +27,12 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#include <stdio.h>
+#include <cstdio>
#include "gtest/gtest.h"
#ifdef ARDUINO
void setup() {
- // Since Arduino doesn't have a command line, fake out the argc/argv arguments
- int argc = 1;
- const auto arg0 = "PlatformIO";
- char* argv0 = const_cast<char*>(arg0);
- char** argv = &argv0;
-
- testing::InitGoogleTest(&argc, argv);
+ testing::InitGoogleTest();
}
void loop() { RUN_ALL_TESTS(); }