Add a way to terminate repeated runs.

The normal gtest framework will terminate a run when using the
--gtest_repeat=XX option after any test fails and either the
--gtest_break_on_failure option or the --gtest_throw_on_failure
option is specified. Make the new runner do this too, otherwise,
there is no stop on error logic when doing repeated runs.

Add new unit tests to verify this behavior.

Test: All unit tests pass.
Change-Id: If4d1e3eafc028d8850c73b58c72d9c138a2198d4
diff --git a/Isolate.cpp b/Isolate.cpp
index 8f76f54..f7ea4e3 100644
--- a/Isolate.cpp
+++ b/Isolate.cpp
@@ -808,6 +808,10 @@
 
     if (total_pass_tests_ + total_skipped_tests_ + total_xfail_tests_ != tests_.size()) {
       exit_code = 1;
+      if (options_.stop_on_error() && options_.num_iterations() > 1) {
+        printf("\nTerminating repeat run due to failing tests (iteration %d).\n", i + 1);
+        break;
+      }
     }
   }
 
diff --git a/Options.cpp b/Options.cpp
index 96842a1..7e54259 100644
--- a/Options.cpp
+++ b/Options.cpp
@@ -64,12 +64,12 @@
      {FLAG_ENVIRONMENT_VARIABLE | FLAG_REQUIRES_VALUE | FLAG_CHILD, &Options::SetString}},
     {"gtest_death_test_style",
      {FLAG_ENVIRONMENT_VARIABLE | FLAG_REQUIRES_VALUE | FLAG_CHILD, nullptr}},
-    {"gtest_break_on_failure", {FLAG_ENVIRONMENT_VARIABLE | FLAG_INCOMPATIBLE, nullptr}},
+    {"gtest_break_on_failure", {FLAG_ENVIRONMENT_VARIABLE, &Options::SetBool}},
     {"gtest_catch_exceptions", {FLAG_ENVIRONMENT_VARIABLE | FLAG_INCOMPATIBLE, nullptr}},
     {"gtest_random_seed", {FLAG_ENVIRONMENT_VARIABLE | FLAG_INCOMPATIBLE, nullptr}},
     {"gtest_shuffle", {FLAG_ENVIRONMENT_VARIABLE | FLAG_INCOMPATIBLE, nullptr}},
     {"gtest_stream_result_to", {FLAG_ENVIRONMENT_VARIABLE | FLAG_INCOMPATIBLE, nullptr}},
-    {"gtest_throw_on_failure", {FLAG_ENVIRONMENT_VARIABLE | FLAG_INCOMPATIBLE, nullptr}},
+    {"gtest_throw_on_failure", {FLAG_ENVIRONMENT_VARIABLE, &Options::SetBool}},
     {"gtest_shard_index",
      {FLAG_ENVIRONMENT_VARIABLE | FLAG_REQUIRES_VALUE, &Options::SetNumericEnvOnly}},
     {"gtest_total_shards",
@@ -311,6 +311,7 @@
   // Initialize the variables.
   job_count_ = static_cast<size_t>(sysconf(_SC_NPROCESSORS_ONLN));
   num_iterations_ = ::testing::GTEST_FLAG(repeat);
+  stop_on_error_ = false;
   numerics_.clear();
   numerics_["deadline_threshold_ms"] = kDefaultDeadlineThresholdMs;
   numerics_["slow_threshold_ms"] = kDefaultSlowThresholdMs;
@@ -325,6 +326,8 @@
   bools_["gtest_print_time"] = ::testing::GTEST_FLAG(print_time);
   bools_["gtest_also_run_disabled_tests"] = ::testing::GTEST_FLAG(also_run_disabled_tests);
   bools_["gtest_list_tests"] = false;
+  bools_["gtest_break_on_failure"] = false;
+  bools_["gtest_throw_on_failure"] = false;
 
   // This does nothing, only added so that passing this option does not exit.
   bools_["gtest_format"] = true;
@@ -373,6 +376,10 @@
     }
   }
 
+  if (bools_["gtest_break_on_failure"] || bools_["gtest_throw_on_failure"]) {
+    stop_on_error_ = true;
+  }
+
   return true;
 }
 
diff --git a/Options.h b/Options.h
index 883940a..ad51e18 100644
--- a/Options.h
+++ b/Options.h
@@ -36,6 +36,7 @@
 
   size_t job_count() const { return job_count_; }
   int num_iterations() const { return num_iterations_; }
+  bool stop_on_error() const { return stop_on_error_; }
 
   uint64_t deadline_threshold_ms() const { return numerics_.at("deadline_threshold_ms"); }
   uint64_t slow_threshold_ms() const { return numerics_.at("slow_threshold_ms"); }
@@ -54,6 +55,7 @@
  private:
   size_t job_count_;
   int num_iterations_;
+  bool stop_on_error_;
 
   std::unordered_map<std::string, bool> bools_;
   std::unordered_map<std::string, std::string> strings_;
diff --git a/tests/OptionsTest.cpp b/tests/OptionsTest.cpp
index 97089a3..434c4df 100644
--- a/tests/OptionsTest.cpp
+++ b/tests/OptionsTest.cpp
@@ -95,6 +95,7 @@
   EXPECT_EQ("", options.xml_file());
   EXPECT_EQ("", options.filter());
   EXPECT_EQ(1, options.num_iterations());
+  EXPECT_FALSE(options.stop_on_error());
   EXPECT_TRUE(options.print_time());
   EXPECT_FALSE(options.allow_disabled_tests());
   EXPECT_FALSE(options.list_tests());
@@ -628,6 +629,27 @@
   EXPECT_EQ("Unable to read data from file /this/does/not/exist\n", capture.str());
 }
 
+TEST_F(OptionsTest, stop_on_error) {
+  std::vector<const char*> cur_args{"ignore", "--gtest_break_on_failure"};
+  Options options;
+  ASSERT_TRUE(options.Process(cur_args, &child_args_));
+  EXPECT_TRUE(options.stop_on_error());
+  EXPECT_THAT(child_args_, ElementsAre(StrEq("ignore")));
+
+  ClearChildArgs();
+  cur_args = std::vector<const char*>{"ignore", "--gtest_throw_on_failure"};
+  ASSERT_TRUE(options.Process(cur_args, &child_args_));
+  EXPECT_TRUE(options.stop_on_error());
+  EXPECT_THAT(child_args_, ElementsAre(StrEq("ignore")));
+
+  ClearChildArgs();
+  cur_args =
+      std::vector<const char*>{"ignore", "--gtest_break_on_failure", "--gtest_throw_on_failure"};
+  ASSERT_TRUE(options.Process(cur_args, &child_args_));
+  EXPECT_TRUE(options.stop_on_error());
+  EXPECT_THAT(child_args_, ElementsAre(StrEq("ignore")));
+}
+
 void OptionsTest::CheckIncompatible(const std::string arg) {
   CapturedStdout capture;
   std::vector<const char*> cur_args{"ignore", arg.c_str()};
@@ -639,12 +661,10 @@
 }
 
 TEST_F(OptionsTest, incompatible) {
-  ASSERT_NO_FATAL_FAILURE(CheckIncompatible("--gtest_break_on_failure"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatible("--gtest_catch_exceptions"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatible("--gtest_random_seed"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatible("--gtest_shuffle"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatible("--gtest_stream_result_to"));
-  ASSERT_NO_FATAL_FAILURE(CheckIncompatible("--gtest_throw_on_failure"));
 }
 
 TEST_F(OptionsTest, verify_non_env_variables) {
@@ -663,6 +683,7 @@
   EXPECT_EQ("", options.xml_file());
   EXPECT_EQ("", options.filter());
   EXPECT_EQ(1, options.num_iterations());
+  EXPECT_FALSE(options.stop_on_error());
   EXPECT_TRUE(options.print_time());
   EXPECT_FALSE(options.allow_disabled_tests());
   EXPECT_FALSE(options.list_tests());
@@ -901,6 +922,38 @@
   ASSERT_NE(-1, unsetenv("GTEST_DEATH_TEST_STYLE"));
 }
 
+TEST_F(OptionsTest, stop_on_error_from_env) {
+  ASSERT_NE(-1, setenv("GTEST_BREAK_ON_FAILURE", "", 1));
+
+  std::vector<const char*> cur_args{"ignore"};
+  Options options;
+  ASSERT_TRUE(options.Process(cur_args, &child_args_));
+  EXPECT_TRUE(options.stop_on_error());
+  EXPECT_THAT(child_args_, ElementsAre(StrEq("ignore")));
+
+  ASSERT_NE(-1, unsetenv("GTEST_BREAK_ON_FAILURE"));
+
+  ASSERT_NE(-1, setenv("GTEST_THROW_ON_FAILURE", "", 1));
+
+  ClearChildArgs();
+  ASSERT_TRUE(options.Process(cur_args, &child_args_));
+  EXPECT_TRUE(options.stop_on_error());
+  EXPECT_THAT(child_args_, ElementsAre(StrEq("ignore")));
+
+  ASSERT_NE(-1, unsetenv("GTEST_THROW_ON_FAILURE"));
+
+  ASSERT_NE(-1, setenv("GTEST_BREAK_ON_FAILURE", "", 1));
+  ASSERT_NE(-1, setenv("GTEST_THROW_ON_FAILURE", "", 1));
+
+  ClearChildArgs();
+  ASSERT_TRUE(options.Process(cur_args, &child_args_));
+  EXPECT_TRUE(options.stop_on_error());
+  EXPECT_THAT(child_args_, ElementsAre(StrEq("ignore")));
+
+  ASSERT_NE(-1, unsetenv("GTEST_BREAK_ON_FAILURE"));
+  ASSERT_NE(-1, unsetenv("GTEST_THROW_ON_FAILURE"));
+}
+
 void OptionsTest::CheckIncompatibleFromEnv(const std::string env_var) {
   ASSERT_NE(-1, setenv(env_var.c_str(), "", 1));
 
@@ -926,12 +979,10 @@
 }
 
 TEST_F(OptionsTest, incompatible_from_env) {
-  ASSERT_NO_FATAL_FAILURE(CheckIncompatibleFromEnv("GTEST_BREAK_ON_FAILURE"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatibleFromEnv("GTEST_CATCH_EXCEPTIONS"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatibleFromEnv("GTEST_RANDOM_SEED"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatibleFromEnv("GTEST_SHUFFLE"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatibleFromEnv("GTEST_STREAM_RESULT_TO"));
-  ASSERT_NO_FATAL_FAILURE(CheckIncompatibleFromEnv("GTEST_THROW_ON_FAILURE"));
 }
 
 }  // namespace gtest_extras
diff --git a/tests/SystemTests.cpp b/tests/SystemTests.cpp
index 821c055..2b434a9 100644
--- a/tests/SystemTests.cpp
+++ b/tests/SystemTests.cpp
@@ -1328,6 +1328,89 @@
       Verify("*.DISABLED_pass", expected, 0, std::vector<const char*>{flagfile.c_str()}));
 }
 
+TEST_F(SystemTests, verify_repeat_stop_on_error) {
+  std::string expected =
+      "Note: Google Test filter = *.DISABLED_fail\n"
+      "[==========] Running 1 test from 1 test suite (20 jobs).\n"
+      "[ RUN      ] SystemTests.DISABLED_fail\n"
+      "file:(XX) Failure in test SystemTests.DISABLED_fail\n"
+      "Expected equality of these values:\n"
+      "  1\n"
+      "  0\n"
+      "SystemTests.DISABLED_fail exited with exitcode 1.\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail (XX ms)\n"
+      "[==========] 1 test from 1 test suite ran. (XX ms total)\n"
+      "[  PASSED  ] 0 tests.\n"
+      "[  FAILED  ] 1 test, listed below:\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail\n"
+      "\n"
+      " 1 FAILED TEST\n"
+      "\n"
+      "Terminating repeat run due to failing tests (iteration 1).\n";
+  ASSERT_NO_FATAL_FAILURE(
+      Verify("*.DISABLED_fail", expected, 1,
+             std::vector<const char*>{"--gtest_repeat=2", "--gtest_break_on_failure"}));
+}
+
+TEST_F(SystemTests, verify_repeat_no_stop_on_error) {
+  std::string expected =
+      "Note: Google Test filter = *.DISABLED_fail\n"
+      "[==========] Running 1 test from 1 test suite (20 jobs).\n"
+      "[ RUN      ] SystemTests.DISABLED_fail\n"
+      "file:(XX) Failure in test SystemTests.DISABLED_fail\n"
+      "Expected equality of these values:\n"
+      "  1\n"
+      "  0\n"
+      "SystemTests.DISABLED_fail exited with exitcode 1.\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail (XX ms)\n"
+      "[==========] 1 test from 1 test suite ran. (XX ms total)\n"
+      "[  PASSED  ] 0 tests.\n"
+      "[  FAILED  ] 1 test, listed below:\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail\n"
+      "\n"
+      " 1 FAILED TEST\n"
+      "\n"
+      "Repeating all tests (iteration 2) . . .\n"
+      "\n"
+      "[==========] Running 1 test from 1 test suite (20 jobs).\n"
+      "[ RUN      ] SystemTests.DISABLED_fail\n"
+      "file:(XX) Failure in test SystemTests.DISABLED_fail\n"
+      "Expected equality of these values:\n"
+      "  1\n"
+      "  0\n"
+      "SystemTests.DISABLED_fail exited with exitcode 1.\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail (XX ms)\n"
+      "[==========] 1 test from 1 test suite ran. (XX ms total)\n"
+      "[  PASSED  ] 0 tests.\n"
+      "[  FAILED  ] 1 test, listed below:\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail\n"
+      "\n"
+      " 1 FAILED TEST\n";
+  ASSERT_NO_FATAL_FAILURE(
+      Verify("*.DISABLED_fail", expected, 1, std::vector<const char*>{"--gtest_repeat=2"}));
+}
+
+TEST_F(SystemTests, verify_single_no_terminate_message) {
+  std::string expected =
+      "Note: Google Test filter = *.DISABLED_fail\n"
+      "[==========] Running 1 test from 1 test suite (20 jobs).\n"
+      "[ RUN      ] SystemTests.DISABLED_fail\n"
+      "file:(XX) Failure in test SystemTests.DISABLED_fail\n"
+      "Expected equality of these values:\n"
+      "  1\n"
+      "  0\n"
+      "SystemTests.DISABLED_fail exited with exitcode 1.\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail (XX ms)\n"
+      "[==========] 1 test from 1 test suite ran. (XX ms total)\n"
+      "[  PASSED  ] 0 tests.\n"
+      "[  FAILED  ] 1 test, listed below:\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail\n"
+      "\n"
+      " 1 FAILED TEST\n";
+  ASSERT_NO_FATAL_FAILURE(
+      Verify("*.DISABLED_fail", expected, 1, std::vector<const char*>{"--gtest_break_on_failure"}));
+}
+
 // These tests are used by the verify_disabled tests.
 TEST_F(SystemTests, always_pass) {}