Snap for 8570526 from 65b44751cc8cc809145850c4f66c2976085a306f to mainline-media-swcodec-release

Change-Id: I66177b0aca259c90f78be854ae1471e4f60850db
diff --git a/Android.bp b/Android.bp
index c728f22..ef855f2 100644
--- a/Android.bp
+++ b/Android.bp
@@ -19,7 +19,7 @@
 cc_library_static {
     name: "libgtest_isolated",
     host_supported: true,
-    cflags: ["-Wall", "-Werror"],
+    cflags: ["-Wall", "-Werror", "-Wextra", "-Wconversion"],
     export_include_dirs: ["include"],
 
     srcs: [
@@ -45,7 +45,7 @@
 cc_library_static {
     name: "libgtest_isolated_main",
     host_supported: true,
-    cflags: ["-Wall", "-Werror"],
+    cflags: ["-Wall", "-Werror", "-Wextra"],
     srcs: [
         "Main.cpp",
     ],
@@ -68,7 +68,7 @@
         "tests/OptionsTest.cpp",
         "tests/SystemTests.cpp",
     ],
-    cflags: ["-Wall", "-Werror"],
+    cflags: ["-Wall", "-Werror", "-Wextra"],
 
     shared_libs: [
         "libbase",
diff --git a/Isolate.cpp b/Isolate.cpp
index 8f76f54..689c3ce 100644
--- a/Isolate.cpp
+++ b/Isolate.cpp
@@ -105,9 +105,9 @@
     FATAL_PLOG("Unexpected failure from popen");
   }
 
-  size_t total_shards = options_.total_shards();
+  uint64_t total_shards = options_.total_shards();
   bool sharded = total_shards > 1;
-  size_t test_count = 0;
+  uint64_t test_count = 0;
   if (sharded) {
     test_count = options_.shard_index() + 1;
   }
@@ -187,7 +187,7 @@
   std::string filter("--gtest_filter=" + GetTestName(test));
   args.push_back(filter.data());
 
-  int argc = args.size();
+  int argc = static_cast<int>(args.size());
   // Add the null terminator.
   args.push_back(nullptr);
   ::testing::InitGoogleTest(&argc, args.data());
@@ -262,7 +262,7 @@
 }
 
 void Isolate::ReadTestsOutput() {
-  int ready = poll(running_pollfds_.data(), running_pollfds_.size(), 0);
+  int ready = poll(running_pollfds_.data(), static_cast<nfds_t>(running_pollfds_.size()), 0);
   if (ready <= 0) {
     return;
   }
@@ -285,7 +285,7 @@
   size_t finished_tests = 0;
   int status;
   pid_t pid;
-  while ((pid = TEMP_FAILURE_RETRY(waitpid(-1, &status, WNOHANG))) > 0) {
+  while ((pid = static_cast<pid_t>(TEMP_FAILURE_RETRY(waitpid(-1, &status, WNOHANG)))) > 0) {
     if (pid == -1) {
       FATAL_PLOG("Unexpected failure from waitpid");
     }
@@ -808,6 +808,10 @@
 
     if (total_pass_tests_ + total_skipped_tests_ + total_xfail_tests_ != tests_.size()) {
       exit_code = 1;
+      if (options_.stop_on_error() && options_.num_iterations() > 1) {
+        printf("\nTerminating repeat run due to failing tests (iteration %d).\n", i + 1);
+        break;
+      }
     }
   }
 
diff --git a/IsolateMain.cpp b/IsolateMain.cpp
index 5b34ef5..77db51b 100644
--- a/IsolateMain.cpp
+++ b/IsolateMain.cpp
@@ -15,10 +15,10 @@
  */
 
 #include <errno.h>
+#include <libgen.h>
 #include <limits.h>
 #include <stdio.h>
 #include <stdlib.h>
-#include <string.h>
 #include <unistd.h>
 
 #include <cstring>
@@ -77,7 +77,7 @@
 }
 
 static int GtestRun(std::vector<const char*>* args) {
-  int argc = args->size();
+  int argc = static_cast<int>(args->size());
   args->push_back(nullptr);
   ::testing::InitGoogleTest(&argc, const_cast<char**>(args->data()));
   return RUN_ALL_TESTS();
@@ -102,7 +102,7 @@
     pid_t ppid = getppid();
     std::string exe_path = std::string("/proc/") + std::to_string(ppid) + "/exe";
     char buf[PATH_MAX + 1];
-    size_t len;
+    ssize_t len;
     // NB We can't use things like android::base::* or std::filesystem::* due to linking
     // issues.
     // Since PATH_MAX is the longest a symlink can be in posix we don't need to
diff --git a/Options.cpp b/Options.cpp
index 96842a1..36f618d 100644
--- a/Options.cpp
+++ b/Options.cpp
@@ -64,12 +64,12 @@
      {FLAG_ENVIRONMENT_VARIABLE | FLAG_REQUIRES_VALUE | FLAG_CHILD, &Options::SetString}},
     {"gtest_death_test_style",
      {FLAG_ENVIRONMENT_VARIABLE | FLAG_REQUIRES_VALUE | FLAG_CHILD, nullptr}},
-    {"gtest_break_on_failure", {FLAG_ENVIRONMENT_VARIABLE | FLAG_INCOMPATIBLE, nullptr}},
+    {"gtest_break_on_failure", {FLAG_ENVIRONMENT_VARIABLE, &Options::SetBool}},
     {"gtest_catch_exceptions", {FLAG_ENVIRONMENT_VARIABLE | FLAG_INCOMPATIBLE, nullptr}},
     {"gtest_random_seed", {FLAG_ENVIRONMENT_VARIABLE | FLAG_INCOMPATIBLE, nullptr}},
     {"gtest_shuffle", {FLAG_ENVIRONMENT_VARIABLE | FLAG_INCOMPATIBLE, nullptr}},
     {"gtest_stream_result_to", {FLAG_ENVIRONMENT_VARIABLE | FLAG_INCOMPATIBLE, nullptr}},
-    {"gtest_throw_on_failure", {FLAG_ENVIRONMENT_VARIABLE | FLAG_INCOMPATIBLE, nullptr}},
+    {"gtest_throw_on_failure", {FLAG_ENVIRONMENT_VARIABLE, &Options::SetBool}},
     {"gtest_shard_index",
      {FLAG_ENVIRONMENT_VARIABLE | FLAG_REQUIRES_VALUE, &Options::SetNumericEnvOnly}},
     {"gtest_total_shards",
@@ -213,14 +213,14 @@
 }
 
 static bool ReadFileToString(const std::string& file, std::string* contents) {
-  int fd = TEMP_FAILURE_RETRY(open(file.c_str(), O_RDONLY | O_CLOEXEC));
+  int fd = static_cast<int>(TEMP_FAILURE_RETRY(open(file.c_str(), O_RDONLY | O_CLOEXEC)));
   if (fd == -1) {
     return false;
   }
   char buf[4096];
   ssize_t bytes_read;
   while ((bytes_read = TEMP_FAILURE_RETRY(read(fd, &buf, sizeof(buf)))) > 0) {
-    contents->append(buf, bytes_read);
+    contents->append(buf, static_cast<size_t>(bytes_read));
   }
   close(fd);
   return true;
@@ -311,6 +311,7 @@
   // Initialize the variables.
   job_count_ = static_cast<size_t>(sysconf(_SC_NPROCESSORS_ONLN));
   num_iterations_ = ::testing::GTEST_FLAG(repeat);
+  stop_on_error_ = false;
   numerics_.clear();
   numerics_["deadline_threshold_ms"] = kDefaultDeadlineThresholdMs;
   numerics_["slow_threshold_ms"] = kDefaultSlowThresholdMs;
@@ -325,6 +326,8 @@
   bools_["gtest_print_time"] = ::testing::GTEST_FLAG(print_time);
   bools_["gtest_also_run_disabled_tests"] = ::testing::GTEST_FLAG(also_run_disabled_tests);
   bools_["gtest_list_tests"] = false;
+  bools_["gtest_break_on_failure"] = false;
+  bools_["gtest_throw_on_failure"] = false;
 
   // This does nothing, only added so that passing this option does not exit.
   bools_["gtest_format"] = true;
@@ -373,6 +376,10 @@
     }
   }
 
+  if (bools_["gtest_break_on_failure"] || bools_["gtest_throw_on_failure"]) {
+    stop_on_error_ = true;
+  }
+
   return true;
 }
 
diff --git a/Options.h b/Options.h
index 883940a..ad51e18 100644
--- a/Options.h
+++ b/Options.h
@@ -36,6 +36,7 @@
 
   size_t job_count() const { return job_count_; }
   int num_iterations() const { return num_iterations_; }
+  bool stop_on_error() const { return stop_on_error_; }
 
   uint64_t deadline_threshold_ms() const { return numerics_.at("deadline_threshold_ms"); }
   uint64_t slow_threshold_ms() const { return numerics_.at("slow_threshold_ms"); }
@@ -54,6 +55,7 @@
  private:
   size_t job_count_;
   int num_iterations_;
+  bool stop_on_error_;
 
   std::unordered_map<std::string, bool> bools_;
   std::unordered_map<std::string, std::string> strings_;
diff --git a/TEST_MAPPING b/TEST_MAPPING
index c4002f2..90cb945 100644
--- a/TEST_MAPPING
+++ b/TEST_MAPPING
@@ -3,5 +3,10 @@
     {
       "name": "gtest_isolated_tests"
     }
+  ],
+  "hwasan-postsubmit": [
+    {
+      "name": "gtest_isolated_tests"
+    }
   ]
 }
diff --git a/tests/OptionsTest.cpp b/tests/OptionsTest.cpp
index 97089a3..36f5bef 100644
--- a/tests/OptionsTest.cpp
+++ b/tests/OptionsTest.cpp
@@ -95,6 +95,7 @@
   EXPECT_EQ("", options.xml_file());
   EXPECT_EQ("", options.filter());
   EXPECT_EQ(1, options.num_iterations());
+  EXPECT_FALSE(options.stop_on_error());
   EXPECT_TRUE(options.print_time());
   EXPECT_FALSE(options.allow_disabled_tests());
   EXPECT_FALSE(options.list_tests());
@@ -560,7 +561,7 @@
   ASSERT_TRUE(options.Process(cur_args, &child_args_));
   EXPECT_EQ("no", options.color());
   EXPECT_FALSE(options.print_time());
-  EXPECT_EQ(10U, options.num_iterations());
+  EXPECT_EQ(10, options.num_iterations());
   EXPECT_THAT(child_args_, ElementsAre(StrEq("ignore"), StrEq("--gtest_color=no")));
 }
 
@@ -628,6 +629,27 @@
   EXPECT_EQ("Unable to read data from file /this/does/not/exist\n", capture.str());
 }
 
+TEST_F(OptionsTest, stop_on_error) {
+  std::vector<const char*> cur_args{"ignore", "--gtest_break_on_failure"};
+  Options options;
+  ASSERT_TRUE(options.Process(cur_args, &child_args_));
+  EXPECT_TRUE(options.stop_on_error());
+  EXPECT_THAT(child_args_, ElementsAre(StrEq("ignore")));
+
+  ClearChildArgs();
+  cur_args = std::vector<const char*>{"ignore", "--gtest_throw_on_failure"};
+  ASSERT_TRUE(options.Process(cur_args, &child_args_));
+  EXPECT_TRUE(options.stop_on_error());
+  EXPECT_THAT(child_args_, ElementsAre(StrEq("ignore")));
+
+  ClearChildArgs();
+  cur_args =
+      std::vector<const char*>{"ignore", "--gtest_break_on_failure", "--gtest_throw_on_failure"};
+  ASSERT_TRUE(options.Process(cur_args, &child_args_));
+  EXPECT_TRUE(options.stop_on_error());
+  EXPECT_THAT(child_args_, ElementsAre(StrEq("ignore")));
+}
+
 void OptionsTest::CheckIncompatible(const std::string arg) {
   CapturedStdout capture;
   std::vector<const char*> cur_args{"ignore", arg.c_str()};
@@ -639,12 +661,10 @@
 }
 
 TEST_F(OptionsTest, incompatible) {
-  ASSERT_NO_FATAL_FAILURE(CheckIncompatible("--gtest_break_on_failure"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatible("--gtest_catch_exceptions"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatible("--gtest_random_seed"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatible("--gtest_shuffle"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatible("--gtest_stream_result_to"));
-  ASSERT_NO_FATAL_FAILURE(CheckIncompatible("--gtest_throw_on_failure"));
 }
 
 TEST_F(OptionsTest, verify_non_env_variables) {
@@ -663,6 +683,7 @@
   EXPECT_EQ("", options.xml_file());
   EXPECT_EQ("", options.filter());
   EXPECT_EQ(1, options.num_iterations());
+  EXPECT_FALSE(options.stop_on_error());
   EXPECT_TRUE(options.print_time());
   EXPECT_FALSE(options.allow_disabled_tests());
   EXPECT_FALSE(options.list_tests());
@@ -901,6 +922,38 @@
   ASSERT_NE(-1, unsetenv("GTEST_DEATH_TEST_STYLE"));
 }
 
+TEST_F(OptionsTest, stop_on_error_from_env) {
+  ASSERT_NE(-1, setenv("GTEST_BREAK_ON_FAILURE", "", 1));
+
+  std::vector<const char*> cur_args{"ignore"};
+  Options options;
+  ASSERT_TRUE(options.Process(cur_args, &child_args_));
+  EXPECT_TRUE(options.stop_on_error());
+  EXPECT_THAT(child_args_, ElementsAre(StrEq("ignore")));
+
+  ASSERT_NE(-1, unsetenv("GTEST_BREAK_ON_FAILURE"));
+
+  ASSERT_NE(-1, setenv("GTEST_THROW_ON_FAILURE", "", 1));
+
+  ClearChildArgs();
+  ASSERT_TRUE(options.Process(cur_args, &child_args_));
+  EXPECT_TRUE(options.stop_on_error());
+  EXPECT_THAT(child_args_, ElementsAre(StrEq("ignore")));
+
+  ASSERT_NE(-1, unsetenv("GTEST_THROW_ON_FAILURE"));
+
+  ASSERT_NE(-1, setenv("GTEST_BREAK_ON_FAILURE", "", 1));
+  ASSERT_NE(-1, setenv("GTEST_THROW_ON_FAILURE", "", 1));
+
+  ClearChildArgs();
+  ASSERT_TRUE(options.Process(cur_args, &child_args_));
+  EXPECT_TRUE(options.stop_on_error());
+  EXPECT_THAT(child_args_, ElementsAre(StrEq("ignore")));
+
+  ASSERT_NE(-1, unsetenv("GTEST_BREAK_ON_FAILURE"));
+  ASSERT_NE(-1, unsetenv("GTEST_THROW_ON_FAILURE"));
+}
+
 void OptionsTest::CheckIncompatibleFromEnv(const std::string env_var) {
   ASSERT_NE(-1, setenv(env_var.c_str(), "", 1));
 
@@ -926,12 +979,10 @@
 }
 
 TEST_F(OptionsTest, incompatible_from_env) {
-  ASSERT_NO_FATAL_FAILURE(CheckIncompatibleFromEnv("GTEST_BREAK_ON_FAILURE"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatibleFromEnv("GTEST_CATCH_EXCEPTIONS"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatibleFromEnv("GTEST_RANDOM_SEED"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatibleFromEnv("GTEST_SHUFFLE"));
   ASSERT_NO_FATAL_FAILURE(CheckIncompatibleFromEnv("GTEST_STREAM_RESULT_TO"));
-  ASSERT_NO_FATAL_FAILURE(CheckIncompatibleFromEnv("GTEST_THROW_ON_FAILURE"));
 }
 
 }  // namespace gtest_extras
diff --git a/tests/SystemTests.cpp b/tests/SystemTests.cpp
index 821c055..25470b0 100644
--- a/tests/SystemTests.cpp
+++ b/tests/SystemTests.cpp
@@ -1200,7 +1200,7 @@
       memory_iteration++;
     }
   }
-  ASSERT_EQ(400, memory_iteration)
+  ASSERT_EQ(400U, memory_iteration)
       << "Did not find the expected 400 lines of memory data." << std::endl
       << "Raw output:" << std::endl
       << raw_output_;
@@ -1328,6 +1328,89 @@
       Verify("*.DISABLED_pass", expected, 0, std::vector<const char*>{flagfile.c_str()}));
 }
 
+TEST_F(SystemTests, verify_repeat_stop_on_error) {
+  std::string expected =
+      "Note: Google Test filter = *.DISABLED_fail\n"
+      "[==========] Running 1 test from 1 test suite (20 jobs).\n"
+      "[ RUN      ] SystemTests.DISABLED_fail\n"
+      "file:(XX) Failure in test SystemTests.DISABLED_fail\n"
+      "Expected equality of these values:\n"
+      "  1\n"
+      "  0\n"
+      "SystemTests.DISABLED_fail exited with exitcode 1.\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail (XX ms)\n"
+      "[==========] 1 test from 1 test suite ran. (XX ms total)\n"
+      "[  PASSED  ] 0 tests.\n"
+      "[  FAILED  ] 1 test, listed below:\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail\n"
+      "\n"
+      " 1 FAILED TEST\n"
+      "\n"
+      "Terminating repeat run due to failing tests (iteration 1).\n";
+  ASSERT_NO_FATAL_FAILURE(
+      Verify("*.DISABLED_fail", expected, 1,
+             std::vector<const char*>{"--gtest_repeat=2", "--gtest_break_on_failure"}));
+}
+
+TEST_F(SystemTests, verify_repeat_no_stop_on_error) {
+  std::string expected =
+      "Note: Google Test filter = *.DISABLED_fail\n"
+      "[==========] Running 1 test from 1 test suite (20 jobs).\n"
+      "[ RUN      ] SystemTests.DISABLED_fail\n"
+      "file:(XX) Failure in test SystemTests.DISABLED_fail\n"
+      "Expected equality of these values:\n"
+      "  1\n"
+      "  0\n"
+      "SystemTests.DISABLED_fail exited with exitcode 1.\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail (XX ms)\n"
+      "[==========] 1 test from 1 test suite ran. (XX ms total)\n"
+      "[  PASSED  ] 0 tests.\n"
+      "[  FAILED  ] 1 test, listed below:\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail\n"
+      "\n"
+      " 1 FAILED TEST\n"
+      "\n"
+      "Repeating all tests (iteration 2) . . .\n"
+      "\n"
+      "[==========] Running 1 test from 1 test suite (20 jobs).\n"
+      "[ RUN      ] SystemTests.DISABLED_fail\n"
+      "file:(XX) Failure in test SystemTests.DISABLED_fail\n"
+      "Expected equality of these values:\n"
+      "  1\n"
+      "  0\n"
+      "SystemTests.DISABLED_fail exited with exitcode 1.\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail (XX ms)\n"
+      "[==========] 1 test from 1 test suite ran. (XX ms total)\n"
+      "[  PASSED  ] 0 tests.\n"
+      "[  FAILED  ] 1 test, listed below:\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail\n"
+      "\n"
+      " 1 FAILED TEST\n";
+  ASSERT_NO_FATAL_FAILURE(
+      Verify("*.DISABLED_fail", expected, 1, std::vector<const char*>{"--gtest_repeat=2"}));
+}
+
+TEST_F(SystemTests, verify_single_no_terminate_message) {
+  std::string expected =
+      "Note: Google Test filter = *.DISABLED_fail\n"
+      "[==========] Running 1 test from 1 test suite (20 jobs).\n"
+      "[ RUN      ] SystemTests.DISABLED_fail\n"
+      "file:(XX) Failure in test SystemTests.DISABLED_fail\n"
+      "Expected equality of these values:\n"
+      "  1\n"
+      "  0\n"
+      "SystemTests.DISABLED_fail exited with exitcode 1.\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail (XX ms)\n"
+      "[==========] 1 test from 1 test suite ran. (XX ms total)\n"
+      "[  PASSED  ] 0 tests.\n"
+      "[  FAILED  ] 1 test, listed below:\n"
+      "[  FAILED  ] SystemTests.DISABLED_fail\n"
+      "\n"
+      " 1 FAILED TEST\n";
+  ASSERT_NO_FATAL_FAILURE(
+      Verify("*.DISABLED_fail", expected, 1, std::vector<const char*>{"--gtest_break_on_failure"}));
+}
+
 // These tests are used by the verify_disabled tests.
 TEST_F(SystemTests, always_pass) {}
 
@@ -1341,7 +1424,7 @@
   ASSERT_EQ(1, 0);
 }
 
-TEST_F(SystemTests, DISABLED_crash) {
+TEST_F(SystemTests, DISABLED_crash) __attribute__((optnone)) {
   char* p = reinterpret_cast<char*>(static_cast<intptr_t>(atoi("0")));
   *p = 3;
 }