Update from https://crrev.com/312600

TBR=jamesr@chromium.org

Review URL: https://codereview.chromium.org/863253002
diff --git a/base/test/launcher/unit_test_launcher.cc b/base/test/launcher/unit_test_launcher.cc
index 9a8b41f..a31b6db 100644
--- a/base/test/launcher/unit_test_launcher.cc
+++ b/base/test/launcher/unit_test_launcher.cc
@@ -244,325 +244,302 @@
 
 bool UnitTestLauncherDelegate::ShouldRunTest(const std::string& test_case_name,
                                              const std::string& test_name) {
-    DCHECK(thread_checker_.CalledOnValidThread());
+  DCHECK(thread_checker_.CalledOnValidThread());
 
-    // There is no additional logic to disable specific tests.
-    return true;
+  // There is no additional logic to disable specific tests.
+  return true;
+}
+
+size_t UnitTestLauncherDelegate::RunTests(
+    TestLauncher* test_launcher,
+    const std::vector<std::string>& test_names) {
+  DCHECK(thread_checker_.CalledOnValidThread());
+
+  std::vector<std::string> batch;
+  for (size_t i = 0; i < test_names.size(); i++) {
+    batch.push_back(test_names[i]);
+
+    if (batch.size() >= batch_limit_) {
+      RunBatch(test_launcher, batch);
+      batch.clear();
+    }
   }
 
-  size_t UnitTestLauncherDelegate::RunTests(
-      TestLauncher* test_launcher,
-      const std::vector<std::string>& test_names) {
-    DCHECK(thread_checker_.CalledOnValidThread());
+  RunBatch(test_launcher, batch);
 
+  return test_names.size();
+}
+
+size_t UnitTestLauncherDelegate::RetryTests(
+    TestLauncher* test_launcher,
+    const std::vector<std::string>& test_names) {
+  MessageLoop::current()->PostTask(
+      FROM_HERE, Bind(&UnitTestLauncherDelegate::RunSerially, Unretained(this),
+                      test_launcher, test_names));
+  return test_names.size();
+}
+
+void UnitTestLauncherDelegate::RunSerially(
+    TestLauncher* test_launcher,
+    const std::vector<std::string>& test_names) {
+  if (test_names.empty())
+    return;
+
+  std::vector<std::string> new_test_names(test_names);
+  std::string test_name(new_test_names.back());
+  new_test_names.pop_back();
+
+  // Create a dedicated temporary directory to store the xml result data
+  // per run to ensure clean state and make it possible to launch multiple
+  // processes in parallel.
+  base::FilePath output_file;
+  CHECK(CreateNewTempDirectory(FilePath::StringType(), &output_file));
+  output_file = output_file.AppendASCII("test_results.xml");
+
+  std::vector<std::string> current_test_names;
+  current_test_names.push_back(test_name);
+  CommandLine cmd_line(
+      GetCommandLineForChildGTestProcess(current_test_names, output_file));
+
+  GTestCallbackState callback_state;
+  callback_state.test_launcher = test_launcher;
+  callback_state.test_names = current_test_names;
+  callback_state.output_file = output_file;
+
+  test_launcher->LaunchChildGTestProcess(
+      cmd_line, std::string(), TestTimeouts::test_launcher_timeout(),
+      use_job_objects_ ? TestLauncher::USE_JOB_OBJECTS : 0,
+      Bind(&UnitTestLauncherDelegate::SerialGTestCallback, Unretained(this),
+           callback_state, new_test_names));
+}
+
+void UnitTestLauncherDelegate::RunBatch(
+    TestLauncher* test_launcher,
+    const std::vector<std::string>& test_names) {
+  DCHECK(thread_checker_.CalledOnValidThread());
+
+  if (test_names.empty())
+    return;
+
+  // Create a dedicated temporary directory to store the xml result data
+  // per run to ensure clean state and make it possible to launch multiple
+  // processes in parallel.
+  base::FilePath output_file;
+  CHECK(CreateNewTempDirectory(FilePath::StringType(), &output_file));
+  output_file = output_file.AppendASCII("test_results.xml");
+
+  CommandLine cmd_line(
+      GetCommandLineForChildGTestProcess(test_names, output_file));
+
+  // Adjust the timeout depending on how many tests we're running
+  // (note that e.g. the last batch of tests will be smaller).
+  // TODO(phajdan.jr): Consider an adaptive timeout, which can change
+  // depending on how many tests ran and how many remain.
+  // Note: do NOT parse child's stdout to do that, it's known to be
+  // unreliable (e.g. buffering issues can mix up the output).
+  base::TimeDelta timeout =
+      test_names.size() * TestTimeouts::test_launcher_timeout();
+
+  GTestCallbackState callback_state;
+  callback_state.test_launcher = test_launcher;
+  callback_state.test_names = test_names;
+  callback_state.output_file = output_file;
+
+  test_launcher->LaunchChildGTestProcess(
+      cmd_line, std::string(), timeout,
+      use_job_objects_ ? TestLauncher::USE_JOB_OBJECTS : 0,
+      Bind(&UnitTestLauncherDelegate::GTestCallback, Unretained(this),
+           callback_state));
+}
+
+void UnitTestLauncherDelegate::GTestCallback(
+    const GTestCallbackState& callback_state,
+    int exit_code,
+    const TimeDelta& elapsed_time,
+    bool was_timeout,
+    const std::string& output) {
+  DCHECK(thread_checker_.CalledOnValidThread());
+  std::vector<std::string> tests_to_relaunch;
+  ProcessTestResults(callback_state.test_launcher, callback_state.test_names,
+                     callback_state.output_file, output, exit_code, was_timeout,
+                     &tests_to_relaunch);
+
+  // Relaunch requested tests in parallel, but only use single
+  // test per batch for more precise results (crashes, test passes
+  // but non-zero exit codes etc).
+  for (size_t i = 0; i < tests_to_relaunch.size(); i++) {
     std::vector<std::string> batch;
+    batch.push_back(tests_to_relaunch[i]);
+    RunBatch(callback_state.test_launcher, batch);
+  }
+
+  // The temporary file's directory is also temporary.
+  DeleteFile(callback_state.output_file.DirName(), true);
+}
+
+void UnitTestLauncherDelegate::SerialGTestCallback(
+    const GTestCallbackState& callback_state,
+    const std::vector<std::string>& test_names,
+    int exit_code,
+    const TimeDelta& elapsed_time,
+    bool was_timeout,
+    const std::string& output) {
+  DCHECK(thread_checker_.CalledOnValidThread());
+  std::vector<std::string> tests_to_relaunch;
+  bool called_any_callbacks =
+      ProcessTestResults(callback_state.test_launcher,
+                         callback_state.test_names, callback_state.output_file,
+                         output, exit_code, was_timeout, &tests_to_relaunch);
+
+  // There is only one test, there cannot be other tests to relaunch
+  // due to a crash.
+  DCHECK(tests_to_relaunch.empty());
+
+  // There is only one test, we should have called back with its result.
+  DCHECK(called_any_callbacks);
+
+  // The temporary file's directory is also temporary.
+  DeleteFile(callback_state.output_file.DirName(), true);
+
+  MessageLoop::current()->PostTask(
+      FROM_HERE, Bind(&UnitTestLauncherDelegate::RunSerially, Unretained(this),
+                      callback_state.test_launcher, test_names));
+}
+
+// static
+bool UnitTestLauncherDelegate::ProcessTestResults(
+    TestLauncher* test_launcher,
+    const std::vector<std::string>& test_names,
+    const base::FilePath& output_file,
+    const std::string& output,
+    int exit_code,
+    bool was_timeout,
+    std::vector<std::string>* tests_to_relaunch) {
+  std::vector<TestResult> test_results;
+  bool crashed = false;
+  bool have_test_results =
+      ProcessGTestOutput(output_file, &test_results, &crashed);
+
+  bool called_any_callback = false;
+
+  if (have_test_results) {
+    // TODO(phajdan.jr): Check for duplicates and mismatches between
+    // the results we got from XML file and tests we intended to run.
+    std::map<std::string, TestResult> results_map;
+    for (size_t i = 0; i < test_results.size(); i++)
+      results_map[test_results[i].full_name] = test_results[i];
+
+    bool had_interrupted_test = false;
+
+    // Results to be reported back to the test launcher.
+    std::vector<TestResult> final_results;
+
     for (size_t i = 0; i < test_names.size(); i++) {
-      batch.push_back(test_names[i]);
+      if (ContainsKey(results_map, test_names[i])) {
+        TestResult test_result = results_map[test_names[i]];
+        if (test_result.status == TestResult::TEST_CRASH) {
+          had_interrupted_test = true;
 
-      if (batch.size() >= batch_limit_) {
-        RunBatch(test_launcher, batch);
-        batch.clear();
-      }
-    }
-
-    RunBatch(test_launcher, batch);
-
-    return test_names.size();
-  }
-
-  size_t UnitTestLauncherDelegate::RetryTests(
-      TestLauncher* test_launcher,
-      const std::vector<std::string>& test_names) {
-    MessageLoop::current()->PostTask(
-        FROM_HERE,
-        Bind(&UnitTestLauncherDelegate::RunSerially,
-             Unretained(this),
-             test_launcher,
-             test_names));
-    return test_names.size();
-  }
-
-  void UnitTestLauncherDelegate::RunSerially(
-      TestLauncher* test_launcher,
-      const std::vector<std::string>& test_names) {
-    if (test_names.empty())
-      return;
-
-    std::vector<std::string> new_test_names(test_names);
-    std::string test_name(new_test_names.back());
-    new_test_names.pop_back();
-
-    // Create a dedicated temporary directory to store the xml result data
-    // per run to ensure clean state and make it possible to launch multiple
-    // processes in parallel.
-    base::FilePath output_file;
-    CHECK(CreateNewTempDirectory(FilePath::StringType(), &output_file));
-    output_file = output_file.AppendASCII("test_results.xml");
-
-    std::vector<std::string> current_test_names;
-    current_test_names.push_back(test_name);
-    CommandLine cmd_line(
-        GetCommandLineForChildGTestProcess(current_test_names, output_file));
-
-    GTestCallbackState callback_state;
-    callback_state.test_launcher = test_launcher;
-    callback_state.test_names = current_test_names;
-    callback_state.output_file = output_file;
-
-    test_launcher->LaunchChildGTestProcess(
-        cmd_line,
-        std::string(),
-        TestTimeouts::test_launcher_timeout(),
-        use_job_objects_ ? TestLauncher::USE_JOB_OBJECTS : 0,
-        Bind(&UnitTestLauncherDelegate::SerialGTestCallback,
-             Unretained(this),
-             callback_state,
-             new_test_names));
-  }
-
-  void UnitTestLauncherDelegate::RunBatch(
-      TestLauncher* test_launcher,
-      const std::vector<std::string>& test_names) {
-    DCHECK(thread_checker_.CalledOnValidThread());
-
-    if (test_names.empty())
-      return;
-
-    // Create a dedicated temporary directory to store the xml result data
-    // per run to ensure clean state and make it possible to launch multiple
-    // processes in parallel.
-    base::FilePath output_file;
-    CHECK(CreateNewTempDirectory(FilePath::StringType(), &output_file));
-    output_file = output_file.AppendASCII("test_results.xml");
-
-    CommandLine cmd_line(
-        GetCommandLineForChildGTestProcess(test_names, output_file));
-
-    // Adjust the timeout depending on how many tests we're running
-    // (note that e.g. the last batch of tests will be smaller).
-    // TODO(phajdan.jr): Consider an adaptive timeout, which can change
-    // depending on how many tests ran and how many remain.
-    // Note: do NOT parse child's stdout to do that, it's known to be
-    // unreliable (e.g. buffering issues can mix up the output).
-    base::TimeDelta timeout =
-        test_names.size() * TestTimeouts::test_launcher_timeout();
-
-    GTestCallbackState callback_state;
-    callback_state.test_launcher = test_launcher;
-    callback_state.test_names = test_names;
-    callback_state.output_file = output_file;
-
-    test_launcher->LaunchChildGTestProcess(
-        cmd_line,
-        std::string(),
-        timeout,
-        use_job_objects_ ? TestLauncher::USE_JOB_OBJECTS : 0,
-        Bind(&UnitTestLauncherDelegate::GTestCallback,
-             Unretained(this),
-             callback_state));
-  }
-
-  void UnitTestLauncherDelegate::GTestCallback(
-      const GTestCallbackState& callback_state,
-      int exit_code,
-      const TimeDelta& elapsed_time,
-      bool was_timeout,
-      const std::string& output) {
-    DCHECK(thread_checker_.CalledOnValidThread());
-    std::vector<std::string> tests_to_relaunch;
-    ProcessTestResults(callback_state.test_launcher,
-                       callback_state.test_names,
-                       callback_state.output_file,
-                       output,
-                       exit_code,
-                       was_timeout,
-                       &tests_to_relaunch);
-
-    // Relaunch requested tests in parallel, but only use single
-    // test per batch for more precise results (crashes, test passes
-    // but non-zero exit codes etc).
-    for (size_t i = 0; i < tests_to_relaunch.size(); i++) {
-      std::vector<std::string> batch;
-      batch.push_back(tests_to_relaunch[i]);
-      RunBatch(callback_state.test_launcher, batch);
-    }
-
-    // The temporary file's directory is also temporary.
-    DeleteFile(callback_state.output_file.DirName(), true);
-  }
-
-  void UnitTestLauncherDelegate::SerialGTestCallback(
-      const GTestCallbackState& callback_state,
-      const std::vector<std::string>& test_names,
-      int exit_code,
-      const TimeDelta& elapsed_time,
-      bool was_timeout,
-      const std::string& output) {
-    DCHECK(thread_checker_.CalledOnValidThread());
-    std::vector<std::string> tests_to_relaunch;
-    bool called_any_callbacks =
-        ProcessTestResults(callback_state.test_launcher,
-                           callback_state.test_names,
-                           callback_state.output_file,
-                           output,
-                           exit_code,
-                           was_timeout,
-                           &tests_to_relaunch);
-
-    // There is only one test, there cannot be other tests to relaunch
-    // due to a crash.
-    DCHECK(tests_to_relaunch.empty());
-
-    // There is only one test, we should have called back with its result.
-    DCHECK(called_any_callbacks);
-
-    // The temporary file's directory is also temporary.
-    DeleteFile(callback_state.output_file.DirName(), true);
-
-    MessageLoop::current()->PostTask(
-        FROM_HERE,
-        Bind(&UnitTestLauncherDelegate::RunSerially,
-             Unretained(this),
-             callback_state.test_launcher,
-             test_names));
-  }
-
-  // static
-  bool UnitTestLauncherDelegate::ProcessTestResults(
-      TestLauncher* test_launcher,
-      const std::vector<std::string>& test_names,
-      const base::FilePath& output_file,
-      const std::string& output,
-      int exit_code,
-      bool was_timeout,
-      std::vector<std::string>* tests_to_relaunch) {
-    std::vector<TestResult> test_results;
-    bool crashed = false;
-    bool have_test_results =
-        ProcessGTestOutput(output_file, &test_results, &crashed);
-
-    bool called_any_callback = false;
-
-    if (have_test_results) {
-      // TODO(phajdan.jr): Check for duplicates and mismatches between
-      // the results we got from XML file and tests we intended to run.
-      std::map<std::string, TestResult> results_map;
-      for (size_t i = 0; i < test_results.size(); i++)
-        results_map[test_results[i].full_name] = test_results[i];
-
-      bool had_interrupted_test = false;
-
-      // Results to be reported back to the test launcher.
-      std::vector<TestResult> final_results;
-
-      for (size_t i = 0; i < test_names.size(); i++) {
-        if (ContainsKey(results_map, test_names[i])) {
-          TestResult test_result = results_map[test_names[i]];
-          if (test_result.status == TestResult::TEST_CRASH) {
-            had_interrupted_test = true;
-
-            if (was_timeout) {
-              // Fix up the test status: we forcibly kill the child process
-              // after the timeout, so from XML results it looks just like
-              // a crash.
-              test_result.status = TestResult::TEST_TIMEOUT;
-            }
-          } else if (test_result.status == TestResult::TEST_SUCCESS ||
-                     test_result.status == TestResult::TEST_FAILURE) {
-            // We run multiple tests in a batch with a timeout applied
-            // to the entire batch. It is possible that with other tests
-            // running quickly some tests take longer than the per-test timeout.
-            // For consistent handling of tests independent of order and other
-            // factors, mark them as timing out.
-            if (test_result.elapsed_time >
-                TestTimeouts::test_launcher_timeout()) {
-              test_result.status = TestResult::TEST_TIMEOUT;
-            }
+          if (was_timeout) {
+            // Fix up the test status: we forcibly kill the child process
+            // after the timeout, so from XML results it looks just like
+            // a crash.
+            test_result.status = TestResult::TEST_TIMEOUT;
           }
-          test_result.output_snippet =
-              GetTestOutputSnippet(test_result, output);
-          final_results.push_back(test_result);
-        } else if (had_interrupted_test) {
-          tests_to_relaunch->push_back(test_names[i]);
-        } else {
-          // TODO(phajdan.jr): Explicitly pass the info that the test didn't
-          // run for a mysterious reason.
-          LOG(ERROR) << "no test result for " << test_names[i];
-          TestResult test_result;
-          test_result.full_name = test_names[i];
-          test_result.status = TestResult::TEST_UNKNOWN;
-          test_result.output_snippet =
-              GetTestOutputSnippet(test_result, output);
-          final_results.push_back(test_result);
+        } else if (test_result.status == TestResult::TEST_SUCCESS ||
+                   test_result.status == TestResult::TEST_FAILURE) {
+          // We run multiple tests in a batch with a timeout applied
+          // to the entire batch. It is possible that with other tests
+          // running quickly some tests take longer than the per-test timeout.
+          // For consistent handling of tests independent of order and other
+          // factors, mark them as timing out.
+          if (test_result.elapsed_time >
+              TestTimeouts::test_launcher_timeout()) {
+            test_result.status = TestResult::TEST_TIMEOUT;
+          }
         }
-      }
-
-      // TODO(phajdan.jr): Handle the case where processing XML output
-      // indicates a crash but none of the test results is marked as crashing.
-
-      if (final_results.empty())
-        return false;
-
-      bool has_non_success_test = false;
-      for (size_t i = 0; i < final_results.size(); i++) {
-        if (final_results[i].status != TestResult::TEST_SUCCESS) {
-          has_non_success_test = true;
-          break;
-        }
-      }
-
-      if (!has_non_success_test && exit_code != 0) {
-        // This is a bit surprising case: all tests are marked as successful,
-        // but the exit code was not zero. This can happen e.g. under memory
-        // tools that report leaks this way.
-
-        if (final_results.size() == 1) {
-          // Easy case. One test only so we know the non-zero exit code
-          // was caused by that one test.
-          final_results[0].status = TestResult::TEST_FAILURE_ON_EXIT;
-        } else {
-          // Harder case. Discard the results and request relaunching all
-          // tests without batching. This will trigger above branch on
-          // relaunch leading to more precise results.
-          LOG(WARNING) << "Not sure which test caused non-zero exit code, "
-                       << "relaunching all of them without batching.";
-
-          for (size_t i = 0; i < final_results.size(); i++)
-            tests_to_relaunch->push_back(final_results[i].full_name);
-
-          return false;
-        }
-      }
-
-      for (size_t i = 0; i < final_results.size(); i++) {
-        // Fix the output snippet after possible changes to the test result.
-        final_results[i].output_snippet =
-            GetTestOutputSnippet(final_results[i], output);
-        test_launcher->OnTestFinished(final_results[i]);
-        called_any_callback = true;
-      }
-    } else {
-      fprintf(stdout,
-              "Failed to get out-of-band test success data, "
-              "dumping full stdio below:\n%s\n",
-              output.c_str());
-      fflush(stdout);
-
-      // We do not have reliable details about test results (parsing test
-      // stdout is known to be unreliable), apply the executable exit code
-      // to all tests.
-      // TODO(phajdan.jr): Be smarter about this, e.g. retry each test
-      // individually.
-      for (size_t i = 0; i < test_names.size(); i++) {
+        test_result.output_snippet = GetTestOutputSnippet(test_result, output);
+        final_results.push_back(test_result);
+      } else if (had_interrupted_test) {
+        tests_to_relaunch->push_back(test_names[i]);
+      } else {
+        // TODO(phajdan.jr): Explicitly pass the info that the test didn't
+        // run for a mysterious reason.
+        LOG(ERROR) << "no test result for " << test_names[i];
         TestResult test_result;
         test_result.full_name = test_names[i];
         test_result.status = TestResult::TEST_UNKNOWN;
-        test_launcher->OnTestFinished(test_result);
-        called_any_callback = true;
+        test_result.output_snippet = GetTestOutputSnippet(test_result, output);
+        final_results.push_back(test_result);
       }
     }
 
-    return called_any_callback;
+    // TODO(phajdan.jr): Handle the case where processing XML output
+    // indicates a crash but none of the test results is marked as crashing.
+
+    if (final_results.empty())
+      return false;
+
+    bool has_non_success_test = false;
+    for (size_t i = 0; i < final_results.size(); i++) {
+      if (final_results[i].status != TestResult::TEST_SUCCESS) {
+        has_non_success_test = true;
+        break;
+      }
+    }
+
+    if (!has_non_success_test && exit_code != 0) {
+      // This is a bit surprising case: all tests are marked as successful,
+      // but the exit code was not zero. This can happen e.g. under memory
+      // tools that report leaks this way.
+
+      if (final_results.size() == 1) {
+        // Easy case. One test only so we know the non-zero exit code
+        // was caused by that one test.
+        final_results[0].status = TestResult::TEST_FAILURE_ON_EXIT;
+      } else {
+        // Harder case. Discard the results and request relaunching all
+        // tests without batching. This will trigger above branch on
+        // relaunch leading to more precise results.
+        LOG(WARNING) << "Not sure which test caused non-zero exit code, "
+                     << "relaunching all of them without batching.";
+
+        for (size_t i = 0; i < final_results.size(); i++)
+          tests_to_relaunch->push_back(final_results[i].full_name);
+
+        return false;
+      }
+    }
+
+    for (size_t i = 0; i < final_results.size(); i++) {
+      // Fix the output snippet after possible changes to the test result.
+      final_results[i].output_snippet =
+          GetTestOutputSnippet(final_results[i], output);
+      test_launcher->OnTestFinished(final_results[i]);
+      called_any_callback = true;
+    }
+  } else {
+    fprintf(stdout,
+            "Failed to get out-of-band test success data, "
+            "dumping full stdio below:\n%s\n",
+            output.c_str());
+    fflush(stdout);
+
+    // We do not have reliable details about test results (parsing test
+    // stdout is known to be unreliable), apply the executable exit code
+    // to all tests.
+    // TODO(phajdan.jr): Be smarter about this, e.g. retry each test
+    // individually.
+    for (size_t i = 0; i < test_names.size(); i++) {
+      TestResult test_result;
+      test_result.full_name = test_names[i];
+      test_result.status = TestResult::TEST_UNKNOWN;
+      test_launcher->OnTestFinished(test_result);
+      called_any_callback = true;
+    }
   }
 
+  return called_any_callback;
+}
+
 }  // namespace base