test_launcher.cc revision c2e0dbddbe15c98d52c4786dac06cb8952a8ae6d
1// Copyright (c) 2012 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "content/public/test/test_launcher.h"
6
7#include <string>
8#include <vector>
9
10#include "base/command_line.h"
11#include "base/environment.h"
12#include "base/file_util.h"
13#include "base/files/scoped_temp_dir.h"
14#include "base/hash_tables.h"
15#include "base/logging.h"
16#include "base/memory/linked_ptr.h"
17#include "base/memory/scoped_ptr.h"
18#include "base/process_util.h"
19#include "base/string_number_conversions.h"
20#include "base/string_util.h"
21#include "base/test/test_suite.h"
22#include "base/test/test_timeouts.h"
23#include "base/time.h"
24#include "base/utf_string_conversions.h"
25#include "content/public/app/content_main.h"
26#include "content/public/app/content_main_delegate.h"
27#include "content/public/app/startup_helper_win.h"
28#include "content/public/common/content_switches.h"
29#include "content/public/common/sandbox_init.h"
30#include "content/public/test/browser_test.h"
31#include "net/base/escape.h"
32#include "testing/gtest/include/gtest/gtest.h"
33
34#if defined(OS_WIN)
35#include "base/base_switches.h"
36#include "content/common/sandbox_win.h"
37#include "sandbox/win/src/sandbox_factory.h"
38#include "sandbox/win/src/sandbox_types.h"
39#elif defined(OS_MACOSX)
40#include "base/mac/scoped_nsautorelease_pool.h"
41#endif
42
43namespace content {
44
45namespace {
46
47// Tests with this prefix run before the same test without it, and use the same
48// profile. i.e. Foo.PRE_Test runs and then Foo.Test. This allows writing tests
49// that span browser restarts.
50const char kPreTestPrefix[] = "PRE_";
51
52// Manual tests only run when --run-manual is specified. This allows writing
53// tests that don't run automatically but are still in the same test binary.
54// This is useful so that a team that wants to run a few tests doesn't have to
55// add a new binary that must be compiled on all builds.
56const char kManualTestPrefix[] = "MANUAL_";
57
58// Tests with this suffix are expected to crash, so it won't count as a failure.
59// A test that uses this must have a PRE_ prefix.
60const char kCrashTestSuffix[] = "_CRASH";
61
62TestLauncherDelegate* g_launcher_delegate;
63}
64
65// The environment variable name for the total number of test shards.
66const char kTestTotalShards[] = "GTEST_TOTAL_SHARDS";
67// The environment variable name for the test shard index.
68const char kTestShardIndex[] = "GTEST_SHARD_INDEX";
69
70// The default output file for XML output.
71const base::FilePath::CharType kDefaultOutputFile[] = FILE_PATH_LITERAL(
72    "test_detail.xml");
73
74// Quit test execution after this number of tests has timed out.
75const int kMaxTimeouts = 5;  // 45s timeout * (5 + 1) = 270s max run time.
76
77namespace {
78
79// Parses the environment variable var as an Int32.  If it is unset, returns
80// default_val.  If it is set, unsets it then converts it to Int32 before
81// returning it.  If unsetting or converting to an Int32 fails, print an
82// error and exit with failure.
83int32 Int32FromEnvOrDie(const char* const var, int32 default_val) {
84  scoped_ptr<base::Environment> env(base::Environment::Create());
85  std::string str_val;
86  int32 result;
87  if (!env->GetVar(var, &str_val))
88    return default_val;
89  if (!env->UnSetVar(var)) {
90    LOG(ERROR) << "Invalid environment: we could not unset " << var << ".\n";
91    exit(EXIT_FAILURE);
92  }
93  if (!base::StringToInt(str_val, &result)) {
94    LOG(ERROR) << "Invalid environment: " << var << " is not an integer.\n";
95    exit(EXIT_FAILURE);
96  }
97  return result;
98}
99
100// Checks whether sharding is enabled by examining the relevant
101// environment variable values.  If the variables are present,
102// but inconsistent (i.e., shard_index >= total_shards), prints
103// an error and exits.
104bool ShouldShard(int32* total_shards, int32* shard_index) {
105  *total_shards = Int32FromEnvOrDie(kTestTotalShards, -1);
106  *shard_index = Int32FromEnvOrDie(kTestShardIndex, -1);
107
108  if (*total_shards == -1 && *shard_index == -1) {
109    return false;
110  } else if (*total_shards == -1 && *shard_index != -1) {
111    LOG(ERROR) << "Invalid environment variables: you have "
112               << kTestShardIndex << " = " << *shard_index
113               << ", but have left " << kTestTotalShards << " unset.\n";
114    exit(EXIT_FAILURE);
115  } else if (*total_shards != -1 && *shard_index == -1) {
116    LOG(ERROR) << "Invalid environment variables: you have "
117               << kTestTotalShards << " = " << *total_shards
118               << ", but have left " << kTestShardIndex << " unset.\n";
119    exit(EXIT_FAILURE);
120  } else if (*shard_index < 0 || *shard_index >= *total_shards) {
121    LOG(ERROR) << "Invalid environment variables: we require 0 <= "
122               << kTestShardIndex << " < " << kTestTotalShards
123               << ", but you have " << kTestShardIndex << "=" << *shard_index
124               << ", " << kTestTotalShards << "=" << *total_shards << ".\n";
125    exit(EXIT_FAILURE);
126  }
127
128  return *total_shards > 1;
129}
130
131// Given the total number of shards, the shard index, and the test id, returns
132// true iff the test should be run on this shard.  The test id is some arbitrary
133// but unique non-negative integer assigned by this launcher to each test
134// method.  Assumes that 0 <= shard_index < total_shards, which is first
135// verified in ShouldShard().
136bool ShouldRunTestOnShard(int total_shards, int shard_index, int test_id) {
137  return (test_id % total_shards) == shard_index;
138}
139
140// A helper class to output results.
141// Note: as currently XML is the only supported format by gtest, we don't
142// check output format (e.g. "xml:" prefix) here and output an XML file
143// unconditionally.
144// Note: we don't output per-test-case or total summary info like
145// total failed_test_count, disabled_test_count, elapsed_time and so on.
146// Only each test (testcase element in the XML) will have the correct
147// failed/disabled/elapsed_time information. Each test won't include
148// detailed failure messages either.
149class ResultsPrinter {
150 public:
151  explicit ResultsPrinter(const CommandLine& command_line);
152  ~ResultsPrinter();
153  void OnTestCaseStart(const char* name, int test_count) const;
154  void OnTestCaseEnd() const;
155
156  void OnTestEnd(const char* name, const char* case_name, bool run,
157                 bool failed, bool failure_ignored, double elapsed_time) const;
158 private:
159  FILE* out_;
160
161  DISALLOW_COPY_AND_ASSIGN(ResultsPrinter);
162};
163
164ResultsPrinter::ResultsPrinter(const CommandLine& command_line) : out_(NULL) {
165  if (!command_line.HasSwitch(kGTestOutputFlag))
166    return;
167  std::string flag = command_line.GetSwitchValueASCII(kGTestOutputFlag);
168  size_t colon_pos = flag.find(':');
169  base::FilePath path;
170  if (colon_pos != std::string::npos) {
171    base::FilePath flag_path =
172        command_line.GetSwitchValuePath(kGTestOutputFlag);
173    base::FilePath::StringType path_string = flag_path.value();
174    path = base::FilePath(path_string.substr(colon_pos + 1));
175    // If the given path ends with '/', consider it is a directory.
176    // Note: This does NOT check that a directory (or file) actually exists
177    // (the behavior is same as what gtest does).
178    if (path.EndsWithSeparator()) {
179      base::FilePath executable = command_line.GetProgram().BaseName();
180      path = path.Append(executable.ReplaceExtension(
181          base::FilePath::StringType(FILE_PATH_LITERAL("xml"))));
182    }
183  }
184  if (path.value().empty())
185    path = base::FilePath(kDefaultOutputFile);
186  base::FilePath dir_name = path.DirName();
187  if (!file_util::DirectoryExists(dir_name)) {
188    LOG(WARNING) << "The output directory does not exist. "
189                 << "Creating the directory: " << dir_name.value();
190    // Create the directory if necessary (because the gtest does the same).
191    file_util::CreateDirectory(dir_name);
192  }
193  out_ = file_util::OpenFile(path, "w");
194  if (!out_) {
195    LOG(ERROR) << "Cannot open output file: "
196               << path.value() << ".";
197    return;
198  }
199  fprintf(out_, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
200  fprintf(out_, "<testsuites name=\"AllTests\" tests=\"\" failures=\"\""
201          " disabled=\"\" errors=\"\" time=\"\">\n");
202}
203
204ResultsPrinter::~ResultsPrinter() {
205  if (!out_)
206    return;
207  fprintf(out_, "</testsuites>\n");
208  fclose(out_);
209}
210
211void ResultsPrinter::OnTestCaseStart(const char* name, int test_count) const {
212  if (!out_)
213    return;
214  fprintf(out_, "  <testsuite name=\"%s\" tests=\"%d\" failures=\"\""
215          " disabled=\"\" errors=\"\" time=\"\">\n", name, test_count);
216}
217
218void ResultsPrinter::OnTestCaseEnd() const {
219  if (!out_)
220    return;
221  fprintf(out_, "  </testsuite>\n");
222}
223
224void ResultsPrinter::OnTestEnd(const char* name,
225                               const char* case_name,
226                               bool run,
227                               bool failed,
228                               bool failure_ignored,
229                               double elapsed_time) const {
230  if (!out_)
231    return;
232  fprintf(out_, "    <testcase name=\"%s\" status=\"%s\" time=\"%.3f\""
233          " classname=\"%s\"",
234          name, run ? "run" : "notrun", elapsed_time / 1000.0, case_name);
235  if (!failed) {
236    fprintf(out_, " />\n");
237    return;
238  }
239  fprintf(out_, ">\n");
240  fprintf(out_, "      <failure message=\"\" type=\"\"%s></failure>\n",
241          failure_ignored ? " ignored=\"true\"" : "");
242  fprintf(out_, "    </testcase>\n");
243}
244
245class TestCasePrinterHelper {
246 public:
247  TestCasePrinterHelper(const ResultsPrinter& printer,
248                        const char* name,
249                        int total_test_count)
250      : printer_(printer) {
251    printer_.OnTestCaseStart(name, total_test_count);
252  }
253  ~TestCasePrinterHelper() {
254    printer_.OnTestCaseEnd();
255  }
256 private:
257  const ResultsPrinter& printer_;
258
259  DISALLOW_COPY_AND_ASSIGN(TestCasePrinterHelper);
260};
261
262// For a basic pattern matching for gtest_filter options.  (Copied from
263// gtest.cc, see the comment below and http://crbug.com/44497)
264bool PatternMatchesString(const char* pattern, const char* str) {
265  switch (*pattern) {
266    case '\0':
267    case ':':  // Either ':' or '\0' marks the end of the pattern.
268      return *str == '\0';
269    case '?':  // Matches any single character.
270      return *str != '\0' && PatternMatchesString(pattern + 1, str + 1);
271    case '*':  // Matches any string (possibly empty) of characters.
272      return (*str != '\0' && PatternMatchesString(pattern, str + 1)) ||
273          PatternMatchesString(pattern + 1, str);
274    default:  // Non-special character.  Matches itself.
275      return *pattern == *str &&
276          PatternMatchesString(pattern + 1, str + 1);
277  }
278}
279
280// TODO(phajdan.jr): Avoid duplicating gtest code. (http://crbug.com/44497)
281// For basic pattern matching for gtest_filter options.  (Copied from
282// gtest.cc)
283bool MatchesFilter(const std::string& name, const std::string& filter) {
284  const char *cur_pattern = filter.c_str();
285  for (;;) {
286    if (PatternMatchesString(cur_pattern, name.c_str())) {
287      return true;
288    }
289
290    // Finds the next pattern in the filter.
291    cur_pattern = strchr(cur_pattern, ':');
292
293    // Returns if no more pattern can be found.
294    if (cur_pattern == NULL) {
295      return false;
296    }
297
298    // Skips the pattern separater (the ':' character).
299    cur_pattern++;
300  }
301}
302
303int RunTestInternal(const testing::TestCase* test_case,
304                    const std::string& test_name,
305                    CommandLine* command_line,
306                    base::TimeDelta default_timeout,
307                    bool* was_timeout) {
308  if (test_case) {
309    std::string pre_test_name = test_name;
310    std::string replace_string = std::string(".") + kPreTestPrefix;
311    ReplaceFirstSubstringAfterOffset(&pre_test_name, 0, ".", replace_string);
312    for (int i = 0; i < test_case->total_test_count(); ++i) {
313      const testing::TestInfo* test_info = test_case->GetTestInfo(i);
314      std::string cur_test_name = test_info->test_case_name();
315      cur_test_name.append(".");
316      cur_test_name.append(test_info->name());
317      if (cur_test_name == pre_test_name) {
318        int exit_code = RunTestInternal(test_case, pre_test_name, command_line,
319                                        default_timeout, was_timeout);
320        if (exit_code != 0 &&
321            !EndsWith(pre_test_name, kCrashTestSuffix, true)) {
322          return exit_code;
323        }
324      }
325    }
326  }
327
328  CommandLine new_cmd_line(*command_line);
329
330  // Always enable disabled tests.  This method is not called with disabled
331  // tests unless this flag was specified to the browser test executable.
332  new_cmd_line.AppendSwitch("gtest_also_run_disabled_tests");
333  new_cmd_line.AppendSwitchASCII("gtest_filter", test_name);
334  new_cmd_line.AppendSwitch(kSingleProcessTestsFlag);
335
336  const char* browser_wrapper = getenv("BROWSER_WRAPPER");
337  if (browser_wrapper) {
338#if defined(OS_WIN)
339    new_cmd_line.PrependWrapper(ASCIIToWide(browser_wrapper));
340#elif defined(OS_POSIX)
341    new_cmd_line.PrependWrapper(browser_wrapper);
342#endif
343    VLOG(1) << "BROWSER_WRAPPER was set, prefixing command_line with "
344            << browser_wrapper;
345  }
346
347  base::ProcessHandle process_handle;
348  base::LaunchOptions options;
349
350#if defined(OS_POSIX)
351  // On POSIX, we launch the test in a new process group with pgid equal to
352  // its pid. Any child processes that the test may create will inherit the
353  // same pgid. This way, if the test is abruptly terminated, we can clean up
354  // any orphaned child processes it may have left behind.
355  options.new_process_group = true;
356#endif
357
358  if (!base::LaunchProcess(new_cmd_line, options, &process_handle))
359    return -1;
360
361  int exit_code = 0;
362  if (!base::WaitForExitCodeWithTimeout(process_handle,
363                                        &exit_code,
364                                        default_timeout)) {
365    LOG(ERROR) << "Test timeout (" << default_timeout.InMilliseconds()
366               << " ms) exceeded for " << test_name;
367
368    if (was_timeout)
369      *was_timeout = true;
370    exit_code = -1;  // Set a non-zero exit code to signal a failure.
371
372    // Ensure that the process terminates.
373    base::KillProcess(process_handle, -1, true);
374  }
375
376#if defined(OS_POSIX)
377  if (exit_code != 0) {
378    // On POSIX, in case the test does not exit cleanly, either due to a crash
379    // or due to it timing out, we need to clean up any child processes that
380    // it might have created. On Windows, child processes are automatically
381    // cleaned up using JobObjects.
382    base::KillProcessGroup(process_handle);
383  }
384#endif
385
386  base::CloseProcessHandle(process_handle);
387
388  return exit_code;
389}
390
391// Runs test specified by |test_name| in a child process,
392// and returns the exit code.
393int RunTest(TestLauncherDelegate* launcher_delegate,
394            const testing::TestCase* test_case,
395            const std::string& test_name,
396            base::TimeDelta default_timeout,
397            bool* was_timeout) {
398  if (was_timeout)
399    *was_timeout = false;
400
401#if defined(OS_MACOSX)
402  // Some of the below method calls will leak objects if there is no
403  // autorelease pool in place.
404  base::mac::ScopedNSAutoreleasePool pool;
405#endif
406
407  const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
408  CommandLine new_cmd_line(cmd_line->GetProgram());
409  CommandLine::SwitchMap switches = cmd_line->GetSwitches();
410
411  // Strip out gtest_output flag because otherwise we would overwrite results
412  // of the previous test. We will generate the final output file later
413  // in RunTests().
414  switches.erase(kGTestOutputFlag);
415
416  // Strip out gtest_repeat flag because we can only run one test in the child
417  // process (restarting the browser in the same process is illegal after it
418  // has been shut down and will actually crash).
419  switches.erase(kGTestRepeatFlag);
420
421  for (CommandLine::SwitchMap::const_iterator iter = switches.begin();
422       iter != switches.end(); ++iter) {
423    new_cmd_line.AppendSwitchNative((*iter).first, (*iter).second);
424  }
425
426  base::ScopedTempDir temp_dir;
427  // Create a new data dir and pass it to the child.
428  if (!temp_dir.CreateUniqueTempDir() || !temp_dir.IsValid()) {
429    LOG(ERROR) << "Error creating temp data directory";
430    return -1;
431  }
432
433  if (!launcher_delegate->AdjustChildProcessCommandLine(&new_cmd_line,
434                                                        temp_dir.path())) {
435    return -1;
436  }
437
438  return RunTestInternal(
439      test_case, test_name, &new_cmd_line, default_timeout, was_timeout);
440}
441
442bool RunTests(TestLauncherDelegate* launcher_delegate,
443              bool should_shard,
444              int total_shards,
445              int shard_index) {
446  const CommandLine* command_line = CommandLine::ForCurrentProcess();
447
448  DCHECK(!command_line->HasSwitch(kGTestListTestsFlag));
449
450  testing::UnitTest* const unit_test = testing::UnitTest::GetInstance();
451
452  std::string filter = command_line->GetSwitchValueASCII(kGTestFilterFlag);
453
454  // Split --gtest_filter at '-', if there is one, to separate into
455  // positive filter and negative filter portions.
456  std::string positive_filter = filter;
457  std::string negative_filter;
458  size_t dash_pos = filter.find('-');
459  if (dash_pos != std::string::npos) {
460    positive_filter = filter.substr(0, dash_pos);  // Everything up to the dash.
461    negative_filter = filter.substr(dash_pos + 1); // Everything after the dash.
462  }
463
464  int num_runnable_tests = 0;
465  int test_run_count = 0;
466  int timeout_count = 0;
467  std::vector<std::string> failed_tests;
468  std::set<std::string> ignored_tests;
469
470  ResultsPrinter printer(*command_line);
471  for (int i = 0; i < unit_test->total_test_case_count(); ++i) {
472    const testing::TestCase* test_case = unit_test->GetTestCase(i);
473    TestCasePrinterHelper helper(printer, test_case->name(),
474                                 test_case->total_test_count());
475    for (int j = 0; j < test_case->total_test_count(); ++j) {
476      const testing::TestInfo* test_info = test_case->GetTestInfo(j);
477      std::string test_name = test_info->test_case_name();
478      test_name.append(".");
479      test_name.append(test_info->name());
480
481      // Skip our special test so it's not run twice. That confuses the log
482      // parser.
483      if (test_name == launcher_delegate->GetEmptyTestName())
484        continue;
485
486      // Skip disabled tests.
487      if (test_name.find("DISABLED") != std::string::npos &&
488          !command_line->HasSwitch(kGTestRunDisabledTestsFlag)) {
489        printer.OnTestEnd(test_info->name(), test_case->name(),
490                          false, false, false, 0);
491        continue;
492      }
493
494      if (StartsWithASCII(test_info->name(), kPreTestPrefix, true))
495        continue;
496
497      if (StartsWithASCII(test_info->name(), kManualTestPrefix, true) &&
498          !command_line->HasSwitch(kRunManualTestsFlag)) {
499        continue;
500      }
501
502      // Skip the test that doesn't match the filter string (if given).
503      if ((!positive_filter.empty() &&
504           !MatchesFilter(test_name, positive_filter)) ||
505          MatchesFilter(test_name, negative_filter)) {
506        printer.OnTestEnd(test_info->name(), test_case->name(),
507                          false, false, false, 0);
508        continue;
509      }
510
511      // Decide if this test should be run.
512      bool should_run = true;
513      if (should_shard) {
514        should_run = ShouldRunTestOnShard(total_shards, shard_index,
515                                          num_runnable_tests);
516      }
517      num_runnable_tests += 1;
518      // If sharding is enabled and the test should not be run, skip it.
519      if (!should_run) {
520        continue;
521      }
522
523      base::TimeTicks start_time = base::TimeTicks::Now();
524      ++test_run_count;
525      bool was_timeout = false;
526      int exit_code = RunTest(launcher_delegate,
527                              test_case,
528                              test_name,
529                              TestTimeouts::action_max_timeout(),
530                              &was_timeout);
531      if (exit_code == 0) {
532        // Test passed.
533        printer.OnTestEnd(
534            test_info->name(), test_case->name(), true, false,
535            false,
536            (base::TimeTicks::Now() - start_time).InMillisecondsF());
537      } else {
538        failed_tests.push_back(test_name);
539
540        bool ignore_failure = false;
541        printer.OnTestEnd(
542            test_info->name(), test_case->name(), true, true,
543            ignore_failure,
544            (base::TimeTicks::Now() - start_time).InMillisecondsF());
545        if (ignore_failure)
546          ignored_tests.insert(test_name);
547
548        if (was_timeout)
549          ++timeout_count;
550      }
551
552      if (timeout_count > kMaxTimeouts) {
553        printf("More than %d timeouts, aborting test case\n", kMaxTimeouts);
554        break;
555      }
556    }
557    if (timeout_count > kMaxTimeouts) {
558      printf("More than %d timeouts, aborting test\n", kMaxTimeouts);
559      break;
560    }
561  }
562
563  printf("%d test%s run\n", test_run_count, test_run_count > 1 ? "s" : "");
564  printf("%d test%s failed (%d ignored)\n",
565         static_cast<int>(failed_tests.size()),
566         failed_tests.size() != 1 ? "s" : "",
567         static_cast<int>(ignored_tests.size()));
568  if (failed_tests.size() == ignored_tests.size())
569    return true;
570
571  printf("Failing tests:\n");
572  for (std::vector<std::string>::const_iterator iter = failed_tests.begin();
573       iter != failed_tests.end(); ++iter) {
574    bool is_ignored = ignored_tests.find(*iter) != ignored_tests.end();
575    printf("%s%s\n", iter->c_str(), is_ignored ? " (ignored)" : "");
576  }
577
578  return false;
579}
580
581void PrintUsage() {
582  fprintf(stdout,
583      "Runs tests using the gtest framework, each test being run in its own\n"
584      "process.  Any gtest flags can be specified.\n"
585      "  --single_process\n"
586      "    Runs the tests and the launcher in the same process. Useful for \n"
587      "    debugging a specific test in a debugger.\n"
588      "  --single-process\n"
589      "    Same as above, and also runs Chrome in single-process mode.\n"
590      "  --help\n"
591      "    Shows this message.\n"
592      "  --gtest_help\n"
593      "    Shows the gtest help message.\n");
594}
595
596}  // namespace
597
598// The following is kept for historical reasons (so people that are used to
599// using it don't get surprised).
600const char kChildProcessFlag[]   = "child";
601
602const char kGTestFilterFlag[] = "gtest_filter";
603const char kGTestHelpFlag[]   = "gtest_help";
604const char kGTestListTestsFlag[] = "gtest_list_tests";
605const char kGTestRepeatFlag[] = "gtest_repeat";
606const char kGTestRunDisabledTestsFlag[] = "gtest_also_run_disabled_tests";
607const char kGTestOutputFlag[] = "gtest_output";
608
609const char kHelpFlag[]   = "help";
610
611const char kLaunchAsBrowser[] = "as-browser";
612
613// See kManualTestPrefix above.
614const char kRunManualTestsFlag[] = "run-manual";
615
616const char kSingleProcessTestsFlag[]   = "single_process";
617
618const char kWarmupFlag[] = "warmup";
619
620
621TestLauncherDelegate::~TestLauncherDelegate() {
622}
623
624bool ShouldRunContentMain() {
625#if defined(OS_WIN) || defined(OS_LINUX)
626  CommandLine* command_line = CommandLine::ForCurrentProcess();
627  return command_line->HasSwitch(switches::kProcessType) ||
628         command_line->HasSwitch(kLaunchAsBrowser);
629#else
630  return false;
631#endif  // defined(OS_WIN) || defined(OS_LINUX)
632}
633
634int RunContentMain(int argc, char** argv,
635                   TestLauncherDelegate* launcher_delegate) {
636#if defined(OS_WIN)
637  sandbox::SandboxInterfaceInfo sandbox_info = {0};
638  InitializeSandboxInfo(&sandbox_info);
639  scoped_ptr<ContentMainDelegate> chrome_main_delegate(
640      launcher_delegate->CreateContentMainDelegate());
641  return ContentMain(GetModuleHandle(NULL),
642                     &sandbox_info,
643                     chrome_main_delegate.get());
644#elif defined(OS_LINUX)
645  scoped_ptr<ContentMainDelegate> chrome_main_delegate(
646      launcher_delegate->CreateContentMainDelegate());
647  return ContentMain(argc, const_cast<const char**>(argv),
648                     chrome_main_delegate.get());
649#endif  // defined(OS_WIN)
650  NOTREACHED();
651  return 0;
652}
653
654int LaunchTests(TestLauncherDelegate* launcher_delegate,
655                int argc,
656                char** argv) {
657  DCHECK(!g_launcher_delegate);
658  g_launcher_delegate = launcher_delegate;
659
660  CommandLine::Init(argc, argv);
661  const CommandLine* command_line = CommandLine::ForCurrentProcess();
662
663  if (command_line->HasSwitch(kHelpFlag)) {
664    PrintUsage();
665    return 0;
666  }
667
668  if (command_line->HasSwitch(kSingleProcessTestsFlag) ||
669      (command_line->HasSwitch(switches::kSingleProcess) &&
670       command_line->HasSwitch(kGTestFilterFlag)) ||
671      command_line->HasSwitch(kGTestListTestsFlag) ||
672      command_line->HasSwitch(kGTestHelpFlag)) {
673#if defined(OS_WIN)
674    if (command_line->HasSwitch(kSingleProcessTestsFlag)) {
675      sandbox::SandboxInterfaceInfo sandbox_info;
676      InitializeSandboxInfo(&sandbox_info);
677      InitializeSandbox(&sandbox_info);
678    }
679#endif
680    return launcher_delegate->RunTestSuite(argc, argv);
681  }
682
683  if (ShouldRunContentMain())
684    return RunContentMain(argc, argv, launcher_delegate);
685
686  base::AtExitManager at_exit;
687
688  int32 total_shards;
689  int32 shard_index;
690  bool should_shard = ShouldShard(&total_shards, &shard_index);
691
692  fprintf(stdout,
693      "Starting tests...\n"
694      "IMPORTANT DEBUGGING NOTE: each test is run inside its own process.\n"
695      "For debugging a test inside a debugger, use the\n"
696      "--gtest_filter=<your_test_name> flag along with either\n"
697      "--single_process (to run the test in one launcher/browser process) or\n"
698      "--single-process (to do the above, and also run Chrome in single-"
699      "process mode).\n");
700
701  testing::InitGoogleTest(&argc, argv);
702  TestTimeouts::Initialize();
703  int exit_code = 0;
704
705  std::string empty_test = launcher_delegate->GetEmptyTestName();
706  if (!empty_test.empty()) {
707    // Make sure the entire browser code is loaded into memory. Reading it
708    // from disk may be slow on a busy bot, and can easily exceed the default
709    // timeout causing flaky test failures. Use an empty test that only starts
710    // and closes a browser with a long timeout to avoid those problems.
711    // NOTE: We don't do this when specifying a filter because this slows down
712    // the common case of running one test locally, and also on trybots when
713    // sharding as this one test runs ~200 times and wastes a few minutes.
714    bool warmup = command_line->HasSwitch(kWarmupFlag);
715    bool has_filter = command_line->HasSwitch(kGTestFilterFlag);
716    if (warmup || (!should_shard && !has_filter)) {
717      exit_code = RunTest(launcher_delegate,
718                          NULL,
719                          empty_test,
720                          TestTimeouts::large_test_timeout(),
721                          NULL);
722      if (exit_code != 0 || warmup)
723        return exit_code;
724    }
725  }
726
727  int cycles = 1;
728  if (command_line->HasSwitch(kGTestRepeatFlag)) {
729    base::StringToInt(command_line->GetSwitchValueASCII(kGTestRepeatFlag),
730                      &cycles);
731  }
732
733  while (cycles != 0) {
734    if (!RunTests(launcher_delegate,
735                  should_shard,
736                  total_shards,
737                  shard_index)) {
738      exit_code = 1;
739      break;
740    }
741
742    // Special value "-1" means "repeat indefinitely".
743    if (cycles != -1)
744      cycles--;
745  }
746  return exit_code;
747}
748
749TestLauncherDelegate* GetCurrentTestLauncherDelegate() {
750  return g_launcher_delegate;
751}
752
753}  // namespace content
754