test_launcher.cc revision 868fa2fe829687343ffae624259930155e16dbd8
1// Copyright (c) 2012 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "content/public/test/test_launcher.h"
6
7#include <string>
8#include <vector>
9
10#include "base/command_line.h"
11#include "base/environment.h"
12#include "base/file_util.h"
13#include "base/files/scoped_temp_dir.h"
14#include "base/hash_tables.h"
15#include "base/logging.h"
16#include "base/memory/linked_ptr.h"
17#include "base/memory/scoped_ptr.h"
18#include "base/process_util.h"
19#include "base/string_number_conversions.h"
20#include "base/string_util.h"
21#include "base/strings/utf_string_conversions.h"
22#include "base/test/test_suite.h"
23#include "base/test/test_timeouts.h"
24#include "base/time.h"
25#include "content/public/app/content_main.h"
26#include "content/public/app/content_main_delegate.h"
27#include "content/public/app/startup_helper_win.h"
28#include "content/public/common/content_switches.h"
29#include "content/public/common/sandbox_init.h"
30#include "content/public/test/browser_test.h"
31#include "net/base/escape.h"
32#include "testing/gtest/include/gtest/gtest.h"
33
34#if defined(OS_WIN)
35#include "base/base_switches.h"
36#include "content/common/sandbox_win.h"
37#include "sandbox/win/src/sandbox_factory.h"
38#include "sandbox/win/src/sandbox_types.h"
39#elif defined(OS_MACOSX)
40#include "base/mac/scoped_nsautorelease_pool.h"
41#endif
42
43namespace content {
44
45namespace {
46
47// Tests with this prefix run before the same test without it, and use the same
48// profile. i.e. Foo.PRE_Test runs and then Foo.Test. This allows writing tests
49// that span browser restarts.
50const char kPreTestPrefix[] = "PRE_";
51
52// Manual tests only run when --run-manual is specified. This allows writing
53// tests that don't run automatically but are still in the same test binary.
54// This is useful so that a team that wants to run a few tests doesn't have to
55// add a new binary that must be compiled on all builds.
56const char kManualTestPrefix[] = "MANUAL_";
57
58TestLauncherDelegate* g_launcher_delegate;
59}
60
61// The environment variable name for the total number of test shards.
62const char kTestTotalShards[] = "GTEST_TOTAL_SHARDS";
63// The environment variable name for the test shard index.
64const char kTestShardIndex[] = "GTEST_SHARD_INDEX";
65
66// The default output file for XML output.
67const base::FilePath::CharType kDefaultOutputFile[] = FILE_PATH_LITERAL(
68    "test_detail.xml");
69
70// Quit test execution after this number of tests has timed out.
71const int kMaxTimeouts = 5;  // 45s timeout * (5 + 1) = 270s max run time.
72
73namespace {
74
75// Parses the environment variable var as an Int32.  If it is unset, returns
76// default_val.  If it is set, unsets it then converts it to Int32 before
77// returning it.  If unsetting or converting to an Int32 fails, print an
78// error and exit with failure.
79int32 Int32FromEnvOrDie(const char* const var, int32 default_val) {
80  scoped_ptr<base::Environment> env(base::Environment::Create());
81  std::string str_val;
82  int32 result;
83  if (!env->GetVar(var, &str_val))
84    return default_val;
85  if (!env->UnSetVar(var)) {
86    LOG(ERROR) << "Invalid environment: we could not unset " << var << ".\n";
87    exit(EXIT_FAILURE);
88  }
89  if (!base::StringToInt(str_val, &result)) {
90    LOG(ERROR) << "Invalid environment: " << var << " is not an integer.\n";
91    exit(EXIT_FAILURE);
92  }
93  return result;
94}
95
96// Checks whether sharding is enabled by examining the relevant
97// environment variable values.  If the variables are present,
98// but inconsistent (i.e., shard_index >= total_shards), prints
99// an error and exits.
100bool ShouldShard(int32* total_shards, int32* shard_index) {
101  *total_shards = Int32FromEnvOrDie(kTestTotalShards, -1);
102  *shard_index = Int32FromEnvOrDie(kTestShardIndex, -1);
103
104  if (*total_shards == -1 && *shard_index == -1) {
105    return false;
106  } else if (*total_shards == -1 && *shard_index != -1) {
107    LOG(ERROR) << "Invalid environment variables: you have "
108               << kTestShardIndex << " = " << *shard_index
109               << ", but have left " << kTestTotalShards << " unset.\n";
110    exit(EXIT_FAILURE);
111  } else if (*total_shards != -1 && *shard_index == -1) {
112    LOG(ERROR) << "Invalid environment variables: you have "
113               << kTestTotalShards << " = " << *total_shards
114               << ", but have left " << kTestShardIndex << " unset.\n";
115    exit(EXIT_FAILURE);
116  } else if (*shard_index < 0 || *shard_index >= *total_shards) {
117    LOG(ERROR) << "Invalid environment variables: we require 0 <= "
118               << kTestShardIndex << " < " << kTestTotalShards
119               << ", but you have " << kTestShardIndex << "=" << *shard_index
120               << ", " << kTestTotalShards << "=" << *total_shards << ".\n";
121    exit(EXIT_FAILURE);
122  }
123
124  return *total_shards > 1;
125}
126
127// Given the total number of shards, the shard index, and the test id, returns
128// true iff the test should be run on this shard.  The test id is some arbitrary
129// but unique non-negative integer assigned by this launcher to each test
130// method.  Assumes that 0 <= shard_index < total_shards, which is first
131// verified in ShouldShard().
132bool ShouldRunTestOnShard(int total_shards, int shard_index, int test_id) {
133  return (test_id % total_shards) == shard_index;
134}
135
136// A helper class to output results.
137// Note: as currently XML is the only supported format by gtest, we don't
138// check output format (e.g. "xml:" prefix) here and output an XML file
139// unconditionally.
140// Note: we don't output per-test-case or total summary info like
141// total failed_test_count, disabled_test_count, elapsed_time and so on.
142// Only each test (testcase element in the XML) will have the correct
143// failed/disabled/elapsed_time information. Each test won't include
144// detailed failure messages either.
145class ResultsPrinter {
146 public:
147  explicit ResultsPrinter(const CommandLine& command_line);
148  ~ResultsPrinter();
149  void OnTestCaseStart(const char* name, int test_count) const;
150  void OnTestCaseEnd() const;
151
152  void OnTestEnd(const char* name, const char* case_name, bool run,
153                 bool failed, bool failure_ignored, double elapsed_time) const;
154 private:
155  FILE* out_;
156
157  DISALLOW_COPY_AND_ASSIGN(ResultsPrinter);
158};
159
160ResultsPrinter::ResultsPrinter(const CommandLine& command_line) : out_(NULL) {
161  if (!command_line.HasSwitch(kGTestOutputFlag))
162    return;
163  std::string flag = command_line.GetSwitchValueASCII(kGTestOutputFlag);
164  size_t colon_pos = flag.find(':');
165  base::FilePath path;
166  if (colon_pos != std::string::npos) {
167    base::FilePath flag_path =
168        command_line.GetSwitchValuePath(kGTestOutputFlag);
169    base::FilePath::StringType path_string = flag_path.value();
170    path = base::FilePath(path_string.substr(colon_pos + 1));
171    // If the given path ends with '/', consider it is a directory.
172    // Note: This does NOT check that a directory (or file) actually exists
173    // (the behavior is same as what gtest does).
174    if (path.EndsWithSeparator()) {
175      base::FilePath executable = command_line.GetProgram().BaseName();
176      path = path.Append(executable.ReplaceExtension(
177          base::FilePath::StringType(FILE_PATH_LITERAL("xml"))));
178    }
179  }
180  if (path.value().empty())
181    path = base::FilePath(kDefaultOutputFile);
182  base::FilePath dir_name = path.DirName();
183  if (!file_util::DirectoryExists(dir_name)) {
184    LOG(WARNING) << "The output directory does not exist. "
185                 << "Creating the directory: " << dir_name.value();
186    // Create the directory if necessary (because the gtest does the same).
187    file_util::CreateDirectory(dir_name);
188  }
189  out_ = file_util::OpenFile(path, "w");
190  if (!out_) {
191    LOG(ERROR) << "Cannot open output file: "
192               << path.value() << ".";
193    return;
194  }
195  fprintf(out_, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
196  fprintf(out_, "<testsuites name=\"AllTests\" tests=\"\" failures=\"\""
197          " disabled=\"\" errors=\"\" time=\"\">\n");
198}
199
200ResultsPrinter::~ResultsPrinter() {
201  if (!out_)
202    return;
203  fprintf(out_, "</testsuites>\n");
204  fclose(out_);
205}
206
207void ResultsPrinter::OnTestCaseStart(const char* name, int test_count) const {
208  if (!out_)
209    return;
210  fprintf(out_, "  <testsuite name=\"%s\" tests=\"%d\" failures=\"\""
211          " disabled=\"\" errors=\"\" time=\"\">\n", name, test_count);
212}
213
214void ResultsPrinter::OnTestCaseEnd() const {
215  if (!out_)
216    return;
217  fprintf(out_, "  </testsuite>\n");
218}
219
220void ResultsPrinter::OnTestEnd(const char* name,
221                               const char* case_name,
222                               bool run,
223                               bool failed,
224                               bool failure_ignored,
225                               double elapsed_time) const {
226  if (!out_)
227    return;
228  fprintf(out_, "    <testcase name=\"%s\" status=\"%s\" time=\"%.3f\""
229          " classname=\"%s\"",
230          name, run ? "run" : "notrun", elapsed_time / 1000.0, case_name);
231  if (!failed) {
232    fprintf(out_, " />\n");
233    return;
234  }
235  fprintf(out_, ">\n");
236  fprintf(out_, "      <failure message=\"\" type=\"\"%s></failure>\n",
237          failure_ignored ? " ignored=\"true\"" : "");
238  fprintf(out_, "    </testcase>\n");
239}
240
241class TestCasePrinterHelper {
242 public:
243  TestCasePrinterHelper(const ResultsPrinter& printer,
244                        const char* name,
245                        int total_test_count)
246      : printer_(printer) {
247    printer_.OnTestCaseStart(name, total_test_count);
248  }
249  ~TestCasePrinterHelper() {
250    printer_.OnTestCaseEnd();
251  }
252 private:
253  const ResultsPrinter& printer_;
254
255  DISALLOW_COPY_AND_ASSIGN(TestCasePrinterHelper);
256};
257
258// For a basic pattern matching for gtest_filter options.  (Copied from
259// gtest.cc, see the comment below and http://crbug.com/44497)
260bool PatternMatchesString(const char* pattern, const char* str) {
261  switch (*pattern) {
262    case '\0':
263    case ':':  // Either ':' or '\0' marks the end of the pattern.
264      return *str == '\0';
265    case '?':  // Matches any single character.
266      return *str != '\0' && PatternMatchesString(pattern + 1, str + 1);
267    case '*':  // Matches any string (possibly empty) of characters.
268      return (*str != '\0' && PatternMatchesString(pattern, str + 1)) ||
269          PatternMatchesString(pattern + 1, str);
270    default:  // Non-special character.  Matches itself.
271      return *pattern == *str &&
272          PatternMatchesString(pattern + 1, str + 1);
273  }
274}
275
276// TODO(phajdan.jr): Avoid duplicating gtest code. (http://crbug.com/44497)
277// For basic pattern matching for gtest_filter options.  (Copied from
278// gtest.cc)
279bool MatchesFilter(const std::string& name, const std::string& filter) {
280  const char *cur_pattern = filter.c_str();
281  for (;;) {
282    if (PatternMatchesString(cur_pattern, name.c_str())) {
283      return true;
284    }
285
286    // Finds the next pattern in the filter.
287    cur_pattern = strchr(cur_pattern, ':');
288
289    // Returns if no more pattern can be found.
290    if (cur_pattern == NULL) {
291      return false;
292    }
293
294    // Skips the pattern separater (the ':' character).
295    cur_pattern++;
296  }
297}
298
299int RunTestInternal(const testing::TestCase* test_case,
300                    const std::string& test_name,
301                    CommandLine* command_line,
302                    base::TimeDelta default_timeout,
303                    bool* was_timeout) {
304  if (test_case) {
305    std::string pre_test_name = test_name;
306    std::string replace_string = std::string(".") + kPreTestPrefix;
307    ReplaceFirstSubstringAfterOffset(&pre_test_name, 0, ".", replace_string);
308    for (int i = 0; i < test_case->total_test_count(); ++i) {
309      const testing::TestInfo* test_info = test_case->GetTestInfo(i);
310      std::string cur_test_name = test_info->test_case_name();
311      cur_test_name.append(".");
312      cur_test_name.append(test_info->name());
313      if (cur_test_name == pre_test_name) {
314        int exit_code = RunTestInternal(test_case, pre_test_name, command_line,
315                                        default_timeout, was_timeout);
316        if (exit_code != 0)
317          return exit_code;
318      }
319    }
320  }
321
322  CommandLine new_cmd_line(*command_line);
323
324  // Always enable disabled tests.  This method is not called with disabled
325  // tests unless this flag was specified to the browser test executable.
326  new_cmd_line.AppendSwitch("gtest_also_run_disabled_tests");
327  new_cmd_line.AppendSwitchASCII("gtest_filter", test_name);
328  new_cmd_line.AppendSwitch(kSingleProcessTestsFlag);
329
330  const char* browser_wrapper = getenv("BROWSER_WRAPPER");
331  if (browser_wrapper) {
332#if defined(OS_WIN)
333    new_cmd_line.PrependWrapper(ASCIIToWide(browser_wrapper));
334#elif defined(OS_POSIX)
335    new_cmd_line.PrependWrapper(browser_wrapper);
336#endif
337    VLOG(1) << "BROWSER_WRAPPER was set, prefixing command_line with "
338            << browser_wrapper;
339  }
340
341  base::ProcessHandle process_handle;
342  base::LaunchOptions options;
343
344#if defined(OS_POSIX)
345  // On POSIX, we launch the test in a new process group with pgid equal to
346  // its pid. Any child processes that the test may create will inherit the
347  // same pgid. This way, if the test is abruptly terminated, we can clean up
348  // any orphaned child processes it may have left behind.
349  options.new_process_group = true;
350#endif
351
352  if (!base::LaunchProcess(new_cmd_line, options, &process_handle))
353    return -1;
354
355  int exit_code = 0;
356  if (!base::WaitForExitCodeWithTimeout(process_handle,
357                                        &exit_code,
358                                        default_timeout)) {
359    LOG(ERROR) << "Test timeout (" << default_timeout.InMilliseconds()
360               << " ms) exceeded for " << test_name;
361
362    if (was_timeout)
363      *was_timeout = true;
364    exit_code = -1;  // Set a non-zero exit code to signal a failure.
365
366    // Ensure that the process terminates.
367    base::KillProcess(process_handle, -1, true);
368  }
369
370#if defined(OS_POSIX)
371  if (exit_code != 0) {
372    // On POSIX, in case the test does not exit cleanly, either due to a crash
373    // or due to it timing out, we need to clean up any child processes that
374    // it might have created. On Windows, child processes are automatically
375    // cleaned up using JobObjects.
376    base::KillProcessGroup(process_handle);
377  }
378#endif
379
380  base::CloseProcessHandle(process_handle);
381
382  return exit_code;
383}
384
385// Runs test specified by |test_name| in a child process,
386// and returns the exit code.
387int RunTest(TestLauncherDelegate* launcher_delegate,
388            const testing::TestCase* test_case,
389            const std::string& test_name,
390            base::TimeDelta default_timeout,
391            bool* was_timeout) {
392  if (was_timeout)
393    *was_timeout = false;
394
395#if defined(OS_MACOSX)
396  // Some of the below method calls will leak objects if there is no
397  // autorelease pool in place.
398  base::mac::ScopedNSAutoreleasePool pool;
399#endif
400
401  const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
402  CommandLine new_cmd_line(cmd_line->GetProgram());
403  CommandLine::SwitchMap switches = cmd_line->GetSwitches();
404
405  // Strip out gtest_output flag because otherwise we would overwrite results
406  // of the previous test. We will generate the final output file later
407  // in RunTests().
408  switches.erase(kGTestOutputFlag);
409
410  // Strip out gtest_repeat flag because we can only run one test in the child
411  // process (restarting the browser in the same process is illegal after it
412  // has been shut down and will actually crash).
413  switches.erase(kGTestRepeatFlag);
414
415  for (CommandLine::SwitchMap::const_iterator iter = switches.begin();
416       iter != switches.end(); ++iter) {
417    new_cmd_line.AppendSwitchNative((*iter).first, (*iter).second);
418  }
419
420  base::ScopedTempDir temp_dir;
421  // Create a new data dir and pass it to the child.
422  if (!temp_dir.CreateUniqueTempDir() || !temp_dir.IsValid()) {
423    LOG(ERROR) << "Error creating temp data directory";
424    return -1;
425  }
426
427  if (!launcher_delegate->AdjustChildProcessCommandLine(&new_cmd_line,
428                                                        temp_dir.path())) {
429    return -1;
430  }
431
432  return RunTestInternal(
433      test_case, test_name, &new_cmd_line, default_timeout, was_timeout);
434}
435
436bool RunTests(TestLauncherDelegate* launcher_delegate,
437              bool should_shard,
438              int total_shards,
439              int shard_index) {
440  const CommandLine* command_line = CommandLine::ForCurrentProcess();
441
442  DCHECK(!command_line->HasSwitch(kGTestListTestsFlag));
443
444  testing::UnitTest* const unit_test = testing::UnitTest::GetInstance();
445
446  std::string filter = command_line->GetSwitchValueASCII(kGTestFilterFlag);
447
448  // Split --gtest_filter at '-', if there is one, to separate into
449  // positive filter and negative filter portions.
450  std::string positive_filter = filter;
451  std::string negative_filter;
452  size_t dash_pos = filter.find('-');
453  if (dash_pos != std::string::npos) {
454    positive_filter = filter.substr(0, dash_pos);  // Everything up to the dash.
455    negative_filter = filter.substr(dash_pos + 1); // Everything after the dash.
456  }
457
458  int num_runnable_tests = 0;
459  int test_run_count = 0;
460  int timeout_count = 0;
461  std::vector<std::string> failed_tests;
462  std::set<std::string> ignored_tests;
463
464  ResultsPrinter printer(*command_line);
465  for (int i = 0; i < unit_test->total_test_case_count(); ++i) {
466    const testing::TestCase* test_case = unit_test->GetTestCase(i);
467    TestCasePrinterHelper helper(printer, test_case->name(),
468                                 test_case->total_test_count());
469    for (int j = 0; j < test_case->total_test_count(); ++j) {
470      const testing::TestInfo* test_info = test_case->GetTestInfo(j);
471      std::string test_name = test_info->test_case_name();
472      test_name.append(".");
473      test_name.append(test_info->name());
474
475      // Skip our special test so it's not run twice. That confuses the log
476      // parser.
477      if (test_name == launcher_delegate->GetEmptyTestName())
478        continue;
479
480      // Skip disabled tests.
481      if (test_name.find("DISABLED") != std::string::npos &&
482          !command_line->HasSwitch(kGTestRunDisabledTestsFlag)) {
483        printer.OnTestEnd(test_info->name(), test_case->name(),
484                          false, false, false, 0);
485        continue;
486      }
487
488      if (StartsWithASCII(test_info->name(), kPreTestPrefix, true))
489        continue;
490
491      if (StartsWithASCII(test_info->name(), kManualTestPrefix, true) &&
492          !command_line->HasSwitch(kRunManualTestsFlag)) {
493        continue;
494      }
495
496      // Skip the test that doesn't match the filter string (if given).
497      if ((!positive_filter.empty() &&
498           !MatchesFilter(test_name, positive_filter)) ||
499          MatchesFilter(test_name, negative_filter)) {
500        printer.OnTestEnd(test_info->name(), test_case->name(),
501                          false, false, false, 0);
502        continue;
503      }
504
505      // Decide if this test should be run.
506      bool should_run = true;
507      if (should_shard) {
508        should_run = ShouldRunTestOnShard(total_shards, shard_index,
509                                          num_runnable_tests);
510      }
511      num_runnable_tests += 1;
512      // If sharding is enabled and the test should not be run, skip it.
513      if (!should_run) {
514        continue;
515      }
516
517      base::TimeTicks start_time = base::TimeTicks::Now();
518      ++test_run_count;
519      bool was_timeout = false;
520      int exit_code = RunTest(launcher_delegate,
521                              test_case,
522                              test_name,
523                              TestTimeouts::action_max_timeout(),
524                              &was_timeout);
525      if (exit_code == 0) {
526        // Test passed.
527        printer.OnTestEnd(
528            test_info->name(), test_case->name(), true, false,
529            false,
530            (base::TimeTicks::Now() - start_time).InMillisecondsF());
531      } else {
532        failed_tests.push_back(test_name);
533
534        bool ignore_failure = false;
535        printer.OnTestEnd(
536            test_info->name(), test_case->name(), true, true,
537            ignore_failure,
538            (base::TimeTicks::Now() - start_time).InMillisecondsF());
539        if (ignore_failure)
540          ignored_tests.insert(test_name);
541
542        if (was_timeout)
543          ++timeout_count;
544      }
545
546      if (timeout_count > kMaxTimeouts) {
547        printf("More than %d timeouts, aborting test case\n", kMaxTimeouts);
548        break;
549      }
550    }
551    if (timeout_count > kMaxTimeouts) {
552      printf("More than %d timeouts, aborting test\n", kMaxTimeouts);
553      break;
554    }
555  }
556
557  printf("%d test%s run\n", test_run_count, test_run_count > 1 ? "s" : "");
558  printf("%d test%s failed (%d ignored)\n",
559         static_cast<int>(failed_tests.size()),
560         failed_tests.size() != 1 ? "s" : "",
561         static_cast<int>(ignored_tests.size()));
562  if (failed_tests.size() == ignored_tests.size())
563    return true;
564
565  printf("Failing tests:\n");
566  for (std::vector<std::string>::const_iterator iter = failed_tests.begin();
567       iter != failed_tests.end(); ++iter) {
568    bool is_ignored = ignored_tests.find(*iter) != ignored_tests.end();
569    printf("%s%s\n", iter->c_str(), is_ignored ? " (ignored)" : "");
570  }
571
572  return false;
573}
574
575void PrintUsage() {
576  fprintf(stdout,
577      "Runs tests using the gtest framework, each test being run in its own\n"
578      "process.  Any gtest flags can be specified.\n"
579      "  --single_process\n"
580      "    Runs the tests and the launcher in the same process. Useful for \n"
581      "    debugging a specific test in a debugger.\n"
582      "  --single-process\n"
583      "    Same as above, and also runs Chrome in single-process mode.\n"
584      "  --help\n"
585      "    Shows this message.\n"
586      "  --gtest_help\n"
587      "    Shows the gtest help message.\n");
588}
589
590}  // namespace
591
592// The following is kept for historical reasons (so people that are used to
593// using it don't get surprised).
594const char kChildProcessFlag[]   = "child";
595
596const char kGTestFilterFlag[] = "gtest_filter";
597const char kGTestHelpFlag[]   = "gtest_help";
598const char kGTestListTestsFlag[] = "gtest_list_tests";
599const char kGTestRepeatFlag[] = "gtest_repeat";
600const char kGTestRunDisabledTestsFlag[] = "gtest_also_run_disabled_tests";
601const char kGTestOutputFlag[] = "gtest_output";
602
603const char kHelpFlag[]   = "help";
604
605const char kLaunchAsBrowser[] = "as-browser";
606
607// See kManualTestPrefix above.
608const char kRunManualTestsFlag[] = "run-manual";
609
610const char kSingleProcessTestsFlag[]   = "single_process";
611
612const char kWarmupFlag[] = "warmup";
613
614
615TestLauncherDelegate::~TestLauncherDelegate() {
616}
617
618bool ShouldRunContentMain() {
619#if defined(OS_WIN) || defined(OS_LINUX)
620  CommandLine* command_line = CommandLine::ForCurrentProcess();
621  return command_line->HasSwitch(switches::kProcessType) ||
622         command_line->HasSwitch(kLaunchAsBrowser);
623#else
624  return false;
625#endif  // defined(OS_WIN) || defined(OS_LINUX)
626}
627
628int RunContentMain(int argc, char** argv,
629                   TestLauncherDelegate* launcher_delegate) {
630#if defined(OS_WIN)
631  sandbox::SandboxInterfaceInfo sandbox_info = {0};
632  InitializeSandboxInfo(&sandbox_info);
633  scoped_ptr<ContentMainDelegate> chrome_main_delegate(
634      launcher_delegate->CreateContentMainDelegate());
635  return ContentMain(GetModuleHandle(NULL),
636                     &sandbox_info,
637                     chrome_main_delegate.get());
638#elif defined(OS_LINUX)
639  scoped_ptr<ContentMainDelegate> chrome_main_delegate(
640      launcher_delegate->CreateContentMainDelegate());
641  return ContentMain(argc, const_cast<const char**>(argv),
642                     chrome_main_delegate.get());
643#endif  // defined(OS_WIN)
644  NOTREACHED();
645  return 0;
646}
647
648int LaunchTests(TestLauncherDelegate* launcher_delegate,
649                int argc,
650                char** argv) {
651  DCHECK(!g_launcher_delegate);
652  g_launcher_delegate = launcher_delegate;
653
654  CommandLine::Init(argc, argv);
655  const CommandLine* command_line = CommandLine::ForCurrentProcess();
656
657  if (command_line->HasSwitch(kHelpFlag)) {
658    PrintUsage();
659    return 0;
660  }
661
662  if (command_line->HasSwitch(kSingleProcessTestsFlag) ||
663      (command_line->HasSwitch(switches::kSingleProcess) &&
664       command_line->HasSwitch(kGTestFilterFlag)) ||
665      command_line->HasSwitch(kGTestListTestsFlag) ||
666      command_line->HasSwitch(kGTestHelpFlag)) {
667#if defined(OS_WIN)
668    if (command_line->HasSwitch(kSingleProcessTestsFlag)) {
669      sandbox::SandboxInterfaceInfo sandbox_info;
670      InitializeSandboxInfo(&sandbox_info);
671      InitializeSandbox(&sandbox_info);
672    }
673#endif
674    return launcher_delegate->RunTestSuite(argc, argv);
675  }
676
677  if (ShouldRunContentMain())
678    return RunContentMain(argc, argv, launcher_delegate);
679
680  base::AtExitManager at_exit;
681
682  int32 total_shards;
683  int32 shard_index;
684  bool should_shard = ShouldShard(&total_shards, &shard_index);
685
686  fprintf(stdout,
687      "Starting tests...\n"
688      "IMPORTANT DEBUGGING NOTE: each test is run inside its own process.\n"
689      "For debugging a test inside a debugger, use the\n"
690      "--gtest_filter=<your_test_name> flag along with either\n"
691      "--single_process (to run the test in one launcher/browser process) or\n"
692      "--single-process (to do the above, and also run Chrome in single-"
693      "process mode).\n");
694
695  testing::InitGoogleTest(&argc, argv);
696  TestTimeouts::Initialize();
697  int exit_code = 0;
698
699  std::string empty_test = launcher_delegate->GetEmptyTestName();
700  if (!empty_test.empty()) {
701    // Make sure the entire browser code is loaded into memory. Reading it
702    // from disk may be slow on a busy bot, and can easily exceed the default
703    // timeout causing flaky test failures. Use an empty test that only starts
704    // and closes a browser with a long timeout to avoid those problems.
705    // NOTE: We don't do this when specifying a filter because this slows down
706    // the common case of running one test locally, and also on trybots when
707    // sharding as this one test runs ~200 times and wastes a few minutes.
708    bool warmup = command_line->HasSwitch(kWarmupFlag);
709    bool has_filter = command_line->HasSwitch(kGTestFilterFlag);
710    if (warmup || (!should_shard && !has_filter)) {
711      exit_code = RunTest(launcher_delegate,
712                          NULL,
713                          empty_test,
714                          TestTimeouts::large_test_timeout(),
715                          NULL);
716      if (exit_code != 0 || warmup)
717        return exit_code;
718    }
719  }
720
721  int cycles = 1;
722  if (command_line->HasSwitch(kGTestRepeatFlag)) {
723    base::StringToInt(command_line->GetSwitchValueASCII(kGTestRepeatFlag),
724                      &cycles);
725  }
726
727  while (cycles != 0) {
728    if (!RunTests(launcher_delegate,
729                  should_shard,
730                  total_shards,
731                  shard_index)) {
732      exit_code = 1;
733      break;
734    }
735
736    // Special value "-1" means "repeat indefinitely".
737    if (cycles != -1)
738      cycles--;
739  }
740  return exit_code;
741}
742
743TestLauncherDelegate* GetCurrentTestLauncherDelegate() {
744  return g_launcher_delegate;
745}
746
747}  // namespace content
748