1#!/usr/bin/env python
2# Copyright 2016 the V8 project authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5'''
6Usage: callstats.py [-h] <command> ...
7
8Optional arguments:
9  -h, --help  show this help message and exit
10
11Commands:
12  run         run chrome with --runtime-call-stats and generate logs
13  stats       process logs and print statistics
14  json        process logs from several versions and generate JSON
15  help        help information
16
17For each command, you can try ./runtime-call-stats.py help command.
18'''
19
20import argparse
21import json
22import os
23import re
24import shutil
25import subprocess
26import sys
27import tempfile
28import operator
29
30import numpy
31import scipy
32import scipy.stats
33from math import sqrt
34
35
36# Run benchmarks.
37
38def print_command(cmd_args):
39  def fix_for_printing(arg):
40    m = re.match(r'^--([^=]+)=(.*)$', arg)
41    if m and (' ' in m.group(2) or m.group(2).startswith('-')):
42      arg = "--{}='{}'".format(m.group(1), m.group(2))
43    elif ' ' in arg:
44      arg = "'{}'".format(arg)
45    return arg
46  print " ".join(map(fix_for_printing, cmd_args))
47
48
49def start_replay_server(args, sites, discard_output=True):
50  with tempfile.NamedTemporaryFile(prefix='callstats-inject-', suffix='.js',
51                                   mode='wt', delete=False) as f:
52    injection = f.name
53    generate_injection(f, sites, args.refresh)
54  http_port = 4080 + args.port_offset
55  https_port = 4443 + args.port_offset
56  cmd_args = [
57      args.replay_bin,
58      "--port=%s" % http_port,
59      "--ssl_port=%s" % https_port,
60      "--no-dns_forwarding",
61      "--use_closest_match",
62      "--no-diff_unknown_requests",
63      "--inject_scripts=deterministic.js,{}".format(injection),
64      args.replay_wpr,
65  ]
66  print "=" * 80
67  print_command(cmd_args)
68  if discard_output:
69    with open(os.devnull, 'w') as null:
70      server = subprocess.Popen(cmd_args, stdout=null, stderr=null)
71  else:
72      server = subprocess.Popen(cmd_args)
73  print "RUNNING REPLAY SERVER: %s with PID=%s" % (args.replay_bin, server.pid)
74  print "=" * 80
75  return {'process': server, 'injection': injection}
76
77
78def stop_replay_server(server):
79  print("SHUTTING DOWN REPLAY SERVER %s" % server['process'].pid)
80  server['process'].terminate()
81  os.remove(server['injection'])
82
83
84def generate_injection(f, sites, refreshes=0):
85  print >> f, """\
86(function() {
87  var s = window.sessionStorage.getItem("refreshCounter");
88  var refreshTotal = """, refreshes, """;
89  var refreshCounter = s ? parseInt(s) : refreshTotal;
90  var refreshId = refreshTotal - refreshCounter;
91  if (refreshCounter > 0) {
92    window.sessionStorage.setItem("refreshCounter", refreshCounter-1);
93  }
94  function match(url, item) {
95    if ('regexp' in item) { return url.match(item.regexp) !== null };
96    var url_wanted = item.url;
97    /* Allow automatic redirections from http to https. */
98    if (url_wanted.startsWith("http://") && url.startsWith("https://")) {
99      url_wanted = "https://" + url_wanted.substr(7);
100    }
101    return url.startsWith(url_wanted);
102  };
103  function onLoad(url) {
104    for (var item of sites) {
105      if (!match(url, item)) continue;
106      var timeout = 'timeline' in item ? 2000 * item.timeline
107                  : 'timeout'  in item ? 1000 * (item.timeout - 3)
108                  : 10000;
109      console.log("Setting time out of " + timeout + " for: " + url);
110      window.setTimeout(function() {
111        console.log("Time is out for: " + url);
112        var msg = "STATS: (" + refreshId + ") " + url;
113        %GetAndResetRuntimeCallStats(1, msg);
114        if (refreshCounter > 0) {
115          console.log(
116              "Refresh counter is " + refreshCounter + ", refreshing: " + url);
117          window.location.reload();
118        }
119      }, timeout);
120      return;
121    }
122    console.log("Ignoring: " + url);
123  };
124  var sites =
125    """, json.dumps(sites), """;
126  onLoad(window.location.href);
127})();"""
128
129def get_chrome_flags(js_flags, user_data_dir):
130  return [
131      "--no-default-browser-check",
132      "--no-sandbox",
133      "--disable-translate",
134      "--enable-benchmarking",
135      "--js-flags={}".format(js_flags),
136      "--no-first-run",
137      "--user-data-dir={}".format(user_data_dir),
138    ]
139
140def get_chrome_replay_flags(args):
141  http_port = 4080 + args.port_offset
142  https_port = 4443 + args.port_offset
143  return [
144      "--host-resolver-rules=MAP *:80 localhost:%s, "  \
145                            "MAP *:443 localhost:%s, " \
146                            "EXCLUDE localhost" % (
147                                http_port, https_port),
148      "--ignore-certificate-errors",
149      "--disable-seccomp-sandbox",
150      "--disable-web-security",
151      "--reduce-security-for-testing",
152      "--allow-insecure-localhost",
153    ]
154
155def run_site(site, domain, args, timeout=None):
156  print "="*80
157  print "RUNNING DOMAIN %s" % domain
158  print "="*80
159  result_template = "{domain}#{count}.txt" if args.repeat else "{domain}.txt"
160  count = 0
161  if timeout is None: timeout = args.timeout
162  if args.replay_wpr:
163    timeout *= 1 + args.refresh
164    timeout += 1
165  retries_since_good_run = 0
166  while count == 0 or args.repeat is not None and count < args.repeat:
167    count += 1
168    result = result_template.format(domain=domain, count=count)
169    retries = 0
170    while args.retries is None or retries < args.retries:
171      retries += 1
172      try:
173        if args.user_data_dir:
174          user_data_dir = args.user_data_dir
175        else:
176          user_data_dir = tempfile.mkdtemp(prefix="chr_")
177        js_flags = "--runtime-call-stats"
178        if args.replay_wpr: js_flags += " --allow-natives-syntax"
179        if args.js_flags: js_flags += " " + args.js_flags
180        chrome_flags = get_chrome_flags(js_flags, user_data_dir)
181        if args.replay_wpr:
182          chrome_flags += get_chrome_replay_flags(args)
183        else:
184          chrome_flags += [ "--single-process", ]
185        if args.chrome_flags:
186          chrome_flags += args.chrome_flags.split()
187        cmd_args = [
188            "timeout", str(timeout),
189            args.with_chrome
190        ] + chrome_flags + [ site ]
191        print "- " * 40
192        print_command(cmd_args)
193        print "- " * 40
194        with open(result, "wt") as f:
195          with open(args.log_stderr or os.devnull, 'at') as err:
196            status = subprocess.call(cmd_args, stdout=f, stderr=err)
197        # 124 means timeout killed chrome, 0 means the user was bored first!
198        # If none of these two happened, then chrome apparently crashed, so
199        # it must be called again.
200        if status != 124 and status != 0:
201          print("CHROME CRASHED, REPEATING RUN");
202          continue
203        # If the stats file is empty, chrome must be called again.
204        if os.path.isfile(result) and os.path.getsize(result) > 0:
205          if args.print_url:
206            with open(result, "at") as f:
207              print >> f
208              print >> f, "URL: {}".format(site)
209          retries_since_good_run = 0
210          break
211        if retries_since_good_run < 6:
212          timeout += 2 ** retries_since_good_run
213          retries_since_good_run += 1
214        print("EMPTY RESULT, REPEATING RUN ({})".format(
215            retries_since_good_run));
216      finally:
217        if not args.user_data_dir:
218          shutil.rmtree(user_data_dir)
219
220
221def read_sites_file(args):
222  try:
223    sites = []
224    try:
225      with open(args.sites_file, "rt") as f:
226        for item in json.load(f):
227          if 'timeout' not in item:
228            # This is more-or-less arbitrary.
229            item['timeout'] = int(1.5 * item['timeline'] + 7)
230          if item['timeout'] > args.timeout: item['timeout'] = args.timeout
231          sites.append(item)
232    except ValueError:
233      with open(args.sites_file, "rt") as f:
234        for line in f:
235          line = line.strip()
236          if not line or line.startswith('#'): continue
237          sites.append({'url': line, 'timeout': args.timeout})
238    return sites
239  except IOError as e:
240    args.error("Cannot read from {}. {}.".format(args.sites_file, e.strerror))
241    sys.exit(1)
242
243
244def read_sites(args):
245  # Determine the websites to benchmark.
246  if args.sites_file:
247    return read_sites_file(args)
248  return [{'url': site, 'timeout': args.timeout} for site in args.sites]
249
250def do_run(args):
251  sites = read_sites(args)
252  replay_server = start_replay_server(args, sites) if args.replay_wpr else None
253  # Disambiguate domains, if needed.
254  L = []
255  domains = {}
256  for item in sites:
257    site = item['url']
258    domain = None
259    if args.domain:
260      domain = args.domain
261    elif 'domain' in item:
262      domain = item['domain']
263    else:
264      m = re.match(r'^(https?://)?([^/]+)(/.*)?$', site)
265      if not m:
266        args.error("Invalid URL {}.".format(site))
267        continue
268      domain = m.group(2)
269    entry = [site, domain, None, item['timeout']]
270    if domain not in domains:
271      domains[domain] = entry
272    else:
273      if not isinstance(domains[domain], int):
274        domains[domain][2] = 1
275        domains[domain] = 1
276      domains[domain] += 1
277      entry[2] = domains[domain]
278    L.append(entry)
279  try:
280    # Run them.
281    for site, domain, count, timeout in L:
282      if count is not None: domain = "{}%{}".format(domain, count)
283      print(site, domain, timeout)
284      run_site(site, domain, args, timeout)
285  finally:
286    if replay_server:
287      stop_replay_server(replay_server)
288
289
290def do_run_replay_server(args):
291  sites = read_sites(args)
292  print("- " * 40)
293  print("Available URLs:")
294  for site in sites:
295    print("    "+site['url'])
296  print("- " * 40)
297  print("Launch chromium with the following commands for debugging:")
298  flags = get_chrome_flags("'--runtime-call-stats --allow-natives-syntax'",
299                           "/var/tmp/`date +%s`")
300  flags += get_chrome_replay_flags(args)
301  print("    $CHROMIUM_DIR/out/Release/chomium " + (" ".join(flags)) + " <URL>")
302  print("- " * 40)
303  replay_server = start_replay_server(args, sites, discard_output=False)
304  try:
305    replay_server['process'].wait()
306  finally:
307   stop_replay_server(replay_server)
308
309
310# Calculate statistics.
311
312def statistics(data):
313  N = len(data)
314  average = numpy.average(data)
315  median = numpy.median(data)
316  low = numpy.min(data)
317  high= numpy.max(data)
318  if N > 1:
319    # evaluate sample variance by setting delta degrees of freedom (ddof) to
320    # 1. The degree used in calculations is N - ddof
321    stddev = numpy.std(data, ddof=1)
322    # Get the endpoints of the range that contains 95% of the distribution
323    t_bounds = scipy.stats.t.interval(0.95, N-1)
324    #assert abs(t_bounds[0] + t_bounds[1]) < 1e-6
325    # sum mean to the confidence interval
326    ci = {
327        'abs': t_bounds[1] * stddev / sqrt(N),
328        'low': average + t_bounds[0] * stddev / sqrt(N),
329        'high': average + t_bounds[1] * stddev / sqrt(N)
330    }
331  else:
332    stddev = 0
333    ci = { 'abs': 0, 'low': average, 'high': average }
334  if abs(stddev) > 0.0001 and abs(average) > 0.0001:
335    ci['perc'] = t_bounds[1] * stddev / sqrt(N) / average * 100
336  else:
337    ci['perc'] = 0
338  return { 'samples': N, 'average': average, 'median': median,
339           'stddev': stddev, 'min': low, 'max': high, 'ci': ci }
340
341
342def read_stats(path, domain, args):
343  groups = [];
344  if args.aggregate:
345    groups = [
346        ('Group-IC', re.compile(".*IC.*")),
347        ('Group-Optimize',
348         re.compile("StackGuard|.*Optimize.*|.*Deoptimize.*|Recompile.*")),
349        ('Group-Compile', re.compile(".*Compile.*")),
350        ('Group-Parse', re.compile(".*Parse.*")),
351        ('Group-Callback', re.compile(".*Callback.*")),
352        ('Group-API', re.compile(".*API.*")),
353        ('Group-GC', re.compile("GC|AllocateInTargetSpace")),
354        ('Group-JavaScript', re.compile("JS_Execution")),
355        ('Group-Runtime', re.compile(".*"))]
356  with open(path, "rt") as f:
357    # Process the whole file and sum repeating entries.
358    entries = { 'Sum': {'time': 0, 'count': 0} }
359    for group_name, regexp in groups:
360      entries[group_name] = { 'time': 0, 'count': 0 }
361    for line in f:
362      line = line.strip()
363      # Discard headers and footers.
364      if not line: continue
365      if line.startswith("Runtime Function"): continue
366      if line.startswith("===="): continue
367      if line.startswith("----"): continue
368      if line.startswith("URL:"): continue
369      if line.startswith("STATS:"): continue
370      # We have a regular line.
371      fields = line.split()
372      key = fields[0]
373      time = float(fields[1].replace("ms", ""))
374      count = int(fields[3])
375      if key not in entries: entries[key] = { 'time': 0, 'count': 0 }
376      entries[key]['time'] += time
377      entries[key]['count'] += count
378      # We calculate the sum, if it's not the "total" line.
379      if key != "Total":
380        entries['Sum']['time'] += time
381        entries['Sum']['count'] += count
382        for group_name, regexp in groups:
383          if not regexp.match(key): continue
384          entries[group_name]['time'] += time
385          entries[group_name]['count'] += count
386          break
387    # Calculate the V8-Total (all groups except Callback)
388    total_v8 = { 'time': 0, 'count': 0 }
389    for group_name, regexp in groups:
390      if group_name == 'Group-Callback': continue
391      total_v8['time'] += entries[group_name]['time']
392      total_v8['count'] += entries[group_name]['count']
393    entries['Group-Total-V8'] = total_v8
394    # Append the sums as single entries to domain.
395    for key in entries:
396      if key not in domain: domain[key] = { 'time_list': [], 'count_list': [] }
397      domain[key]['time_list'].append(entries[key]['time'])
398      domain[key]['count_list'].append(entries[key]['count'])
399
400
401def print_stats(S, args):
402  # Sort by ascending/descending time average, then by ascending/descending
403  # count average, then by ascending name.
404  def sort_asc_func(item):
405    return (item[1]['time_stat']['average'],
406            item[1]['count_stat']['average'],
407            item[0])
408  def sort_desc_func(item):
409    return (-item[1]['time_stat']['average'],
410            -item[1]['count_stat']['average'],
411            item[0])
412  # Sorting order is in the commend-line arguments.
413  sort_func = sort_asc_func if args.sort == "asc" else sort_desc_func
414  # Possibly limit how many elements to print.
415  L = [item for item in sorted(S.items(), key=sort_func)
416       if item[0] not in ["Total", "Sum"]]
417  N = len(L)
418  if args.limit == 0:
419    low, high = 0, N
420  elif args.sort == "desc":
421    low, high = 0, args.limit
422  else:
423    low, high = N-args.limit, N
424  # How to print entries.
425  def print_entry(key, value):
426    def stats(s, units=""):
427      conf = "{:0.1f}({:0.2f}%)".format(s['ci']['abs'], s['ci']['perc'])
428      return "{:8.1f}{} +/- {:15s}".format(s['average'], units, conf)
429    print "{:>50s}  {}  {}".format(
430      key,
431      stats(value['time_stat'], units="ms"),
432      stats(value['count_stat'])
433    )
434  # Print and calculate partial sums, if necessary.
435  for i in range(low, high):
436    print_entry(*L[i])
437    if args.totals and args.limit != 0 and not args.aggregate:
438      if i == low:
439        partial = { 'time_list': [0] * len(L[i][1]['time_list']),
440                    'count_list': [0] * len(L[i][1]['count_list']) }
441      assert len(partial['time_list']) == len(L[i][1]['time_list'])
442      assert len(partial['count_list']) == len(L[i][1]['count_list'])
443      for j, v in enumerate(L[i][1]['time_list']):
444        partial['time_list'][j] += v
445      for j, v in enumerate(L[i][1]['count_list']):
446        partial['count_list'][j] += v
447  # Print totals, if necessary.
448  if args.totals:
449    print '-' * 80
450    if args.limit != 0 and not args.aggregate:
451      partial['time_stat'] = statistics(partial['time_list'])
452      partial['count_stat'] = statistics(partial['count_list'])
453      print_entry("Partial", partial)
454    print_entry("Sum", S["Sum"])
455    print_entry("Total", S["Total"])
456
457
458def do_stats(args):
459  domains = {}
460  for path in args.logfiles:
461    filename = os.path.basename(path)
462    m = re.match(r'^([^#]+)(#.*)?$', filename)
463    domain = m.group(1)
464    if domain not in domains: domains[domain] = {}
465    read_stats(path, domains[domain], args)
466  if args.aggregate:
467    create_total_page_stats(domains, args)
468  for i, domain in enumerate(sorted(domains)):
469    if len(domains) > 1:
470      if i > 0: print
471      print "{}:".format(domain)
472      print '=' * 80
473    domain_stats = domains[domain]
474    for key in domain_stats:
475      domain_stats[key]['time_stat'] = \
476          statistics(domain_stats[key]['time_list'])
477      domain_stats[key]['count_stat'] = \
478          statistics(domain_stats[key]['count_list'])
479    print_stats(domain_stats, args)
480
481
482# Create a Total page with all entries summed up.
483def create_total_page_stats(domains, args):
484  total = {}
485  def sum_up(parent, key, other):
486    sums = parent[key]
487    for i, item in enumerate(other[key]):
488      if i >= len(sums):
489        sums.extend([0] * (i - len(sums) + 1))
490      if item is not None:
491        sums[i] += item
492  # Sum up all the entries/metrics from all domains
493  for domain, entries in domains.items():
494    for key, domain_stats in entries.items():
495      if key not in total:
496        total[key] = {}
497        total[key]['time_list'] = list(domain_stats['time_list'])
498        total[key]['count_list'] = list(domain_stats['count_list'])
499      else:
500        sum_up(total[key], 'time_list', domain_stats)
501        sum_up(total[key], 'count_list', domain_stats)
502  # Add a new "Total" page containing the summed up metrics.
503  domains['Total'] = total
504
505
506# Generate JSON file.
507
508def do_json(args):
509  versions = {}
510  for path in args.logdirs:
511    if os.path.isdir(path):
512      for root, dirs, files in os.walk(path):
513        version = os.path.basename(root)
514        if version not in versions: versions[version] = {}
515        for filename in files:
516          if filename.endswith(".txt"):
517            m = re.match(r'^([^#]+)(#.*)?\.txt$', filename)
518            domain = m.group(1)
519            if domain not in versions[version]: versions[version][domain] = {}
520            read_stats(os.path.join(root, filename),
521                       versions[version][domain], args)
522  for version, domains in versions.items():
523    if args.aggregate:
524      create_total_page_stats(domains, args)
525    for domain, entries in domains.items():
526      stats = []
527      for name, value in entries.items():
528        # We don't want the calculated sum in the JSON file.
529        if name == "Sum": continue
530        entry = [name]
531        for x in ['time_list', 'count_list']:
532          s = statistics(entries[name][x])
533          entry.append(round(s['average'], 1))
534          entry.append(round(s['ci']['abs'], 1))
535          entry.append(round(s['ci']['perc'], 2))
536        stats.append(entry)
537      domains[domain] = stats
538  print json.dumps(versions, separators=(',', ':'))
539
540
541# Help.
542
543def do_help(parser, subparsers, args):
544  if args.help_cmd:
545    if args.help_cmd in subparsers:
546      subparsers[args.help_cmd].print_help()
547    else:
548      args.error("Unknown command '{}'".format(args.help_cmd))
549  else:
550    parser.print_help()
551
552
553# Main program, parse command line and execute.
554
555def coexist(*l):
556  given = sum(1 for x in l if x)
557  return given == 0 or given == len(l)
558
559def main():
560  parser = argparse.ArgumentParser()
561  subparser_adder = parser.add_subparsers(title="commands", dest="command",
562                                          metavar="<command>")
563  subparsers = {}
564  # Command: run.
565  subparsers["run"] = subparser_adder.add_parser(
566      "run", help="Replay websites and collect runtime stats data.")
567  subparsers["run"].set_defaults(
568      func=do_run, error=subparsers["run"].error)
569  subparsers["run"].add_argument(
570      "--chrome-flags", type=str, default="",
571      help="specify additional chrome flags")
572  subparsers["run"].add_argument(
573      "--js-flags", type=str, default="",
574      help="specify additional V8 flags")
575  subparsers["run"].add_argument(
576      "-u", "--user-data-dir", type=str, metavar="<path>",
577      help="specify user data dir (default is temporary)")
578  subparsers["run"].add_argument(
579      "-c", "--with-chrome", type=str, metavar="<path>",
580      default="/usr/bin/google-chrome",
581      help="specify chrome executable to use")
582  subparsers["run"].add_argument(
583      "-r", "--retries", type=int, metavar="<num>",
584      help="specify retries if website is down (default: forever)")
585  subparsers["run"].add_argument(
586      "--no-url", dest="print_url", action="store_false", default=True,
587      help="do not include url in statistics file")
588  subparsers["run"].add_argument(
589      "--domain", type=str, default="",
590      help="specify the output file domain name")
591  subparsers["run"].add_argument(
592      "-n", "--repeat", type=int, metavar="<num>",
593      help="specify iterations for each website (default: once)")
594
595  def add_replay_args(subparser):
596    subparser.add_argument(
597        "-k", "--refresh", type=int, metavar="<num>", default=0,
598        help="specify refreshes for each iteration (default: 0)")
599    subparser.add_argument(
600        "--replay-wpr", type=str, metavar="<path>",
601        help="use the specified web page replay (.wpr) archive")
602    subparser.add_argument(
603        "--replay-bin", type=str, metavar="<path>",
604        help="specify the replay.py script typically located in " \
605             "$CHROMIUM/src/third_party/webpagereplay/replay.py")
606    subparser.add_argument(
607        "-f", "--sites-file", type=str, metavar="<path>",
608        help="specify file containing benchmark websites")
609    subparser.add_argument(
610        "-t", "--timeout", type=int, metavar="<seconds>", default=60,
611        help="specify seconds before chrome is killed")
612    subparser.add_argument(
613        "-p", "--port-offset", type=int, metavar="<offset>", default=0,
614        help="specify the offset for the replay server's default ports")
615    subparser.add_argument(
616        "-l", "--log-stderr", type=str, metavar="<path>",
617        help="specify where chrome's stderr should go (default: /dev/null)")
618    subparser.add_argument(
619        "sites", type=str, metavar="<URL>", nargs="*",
620        help="specify benchmark website")
621  add_replay_args(subparsers["run"])
622
623  # Command: replay-server
624  subparsers["replay"] = subparser_adder.add_parser(
625      "replay", help="Run the replay server for debugging purposes")
626  subparsers["replay"].set_defaults(
627      func=do_run_replay_server, error=subparsers["replay"].error)
628  add_replay_args(subparsers["replay"])
629
630  # Command: stats.
631  subparsers["stats"] = subparser_adder.add_parser(
632      "stats", help="Analize the results file create by the 'run' command.")
633  subparsers["stats"].set_defaults(
634      func=do_stats, error=subparsers["stats"].error)
635  subparsers["stats"].add_argument(
636      "-l", "--limit", type=int, metavar="<num>", default=0,
637      help="limit how many items to print (default: none)")
638  subparsers["stats"].add_argument(
639      "-s", "--sort", choices=["asc", "desc"], default="asc",
640      help="specify sorting order (default: ascending)")
641  subparsers["stats"].add_argument(
642      "-n", "--no-total", dest="totals", action="store_false", default=True,
643      help="do not print totals")
644  subparsers["stats"].add_argument(
645      "logfiles", type=str, metavar="<logfile>", nargs="*",
646      help="specify log files to parse")
647  subparsers["stats"].add_argument(
648      "--aggregate", dest="aggregate", action="store_true", default=False,
649      help="Create aggregated entries. Adds Group-* entries at the toplevel. " \
650      "Additionally creates a Total page with all entries.")
651
652  # Command: json.
653  subparsers["json"] = subparser_adder.add_parser(
654      "json", help="Collect results file created by the 'run' command into" \
655          "a single json file.")
656  subparsers["json"].set_defaults(
657      func=do_json, error=subparsers["json"].error)
658  subparsers["json"].add_argument(
659      "logdirs", type=str, metavar="<logdir>", nargs="*",
660      help="specify directories with log files to parse")
661  subparsers["json"].add_argument(
662      "--aggregate", dest="aggregate", action="store_true", default=False,
663      help="Create aggregated entries. Adds Group-* entries at the toplevel. " \
664      "Additionally creates a Total page with all entries.")
665
666  # Command: help.
667  subparsers["help"] = subparser_adder.add_parser(
668      "help", help="help information")
669  subparsers["help"].set_defaults(
670      func=lambda args: do_help(parser, subparsers, args),
671      error=subparsers["help"].error)
672  subparsers["help"].add_argument(
673      "help_cmd", type=str, metavar="<command>", nargs="?",
674      help="command for which to display help")
675
676  # Execute the command.
677  args = parser.parse_args()
678  setattr(args, 'script_path', os.path.dirname(sys.argv[0]))
679  if args.command == "run" and coexist(args.sites_file, args.sites):
680    args.error("use either option --sites-file or site URLs")
681    sys.exit(1)
682  elif args.command == "run" and not coexist(args.replay_wpr, args.replay_bin):
683    args.error("options --replay-wpr and --replay-bin must be used together")
684    sys.exit(1)
685  else:
686    args.func(args)
687
688if __name__ == "__main__":
689  sys.exit(main())
690