1#!/usr/bin/env python
2# Copyright 2016 the V8 project authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5'''
6Usage: callstats.py [-h] <command> ...
7
8Optional arguments:
9  -h, --help  show this help message and exit
10
11Commands:
12  run         run chrome with --runtime-call-stats and generate logs
13  stats       process logs and print statistics
14  json        process logs from several versions and generate JSON
15  help        help information
16
17For each command, you can try ./runtime-call-stats.py help command.
18'''
19
20import argparse
21import json
22import os
23import re
24import shutil
25import subprocess
26import sys
27import tempfile
28import operator
29
30import numpy
31import scipy
32import scipy.stats
33from math import sqrt
34
35
36# Run benchmarks.
37
38def print_command(cmd_args):
39  def fix_for_printing(arg):
40    m = re.match(r'^--([^=]+)=(.*)$', arg)
41    if m and (' ' in m.group(2) or m.group(2).startswith('-')):
42      arg = "--{}='{}'".format(m.group(1), m.group(2))
43    elif ' ' in arg:
44      arg = "'{}'".format(arg)
45    return arg
46  print " ".join(map(fix_for_printing, cmd_args))
47
48
49def start_replay_server(args, sites, discard_output=True):
50  with tempfile.NamedTemporaryFile(prefix='callstats-inject-', suffix='.js',
51                                   mode='wt', delete=False) as f:
52    injection = f.name
53    generate_injection(f, sites, args.refresh)
54  http_port = 4080 + args.port_offset
55  https_port = 4443 + args.port_offset
56  cmd_args = [
57      args.replay_bin,
58      "--port=%s" % http_port,
59      "--ssl_port=%s" % https_port,
60      "--no-dns_forwarding",
61      "--use_closest_match",
62      "--no-diff_unknown_requests",
63      "--inject_scripts=deterministic.js,{}".format(injection),
64      args.replay_wpr,
65  ]
66  print "=" * 80
67  print_command(cmd_args)
68  if discard_output:
69    with open(os.devnull, 'w') as null:
70      server = subprocess.Popen(cmd_args, stdout=null, stderr=null)
71  else:
72      server = subprocess.Popen(cmd_args)
73  print "RUNNING REPLAY SERVER: %s with PID=%s" % (args.replay_bin, server.pid)
74  print "=" * 80
75  return {'process': server, 'injection': injection}
76
77
78def stop_replay_server(server):
79  print("SHUTTING DOWN REPLAY SERVER %s" % server['process'].pid)
80  server['process'].terminate()
81  os.remove(server['injection'])
82
83
84def generate_injection(f, sites, refreshes=0):
85  print >> f, """\
86(function() {
87  var s = window.sessionStorage.getItem("refreshCounter");
88  var refreshTotal = """, refreshes, """;
89  var refreshCounter = s ? parseInt(s) : refreshTotal;
90  var refreshId = refreshTotal - refreshCounter;
91  if (refreshCounter > 0) {
92    window.sessionStorage.setItem("refreshCounter", refreshCounter-1);
93  }
94  function match(url, item) {
95    if ('regexp' in item) { return url.match(item.regexp) !== null };
96    var url_wanted = item.url;
97    /* Allow automatic redirections from http to https. */
98    if (url_wanted.startsWith("http://") && url.startsWith("https://")) {
99      url_wanted = "https://" + url_wanted.substr(7);
100    }
101    return url.startsWith(url_wanted);
102  };
103  function onLoad(url) {
104    for (var item of sites) {
105      if (!match(url, item)) continue;
106      var timeout = 'timeline' in item ? 2000 * item.timeline
107                  : 'timeout'  in item ? 1000 * (item.timeout - 3)
108                  : 10000;
109      console.log("Setting time out of " + timeout + " for: " + url);
110      window.setTimeout(function() {
111        console.log("Time is out for: " + url);
112        var msg = "STATS: (" + refreshId + ") " + url;
113        %GetAndResetRuntimeCallStats(1, msg);
114        if (refreshCounter > 0) {
115          console.log(
116              "Refresh counter is " + refreshCounter + ", refreshing: " + url);
117          window.location.reload();
118        }
119      }, timeout);
120      return;
121    }
122    console.log("Ignoring: " + url);
123  };
124  var sites =
125    """, json.dumps(sites), """;
126  onLoad(window.location.href);
127})();"""
128
129def get_chrome_flags(js_flags, user_data_dir, arg_delimiter=""):
130  return [
131      "--no-default-browser-check",
132      "--no-sandbox",
133      "--disable-translate",
134      "--enable-benchmarking",
135      "--enable-stats-table",
136      "--js-flags={}{}{}".format(arg_delimiter, js_flags, arg_delimiter),
137      "--no-first-run",
138      "--user-data-dir={}{}{}".format(arg_delimiter, user_data_dir,
139                                      arg_delimiter),
140    ]
141
142def get_chrome_replay_flags(args, arg_delimiter=""):
143  http_port = 4080 + args.port_offset
144  https_port = 4443 + args.port_offset
145  return [
146      "--host-resolver-rules=%sMAP *:80 localhost:%s, "  \
147                              "MAP *:443 localhost:%s, " \
148                              "EXCLUDE localhost%s" % (
149                               arg_delimiter, http_port, https_port,
150                               arg_delimiter),
151      "--ignore-certificate-errors",
152      "--disable-seccomp-sandbox",
153      "--disable-web-security",
154      "--reduce-security-for-testing",
155      "--allow-insecure-localhost",
156    ]
157
158def run_site(site, domain, args, timeout=None):
159  print "="*80
160  print "RUNNING DOMAIN %s" % domain
161  print "="*80
162  result_template = "{domain}#{count}.txt" if args.repeat else "{domain}.txt"
163  count = 0
164  if timeout is None: timeout = args.timeout
165  if args.replay_wpr:
166    timeout *= 1 + args.refresh
167    timeout += 1
168  retries_since_good_run = 0
169  while count == 0 or args.repeat is not None and count < args.repeat:
170    count += 1
171    result = result_template.format(domain=domain, count=count)
172    retries = 0
173    while args.retries is None or retries < args.retries:
174      retries += 1
175      try:
176        if args.user_data_dir:
177          user_data_dir = args.user_data_dir
178        else:
179          user_data_dir = tempfile.mkdtemp(prefix="chr_")
180        js_flags = "--runtime-call-stats --noconcurrent-recompilation"
181        if args.replay_wpr: js_flags += " --allow-natives-syntax"
182        if args.js_flags: js_flags += " " + args.js_flags
183        chrome_flags = get_chrome_flags(js_flags, user_data_dir)
184        if args.replay_wpr:
185          chrome_flags += get_chrome_replay_flags(args)
186        else:
187          chrome_flags += [ "--single-process", ]
188        if args.chrome_flags:
189          chrome_flags += args.chrome_flags.split()
190        cmd_args = [
191            "timeout", str(timeout),
192            args.with_chrome
193        ] + chrome_flags + [ site ]
194        print "- " * 40
195        print_command(cmd_args)
196        print "- " * 40
197        with open(result, "wt") as f:
198          with open(args.log_stderr or os.devnull, 'at') as err:
199            status = subprocess.call(cmd_args, stdout=f, stderr=err)
200        # 124 means timeout killed chrome, 0 means the user was bored first!
201        # If none of these two happened, then chrome apparently crashed, so
202        # it must be called again.
203        if status != 124 and status != 0:
204          print("CHROME CRASHED, REPEATING RUN");
205          continue
206        # If the stats file is empty, chrome must be called again.
207        if os.path.isfile(result) and os.path.getsize(result) > 0:
208          if args.print_url:
209            with open(result, "at") as f:
210              print >> f
211              print >> f, "URL: {}".format(site)
212          retries_since_good_run = 0
213          break
214        if retries_since_good_run < 6:
215          timeout += 2 ** retries_since_good_run
216          retries_since_good_run += 1
217        print("EMPTY RESULT, REPEATING RUN ({})".format(
218            retries_since_good_run));
219      finally:
220        if not args.user_data_dir:
221          shutil.rmtree(user_data_dir)
222
223
224def read_sites_file(args):
225  try:
226    sites = []
227    try:
228      with open(args.sites_file, "rt") as f:
229        for item in json.load(f):
230          if 'timeout' not in item:
231            # This is more-or-less arbitrary.
232            item['timeout'] = int(1.5 * item['timeline'] + 7)
233          if item['timeout'] > args.timeout: item['timeout'] = args.timeout
234          sites.append(item)
235    except ValueError:
236      with open(args.sites_file, "rt") as f:
237        for line in f:
238          line = line.strip()
239          if not line or line.startswith('#'): continue
240          sites.append({'url': line, 'timeout': args.timeout})
241    return sites
242  except IOError as e:
243    args.error("Cannot read from {}. {}.".format(args.sites_file, e.strerror))
244    sys.exit(1)
245
246
247def read_sites(args):
248  # Determine the websites to benchmark.
249  if args.sites_file:
250    return read_sites_file(args)
251  return [{'url': site, 'timeout': args.timeout} for site in args.sites]
252
253def do_run(args):
254  sites = read_sites(args)
255  replay_server = start_replay_server(args, sites) if args.replay_wpr else None
256  # Disambiguate domains, if needed.
257  L = []
258  domains = {}
259  for item in sites:
260    site = item['url']
261    domain = None
262    if args.domain:
263      domain = args.domain
264    elif 'domain' in item:
265      domain = item['domain']
266    else:
267      m = re.match(r'^(https?://)?([^/]+)(/.*)?$', site)
268      if not m:
269        args.error("Invalid URL {}.".format(site))
270        continue
271      domain = m.group(2)
272    entry = [site, domain, None, item['timeout']]
273    if domain not in domains:
274      domains[domain] = entry
275    else:
276      if not isinstance(domains[domain], int):
277        domains[domain][2] = 1
278        domains[domain] = 1
279      domains[domain] += 1
280      entry[2] = domains[domain]
281    L.append(entry)
282  try:
283    # Run them.
284    for site, domain, count, timeout in L:
285      if count is not None: domain = "{}%{}".format(domain, count)
286      print(site, domain, timeout)
287      run_site(site, domain, args, timeout)
288  finally:
289    if replay_server:
290      stop_replay_server(replay_server)
291
292
293def do_run_replay_server(args):
294  sites = read_sites(args)
295  print("- " * 40)
296  print("Available URLs:")
297  for site in sites:
298    print("    "+site['url'])
299  print("- " * 40)
300  print("Launch chromium with the following commands for debugging:")
301  flags = get_chrome_flags("--runtime-call-stats --allow-natives-syntax",
302                           "/var/tmp/`date +%s`", '"')
303  flags += get_chrome_replay_flags(args, "'")
304  print("    $CHROMIUM_DIR/out/Release/chrome " + (" ".join(flags)) + " <URL>")
305  print("- " * 40)
306  replay_server = start_replay_server(args, sites, discard_output=False)
307  try:
308    replay_server['process'].wait()
309  finally:
310   stop_replay_server(replay_server)
311
312
313# Calculate statistics.
314
315def statistics(data):
316  N = len(data)
317  average = numpy.average(data)
318  median = numpy.median(data)
319  low = numpy.min(data)
320  high= numpy.max(data)
321  if N > 1:
322    # evaluate sample variance by setting delta degrees of freedom (ddof) to
323    # 1. The degree used in calculations is N - ddof
324    stddev = numpy.std(data, ddof=1)
325    # Get the endpoints of the range that contains 95% of the distribution
326    t_bounds = scipy.stats.t.interval(0.95, N-1)
327    #assert abs(t_bounds[0] + t_bounds[1]) < 1e-6
328    # sum mean to the confidence interval
329    ci = {
330        'abs': t_bounds[1] * stddev / sqrt(N),
331        'low': average + t_bounds[0] * stddev / sqrt(N),
332        'high': average + t_bounds[1] * stddev / sqrt(N)
333    }
334  else:
335    stddev = 0
336    ci = { 'abs': 0, 'low': average, 'high': average }
337  if abs(stddev) > 0.0001 and abs(average) > 0.0001:
338    ci['perc'] = t_bounds[1] * stddev / sqrt(N) / average * 100
339  else:
340    ci['perc'] = 0
341  return { 'samples': N, 'average': average, 'median': median,
342           'stddev': stddev, 'min': low, 'max': high, 'ci': ci }
343
344
345def read_stats(path, domain, args):
346  groups = [];
347  if args.aggregate:
348    groups = [
349        ('Group-IC', re.compile(".*IC_.*")),
350        ('Group-Optimize',
351         re.compile("StackGuard|.*Optimize.*|.*Deoptimize.*|Recompile.*")),
352        ('Group-CompileBackground', re.compile("(.*CompileBackground.*)")),
353        ('Group-Compile', re.compile("(^Compile.*)|(.*_Compile.*)")),
354        ('Group-ParseBackground', re.compile(".*ParseBackground.*")),
355        ('Group-Parse', re.compile(".*Parse.*")),
356        ('Group-Callback', re.compile(".*Callback.*")),
357        ('Group-API', re.compile(".*API.*")),
358        ('Group-GC', re.compile("GC|AllocateInTargetSpace")),
359        ('Group-JavaScript', re.compile("JS_Execution")),
360        ('Group-Runtime', re.compile(".*"))]
361  with open(path, "rt") as f:
362    # Process the whole file and sum repeating entries.
363    entries = { 'Sum': {'time': 0, 'count': 0} }
364    for group_name, regexp in groups:
365      entries[group_name] = { 'time': 0, 'count': 0 }
366    for line in f:
367      line = line.strip()
368      # Discard headers and footers.
369      if not line: continue
370      if line.startswith("Runtime Function"): continue
371      if line.startswith("===="): continue
372      if line.startswith("----"): continue
373      if line.startswith("URL:"): continue
374      if line.startswith("STATS:"): continue
375      # We have a regular line.
376      fields = line.split()
377      key = fields[0]
378      time = float(fields[1].replace("ms", ""))
379      count = int(fields[3])
380      if key not in entries: entries[key] = { 'time': 0, 'count': 0 }
381      entries[key]['time'] += time
382      entries[key]['count'] += count
383      # We calculate the sum, if it's not the "total" line.
384      if key != "Total":
385        entries['Sum']['time'] += time
386        entries['Sum']['count'] += count
387        for group_name, regexp in groups:
388          if not regexp.match(key): continue
389          entries[group_name]['time'] += time
390          entries[group_name]['count'] += count
391          break
392    # Calculate the V8-Total (all groups except Callback)
393    group_data = { 'time': 0, 'count': 0 }
394    for group_name, regexp in groups:
395      if group_name == 'Group-Callback': continue
396      group_data['time'] += entries[group_name]['time']
397      group_data['count'] += entries[group_name]['count']
398    entries['Group-Total-V8'] = group_data
399    # Calculate the Parse-Total group
400    group_data = { 'time': 0, 'count': 0 }
401    for group_name, regexp in groups:
402      if not group_name.startswith('Group-Parse'): continue
403      group_data['time'] += entries[group_name]['time']
404      group_data['count'] += entries[group_name]['count']
405    entries['Group-Parse-Total'] = group_data
406    # Calculate the Compile-Total group
407    group_data = { 'time': 0, 'count': 0 }
408    for group_name, regexp in groups:
409      if not group_name.startswith('Group-Compile'): continue
410      group_data['time'] += entries[group_name]['time']
411      group_data['count'] += entries[group_name]['count']
412    entries['Group-Compile-Total'] = group_data
413    # Append the sums as single entries to domain.
414    for key in entries:
415      if key not in domain: domain[key] = { 'time_list': [], 'count_list': [] }
416      domain[key]['time_list'].append(entries[key]['time'])
417      domain[key]['count_list'].append(entries[key]['count'])
418
419
420def print_stats(S, args):
421  # Sort by ascending/descending time average, then by ascending/descending
422  # count average, then by ascending name.
423  def sort_asc_func(item):
424    return (item[1]['time_stat']['average'],
425            item[1]['count_stat']['average'],
426            item[0])
427  def sort_desc_func(item):
428    return (-item[1]['time_stat']['average'],
429            -item[1]['count_stat']['average'],
430            item[0])
431  # Sorting order is in the commend-line arguments.
432  sort_func = sort_asc_func if args.sort == "asc" else sort_desc_func
433  # Possibly limit how many elements to print.
434  L = [item for item in sorted(S.items(), key=sort_func)
435       if item[0] not in ["Total", "Sum"]]
436  N = len(L)
437  if args.limit == 0:
438    low, high = 0, N
439  elif args.sort == "desc":
440    low, high = 0, args.limit
441  else:
442    low, high = N-args.limit, N
443  # How to print entries.
444  def print_entry(key, value):
445    def stats(s, units=""):
446      conf = "{:0.1f}({:0.2f}%)".format(s['ci']['abs'], s['ci']['perc'])
447      return "{:8.1f}{} +/- {:15s}".format(s['average'], units, conf)
448    print "{:>50s}  {}  {}".format(
449      key,
450      stats(value['time_stat'], units="ms"),
451      stats(value['count_stat'])
452    )
453  # Print and calculate partial sums, if necessary.
454  for i in range(low, high):
455    print_entry(*L[i])
456    if args.totals and args.limit != 0 and not args.aggregate:
457      if i == low:
458        partial = { 'time_list': [0] * len(L[i][1]['time_list']),
459                    'count_list': [0] * len(L[i][1]['count_list']) }
460      assert len(partial['time_list']) == len(L[i][1]['time_list'])
461      assert len(partial['count_list']) == len(L[i][1]['count_list'])
462      for j, v in enumerate(L[i][1]['time_list']):
463        partial['time_list'][j] += v
464      for j, v in enumerate(L[i][1]['count_list']):
465        partial['count_list'][j] += v
466  # Print totals, if necessary.
467  if args.totals:
468    print '-' * 80
469    if args.limit != 0 and not args.aggregate:
470      partial['time_stat'] = statistics(partial['time_list'])
471      partial['count_stat'] = statistics(partial['count_list'])
472      print_entry("Partial", partial)
473    print_entry("Sum", S["Sum"])
474    print_entry("Total", S["Total"])
475
476
477def do_stats(args):
478  domains = {}
479  for path in args.logfiles:
480    filename = os.path.basename(path)
481    m = re.match(r'^([^#]+)(#.*)?$', filename)
482    domain = m.group(1)
483    if domain not in domains: domains[domain] = {}
484    read_stats(path, domains[domain], args)
485  if args.aggregate:
486    create_total_page_stats(domains, args)
487  for i, domain in enumerate(sorted(domains)):
488    if len(domains) > 1:
489      if i > 0: print
490      print "{}:".format(domain)
491      print '=' * 80
492    domain_stats = domains[domain]
493    for key in domain_stats:
494      domain_stats[key]['time_stat'] = \
495          statistics(domain_stats[key]['time_list'])
496      domain_stats[key]['count_stat'] = \
497          statistics(domain_stats[key]['count_list'])
498    print_stats(domain_stats, args)
499
500
501# Create a Total page with all entries summed up.
502def create_total_page_stats(domains, args):
503  total = {}
504  def sum_up(parent, key, other):
505    sums = parent[key]
506    for i, item in enumerate(other[key]):
507      if i >= len(sums):
508        sums.extend([0] * (i - len(sums) + 1))
509      if item is not None:
510        sums[i] += item
511  # Sum up all the entries/metrics from all domains
512  for domain, entries in domains.items():
513    for key, domain_stats in entries.items():
514      if key not in total:
515        total[key] = {}
516        total[key]['time_list'] = list(domain_stats['time_list'])
517        total[key]['count_list'] = list(domain_stats['count_list'])
518      else:
519        sum_up(total[key], 'time_list', domain_stats)
520        sum_up(total[key], 'count_list', domain_stats)
521  # Add a new "Total" page containing the summed up metrics.
522  domains['Total'] = total
523
524
525# Generate JSON file.
526
527def do_json(args):
528  versions = {}
529  for path in args.logdirs:
530    if os.path.isdir(path):
531      for root, dirs, files in os.walk(path):
532        version = os.path.basename(root)
533        if version not in versions: versions[version] = {}
534        for filename in files:
535          if filename.endswith(".txt"):
536            m = re.match(r'^([^#]+)(#.*)?\.txt$', filename)
537            domain = m.group(1)
538            if domain not in versions[version]: versions[version][domain] = {}
539            read_stats(os.path.join(root, filename),
540                       versions[version][domain], args)
541  for version, domains in versions.items():
542    if args.aggregate:
543      create_total_page_stats(domains, args)
544    for domain, entries in domains.items():
545      stats = []
546      for name, value in entries.items():
547        # We don't want the calculated sum in the JSON file.
548        if name == "Sum": continue
549        entry = [name]
550        for x in ['time_list', 'count_list']:
551          s = statistics(entries[name][x])
552          entry.append(round(s['average'], 1))
553          entry.append(round(s['ci']['abs'], 1))
554          entry.append(round(s['ci']['perc'], 2))
555        stats.append(entry)
556      domains[domain] = stats
557  print json.dumps(versions, separators=(',', ':'))
558
559
560# Help.
561
562def do_help(parser, subparsers, args):
563  if args.help_cmd:
564    if args.help_cmd in subparsers:
565      subparsers[args.help_cmd].print_help()
566    else:
567      args.error("Unknown command '{}'".format(args.help_cmd))
568  else:
569    parser.print_help()
570
571
572# Main program, parse command line and execute.
573
574def coexist(*l):
575  given = sum(1 for x in l if x)
576  return given == 0 or given == len(l)
577
578def main():
579  parser = argparse.ArgumentParser()
580  subparser_adder = parser.add_subparsers(title="commands", dest="command",
581                                          metavar="<command>")
582  subparsers = {}
583  # Command: run.
584  subparsers["run"] = subparser_adder.add_parser(
585      "run", help="Replay websites and collect runtime stats data.")
586  subparsers["run"].set_defaults(
587      func=do_run, error=subparsers["run"].error)
588  subparsers["run"].add_argument(
589      "--chrome-flags", type=str, default="",
590      help="specify additional chrome flags")
591  subparsers["run"].add_argument(
592      "--js-flags", type=str, default="",
593      help="specify additional V8 flags")
594  subparsers["run"].add_argument(
595      "-u", "--user-data-dir", type=str, metavar="<path>",
596      help="specify user data dir (default is temporary)")
597  subparsers["run"].add_argument(
598      "-c", "--with-chrome", type=str, metavar="<path>",
599      default="/usr/bin/google-chrome",
600      help="specify chrome executable to use")
601  subparsers["run"].add_argument(
602      "-r", "--retries", type=int, metavar="<num>",
603      help="specify retries if website is down (default: forever)")
604  subparsers["run"].add_argument(
605      "--no-url", dest="print_url", action="store_false", default=True,
606      help="do not include url in statistics file")
607  subparsers["run"].add_argument(
608      "--domain", type=str, default="",
609      help="specify the output file domain name")
610  subparsers["run"].add_argument(
611      "-n", "--repeat", type=int, metavar="<num>",
612      help="specify iterations for each website (default: once)")
613
614  def add_replay_args(subparser):
615    subparser.add_argument(
616        "-k", "--refresh", type=int, metavar="<num>", default=0,
617        help="specify refreshes for each iteration (default: 0)")
618    subparser.add_argument(
619        "--replay-wpr", type=str, metavar="<path>",
620        help="use the specified web page replay (.wpr) archive")
621    subparser.add_argument(
622        "--replay-bin", type=str, metavar="<path>",
623        help="specify the replay.py script typically located in " \
624             "$CHROMIUM/src/third_party/webpagereplay/replay.py")
625    subparser.add_argument(
626        "-f", "--sites-file", type=str, metavar="<path>",
627        help="specify file containing benchmark websites")
628    subparser.add_argument(
629        "-t", "--timeout", type=int, metavar="<seconds>", default=60,
630        help="specify seconds before chrome is killed")
631    subparser.add_argument(
632        "-p", "--port-offset", type=int, metavar="<offset>", default=0,
633        help="specify the offset for the replay server's default ports")
634    subparser.add_argument(
635        "-l", "--log-stderr", type=str, metavar="<path>",
636        help="specify where chrome's stderr should go (default: /dev/null)")
637    subparser.add_argument(
638        "sites", type=str, metavar="<URL>", nargs="*",
639        help="specify benchmark website")
640  add_replay_args(subparsers["run"])
641
642  # Command: replay-server
643  subparsers["replay"] = subparser_adder.add_parser(
644      "replay", help="Run the replay server for debugging purposes")
645  subparsers["replay"].set_defaults(
646      func=do_run_replay_server, error=subparsers["replay"].error)
647  add_replay_args(subparsers["replay"])
648
649  # Command: stats.
650  subparsers["stats"] = subparser_adder.add_parser(
651      "stats", help="Analize the results file create by the 'run' command.")
652  subparsers["stats"].set_defaults(
653      func=do_stats, error=subparsers["stats"].error)
654  subparsers["stats"].add_argument(
655      "-l", "--limit", type=int, metavar="<num>", default=0,
656      help="limit how many items to print (default: none)")
657  subparsers["stats"].add_argument(
658      "-s", "--sort", choices=["asc", "desc"], default="asc",
659      help="specify sorting order (default: ascending)")
660  subparsers["stats"].add_argument(
661      "-n", "--no-total", dest="totals", action="store_false", default=True,
662      help="do not print totals")
663  subparsers["stats"].add_argument(
664      "logfiles", type=str, metavar="<logfile>", nargs="*",
665      help="specify log files to parse")
666  subparsers["stats"].add_argument(
667      "--aggregate", dest="aggregate", action="store_true", default=False,
668      help="Create aggregated entries. Adds Group-* entries at the toplevel. " \
669      "Additionally creates a Total page with all entries.")
670
671  # Command: json.
672  subparsers["json"] = subparser_adder.add_parser(
673      "json", help="Collect results file created by the 'run' command into" \
674          "a single json file.")
675  subparsers["json"].set_defaults(
676      func=do_json, error=subparsers["json"].error)
677  subparsers["json"].add_argument(
678      "logdirs", type=str, metavar="<logdir>", nargs="*",
679      help="specify directories with log files to parse")
680  subparsers["json"].add_argument(
681      "--aggregate", dest="aggregate", action="store_true", default=False,
682      help="Create aggregated entries. Adds Group-* entries at the toplevel. " \
683      "Additionally creates a Total page with all entries.")
684
685  # Command: help.
686  subparsers["help"] = subparser_adder.add_parser(
687      "help", help="help information")
688  subparsers["help"].set_defaults(
689      func=lambda args: do_help(parser, subparsers, args),
690      error=subparsers["help"].error)
691  subparsers["help"].add_argument(
692      "help_cmd", type=str, metavar="<command>", nargs="?",
693      help="command for which to display help")
694
695  # Execute the command.
696  args = parser.parse_args()
697  setattr(args, 'script_path', os.path.dirname(sys.argv[0]))
698  if args.command == "run" and coexist(args.sites_file, args.sites):
699    args.error("use either option --sites-file or site URLs")
700    sys.exit(1)
701  elif args.command == "run" and not coexist(args.replay_wpr, args.replay_bin):
702    args.error("options --replay-wpr and --replay-bin must be used together")
703    sys.exit(1)
704  else:
705    args.func(args)
706
707if __name__ == "__main__":
708  sys.exit(main())
709