dom_perf.py revision 23730a6e56a168d1879203e4b3819bb36e3d8f1f
1# Copyright (c) 2013 The Chromium Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5import json
6import math
7import os
8
9from telemetry import test
10from telemetry.core import util
11from telemetry.page import page_measurement
12from telemetry.page import page_set
13from telemetry.value import merge_values
14from telemetry.value import scalar
15
16
17def _GeometricMean(values):
18  """Compute a rounded geometric mean from an array of values."""
19  if not values:
20    return None
21  # To avoid infinite value errors, make sure no value is less than 0.001.
22  new_values = []
23  for value in values:
24    if value > 0.001:
25      new_values.append(value)
26    else:
27      new_values.append(0.001)
28  # Compute the sum of the log of the values.
29  log_sum = sum(map(math.log, new_values))
30  # Raise e to that sum over the number of values.
31  mean = math.pow(math.e, (log_sum / len(new_values)))
32  # Return the rounded mean.
33  return int(round(mean))
34
35
36SCORE_UNIT = 'score (bigger is better)'
37SCORE_TRACE_NAME = 'score'
38
39
40class _DomPerfMeasurement(page_measurement.PageMeasurement):
41  @property
42  def results_are_the_same_on_every_page(self):
43    return False
44
45  def MeasurePage(self, page, tab, results):
46    try:
47      def _IsDone():
48        return tab.GetCookieByName('__domperf_finished') == '1'
49      util.WaitFor(_IsDone, 600)
50
51      data = json.loads(tab.EvaluateJavaScript('__domperf_result'))
52      for suite in data['BenchmarkSuites']:
53        # Skip benchmarks that we didn't actually run this time around.
54        if len(suite['Benchmarks']) or suite['score']:
55          results.Add(SCORE_TRACE_NAME, SCORE_UNIT,
56                      suite['score'], suite['name'], 'unimportant')
57    finally:
58      tab.EvaluateJavaScript('document.cookie = "__domperf_finished=0"')
59
60  def DidRunTest(self, browser, results):
61    # Now give the geometric mean as the total for the combined runs.
62    combined = merge_values.MergeLikeValuesFromDifferentPages(
63        results.all_page_specific_values,
64        group_by_name_suffix=True)
65    combined_score = [x for x in combined if x.name == SCORE_TRACE_NAME][0]
66    total = _GeometricMean(combined_score.values)
67    results.AddSummaryValue(
68        scalar.ScalarValue(None, 'Total.' + SCORE_TRACE_NAME, SCORE_UNIT,
69                           total))
70
71
72@test.Disabled('android', 'linux')
73class DomPerf(test.Test):
74  """A suite of JavaScript benchmarks for exercising the browser's DOM.
75
76  The final score is computed as the geometric mean of the individual results.
77  Scores are not comparable across benchmark suite versions and higher scores
78  means better performance: Bigger is better!"""
79  test = _DomPerfMeasurement
80
81  def CreatePageSet(self, options):
82    dom_perf_dir = os.path.join(util.GetChromiumSrcDir(), 'data', 'dom_perf')
83    base_page = 'file://run.html?reportInJS=1&run='
84    return page_set.PageSet.FromDict({
85        'pages': [
86          { 'url': base_page + 'Accessors' },
87          { 'url': base_page + 'CloneNodes' },
88          { 'url': base_page + 'CreateNodes' },
89          { 'url': base_page + 'DOMDivWalk' },
90          { 'url': base_page + 'DOMTable' },
91          { 'url': base_page + 'DOMWalk' },
92          { 'url': base_page + 'Events' },
93          { 'url': base_page + 'Get+Elements' },
94          { 'url': base_page + 'GridSort' },
95          { 'url': base_page + 'Template' }
96          ]
97        }, dom_perf_dir)
98