experiment_factory.py revision 9847df92a2b5f76ccddc4bf10288819712a8ca47
1#!/usr/bin/python
2
3# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
4# Use of this source code is governed by a BSD-style license that can be
5# found in the LICENSE file.
6
7"""A module to generate experments."""
8
9import os
10import re
11import socket
12
13from benchmark import Benchmark
14import config
15from experiment import Experiment
16from label import Label
17from label import MockLabel
18from results_cache import CacheConditions
19import test_flag
20
21# Users may want to run Telemetry tests either individually, or in
22# specified sets.  Here we define sets of tests that users may want
23# to run together.
24
25telemetry_perfv2_tests = [ 'dromaeo.domcoreattr',
26                           'dromaeo.domcoremodify',
27                           'dromaeo.domcorequery',
28                           'dromaeo.domcoretraverse',
29                           'kraken',
30# The following benchmark is extremely flaky, so omit it for now.
31#                           'memory.top_25',
32                           'octane',
33                           'robohornet_pro',
34                           'smoothness.top_25',
35                           'sunspider',
36                           ]
37
38telemetry_pagecycler_tests = [ 'page_cycler.indexed_db.basic_insert',
39                               'page_cycler.bloat',
40                               'page_cycler.dhtml',
41                               'page_cycler.intl_ar_fa_he',
42                               'page_cycler.intl_es_fr_pt-BR',
43                               'page_cycler.intl_hi_ru',
44                               'page_cycler.intl_ja_zh',
45                               'page_cycler.intl_ko_th_vi',
46                               'page_cycler.morejs',
47                               'page_cycler.moz',
48                               'page_cycler.netsim.top_10',
49                               'page_cycler.tough_layout_cases',
50                               'page_cycler.typical_25',
51                               ]
52
53class ExperimentFactory(object):
54  """Factory class for building an Experiment, given an ExperimentFile as input.
55
56  This factory is currently hardcoded to produce an experiment for running
57  ChromeOS benchmarks, but the idea is that in the future, other types
58  of experiments could be produced.
59  """
60
61  def _AppendBenchmarkSet(self, benchmarks, benchmark_list, test_args,
62                          iterations, outlier_range, key_results_only,
63                          rm_chroot_tmp, perf_args, suite, use_test_that,
64                          show_all_results):
65    """Add all the tests in a set to the benchmarks list."""
66    for test_name in benchmark_list:
67      telemetry_benchmark = Benchmark (test_name, test_name, test_args,
68                                       iterations, outlier_range,
69                                       key_results_only, rm_chroot_tmp,
70                                       perf_args, suite, use_test_that,
71                                       show_all_results)
72      benchmarks.append(telemetry_benchmark)
73
74
75  def GetExperiment(self, experiment_file, working_directory, log_dir):
76    """Construct an experiment from an experiment file."""
77    global_settings = experiment_file.GetGlobalSettings()
78    experiment_name = global_settings.GetField("name")
79    remote = global_settings.GetField("remote")
80    # This is used to remove the ",' from the remote if user
81    # add them to the remote string.
82    new_remote = []
83    for i in remote:
84      c = re.sub('["\']', '', i)
85      new_remote.append(c)
86    remote = new_remote
87    chromeos_root = global_settings.GetField("chromeos_root")
88    rm_chroot_tmp = global_settings.GetField("rm_chroot_tmp")
89    key_results_only = global_settings.GetField("key_results_only")
90    acquire_timeout= global_settings.GetField("acquire_timeout")
91    cache_dir = global_settings.GetField("cache_dir")
92    config.AddConfig("no_email", global_settings.GetField("no_email"))
93    share_users = global_settings.GetField("share_users")
94    results_dir = global_settings.GetField("results_dir")
95    chrome_src = global_settings.GetField("chrome_src")
96    use_test_that = global_settings.GetField("use_test_that")
97    show_all_results = global_settings.GetField("show_all_results")
98    # Default cache hit conditions. The image checksum in the cache and the
99    # computed checksum of the image must match. Also a cache file must exist.
100    cache_conditions = [CacheConditions.CACHE_FILE_EXISTS,
101                        CacheConditions.CHECKSUMS_MATCH]
102    if global_settings.GetField("rerun_if_failed"):
103      cache_conditions.append(CacheConditions.RUN_SUCCEEDED)
104    if global_settings.GetField("rerun"):
105      cache_conditions.append(CacheConditions.FALSE)
106    if global_settings.GetField("same_machine"):
107      cache_conditions.append(CacheConditions.SAME_MACHINE_MATCH)
108    if global_settings.GetField("same_specs"):
109      cache_conditions.append(CacheConditions.MACHINES_MATCH)
110
111    # Construct benchmarks.
112    benchmarks = []
113    all_benchmark_settings = experiment_file.GetSettings("benchmark")
114    for benchmark_settings in all_benchmark_settings:
115      benchmark_name = benchmark_settings.name
116      test_name = benchmark_settings.GetField("test_name")
117      if not test_name:
118        test_name = benchmark_name
119      test_args = benchmark_settings.GetField("test_args")
120      iterations = benchmark_settings.GetField("iterations")
121      outlier_range = benchmark_settings.GetField("outlier_range")
122      perf_args = benchmark_settings.GetField("perf_args")
123      rm_chroot_tmp = benchmark_settings.GetField("rm_chroot_tmp")
124      key_results_only = benchmark_settings.GetField("key_results_only")
125      suite = benchmark_settings.GetField("suite")
126      use_test_that = benchmark_settings.GetField("use_test_that")
127      show_all_results = benchmark_settings.GetField("show_all_results")
128
129      if suite == 'telemetry_Crosperf':
130        if test_name == 'all_perfv2':
131          self._AppendBenchmarkSet (benchmarks, telemetry_perfv2_tests,
132                                    test_args, iterations, outlier_range,
133                                    key_results_only, rm_chroot_tmp,
134                                    perf_args, suite, use_test_that,
135                                    show_all_results)
136        elif test_name == 'all_pagecyclers':
137          self._AppendBenchmarkSet (benchmarks, telemetry_pagecycler_tests,
138                                    test_args, iterations, outlier_range,
139                                    key_results_only, rm_chroot_tmp,
140                                    perf_args, suite, use_test_that,
141                                    show_all_results)
142        else:
143          benchmark = Benchmark(test_name, test_name, test_args,
144                                iterations, outlier_range,
145                                key_results_only, rm_chroot_tmp,
146                                perf_args, suite, use_test_that,
147                                show_all_results)
148          benchmarks.append(benchmark)
149      else:
150        # Add the single benchmark.
151        benchmark = Benchmark(benchmark_name, test_name, test_args,
152                              iterations, outlier_range,
153                              key_results_only, rm_chroot_tmp,
154                              perf_args, suite, use_test_that,
155                              show_all_results)
156        benchmarks.append(benchmark)
157
158    # Construct labels.
159    labels = []
160    all_label_settings = experiment_file.GetSettings("label")
161    all_remote = list(remote)
162    for label_settings in all_label_settings:
163      label_name = label_settings.name
164      image = label_settings.GetField("chromeos_image")
165      chromeos_root = label_settings.GetField("chromeos_root")
166      board = label_settings.GetField("board")
167      my_remote = label_settings.GetField("remote")
168      new_remote = []
169      for i in my_remote:
170        c = re.sub('["\']', '', i)
171        new_remote.append(c)
172      my_remote = new_remote
173
174      image_md5sum = label_settings.GetField("md5sum")
175      cache_dir = label_settings.GetField("cache_dir")
176      chrome_src = label_settings.GetField("chrome_src")
177
178    # TODO(yunlian): We should consolidate code in machine_manager.py
179    # to derermine whether we are running from within google or not
180      if ("corp.google.com" in socket.gethostname() and
181          (not my_remote
182           or my_remote == remote
183           and global_settings.GetField("board") != board)):
184        my_remote = self.GetDefaultRemotes(board)
185      if global_settings.GetField("same_machine") and len(my_remote) > 1:
186        raise Exception("Only one remote is allowed when same_machine "
187                        "is turned on")
188      all_remote += my_remote
189      image_args = label_settings.GetField("image_args")
190      if test_flag.GetTestMode():
191        label = MockLabel(label_name, image, chromeos_root, board, my_remote,
192                          image_args, image_md5sum, cache_dir, chrome_src)
193      else:
194        label = Label(label_name, image, chromeos_root, board, my_remote,
195                      image_args, image_md5sum, cache_dir, chrome_src)
196      labels.append(label)
197
198    email = global_settings.GetField("email")
199    all_remote = list(set(all_remote))
200    experiment = Experiment(experiment_name, all_remote,
201                            working_directory, chromeos_root,
202                            cache_conditions, labels, benchmarks,
203                            experiment_file.Canonicalize(),
204                            email, acquire_timeout, log_dir, share_users,
205                            results_dir)
206
207    return experiment
208
209  def GetDefaultRemotes(self, board):
210    default_remotes_file = os.path.join(os.path.dirname(__file__),
211                                        "default_remotes")
212    try:
213      with open(default_remotes_file) as f:
214        for line in f:
215          key, v = line.split(":")
216          if key.strip() == board:
217            remotes = v.strip().split(" ")
218            if remotes:
219              return remotes
220            else:
221              raise Exception("There is not remote for {0}".format(board))
222    except IOError:
223      raise Exception("IOError while reading file {0}"
224                      .format(default_remotes_file))
225    else:
226      raise Exception("There is not remote for {0}".format(board))
227