telemetry_AFDOGenerate.py revision d085cf6e4352c2cc62625ac3d161b1234b56e869
1# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5"""
6Test to generate the AFDO profile for a set of ChromeOS benchmarks.
7
8This will run a pre-determined set of benchmarks on the DUT under
9the monitoring of the linux "perf" tool. The resulting perf.data
10file will then be copied to Google Storage (GS) where it can be
11used by the AFDO optimized build.
12
13Given that the telemetry benchmarks are quite unstable on ChromeOS at
14this point, this test also supports a mode where the benchmarks are
15executed outside of the telemetry framework. It is not the same as
16executing the benchmarks under telemetry because there is no telemetry
17measurement taken but, for the purposes of profiling Chrome, it should
18be pretty close.
19
20Example invocation:
21/usr/bin/test_that --debug --board=lumpy <DUT IP>
22  --args="ignore_failures=True local=True gs_test_location=True"
23  telemetry_PGOGenerate
24"""
25
26import bz2
27import logging
28import os
29
30from autotest_lib.client.common_lib import error, utils
31from autotest_lib.server import autotest
32from autotest_lib.server import profilers
33from autotest_lib.server import test
34from autotest_lib.server import utils
35from autotest_lib.server.cros import telemetry_runner
36
37# List of benchmarks to run to capture profile information. This is
38# based on the "perf_v2" list and other telemetry benchmarks. Goal is
39# to have a short list that is as representative as possible and takes
40# a short time to execute. At this point the list of benchmarks is in
41# flux.  Some of the benchmarks here may not be good for our purposes
42# (take too long or are too flaky).
43TELEMETRY_AFDO_BENCHMARKS = [
44        'dromaeo.domcoreattr',
45        'dromaeo.domcorequery',
46        'dromaeo.domcoretraverse',
47        'kraken',
48        'octane',
49        'sunspider' ]
50
51
52# List of boards where this test can be run.
53# Currently, this has only been tested on 'sandybridge' boards.
54VALID_BOARDS = ['butterfly', 'lumpy', 'parrot', 'stumpy']
55
56class telemetry_AFDOGenerate(test.test):
57    """
58    Run one or more telemetry benchmarks under the "perf" monitoring
59    tool, generate a "perf.data" file and upload to GS for comsumption
60    by the AFDO optimized build.
61    """
62    version = 1
63
64
65    def run_once(self, host, args):
66        """Run a set of telemetry benchmarks.
67
68        @param host: Host machine where test is run
69        @param args: A dictionary of the arguments that were passed
70                to this test.
71        @returns None.
72        """
73        self._host = host
74        host_board = host.get_board().split(':')[1]
75        if not host_board in VALID_BOARDS:
76            raise error.TestFail(
77                    'This test cannot be run on board %s' % host_board)
78
79        self._parse_args(args)
80
81        if self._minimal_telemetry:
82            self._run_tests_minimal_telemetry()
83        else:
84            self._telemetry_runner = telemetry_runner.TelemetryRunner(
85                    self._host, self._local)
86
87            for benchmark in TELEMETRY_AFDO_BENCHMARKS:
88                self._run_test(benchmark)
89
90
91    def after_run_once(self):
92        """After the profile information has been collected, compress it
93        and upload it to GS
94        """
95        PERF_FILE = 'perf.data'
96        COMP_PERF_FILE = 'chromeos-chrome-%s-%s.perf.data'
97        perf_data = os.path.join(self.profdir, PERF_FILE)
98        comp_data = os.path.join(self.profdir, COMP_PERF_FILE % (
99                self._arch, self._version))
100        compressed = self._compress_file(perf_data, comp_data)
101        self._gs_upload(compressed, os.path.basename(compressed))
102
103        # Also create copy of this file using "LATEST" as version so
104        # it can be found in case the builder is looking for a version
105        # number that does not match. It is ok to use a slighly old
106        # version of the this file for the optimized build
107        latest_data =  COMP_PERF_FILE % (self._arch, 'LATEST')
108        latest_compressed = self._get_compressed_name(latest_data)
109        self._gs_upload(compressed, latest_compressed)
110
111
112    def _parse_args(self, args):
113        """Parses input arguments to this autotest.
114
115        @param args: Options->values dictionary.
116        @raises error.TestFail if a bad option is passed.
117        """
118
119        # Set default values for the options.
120        # Architecture for which we are collecting afdo data.
121        self._arch = 'amd64'
122        # Use an alternate GS location where everyone can write.
123        # Set default depending on whether this is executing in
124        # the lab environment or not
125        self._gs_test_location = not utils.host_is_in_lab_zone(
126                self._host.hostname)
127        # Ignore individual test failures.
128        # TODO(llozano): Change default to False when tests are more stable.
129        self._ignore_failures = True
130        # Use local copy of telemetry instead of using the dev server copy.
131        self._local = False
132        # Chrome version to which the AFDO data corresponds.
133        self._version, _ = self._host.get_chrome_version()
134        # Try to use the minimal support from Telemetry. The Telemetry
135        # benchmarks in ChromeOS are too flaky at this point. So, initially,
136        # this will be set to True by default.
137        self._minimal_telemetry = True
138
139        for option_name, value in args.iteritems():
140            if option_name == 'arch':
141                self._arch = value
142            elif option_name == 'gs_test_location':
143                self._gs_test_location = (value == 'True')
144            elif option_name == 'ignore_failures':
145                self._ignore_failures = (value == 'True')
146            elif option_name == 'local':
147                self._local = (value == 'True')
148            elif option_name == 'minimal_telemetry':
149                self._minimal_telemetry = (value == 'True')
150            elif option_name == 'version':
151                self._version = value
152            else:
153                raise error.TestFail('Unknown option passed: %s' % option_name)
154
155
156    def _run_test(self, benchmark):
157        """Run the benchmark using Telemetry.
158
159        @param benchmark: Name of the benchmark to run.
160        @raises if failures are not being ignored raises error.TestFail if
161                execution of test failed. Also re-raise any exceptions thrown
162                by run_telemetry benchmark.
163        """
164        try:
165            result = self._telemetry_runner.run_telemetry_benchmark(benchmark)
166        except error.TestBaseException as e:
167            if not self._ignore_failures:
168                raise
169            else:
170                logging.info('Ignoring exception from benchmark %s', benchmark)
171                return
172
173        # We dont generate any keyvals for this run. This is not
174        # an official run of the benchmark. We are just running it to get
175        # a profile from it.
176
177        if result.status is telemetry_runner.SUCCESS_STATUS:
178            logging.info('Benchmark %s succeeded', benchmark)
179        else:
180            if not self._ignore_failures:
181                raise error.TestFail ('An error occurred while executing'
182                                      ' benchmark: %s' % benchmark)
183            else:
184                logging.info('Ignoring failure from benchmark %s', benchmark)
185
186
187    def _run_tests_minimal_telemetry(self):
188        """Run the benchmarks using the minimal support from Telemetry.
189
190        The benchmarks are run using a client side autotest test. This test
191        will control Chrome directly using the chrome.Chrome support and it
192        will ask Chrome to display the benchmark pages directly instead of
193        using the "page sets" and "measurements" support from Telemetry.
194        In this way we avoid using Telemetry benchmark support which is not
195        stable on ChromeOS yet.
196        """
197        AFDO_GENERATE_CLIENT_TEST = 'telemetry_AFDOGenerateClient'
198
199        # We dont want to "inherit" the profiler settings for this test
200        # to the client test. Doing so will end up in two instances of
201        # the profiler (perf) being executed at the same time.
202        # Filed a feature request about this. See crbug/342958.
203
204        # Save the current settings for profilers.
205        saved_profilers = self.job.profilers
206        saved_default_profile_only = self.job.default_profile_only
207
208        # Reset the state of the profilers.
209        self.job.default_profile_only = False
210        self.job.profilers = profilers.profilers(self.job)
211
212        # Execute the client side test.
213        client_at = autotest.Autotest(self._host)
214        client_at.run_test(AFDO_GENERATE_CLIENT_TEST, args='')
215
216        # Restore the settings for the profilers.
217        self.job.default_profile_only = saved_default_profile_only
218        self.job.profiler = saved_profilers
219
220
221    @staticmethod
222    def _get_compressed_name(name):
223        """Given a file name, return bz2 compressed name.
224        @param name: Name of uncompressed file.
225        @returns name of compressed file.
226        """
227        return name + '.bz2'
228
229    @staticmethod
230    def _compress_file(unc_file, com_file):
231        """Compresses specified file with bz2.
232
233        @param unc_file: name of file to compress.
234        @param com_file: prefix name of compressed file.
235        @raises error.TestFail if compression failed
236        @returns Name of compressed file.
237        """
238        dest = ''
239        with open(unc_file, 'r') as inp:
240            dest = telemetry_AFDOGenerate._get_compressed_name(com_file)
241            with bz2.BZ2File(dest, 'w') as out:
242                for data in inp:
243                    out.write(data)
244        if not dest or not os.path.isfile(dest):
245            raise error.TestFail('Could not compress %s' % unc_file)
246        return dest
247
248
249    def _gs_upload(self, local_file, remote_basename):
250        """Uploads file to google storage specific location.
251
252        @param local_file: name of file to upload.
253        @param remote_basename: basename of remote file.
254        @raises error.TestFail if upload failed.
255        @returns nothing.
256        """
257        GS_DEST = 'gs://chromeos-prebuilt/afdo-job/canonicals/%s'
258        GS_TEST_DEST = 'gs://chromeos-throw-away-bucket/afdo-job/canonicals/%s'
259        GS_ACL = 'project-private'
260
261        gs_dest = GS_TEST_DEST if self._gs_test_location else GS_DEST
262
263        if not utils.gs_upload(local_file,
264                               gs_dest % remote_basename,
265                               GS_ACL, result_dir=self.resultsdir):
266            raise error.TestFail('Unable to gs upload %s to %s as %s' %
267                                 (local_file, gs_dest, remote_basename))
268