1# Copyright (C) 2010 Google Inc. All rights reserved.
2#
3# Redistribution and use in source and binary forms, with or without
4# modification, are permitted provided that the following conditions are
5# met:
6#
7#     * Redistributions of source code must retain the above copyright
8# notice, this list of conditions and the following disclaimer.
9#     * Redistributions in binary form must reproduce the above
10# copyright notice, this list of conditions and the following disclaimer
11# in the documentation and/or other materials provided with the
12# distribution.
13#     * Neither the name of Google Inc. nor the names of its
14# contributors may be used to endorse or promote products derived from
15# this software without specific prior written permission.
16#
17# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
29from datetime import datetime
30from django.utils import simplejson
31import logging
32
33from model.testfile import TestFile
34
35JSON_RESULTS_FILE = "results.json"
36JSON_RESULTS_FILE_SMALL = "results-small.json"
37JSON_RESULTS_PREFIX = "ADD_RESULTS("
38JSON_RESULTS_SUFFIX = ");"
39JSON_RESULTS_VERSION_KEY = "version"
40JSON_RESULTS_BUILD_NUMBERS = "buildNumbers"
41JSON_RESULTS_TESTS = "tests"
42JSON_RESULTS_RESULTS = "results"
43JSON_RESULTS_TIMES = "times"
44JSON_RESULTS_PASS = "P"
45JSON_RESULTS_NO_DATA = "N"
46JSON_RESULTS_MIN_TIME = 1
47JSON_RESULTS_VERSION = 3
48JSON_RESULTS_MAX_BUILDS = 750
49JSON_RESULTS_MAX_BUILDS_SMALL = 200
50
51
52class JsonResults(object):
53    @classmethod
54    def _strip_prefix_suffix(cls, data):
55        """Strip out prefix and suffix of json results string.
56
57        Args:
58            data: json file content.
59
60        Returns:
61            json string without prefix and suffix.
62        """
63
64        assert(data.startswith(JSON_RESULTS_PREFIX))
65        assert(data.endswith(JSON_RESULTS_SUFFIX))
66
67        return data[len(JSON_RESULTS_PREFIX):
68                    len(data) - len(JSON_RESULTS_SUFFIX)]
69
70    @classmethod
71    def _generate_file_data(cls, json, sort_keys=False):
72        """Given json string, generate file content data by adding
73           prefix and suffix.
74
75        Args:
76            json: json string without prefix and suffix.
77
78        Returns:
79            json file data.
80        """
81
82        data = simplejson.dumps(json, separators=(',', ':'),
83            sort_keys=sort_keys)
84        return JSON_RESULTS_PREFIX + data + JSON_RESULTS_SUFFIX
85
86    @classmethod
87    def _load_json(cls, file_data):
88        """Load json file to a python object.
89
90        Args:
91            file_data: json file content.
92
93        Returns:
94            json object or
95            None on failure.
96        """
97
98        json_results_str = cls._strip_prefix_suffix(file_data)
99        if not json_results_str:
100            logging.warning("No json results data.")
101            return None
102
103        try:
104            return simplejson.loads(json_results_str)
105        except Exception, err:
106            logging.debug(json_results_str)
107            logging.error("Failed to load json results: %s", str(err))
108            return None
109
110    @classmethod
111    def _merge_json(cls, aggregated_json, incremental_json, num_runs):
112        """Merge incremental json into aggregated json results.
113
114        Args:
115            aggregated_json: aggregated json object.
116            incremental_json: incremental json object.
117            num_runs: number of total runs to include.
118
119        Returns:
120            True if merge succeeds or
121            False on failure.
122        """
123
124        # Merge non tests property data.
125        # Tests properties are merged in _merge_tests.
126        if not cls._merge_non_test_data(aggregated_json, incremental_json, num_runs):
127            return False
128
129        # Merge tests results and times
130        incremental_tests = incremental_json[JSON_RESULTS_TESTS]
131        if incremental_tests:
132            aggregated_tests = aggregated_json[JSON_RESULTS_TESTS]
133            cls._merge_tests(aggregated_tests, incremental_tests, num_runs)
134
135        return True
136
137    @classmethod
138    def _merge_non_test_data(cls, aggregated_json, incremental_json, num_runs):
139        """Merge incremental non tests property data into aggregated json results.
140
141        Args:
142            aggregated_json: aggregated json object.
143            incremental_json: incremental json object.
144            num_runs: number of total runs to include.
145
146        Returns:
147            True if merge succeeds or
148            False on failure.
149        """
150
151        incremental_builds = incremental_json[JSON_RESULTS_BUILD_NUMBERS]
152        aggregated_builds = aggregated_json[JSON_RESULTS_BUILD_NUMBERS]
153        aggregated_build_number = int(aggregated_builds[0])
154        # Loop through all incremental builds, start from the oldest run.
155        for index in reversed(range(len(incremental_builds))):
156            build_number = int(incremental_builds[index])
157            logging.debug("Merging build %s, incremental json index: %d.",
158                build_number, index)
159
160            # Return if not all build numbers in the incremental json results
161            # are newer than the most recent build in the aggregated results.
162            # FIXME: make this case work.
163            if build_number < aggregated_build_number:
164                logging.warning(("Build %d in incremental json is older than "
165                    "the most recent build in aggregated results: %d"),
166                    build_number, aggregated_build_number)
167                return False
168
169            # Return if the build number is duplicated.
170            # FIXME: skip the duplicated build and merge rest of the results.
171            #        Need to be careful on skiping the corresponding value in
172            #        _merge_tests because the property data for each test could
173            #        be accumulated.
174            if build_number == aggregated_build_number:
175                logging.warning("Duplicate build %d in incremental json",
176                    build_number)
177                return False
178
179            # Merge this build into aggreagated results.
180            cls._merge_one_build(aggregated_json, incremental_json, index, num_runs)
181
182        return True
183
184    @classmethod
185    def _merge_one_build(cls, aggregated_json, incremental_json,
186                         incremental_index, num_runs):
187        """Merge one build of incremental json into aggregated json results.
188
189        Args:
190            aggregated_json: aggregated json object.
191            incremental_json: incremental json object.
192            incremental_index: index of the incremental json results to merge.
193            num_runs: number of total runs to include.
194        """
195
196        for key in incremental_json.keys():
197            # Merge json results except "tests" properties (results, times etc).
198            # "tests" properties will be handled separately.
199            if key == JSON_RESULTS_TESTS:
200                continue
201
202            if key in aggregated_json:
203                aggregated_json[key].insert(
204                    0, incremental_json[key][incremental_index])
205                aggregated_json[key] = \
206                    aggregated_json[key][:num_runs]
207            else:
208                aggregated_json[key] = incremental_json[key]
209
210    @classmethod
211    def _merge_tests(cls, aggregated_json, incremental_json, num_runs):
212        """Merge "tests" properties:results, times.
213
214        Args:
215            aggregated_json: aggregated json object.
216            incremental_json: incremental json object.
217            num_runs: number of total runs to include.
218        """
219
220        all_tests = (set(aggregated_json.iterkeys()) |
221                     set(incremental_json.iterkeys()))
222        for test_name in all_tests:
223            if test_name in aggregated_json:
224                aggregated_test = aggregated_json[test_name]
225                if test_name in incremental_json:
226                    incremental_test = incremental_json[test_name]
227                    results = incremental_test[JSON_RESULTS_RESULTS]
228                    times = incremental_test[JSON_RESULTS_TIMES]
229                else:
230                    results = [[1, JSON_RESULTS_NO_DATA]]
231                    times = [[1, 0]]
232
233                cls._insert_item_run_length_encoded(
234                    results, aggregated_test[JSON_RESULTS_RESULTS], num_runs)
235                cls._insert_item_run_length_encoded(
236                    times, aggregated_test[JSON_RESULTS_TIMES], num_runs)
237                cls._normalize_results_json(test_name, aggregated_json, num_runs)
238            else:
239                aggregated_json[test_name] = incremental_json[test_name]
240
241    @classmethod
242    def _insert_item_run_length_encoded(cls, incremental_item, aggregated_item, num_runs):
243        """Inserts the incremental run-length encoded results into the aggregated
244           run-length encoded results.
245
246        Args:
247            incremental_item: incremental run-length encoded results.
248            aggregated_item: aggregated run-length encoded results.
249            num_runs: number of total runs to include.
250        """
251
252        for item in incremental_item:
253            if len(aggregated_item) and item[1] == aggregated_item[0][1]:
254                aggregated_item[0][0] = min(
255                    aggregated_item[0][0] + item[0], num_runs)
256            else:
257                aggregated_item.insert(0, item)
258
259    @classmethod
260    def _normalize_results_json(cls, test_name, aggregated_json, num_runs):
261        """ Prune tests where all runs pass or tests that no longer exist and
262        truncate all results to num_runs.
263
264        Args:
265          test_name: Name of the test.
266          aggregated_json: The JSON object with all the test results for
267                           this builder.
268          num_runs: number of total runs to include.
269        """
270
271        aggregated_test = aggregated_json[test_name]
272        aggregated_test[JSON_RESULTS_RESULTS] = \
273            cls._remove_items_over_max_number_of_builds(
274                aggregated_test[JSON_RESULTS_RESULTS], num_runs)
275        aggregated_test[JSON_RESULTS_TIMES] = \
276            cls._remove_items_over_max_number_of_builds(
277                aggregated_test[JSON_RESULTS_TIMES], num_runs)
278
279        is_all_pass = cls._is_results_all_of_type(
280            aggregated_test[JSON_RESULTS_RESULTS], JSON_RESULTS_PASS)
281        is_all_no_data = cls._is_results_all_of_type(
282            aggregated_test[JSON_RESULTS_RESULTS], JSON_RESULTS_NO_DATA)
283
284        max_time = max(
285            [time[1] for time in aggregated_test[JSON_RESULTS_TIMES]])
286        # Remove all passes/no-data from the results to reduce noise and
287        # filesize. If a test passes every run, but
288        # takes >= JSON_RESULTS_MIN_TIME to run, don't throw away the data.
289        if (is_all_no_data or
290           (is_all_pass and max_time < JSON_RESULTS_MIN_TIME)):
291            del aggregated_json[test_name]
292
293    @classmethod
294    def _remove_items_over_max_number_of_builds(cls, encoded_list, num_runs):
295        """Removes items from the run-length encoded list after the final
296        item that exceeds the max number of builds to track.
297
298        Args:
299          encoded_results: run-length encoded results. An array of arrays, e.g.
300              [[3,'A'],[1,'Q']] encodes AAAQ.
301          num_runs: number of total runs to include.
302        """
303        num_builds = 0
304        index = 0
305        for result in encoded_list:
306            num_builds = num_builds + result[0]
307            index = index + 1
308            if num_builds >= num_runs:
309                return encoded_list[:index]
310
311        return encoded_list
312
313    @classmethod
314    def _is_results_all_of_type(cls, results, type):
315        """Returns whether all the results are of the given type
316        (e.g. all passes).
317        """
318
319        return len(results) == 1 and results[0][1] == type
320
321    @classmethod
322    def _check_json(cls, builder, json):
323        """Check whether the given json is valid.
324
325        Args:
326            builder: builder name this json is for.
327            json: json object to check.
328
329        Returns:
330            True if the json is valid or
331            False otherwise.
332        """
333
334        version = json[JSON_RESULTS_VERSION_KEY]
335        if version > JSON_RESULTS_VERSION:
336            logging.error("Results JSON version '%s' is not supported.",
337                version)
338            return False
339
340        if not builder in json:
341            logging.error("Builder '%s' is not in json results.", builder)
342            return False
343
344        results_for_builder = json[builder]
345        if not JSON_RESULTS_BUILD_NUMBERS in results_for_builder:
346            logging.error("Missing build number in json results.")
347            return False
348
349        return True
350
351    @classmethod
352    def merge(cls, builder, aggregated, incremental, num_runs, sort_keys=False):
353        """Merge incremental json file data with aggregated json file data.
354
355        Args:
356            builder: builder name.
357            aggregated: aggregated json file data.
358            incremental: incremental json file data.
359            sort_key: whether or not to sort key when dumping json results.
360
361        Returns:
362            Merged json file data if merge succeeds or
363            None on failure.
364        """
365
366        if not incremental:
367            logging.warning("Nothing to merge.")
368            return None
369
370        logging.info("Loading incremental json...")
371        incremental_json = cls._load_json(incremental)
372        if not incremental_json:
373            return None
374
375        logging.info("Checking incremental json...")
376        if not cls._check_json(builder, incremental_json):
377            return None
378
379        logging.info("Loading existing aggregated json...")
380        aggregated_json = cls._load_json(aggregated)
381        if not aggregated_json:
382            return incremental
383
384        logging.info("Checking existing aggregated json...")
385        if not cls._check_json(builder, aggregated_json):
386            return incremental
387
388        logging.info("Merging json results...")
389        try:
390            if not cls._merge_json(aggregated_json[builder], incremental_json[builder], num_runs):
391                return None
392        except Exception, err:
393            logging.error("Failed to merge json results: %s", str(err))
394            return None
395
396        aggregated_json[JSON_RESULTS_VERSION_KEY] = JSON_RESULTS_VERSION
397
398        return cls._generate_file_data(aggregated_json, sort_keys)
399
400    @classmethod
401    def update(cls, master, builder, test_type, incremental):
402        """Update datastore json file data by merging it with incremental json
403           file. Writes the large file and a small file. The small file just stores
404           fewer runs.
405
406        Args:
407            master: master name.
408            builder: builder name.
409            test_type: type of test results.
410            incremental: incremental json file data to merge.
411
412        Returns:
413            Large TestFile object if update succeeds or
414            None on failure.
415        """
416        small_file_updated = cls.update_file(master, builder, test_type, incremental, JSON_RESULTS_FILE_SMALL, JSON_RESULTS_MAX_BUILDS_SMALL)
417        large_file_updated = cls.update_file(master, builder, test_type, incremental, JSON_RESULTS_FILE, JSON_RESULTS_MAX_BUILDS)
418
419        return small_file_updated and large_file_updated
420
421    @classmethod
422    def update_file(cls, master, builder, test_type, incremental, filename, num_runs):
423        files = TestFile.get_files(master, builder, test_type, filename)
424        if files:
425            file = files[0]
426            new_results = cls.merge(builder, file.data, incremental, num_runs)
427        else:
428            # Use the incremental data if there is no aggregated file to merge.
429            file = TestFile()
430            file.master = master
431            file.builder = builder
432            file.test_type = test_type
433            file.name = filename
434            new_results = incremental
435            logging.info("No existing json results, incremental json is saved.")
436
437        if not new_results or not file.save(new_results):
438            logging.info(
439                "Update failed, master: %s, builder: %s, test_type: %s, name: %s." %
440                (master, builder, test_type, filename))
441            return False
442
443        return True
444
445    @classmethod
446    def get_test_list(cls, builder, json_file_data):
447        """Get list of test names from aggregated json file data.
448
449        Args:
450            json_file_data: json file data that has all test-data and
451                            non-test-data.
452
453        Returns:
454            json file with test name list only. The json format is the same
455            as the one saved in datastore, but all non-test-data and test detail
456            results are removed.
457        """
458
459        logging.debug("Loading test results json...")
460        json = cls._load_json(json_file_data)
461        if not json:
462            return None
463
464        logging.debug("Checking test results json...")
465        if not cls._check_json(builder, json):
466            return None
467
468        test_list_json = {}
469        tests = json[builder][JSON_RESULTS_TESTS]
470        test_list_json[builder] = {
471            "tests": dict.fromkeys(tests, {})}
472
473        return cls._generate_file_data(test_list_json)
474