1# Copyright (C) 2013 Google Inc. All rights reserved.
2#
3# Redistribution and use in source and binary forms, with or without
4# modification, are permitted provided that the following conditions are
5# met:
6#
7#     * Redistributions of source code must retain the above copyright
8# notice, this list of conditions and the following disclaimer.
9#     * Redistributions in binary form must reproduce the above
10# copyright notice, this list of conditions and the following disclaimer
11# in the documentation and/or other materials provided with the
12# distribution.
13#     * Neither the name of Google Inc. nor the names of its
14# contributors may be used to endorse or promote products derived from
15# this software without specific prior written permission.
16#
17# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
29import StringIO
30import json
31import math
32import unittest
33
34from webkitpy.common.host_mock import MockHost
35from webkitpy.common.system.outputcapture import OutputCapture
36from webkitpy.layout_tests.port.driver import DriverOutput
37from webkitpy.layout_tests.port.test import TestDriver
38from webkitpy.layout_tests.port.test import TestPort
39from webkitpy.performance_tests.perftest import ChromiumStylePerfTest
40from webkitpy.performance_tests.perftest import PerfTest
41from webkitpy.performance_tests.perftest import PerfTestMetric
42from webkitpy.performance_tests.perftest import PerfTestFactory
43from webkitpy.performance_tests.perftest import SingleProcessPerfTest
44
45
46class MockPort(TestPort):
47    def __init__(self, custom_run_test=None):
48        super(MockPort, self).__init__(host=MockHost(), custom_run_test=custom_run_test)
49
50
51class TestPerfTestMetric(unittest.TestCase):
52    def test_init_set_missing_unit(self):
53        self.assertEqual(PerfTestMetric('Time', iterations=[1, 2, 3, 4, 5]).unit(), 'ms')
54        self.assertEqual(PerfTestMetric('Malloc', iterations=[1, 2, 3, 4, 5]).unit(), 'bytes')
55        self.assertEqual(PerfTestMetric('JSHeap', iterations=[1, 2, 3, 4, 5]).unit(), 'bytes')
56
57    def test_init_set_time_metric(self):
58        self.assertEqual(PerfTestMetric('Time', 'ms').name(), 'Time')
59        self.assertEqual(PerfTestMetric('Time', 'fps').name(), 'FrameRate')
60        self.assertEqual(PerfTestMetric('Time', 'runs/s').name(), 'Runs')
61
62    def test_has_values(self):
63        self.assertFalse(PerfTestMetric('Time').has_values())
64        self.assertTrue(PerfTestMetric('Time', iterations=[1]).has_values())
65
66    def test_append(self):
67        metric = PerfTestMetric('Time')
68        metric2 = PerfTestMetric('Time')
69        self.assertFalse(metric.has_values())
70        self.assertFalse(metric2.has_values())
71
72        metric.append_group([1])
73        self.assertTrue(metric.has_values())
74        self.assertFalse(metric2.has_values())
75        self.assertEqual(metric.grouped_iteration_values(), [[1]])
76        self.assertEqual(metric.flattened_iteration_values(), [1])
77
78        metric.append_group([2])
79        self.assertEqual(metric.grouped_iteration_values(), [[1], [2]])
80        self.assertEqual(metric.flattened_iteration_values(), [1, 2])
81
82        metric2.append_group([3])
83        self.assertTrue(metric2.has_values())
84        self.assertEqual(metric.flattened_iteration_values(), [1, 2])
85        self.assertEqual(metric2.flattened_iteration_values(), [3])
86
87        metric.append_group([4, 5])
88        self.assertEqual(metric.grouped_iteration_values(), [[1], [2], [4, 5]])
89        self.assertEqual(metric.flattened_iteration_values(), [1, 2, 4, 5])
90
91
92class TestPerfTest(unittest.TestCase):
93    def _assert_results_are_correct(self, test, output):
94        test.run_single = lambda driver, path, time_out_ms: output
95        self.assertTrue(test._run_with_driver(None, None))
96        self.assertEqual(test._metrics.keys(), ['Time'])
97        self.assertEqual(test._metrics['Time'].flattened_iteration_values(), [1080, 1120, 1095, 1101, 1104])
98
99    def test_parse_output(self):
100        output = DriverOutput("""
101Running 20 times
102Ignoring warm-up run (1115)
103
104Time:
105values 1080, 1120, 1095, 1101, 1104 ms
106avg 1100 ms
107median 1101 ms
108stdev 14.50862 ms
109min 1080 ms
110max 1120 ms
111""", image=None, image_hash=None, audio=None)
112        output_capture = OutputCapture()
113        output_capture.capture_output()
114        try:
115            test = PerfTest(MockPort(), 'some-test', '/path/some-dir/some-test')
116            self._assert_results_are_correct(test, output)
117        finally:
118            actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
119        self.assertEqual(actual_stdout, '')
120        self.assertEqual(actual_stderr, '')
121        self.assertEqual(actual_logs, '')
122
123    def test_parse_output_with_failing_line(self):
124        output = DriverOutput("""
125Running 20 times
126Ignoring warm-up run (1115)
127
128some-unrecognizable-line
129
130Time:
131values 1080, 1120, 1095, 1101, 1104 ms
132avg 1100 ms
133median 1101 ms
134stdev 14.50862 ms
135min 1080 ms
136max 1120 ms
137""", image=None, image_hash=None, audio=None)
138        output_capture = OutputCapture()
139        output_capture.capture_output()
140        try:
141            test = PerfTest(MockPort(), 'some-test', '/path/some-dir/some-test')
142            test.run_single = lambda driver, path, time_out_ms: output
143            self.assertFalse(test._run_with_driver(None, None))
144        finally:
145            actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
146        self.assertEqual(actual_stdout, '')
147        self.assertEqual(actual_stderr, '')
148        self.assertEqual(actual_logs, 'ERROR: some-unrecognizable-line\n')
149
150    def test_parse_output_with_description(self):
151        output = DriverOutput("""
152Description: this is a test description.
153
154Running 20 times
155Ignoring warm-up run (1115)
156
157Time:
158values 1080, 1120, 1095, 1101, 1104 ms
159avg 1100 ms
160median 1101 ms
161stdev 14.50862 ms
162min 1080 ms
163max 1120 ms""", image=None, image_hash=None, audio=None)
164        test = PerfTest(MockPort(), 'some-test', '/path/some-dir/some-test')
165        self._assert_results_are_correct(test, output)
166        self.assertEqual(test.description(), 'this is a test description.')
167
168    def test_ignored_stderr_lines(self):
169        test = PerfTest(MockPort(), 'some-test', '/path/some-dir/some-test')
170        output_with_lines_to_ignore = DriverOutput('', image=None, image_hash=None, audio=None, error="""
171Unknown option: --foo-bar
172Should not be ignored
173[WARNING:proxy_service.cc] bad moon a-rising
174[WARNING:chrome.cc] Something went wrong
175[INFO:SkFontHost_android.cpp(1158)] Use Test Config File Main /data/local/tmp/drt/android_main_fonts.xml, Fallback /data/local/tmp/drt/android_fallback_fonts.xml, Font Dir /data/local/tmp/drt/fonts/
176[ERROR:main.cc] The sky has fallen""")
177        test._filter_output(output_with_lines_to_ignore)
178        self.assertEqual(output_with_lines_to_ignore.error,
179            "Should not be ignored\n"
180            "[WARNING:chrome.cc] Something went wrong\n"
181            "[ERROR:main.cc] The sky has fallen")
182
183    def test_parse_output_with_subtests(self):
184        output = DriverOutput("""
185Running 20 times
186some test: [1, 2, 3, 4, 5]
187other test = else: [6, 7, 8, 9, 10]
188Ignoring warm-up run (1115)
189
190Time:
191values 1080, 1120, 1095, 1101, 1104 ms
192avg 1100 ms
193median 1101 ms
194stdev 14.50862 ms
195min 1080 ms
196max 1120 ms
197""", image=None, image_hash=None, audio=None)
198        output_capture = OutputCapture()
199        output_capture.capture_output()
200        try:
201            test = PerfTest(MockPort(), 'some-test', '/path/some-dir/some-test')
202            self._assert_results_are_correct(test, output)
203        finally:
204            actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
205        self.assertEqual(actual_stdout, '')
206        self.assertEqual(actual_stderr, '')
207        self.assertEqual(actual_logs, '')
208
209
210class TestSingleProcessPerfTest(unittest.TestCase):
211    def test_use_only_one_process(self):
212        called = [0]
213
214        def run_single(driver, path, time_out_ms):
215            called[0] += 1
216            return DriverOutput("""
217Running 20 times
218Ignoring warm-up run (1115)
219
220Time:
221values 1080, 1120, 1095, 1101, 1104 ms
222avg 1100 ms
223median 1101 ms
224stdev 14.50862 ms
225min 1080 ms
226max 1120 ms""", image=None, image_hash=None, audio=None)
227
228        test = SingleProcessPerfTest(MockPort(), 'some-test', '/path/some-dir/some-test')
229        test.run_single = run_single
230        self.assertTrue(test.run(0))
231        self.assertEqual(called[0], 1)
232
233
234class TestPerfTestFactory(unittest.TestCase):
235    def test_regular_test(self):
236        test = PerfTestFactory.create_perf_test(MockPort(), 'some-dir/some-test', '/path/some-dir/some-test')
237        self.assertEqual(test.__class__, PerfTest)
238
239    def test_inspector_test(self):
240        test = PerfTestFactory.create_perf_test(MockPort(), 'inspector/some-test', '/path/inspector/some-test')
241        self.assertEqual(test.__class__, ChromiumStylePerfTest)
242