benchmark_metrics_experiment_unittest.py revision 45a52fb3586d4fbaf6ffc5f3595ae417b5d148be
1#!/usr/bin/python2
2
3# Copyright 2016 The Chromium OS Authors. All rights reserved.
4# Use of this source code is governed by a BSD-style license that can be
5# found in the LICENSE file.
6"""Unit tests for the benchmark_metrics_experiment module."""
7
8import os
9import tempfile
10import unittest
11
12from benchmark_metrics_experiment import MetricsExperiment
13
14
15class MetricsExperimentTest(unittest.TestCase):
16  """Test class for MetricsExperiment class."""
17
18  def __init__(self, *args, **kwargs):
19    super(MetricsExperimentTest, self).__init__(*args, **kwargs)
20    self._pairwise_inclusive_count_test_file = \
21        'testdata/input/pairwise_inclusive_count_test.csv'
22    self._pairwise_inclusive_count_reference_file = \
23        'testdata/input/pairwise_inclusive_count_reference.csv'
24    self._inclusive_count_test_file = \
25        'testdata/input/inclusive_count_test.csv'
26    self._inclusive_count_reference_file = \
27        'testdata/input/inclusive_count_reference.csv'
28    self._cwp_function_groups_file = \
29        'testdata/input/cwp_function_groups.txt'
30
31  def _CheckFileContents(self, file_name, expected_content_lines):
32    with open(file_name) as input_file:
33      result_content_lines = input_file.readlines()
34      self.assertListEqual(expected_content_lines, result_content_lines)
35
36  def testExperiment(self):
37    group_statistics_file, group_statistics_filename = tempfile.mkstemp()
38
39    os.close(group_statistics_file)
40
41    function_statistics_file, function_statistics_filename = tempfile.mkstemp()
42
43    os.close(function_statistics_file)
44
45
46    expected_group_statistics_lines = \
47        ['group,file_path,function_count,distance_cum,distance_avg,score_cum,'
48         'score_avg\n',
49         'ab,/a/b,2.0,3.01,1.505,8.26344228895,4.13172114448\n',
50         'e,/e,2.0,2.0,1.0,27.5,13.75\n',
51         'cd,/c/d,2.0,2.0,1.0,27.5,13.75']
52    expected_function_statistics_lines = \
53        ['function,file,distance,score\n',
54         'func_i,/c/d/file_i,1.0,17.6\n',
55         'func_j,/e/file_j,1.0,27.5\n',
56         'func_f,/a/b/file_f,1.59,1.4465408805\n',
57         'func_h,/c/d/file_h,1.0,9.9\n',
58         'func_k,/e/file_k,1.0,0.0\n',
59         'func_g,/a/b/file_g,1.42,6.81690140845']
60    metric_experiment = \
61        MetricsExperiment(self._pairwise_inclusive_count_reference_file,
62                          self._pairwise_inclusive_count_test_file,
63                          self._inclusive_count_reference_file,
64                          self._inclusive_count_test_file,
65                          self._cwp_function_groups_file,
66                          group_statistics_filename,
67                          function_statistics_filename)
68
69    metric_experiment.PerformComputation()
70    self._CheckFileContents(group_statistics_filename,
71                            expected_group_statistics_lines)
72    self._CheckFileContents(function_statistics_filename,
73                            expected_function_statistics_lines)
74    os.remove(group_statistics_filename)
75    os.remove(function_statistics_filename)
76
77
78if __name__ == '__main__':
79  unittest.main()
80