unittest_suite.py revision ab583045d1e3e21da15eeb5152c2f808f4aea8ff
1#!/usr/bin/python -u
2
3import os, sys, unittest, optparse
4import common
5from autotest_lib.utils import parallel
6from autotest_lib.client.common_lib.test_utils import unittest as custom_unittest
7
8parser = optparse.OptionParser()
9parser.add_option("-r", action="store", type="string", dest="start",
10                  default='',
11                  help="root directory to start running unittests")
12parser.add_option("--full", action="store_true", dest="full", default=False,
13                  help="whether to run the shortened version of the test")
14parser.add_option("--debug", action="store_true", dest="debug", default=False,
15                  help="run in debug mode")
16parser.add_option("--skip-tests", dest="skip_tests",  default=[],
17                  help="A space separated list of tests to skip")
18
19parser.set_defaults(module_list=None)
20
21# Following sets are used to define a collection of modules that are optional
22# tests and do not need to be executed in unittest suite for various reasons.
23# Each entry can be file name or relative path that's relative to the parent
24# folder of the folder containing this file (unittest_suite.py). The list
25# will be used to filter any test file with matching name or matching full
26# path. If a file's name is too general and has a chance to collide with files
27# in other folder, it is recommended to specify its relative path here, e.g.,
28# using 'mirror/trigger_unittest.py', instead of 'trigger_unittest.py' only.
29
30REQUIRES_DJANGO = set((
31        'monitor_db_unittest.py',
32        'monitor_db_functional_test.py',
33        'monitor_db_cleanup_test.py',
34        'frontend_unittest.py',
35        'csv_encoder_unittest.py',
36        'rpc_interface_unittest.py',
37        'models_test.py',
38        'scheduler_models_unittest.py',
39        'rpc_utils_unittest.py',
40        'site_rpc_utils_unittest.py',
41        'execution_engine_unittest.py',
42        'service_proxy_lib_test.py',
43        'rdb_integration_tests.py',
44        'rdb_unittest.py',
45        'rdb_hosts_unittest.py',
46        'rdb_cache_unittests.py',
47        'scheduler_lib_unittest.py',
48        'host_scheduler_unittests.py',
49        ))
50
51REQUIRES_MYSQLDB = set((
52        'migrate_unittest.py',
53        'db_utils_unittest.py',
54        ))
55
56REQUIRES_GWT = set((
57        'client_compilation_unittest.py',
58        ))
59
60REQUIRES_SIMPLEJSON = set((
61        'resources_test.py',
62        'serviceHandler_unittest.py',
63        ))
64
65REQUIRES_AUTH = set ((
66    'trigger_unittest.py',
67    ))
68
69REQUIRES_HTTPLIB2 = set((
70        ))
71
72REQUIRES_PROTOBUFS = set((
73        'job_serializer_unittest.py',
74        ))
75
76REQUIRES_SELENIUM = set((
77        'ap_configurator_factory_unittest.py',
78        'ap_batch_locker_unittest.py'
79    ))
80
81LONG_RUNTIME = set((
82    'auth_server_unittest.py',
83    'base_barrier_unittest.py',
84    'logging_manager_test.py',
85    'task_loop_unittest.py'  # crbug.com/254030
86    ))
87
88
89SKIP = set((
90    # This particular KVM autotest test is not a unittest
91    'guest_test.py',
92    'ap_configurator_test.py',
93    'chaos_base_test.py',
94    'chaos_interop_test.py',
95    'atomic_group_unittests.py',
96    # crbug.com/251395
97    'dev_server_test.py',
98    'full_release_test.py',
99    'scheduler_lib_unittest.py',
100    'webstore_test.py',
101    ))
102
103LONG_TESTS = (REQUIRES_MYSQLDB |
104              REQUIRES_GWT |
105              REQUIRES_HTTPLIB2 |
106              REQUIRES_AUTH |
107              REQUIRES_PROTOBUFS |
108              REQUIRES_SELENIUM |
109              LONG_RUNTIME)
110
111ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
112
113# The set of files in LONG_TESTS with its full path
114LONG_TESTS_FULL_PATH = {os.path.join(ROOT, t) for t in LONG_TESTS}
115
116class TestFailure(Exception):
117    """Exception type for any test failure."""
118    pass
119
120
121def run_test(mod_names, options):
122    """
123    @param mod_names: A list of individual parts of the module name to import
124            and run as a test suite.
125    @param options: optparse options.
126    """
127    if not options.debug:
128        parallel.redirect_io()
129
130    print "Running %s" % '.'.join(mod_names)
131    mod = common.setup_modules.import_module(mod_names[-1],
132                                             '.'.join(mod_names[:-1]))
133    for ut_module in [unittest, custom_unittest]:
134        test = ut_module.defaultTestLoader.loadTestsFromModule(mod)
135        suite = ut_module.TestSuite(test)
136        runner = ut_module.TextTestRunner(verbosity=2)
137        result = runner.run(suite)
138        if result.errors or result.failures:
139            msg = '%s had %d failures and %d errors.'
140            msg %= '.'.join(mod_names), len(result.failures), len(result.errors)
141            raise TestFailure(msg)
142
143
144def scan_for_modules(start, options):
145    """Scan folders and find all test modules that are not included in the
146    blacklist (defined in LONG_TESTS).
147
148    @param start: The absolute directory to look for tests under.
149    @param options: optparse options.
150    @return a list of modules to be executed.
151    """
152    modules = []
153
154    skip_tests = SKIP
155    if options.skip_tests:
156        skip_tests.update(options.skip_tests.split())
157    skip_tests_full_path = {os.path.join(ROOT, t) for t in skip_tests}
158
159    for dir_path, sub_dirs, file_names in os.walk(start):
160        # Only look in and below subdirectories that are python modules.
161        if '__init__.py' not in file_names:
162            if options.full:
163                for file_name in file_names:
164                    if file_name.endswith('.pyc'):
165                        os.unlink(os.path.join(dir_path, file_name))
166            # Skip all subdirectories below this one, it is not a module.
167            del sub_dirs[:]
168            if options.debug:
169                print 'Skipping', dir_path
170            continue  # Skip this directory.
171
172        # Look for unittest files.
173        for file_name in file_names:
174            if (file_name.endswith('_unittest.py') or
175                file_name.endswith('_test.py')):
176                file_path = os.path.join(dir_path, file_name)
177                if (not options.full and
178                    (file_name in LONG_TESTS or
179                     file_path in LONG_TESTS_FULL_PATH)):
180                    continue
181                if (file_name in skip_tests or
182                    file_path in skip_tests_full_path):
183                    continue
184                path_no_py = os.path.join(dir_path, file_name).rstrip('.py')
185                assert path_no_py.startswith(ROOT)
186                names = path_no_py[len(ROOT)+1:].split('/')
187                modules.append(['autotest_lib'] + names)
188                if options.debug:
189                    print 'testing', path_no_py
190    return modules
191
192def find_and_run_tests(start, options):
193    """
194    Find and run Python unittest suites below the given directory.  Only look
195    in subdirectories of start that are actual importable Python modules.
196
197    @param start: The absolute directory to look for tests under.
198    @param options: optparse options.
199    """
200    if options.module_list:
201        modules = []
202        for m in options.module_list:
203            modules.append(m.split('.'))
204    else:
205        modules = scan_for_modules(start, options)
206
207    if options.debug:
208        print 'Number of test modules found:', len(modules)
209
210    functions = {}
211    for module_names in modules:
212        # Create a function that'll test a particular module.  module=module
213        # is a hack to force python to evaluate the params now.  We then
214        # rename the function to make error reporting nicer.
215        run_module = lambda module=module_names: run_test(module, options)
216        name = '.'.join(module_names)
217        run_module.__name__ = name
218        functions[run_module] = set()
219
220    try:
221        dargs = {}
222        if options.debug:
223            dargs['max_simultaneous_procs'] = 1
224        pe = parallel.ParallelExecute(functions, **dargs)
225        pe.run_until_completion()
226    except parallel.ParallelError, err:
227        return err.errors
228    return []
229
230
231def main():
232    """Entry point for unittest_suite.py"""
233    options, args = parser.parse_args()
234    if args:
235        options.module_list = args
236
237    # Strip the arguments off the command line, so that the unit tests do not
238    # see them.
239    del sys.argv[1:]
240
241    absolute_start = os.path.join(ROOT, options.start)
242    errors = find_and_run_tests(absolute_start, options)
243    if errors:
244        print "%d tests resulted in an error/failure:" % len(errors)
245        for error in errors:
246            print "\t%s" % error
247        print "Rerun", sys.argv[0], "--debug to see the failure details."
248        sys.exit(1)
249    else:
250        print "All passed!"
251        sys.exit(0)
252
253
254if __name__ == "__main__":
255    main()
256