unittest_suite.py revision af04857a63c7d789713aca41d3bc467adbddf9f1
1#!/usr/bin/python -u
2
3import os, sys, unittest, optparse
4import common
5from autotest_lib.utils import parallel
6from autotest_lib.client.common_lib.test_utils import unittest as custom_unittest
7
8parser = optparse.OptionParser()
9parser.add_option("-r", action="store", type="string", dest="start",
10                  default='',
11                  help="root directory to start running unittests")
12parser.add_option("--full", action="store_true", dest="full", default=False,
13                  help="whether to run the shortened version of the test")
14parser.add_option("--debug", action="store_true", dest="debug", default=False,
15                  help="run in debug mode")
16parser.add_option("--skip-tests", dest="skip_tests",  default=[],
17                  help="A space separated list of tests to skip")
18
19parser.set_defaults(module_list=None)
20
21# Following sets are used to define a collection of modules that are optional
22# tests and do not need to be executed in unittest suite for various reasons.
23# Each entry can be file name or relative path that's relative to the parent
24# folder of the folder containing this file (unittest_suite.py). The list
25# will be used to filter any test file with matching name or matching full
26# path. If a file's name is too general and has a chance to collide with files
27# in other folder, it is recommended to specify its relative path here, e.g.,
28# using 'mirror/trigger_unittest.py', instead of 'trigger_unittest.py' only.
29
30REQUIRES_DJANGO = set((
31        'monitor_db_unittest.py',
32        'monitor_db_functional_test.py',
33        'monitor_db_cleanup_test.py',
34        'frontend_unittest.py',
35        'csv_encoder_unittest.py',
36        'rpc_interface_unittest.py',
37        'models_test.py',
38        'scheduler_models_unittest.py',
39        'metahost_scheduler_unittest.py',
40        'site_metahost_scheduler_unittest.py',
41        'rpc_utils_unittest.py',
42        'site_rpc_utils_unittest.py',
43        'execution_engine_unittest.py',
44        'service_proxy_lib_test.py',
45        ))
46
47REQUIRES_MYSQLDB = set((
48        'migrate_unittest.py',
49        'db_utils_unittest.py',
50        ))
51
52REQUIRES_GWT = set((
53        'client_compilation_unittest.py',
54        ))
55
56REQUIRES_SIMPLEJSON = set((
57        'resources_test.py',
58        'serviceHandler_unittest.py',
59        ))
60
61REQUIRES_AUTH = set ((
62    'trigger_unittest.py',
63    ))
64
65REQUIRES_HTTPLIB2 = set((
66        ))
67
68REQUIRES_PROTOBUFS = set((
69        'job_serializer_unittest.py',
70        ))
71
72REQUIRES_SELENIUM = set((
73        'ap_configurator_factory_unittest.py',
74        'ap_batch_locker_unittest.py'
75    ))
76
77LONG_RUNTIME = set((
78    'auth_server_unittest.py',
79    'base_barrier_unittest.py',
80    'logging_manager_test.py',
81    'task_loop_unittest.py'  # crbug.com/254030
82    ))
83
84
85SKIP = set((
86    # This particular KVM autotest test is not a unittest
87    'guest_test.py',
88    'ap_configurator_test.py',
89    'chaos_base_test.py',
90    'chaos_interop_test.py',
91    'monitor_db_functional_test.py',
92    # crbug.com/249377
93    'reporting_unittest.py',
94    # crbug.com/251395
95    'dev_server_test.py',
96    ))
97
98LONG_TESTS = (REQUIRES_MYSQLDB |
99              REQUIRES_GWT |
100              REQUIRES_HTTPLIB2 |
101              REQUIRES_AUTH |
102              REQUIRES_PROTOBUFS |
103              REQUIRES_SELENIUM |
104              LONG_RUNTIME)
105
106ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
107
108# The set of files in LONG_TESTS with its full path
109LONG_TESTS_FULL_PATH = {os.path.join(ROOT, t) for t in LONG_TESTS}
110
111class TestFailure(Exception):
112    """Exception type for any test failure."""
113    pass
114
115
116def run_test(mod_names, options):
117    """
118    @param mod_names: A list of individual parts of the module name to import
119            and run as a test suite.
120    @param options: optparse options.
121    """
122    if not options.debug:
123        parallel.redirect_io()
124
125    print "Running %s" % '.'.join(mod_names)
126    mod = common.setup_modules.import_module(mod_names[-1],
127                                             '.'.join(mod_names[:-1]))
128    for ut_module in [unittest, custom_unittest]:
129        test = ut_module.defaultTestLoader.loadTestsFromModule(mod)
130        suite = ut_module.TestSuite(test)
131        runner = ut_module.TextTestRunner(verbosity=2)
132        result = runner.run(suite)
133        if result.errors or result.failures:
134            msg = '%s had %d failures and %d errors.'
135            msg %= '.'.join(mod_names), len(result.failures), len(result.errors)
136            raise TestFailure(msg)
137
138
139def scan_for_modules(start, options):
140    """Scan folders and find all test modules that are not included in the
141    blacklist (defined in LONG_TESTS).
142
143    @param start: The absolute directory to look for tests under.
144    @param options: optparse options.
145    @return a list of modules to be executed.
146    """
147    modules = []
148
149    skip_tests = SKIP
150    if options.skip_tests:
151        skip_tests.update(options.skip_tests.split())
152    skip_tests_full_path = {os.path.join(ROOT, t) for t in skip_tests}
153
154    for dir_path, sub_dirs, file_names in os.walk(start):
155        # Only look in and below subdirectories that are python modules.
156        if '__init__.py' not in file_names:
157            if options.full:
158                for file_name in file_names:
159                    if file_name.endswith('.pyc'):
160                        os.unlink(os.path.join(dir_path, file_name))
161            # Skip all subdirectories below this one, it is not a module.
162            del sub_dirs[:]
163            if options.debug:
164                print 'Skipping', dir_path
165            continue  # Skip this directory.
166
167        # Look for unittest files.
168        for file_name in file_names:
169            if (file_name.endswith('_unittest.py') or
170                file_name.endswith('_test.py')):
171                file_path = os.path.join(dir_path, file_name)
172                if (not options.full and
173                    (file_name in LONG_TESTS or
174                     file_path in LONG_TESTS_FULL_PATH)):
175                    continue
176                if (file_name in skip_tests or
177                    file_path in skip_tests_full_path):
178                    continue
179                path_no_py = os.path.join(dir_path, file_name).rstrip('.py')
180                assert path_no_py.startswith(ROOT)
181                names = path_no_py[len(ROOT)+1:].split('/')
182                modules.append(['autotest_lib'] + names)
183                if options.debug:
184                    print 'testing', path_no_py
185    return modules
186
187def find_and_run_tests(start, options):
188    """
189    Find and run Python unittest suites below the given directory.  Only look
190    in subdirectories of start that are actual importable Python modules.
191
192    @param start: The absolute directory to look for tests under.
193    @param options: optparse options.
194    """
195    if options.module_list:
196        modules = []
197        for m in options.module_list:
198            modules.append(m.split('.'))
199    else:
200        modules = scan_for_modules(start, options)
201
202    if options.debug:
203        print 'Number of test modules found:', len(modules)
204
205    functions = {}
206    for module_names in modules:
207        # Create a function that'll test a particular module.  module=module
208        # is a hack to force python to evaluate the params now.  We then
209        # rename the function to make error reporting nicer.
210        run_module = lambda module=module_names: run_test(module, options)
211        name = '.'.join(module_names)
212        run_module.__name__ = name
213        functions[run_module] = set()
214
215    try:
216        dargs = {}
217        if options.debug:
218            dargs['max_simultaneous_procs'] = 1
219        pe = parallel.ParallelExecute(functions, **dargs)
220        pe.run_until_completion()
221    except parallel.ParallelError, err:
222        return err.errors
223    return []
224
225
226def main():
227    """Entry point for unittest_suite.py"""
228    options, args = parser.parse_args()
229    if args:
230        options.module_list = args
231
232    # Strip the arguments off the command line, so that the unit tests do not
233    # see them.
234    del sys.argv[1:]
235
236    absolute_start = os.path.join(ROOT, options.start)
237    errors = find_and_run_tests(absolute_start, options)
238    if errors:
239        print "%d tests resulted in an error/failure:" % len(errors)
240        for error in errors:
241            print "\t%s" % error
242        print "Rerun", sys.argv[0], "--debug to see the failure details."
243        sys.exit(1)
244    else:
245        print "All passed!"
246        sys.exit(0)
247
248
249if __name__ == "__main__":
250    main()
251