unittest_suite.py revision 0e960285b022fad77f0b087a2007867363bf6ab9
1#!/usr/bin/python -u
2
3import os, sys, unittest, optparse
4import common
5from autotest_lib.utils import parallel
6from autotest_lib.client.common_lib.test_utils import unittest as custom_unittest
7
8parser = optparse.OptionParser()
9parser.add_option("-r", action="store", type="string", dest="start",
10                  default='',
11                  help="root directory to start running unittests")
12parser.add_option("--full", action="store_true", dest="full", default=False,
13                  help="whether to run the shortened version of the test")
14parser.add_option("--debug", action="store_true", dest="debug", default=False,
15                  help="run in debug mode")
16parser.add_option("--skip-tests", dest="skip_tests",  default=[],
17                  help="A space separated list of tests to skip")
18
19parser.set_defaults(module_list=None)
20
21# Following sets are used to define a collection of modules that are optional
22# tests and do not need to be executed in unittest suite for various reasons.
23# Each entry can be file name or relative path that's relative to the parent
24# folder of the folder containing this file (unittest_suite.py). The list
25# will be used to filter any test file with matching name or matching full
26# path. If a file's name is too general and has a chance to collide with files
27# in other folder, it is recommended to specify its relative path here, e.g.,
28# using 'mirror/trigger_unittest.py', instead of 'trigger_unittest.py' only.
29
30REQUIRES_DJANGO = set((
31        'monitor_db_unittest.py',
32        'monitor_db_functional_test.py',
33        'monitor_db_cleanup_test.py',
34        'frontend_unittest.py',
35        'csv_encoder_unittest.py',
36        'rpc_interface_unittest.py',
37        'models_test.py',
38        'scheduler_models_unittest.py',
39        'metahost_scheduler_unittest.py',
40        'site_metahost_scheduler_unittest.py',
41        'rpc_utils_unittest.py',
42        'site_rpc_utils_unittest.py',
43        'execution_engine_unittest.py',
44        'service_proxy_lib_test.py',
45        'rdb_integration_tests.py',
46        'rdb_unittest.py',
47        'rdb_hosts_unittest.py',
48        'rdb_cache_unittests.py',
49        'scheduler_lib_unittest.py',
50        ))
51
52REQUIRES_MYSQLDB = set((
53        'migrate_unittest.py',
54        'db_utils_unittest.py',
55        'scheduler_lib_unittest.py',
56        ))
57
58REQUIRES_GWT = set((
59        'client_compilation_unittest.py',
60        ))
61
62REQUIRES_SIMPLEJSON = set((
63        'resources_test.py',
64        'serviceHandler_unittest.py',
65        ))
66
67REQUIRES_AUTH = set ((
68    'trigger_unittest.py',
69    ))
70
71REQUIRES_HTTPLIB2 = set((
72        ))
73
74REQUIRES_PROTOBUFS = set((
75        'job_serializer_unittest.py',
76        ))
77
78REQUIRES_SELENIUM = set((
79        'ap_configurator_factory_unittest.py',
80        'ap_batch_locker_unittest.py'
81    ))
82
83LONG_RUNTIME = set((
84    'auth_server_unittest.py',
85    'base_barrier_unittest.py',
86    'logging_manager_test.py',
87    'task_loop_unittest.py'  # crbug.com/254030
88    ))
89
90
91SKIP = set((
92    # This particular KVM autotest test is not a unittest
93    'guest_test.py',
94    'ap_configurator_test.py',
95    'chaos_base_test.py',
96    'chaos_interop_test.py',
97    'monitor_db_functional_test.py',
98    'atomic_group_unittests.py',
99    # crbug.com/251395
100    'dev_server_test.py',
101    'full_release_test.py',
102    'scheduler_lib_unittest.py',
103    ))
104
105LONG_TESTS = (REQUIRES_MYSQLDB |
106              REQUIRES_GWT |
107              REQUIRES_HTTPLIB2 |
108              REQUIRES_AUTH |
109              REQUIRES_PROTOBUFS |
110              REQUIRES_SELENIUM |
111              LONG_RUNTIME)
112
113ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
114
115# The set of files in LONG_TESTS with its full path
116LONG_TESTS_FULL_PATH = {os.path.join(ROOT, t) for t in LONG_TESTS}
117
118class TestFailure(Exception):
119    """Exception type for any test failure."""
120    pass
121
122
123def run_test(mod_names, options):
124    """
125    @param mod_names: A list of individual parts of the module name to import
126            and run as a test suite.
127    @param options: optparse options.
128    """
129    if not options.debug:
130        parallel.redirect_io()
131
132    print "Running %s" % '.'.join(mod_names)
133    mod = common.setup_modules.import_module(mod_names[-1],
134                                             '.'.join(mod_names[:-1]))
135    for ut_module in [unittest, custom_unittest]:
136        test = ut_module.defaultTestLoader.loadTestsFromModule(mod)
137        suite = ut_module.TestSuite(test)
138        runner = ut_module.TextTestRunner(verbosity=2)
139        result = runner.run(suite)
140        if result.errors or result.failures:
141            msg = '%s had %d failures and %d errors.'
142            msg %= '.'.join(mod_names), len(result.failures), len(result.errors)
143            raise TestFailure(msg)
144
145
146def scan_for_modules(start, options):
147    """Scan folders and find all test modules that are not included in the
148    blacklist (defined in LONG_TESTS).
149
150    @param start: The absolute directory to look for tests under.
151    @param options: optparse options.
152    @return a list of modules to be executed.
153    """
154    modules = []
155
156    skip_tests = SKIP
157    if options.skip_tests:
158        skip_tests.update(options.skip_tests.split())
159    skip_tests_full_path = {os.path.join(ROOT, t) for t in skip_tests}
160
161    for dir_path, sub_dirs, file_names in os.walk(start):
162        # Only look in and below subdirectories that are python modules.
163        if '__init__.py' not in file_names:
164            if options.full:
165                for file_name in file_names:
166                    if file_name.endswith('.pyc'):
167                        os.unlink(os.path.join(dir_path, file_name))
168            # Skip all subdirectories below this one, it is not a module.
169            del sub_dirs[:]
170            if options.debug:
171                print 'Skipping', dir_path
172            continue  # Skip this directory.
173
174        # Look for unittest files.
175        for file_name in file_names:
176            if (file_name.endswith('_unittest.py') or
177                file_name.endswith('_test.py')):
178                file_path = os.path.join(dir_path, file_name)
179                if (not options.full and
180                    (file_name in LONG_TESTS or
181                     file_path in LONG_TESTS_FULL_PATH)):
182                    continue
183                if (file_name in skip_tests or
184                    file_path in skip_tests_full_path):
185                    continue
186                path_no_py = os.path.join(dir_path, file_name).rstrip('.py')
187                assert path_no_py.startswith(ROOT)
188                names = path_no_py[len(ROOT)+1:].split('/')
189                modules.append(['autotest_lib'] + names)
190                if options.debug:
191                    print 'testing', path_no_py
192    return modules
193
194def find_and_run_tests(start, options):
195    """
196    Find and run Python unittest suites below the given directory.  Only look
197    in subdirectories of start that are actual importable Python modules.
198
199    @param start: The absolute directory to look for tests under.
200    @param options: optparse options.
201    """
202    if options.module_list:
203        modules = []
204        for m in options.module_list:
205            modules.append(m.split('.'))
206    else:
207        modules = scan_for_modules(start, options)
208
209    if options.debug:
210        print 'Number of test modules found:', len(modules)
211
212    functions = {}
213    for module_names in modules:
214        # Create a function that'll test a particular module.  module=module
215        # is a hack to force python to evaluate the params now.  We then
216        # rename the function to make error reporting nicer.
217        run_module = lambda module=module_names: run_test(module, options)
218        name = '.'.join(module_names)
219        run_module.__name__ = name
220        functions[run_module] = set()
221
222    try:
223        dargs = {}
224        if options.debug:
225            dargs['max_simultaneous_procs'] = 1
226        pe = parallel.ParallelExecute(functions, **dargs)
227        pe.run_until_completion()
228    except parallel.ParallelError, err:
229        return err.errors
230    return []
231
232
233def main():
234    """Entry point for unittest_suite.py"""
235    options, args = parser.parse_args()
236    if args:
237        options.module_list = args
238
239    # Strip the arguments off the command line, so that the unit tests do not
240    # see them.
241    del sys.argv[1:]
242
243    absolute_start = os.path.join(ROOT, options.start)
244    errors = find_and_run_tests(absolute_start, options)
245    if errors:
246        print "%d tests resulted in an error/failure:" % len(errors)
247        for error in errors:
248            print "\t%s" % error
249        print "Rerun", sys.argv[0], "--debug to see the failure details."
250        sys.exit(1)
251    else:
252        print "All passed!"
253        sys.exit(0)
254
255
256if __name__ == "__main__":
257    main()
258