1import datetime 2import getpass 3import glob 4import os 5import pickle 6import re 7import threading 8import time 9import image_chromeos 10import machine_manager_singleton 11import table_formatter 12from cros_utils import command_executer 13from cros_utils import logger 14 15SCRATCH_DIR = '/home/%s/cros_scratch' % getpass.getuser() 16PICKLE_FILE = 'pickle.txt' 17VERSION = '1' 18 19 20def ConvertToFilename(text): 21 ret = text 22 ret = re.sub('/', '__', ret) 23 ret = re.sub(' ', '_', ret) 24 ret = re.sub('=', '', ret) 25 ret = re.sub("\"", '', ret) 26 return ret 27 28 29class AutotestRun(threading.Thread): 30 31 def __init__(self, 32 autotest, 33 chromeos_root='', 34 chromeos_image='', 35 board='', 36 remote='', 37 iteration=0, 38 image_checksum='', 39 exact_remote=False, 40 rerun=False, 41 rerun_if_failed=False): 42 self.autotest = autotest 43 self.chromeos_root = chromeos_root 44 self.chromeos_image = chromeos_image 45 self.board = board 46 self.remote = remote 47 self.iteration = iteration 48 l = logger.GetLogger() 49 l.LogFatalIf(not image_checksum, "Checksum shouldn't be None") 50 self.image_checksum = image_checksum 51 self.results = {} 52 threading.Thread.__init__(self) 53 self.terminate = False 54 self.retval = None 55 self.status = 'PENDING' 56 self.run_completed = False 57 self.exact_remote = exact_remote 58 self.rerun = rerun 59 self.rerun_if_failed = rerun_if_failed 60 self.results_dir = None 61 self.full_name = None 62 63 @staticmethod 64 def MeanExcludingSlowest(array): 65 mean = sum(array) / len(array) 66 array2 = [] 67 68 for v in array: 69 if mean != 0 and abs(v - mean) / mean < 0.2: 70 array2.append(v) 71 72 if array2: 73 return sum(array2) / len(array2) 74 else: 75 return mean 76 77 @staticmethod 78 def AddComposite(results_dict): 79 composite_keys = [] 80 composite_dict = {} 81 for key in results_dict: 82 mo = re.match('(.*){\d+}', key) 83 if mo: 84 composite_keys.append(mo.group(1)) 85 for key in results_dict: 86 for composite_key in composite_keys: 87 if (key.count(composite_key) != 0 and 88 table_formatter.IsFloat(results_dict[key])): 89 if composite_key not in composite_dict: 90 composite_dict[composite_key] = [] 91 composite_dict[composite_key].append(float(results_dict[key])) 92 break 93 94 for composite_key in composite_dict: 95 v = composite_dict[composite_key] 96 results_dict['%s[c]' % composite_key] = sum(v) / len(v) 97 mean_excluding_slowest = AutotestRun.MeanExcludingSlowest(v) 98 results_dict['%s[ce]' % composite_key] = mean_excluding_slowest 99 100 return results_dict 101 102 def ParseOutput(self): 103 p = re.compile('^-+.*?^-+', re.DOTALL | re.MULTILINE) 104 matches = p.findall(self.out) 105 for i in range(len(matches)): 106 results = matches[i] 107 results_dict = {} 108 for line in results.splitlines()[1:-1]: 109 mo = re.match('(.*\S)\s+\[\s+(PASSED|FAILED)\s+\]', line) 110 if mo: 111 results_dict[mo.group(1)] = mo.group(2) 112 continue 113 mo = re.match('(.*\S)\s+(.*)', line) 114 if mo: 115 results_dict[mo.group(1)] = mo.group(2) 116 117 # Add a composite keyval for tests like startup. 118 results_dict = AutotestRun.AddComposite(results_dict) 119 120 self.results = results_dict 121 122 # This causes it to not parse the table again 123 # Autotest recently added a secondary table 124 # That reports errors and screws up the final pretty output. 125 break 126 mo = re.search('Results placed in (\S+)', self.out) 127 if mo: 128 self.results_dir = mo.group(1) 129 self.full_name = os.path.basename(self.results_dir) 130 131 def GetCacheHashBase(self): 132 ret = ('%s %s %s' % 133 (self.image_checksum, self.autotest.name, self.iteration)) 134 if self.autotest.args: 135 ret += ' %s' % self.autotest.args 136 ret += '-%s' % VERSION 137 return ret 138 139 def GetLabel(self): 140 ret = '%s %s remote:%s' % (self.chromeos_image, self.autotest.name, 141 self.remote) 142 return ret 143 144 def TryToLoadFromCache(self): 145 base = self.GetCacheHashBase() 146 if self.exact_remote: 147 if not self.remote: 148 return False 149 cache_dir_glob = '%s_%s' % (ConvertToFilename(base), self.remote) 150 else: 151 cache_dir_glob = '%s*' % ConvertToFilename(base) 152 cache_path_glob = os.path.join(SCRATCH_DIR, cache_dir_glob) 153 matching_dirs = glob.glob(cache_path_glob) 154 if matching_dirs: 155 matching_dir = matching_dirs[0] 156 cache_file = os.path.join(matching_dir, PICKLE_FILE) 157 assert os.path.isfile(cache_file) 158 self._logger.LogOutput('Trying to read from cache file: %s' % cache_file) 159 return self.ReadFromCache(cache_file) 160 self._logger.LogOutput('Cache miss. AM going to run: %s for: %s' % 161 (self.autotest.name, self.chromeos_image)) 162 return False 163 164 def ReadFromCache(self, cache_file): 165 with open(cache_file, 'rb') as f: 166 self.retval = pickle.load(f) 167 self.out = pickle.load(f) 168 self.err = pickle.load(f) 169 self._logger.LogOutput(self.out) 170 return True 171 return False 172 173 def StoreToCache(self): 174 base = self.GetCacheHashBase() 175 self.cache_dir = os.path.join(SCRATCH_DIR, 176 '%s_%s' % (ConvertToFilename(base), 177 self.remote)) 178 cache_file = os.path.join(self.cache_dir, PICKLE_FILE) 179 command = 'mkdir -p %s' % os.path.dirname(cache_file) 180 ret = self._ce.RunCommand(command) 181 assert ret == 0, "Couldn't create cache dir" 182 with open(cache_file, 'wb') as f: 183 pickle.dump(self.retval, f) 184 pickle.dump(self.out, f) 185 pickle.dump(self.err, f) 186 187 def run(self): 188 self._logger = logger.Logger( 189 os.path.dirname(__file__), '%s.%s' % (os.path.basename(__file__), 190 self.name), True) 191 self._ce = command_executer.GetCommandExecuter(self._logger) 192 self.RunCached() 193 194 def RunCached(self): 195 self.status = 'WAITING' 196 cache_hit = False 197 if not self.rerun: 198 cache_hit = self.TryToLoadFromCache() 199 else: 200 self._logger.LogOutput('--rerun passed. Not using cached results.') 201 if self.rerun_if_failed and self.retval: 202 self._logger.LogOutput('--rerun_if_failed passed and existing test ' 203 'failed. Rerunning...') 204 cache_hit = False 205 if not cache_hit: 206 # Get machine 207 while True: 208 if self.terminate: 209 return 1 210 self.machine = (machine_manager_singleton.MachineManagerSingleton( 211 ).AcquireMachine(self.image_checksum)) 212 if self.machine: 213 self._logger.LogOutput('%s: Machine %s acquired at %s' % 214 (self.name, self.machine.name, 215 datetime.datetime.now())) 216 break 217 else: 218 sleep_duration = 10 219 time.sleep(sleep_duration) 220 try: 221 self.remote = self.machine.name 222 223 if self.machine.checksum != self.image_checksum: 224 self.retval = self.ImageTo(self.machine.name) 225 if self.retval: 226 return self.retval 227 self.machine.checksum = self.image_checksum 228 self.machine.image = self.chromeos_image 229 self.status = 'RUNNING: %s' % self.autotest.name 230 [self.retval, self.out, self.err] = self.RunTestOn(self.machine.name) 231 self.run_completed = True 232 233 finally: 234 self._logger.LogOutput('Releasing machine: %s' % self.machine.name) 235 machine_manager_singleton.MachineManagerSingleton().ReleaseMachine( 236 self.machine) 237 self._logger.LogOutput('Released machine: %s' % self.machine.name) 238 239 self.StoreToCache() 240 241 if not self.retval: 242 self.status = 'SUCCEEDED' 243 else: 244 self.status = 'FAILED' 245 246 self.ParseOutput() 247 # Copy results directory to the scratch dir 248 if (not cache_hit and not self.retval and self.autotest.args and 249 '--profile' in self.autotest.args): 250 results_dir = os.path.join(self.chromeos_root, 'chroot', 251 self.results_dir.lstrip('/')) 252 tarball = os.path.join( 253 self.cache_dir, os.path.basename(os.path.dirname(self.results_dir))) 254 command = ('cd %s && tar cjf %s.tbz2 .' % (results_dir, tarball)) 255 self._ce.RunCommand(command) 256 perf_data_file = os.path.join(self.results_dir, self.full_name, 257 'profiling/iteration.1/perf.data') 258 259 # Attempt to build a perf report and keep it with the results. 260 command = ('cd %s/src/scripts &&' 261 ' cros_sdk -- /usr/sbin/perf report --symfs=/build/%s' 262 ' -i %s --stdio' % (self.chromeos_root, self.board, 263 perf_data_file)) 264 ret, out, err = self._ce.RunCommandWOutput(command) 265 with open(os.path.join(self.cache_dir, 'perf.report'), 'wb') as f: 266 f.write(out) 267 return self.retval 268 269 def ImageTo(self, machine_name): 270 image_args = [image_chromeos.__file__, '--chromeos_root=%s' % 271 self.chromeos_root, '--image=%s' % self.chromeos_image, 272 '--remote=%s' % machine_name] 273 if self.board: 274 image_args.append('--board=%s' % self.board) 275 276### devserver_port = 8080 277### mo = re.search("\d+", self.name) 278### if mo: 279### to_add = int(mo.group(0)) 280### assert to_add < 100, "Too many threads launched!" 281### devserver_port += to_add 282 283### # I tried --noupdate_stateful, but that still fails when run in parallel. 284### image_args.append("--image_to_live_args=\"--devserver_port=%s" 285### " --noupdate_stateful\"" % devserver_port) 286### image_args.append("--image_to_live_args=--devserver_port=%s" % 287### devserver_port) 288 289# Currently can't image two machines at once. 290# So have to serialized on this lock. 291 self.status = 'WAITING ON IMAGE_LOCK' 292 with machine_manager_singleton.MachineManagerSingleton().image_lock: 293 self.status = 'IMAGING' 294 retval = self._ce.RunCommand(' '.join(['python'] + image_args)) 295 machine_manager_singleton.MachineManagerSingleton().num_reimages += 1 296 if retval: 297 self.status = 'ABORTED DUE TO IMAGE FAILURE' 298 return retval 299 300 def DoPowerdHack(self): 301 command = 'sudo initctl stop powerd' 302 self._ce.CrosRunCommand(command, 303 machine=self.machine.name, 304 chromeos_root=self.chromeos_root) 305 306 def RunTestOn(self, machine_name): 307 command = 'cd %s/src/scripts' % self.chromeos_root 308 options = '' 309 if self.board: 310 options += ' --board=%s' % self.board 311 if self.autotest.args: 312 options += " --args='%s'" % self.autotest.args 313 if 'tegra2' in self.board: 314 self.DoPowerdHack() 315 command += ('&& cros_sdk -- /usr/bin/test_that %s %s %s' % 316 (options, machine_name, self.autotest.name)) 317 return self._ce.RunCommand(command, True) 318