common.py revision ab7ca1d2861e70d8ef8673d350b634111414039e
1# Copyright (C) 2008 The Android Open Source Project 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14 15import copy 16import errno 17import getopt 18import getpass 19import imp 20import os 21import platform 22import re 23import shlex 24import shutil 25import subprocess 26import sys 27import tempfile 28import threading 29import time 30import zipfile 31 32import blockimgdiff 33 34try: 35 from hashlib import sha1 as sha1 36except ImportError: 37 from sha import sha as sha1 38 39# missing in Python 2.4 and before 40if not hasattr(os, "SEEK_SET"): 41 os.SEEK_SET = 0 42 43class Options(object): pass 44OPTIONS = Options() 45OPTIONS.search_path = "out/host/linux-x86" 46OPTIONS.signapk_path = "framework/signapk.jar" # Relative to search_path 47OPTIONS.extra_signapk_args = [] 48OPTIONS.java_path = "java" # Use the one on the path by default. 49OPTIONS.public_key_suffix = ".x509.pem" 50OPTIONS.private_key_suffix = ".pk8" 51OPTIONS.verbose = False 52OPTIONS.tempfiles = [] 53OPTIONS.device_specific = None 54OPTIONS.extras = {} 55OPTIONS.info_dict = None 56 57 58# Values for "certificate" in apkcerts that mean special things. 59SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL") 60 61 62class ExternalError(RuntimeError): pass 63 64 65def Run(args, **kwargs): 66 """Create and return a subprocess.Popen object, printing the command 67 line on the terminal if -v was specified.""" 68 if OPTIONS.verbose: 69 print " running: ", " ".join(args) 70 return subprocess.Popen(args, **kwargs) 71 72 73def CloseInheritedPipes(): 74 """ Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds 75 before doing other work.""" 76 if platform.system() != "Darwin": 77 return 78 for d in range(3, 1025): 79 try: 80 stat = os.fstat(d) 81 if stat is not None: 82 pipebit = stat[0] & 0x1000 83 if pipebit != 0: 84 os.close(d) 85 except OSError: 86 pass 87 88 89def LoadInfoDict(input): 90 """Read and parse the META/misc_info.txt key/value pairs from the 91 input target files and return a dict.""" 92 93 def read_helper(fn): 94 if isinstance(input, zipfile.ZipFile): 95 return input.read(fn) 96 else: 97 path = os.path.join(input, *fn.split("/")) 98 try: 99 with open(path) as f: 100 return f.read() 101 except IOError, e: 102 if e.errno == errno.ENOENT: 103 raise KeyError(fn) 104 d = {} 105 try: 106 d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n")) 107 except KeyError: 108 # ok if misc_info.txt doesn't exist 109 pass 110 111 # backwards compatibility: These values used to be in their own 112 # files. Look for them, in case we're processing an old 113 # target_files zip. 114 115 if "mkyaffs2_extra_flags" not in d: 116 try: 117 d["mkyaffs2_extra_flags"] = read_helper("META/mkyaffs2-extra-flags.txt").strip() 118 except KeyError: 119 # ok if flags don't exist 120 pass 121 122 if "recovery_api_version" not in d: 123 try: 124 d["recovery_api_version"] = read_helper("META/recovery-api-version.txt").strip() 125 except KeyError: 126 raise ValueError("can't find recovery API version in input target-files") 127 128 if "tool_extensions" not in d: 129 try: 130 d["tool_extensions"] = read_helper("META/tool-extensions.txt").strip() 131 except KeyError: 132 # ok if extensions don't exist 133 pass 134 135 if "fstab_version" not in d: 136 d["fstab_version"] = "1" 137 138 try: 139 data = read_helper("META/imagesizes.txt") 140 for line in data.split("\n"): 141 if not line: continue 142 name, value = line.split(" ", 1) 143 if not value: continue 144 if name == "blocksize": 145 d[name] = value 146 else: 147 d[name + "_size"] = value 148 except KeyError: 149 pass 150 151 def makeint(key): 152 if key in d: 153 d[key] = int(d[key], 0) 154 155 makeint("recovery_api_version") 156 makeint("blocksize") 157 makeint("system_size") 158 makeint("vendor_size") 159 makeint("userdata_size") 160 makeint("cache_size") 161 makeint("recovery_size") 162 makeint("boot_size") 163 makeint("fstab_version") 164 165 d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"]) 166 d["build.prop"] = LoadBuildProp(read_helper) 167 return d 168 169def LoadBuildProp(read_helper): 170 try: 171 data = read_helper("SYSTEM/build.prop") 172 except KeyError: 173 print "Warning: could not find SYSTEM/build.prop in %s" % zip 174 data = "" 175 return LoadDictionaryFromLines(data.split("\n")) 176 177def LoadDictionaryFromLines(lines): 178 d = {} 179 for line in lines: 180 line = line.strip() 181 if not line or line.startswith("#"): continue 182 if "=" in line: 183 name, value = line.split("=", 1) 184 d[name] = value 185 return d 186 187def LoadRecoveryFSTab(read_helper, fstab_version): 188 class Partition(object): 189 pass 190 191 try: 192 data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab") 193 except KeyError: 194 print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab" 195 data = "" 196 197 if fstab_version == 1: 198 d = {} 199 for line in data.split("\n"): 200 line = line.strip() 201 if not line or line.startswith("#"): continue 202 pieces = line.split() 203 if not (3 <= len(pieces) <= 4): 204 raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,)) 205 206 p = Partition() 207 p.mount_point = pieces[0] 208 p.fs_type = pieces[1] 209 p.device = pieces[2] 210 p.length = 0 211 options = None 212 if len(pieces) >= 4: 213 if pieces[3].startswith("/"): 214 p.device2 = pieces[3] 215 if len(pieces) >= 5: 216 options = pieces[4] 217 else: 218 p.device2 = None 219 options = pieces[3] 220 else: 221 p.device2 = None 222 223 if options: 224 options = options.split(",") 225 for i in options: 226 if i.startswith("length="): 227 p.length = int(i[7:]) 228 else: 229 print "%s: unknown option \"%s\"" % (p.mount_point, i) 230 231 d[p.mount_point] = p 232 233 elif fstab_version == 2: 234 d = {} 235 for line in data.split("\n"): 236 line = line.strip() 237 if not line or line.startswith("#"): continue 238 pieces = line.split() 239 if len(pieces) != 5: 240 raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,)) 241 242 # Ignore entries that are managed by vold 243 options = pieces[4] 244 if "voldmanaged=" in options: continue 245 246 # It's a good line, parse it 247 p = Partition() 248 p.device = pieces[0] 249 p.mount_point = pieces[1] 250 p.fs_type = pieces[2] 251 p.device2 = None 252 p.length = 0 253 254 options = options.split(",") 255 for i in options: 256 if i.startswith("length="): 257 p.length = int(i[7:]) 258 else: 259 # Ignore all unknown options in the unified fstab 260 continue 261 262 d[p.mount_point] = p 263 264 else: 265 raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,)) 266 267 return d 268 269 270def DumpInfoDict(d): 271 for k, v in sorted(d.items()): 272 print "%-25s = (%s) %s" % (k, type(v).__name__, v) 273 274def BuildBootableImage(sourcedir, fs_config_file, info_dict=None): 275 """Take a kernel, cmdline, and ramdisk directory from the input (in 276 'sourcedir'), and turn them into a boot image. Return the image 277 data, or None if sourcedir does not appear to contains files for 278 building the requested image.""" 279 280 if (not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK) or 281 not os.access(os.path.join(sourcedir, "kernel"), os.F_OK)): 282 return None 283 284 if info_dict is None: 285 info_dict = OPTIONS.info_dict 286 287 ramdisk_img = tempfile.NamedTemporaryFile() 288 img = tempfile.NamedTemporaryFile() 289 290 if os.access(fs_config_file, os.F_OK): 291 cmd = ["mkbootfs", "-f", fs_config_file, os.path.join(sourcedir, "RAMDISK")] 292 else: 293 cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")] 294 p1 = Run(cmd, stdout=subprocess.PIPE) 295 p2 = Run(["minigzip"], 296 stdin=p1.stdout, stdout=ramdisk_img.file.fileno()) 297 298 p2.wait() 299 p1.wait() 300 assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (targetname,) 301 assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (targetname,) 302 303 # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set 304 mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg" 305 306 cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")] 307 308 fn = os.path.join(sourcedir, "second") 309 if os.access(fn, os.F_OK): 310 cmd.append("--second") 311 cmd.append(fn) 312 313 fn = os.path.join(sourcedir, "cmdline") 314 if os.access(fn, os.F_OK): 315 cmd.append("--cmdline") 316 cmd.append(open(fn).read().rstrip("\n")) 317 318 fn = os.path.join(sourcedir, "base") 319 if os.access(fn, os.F_OK): 320 cmd.append("--base") 321 cmd.append(open(fn).read().rstrip("\n")) 322 323 fn = os.path.join(sourcedir, "pagesize") 324 if os.access(fn, os.F_OK): 325 cmd.append("--pagesize") 326 cmd.append(open(fn).read().rstrip("\n")) 327 328 args = info_dict.get("mkbootimg_args", None) 329 if args and args.strip(): 330 cmd.extend(shlex.split(args)) 331 332 cmd.extend(["--ramdisk", ramdisk_img.name, 333 "--output", img.name]) 334 335 p = Run(cmd, stdout=subprocess.PIPE) 336 p.communicate() 337 assert p.returncode == 0, "mkbootimg of %s image failed" % ( 338 os.path.basename(sourcedir),) 339 340 if info_dict.get("verity_key", None): 341 path = "/" + os.path.basename(sourcedir).lower() 342 cmd = ["boot_signer", path, img.name, info_dict["verity_key"], img.name] 343 p = Run(cmd, stdout=subprocess.PIPE) 344 p.communicate() 345 assert p.returncode == 0, "boot_signer of %s image failed" % path 346 347 img.seek(os.SEEK_SET, 0) 348 data = img.read() 349 350 ramdisk_img.close() 351 img.close() 352 353 return data 354 355 356def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir, 357 info_dict=None): 358 """Return a File object (with name 'name') with the desired bootable 359 image. Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name 360 'prebuilt_name', otherwise look for it under 'unpack_dir'/IMAGES, 361 otherwise construct it from the source files in 362 'unpack_dir'/'tree_subdir'.""" 363 364 prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name) 365 if os.path.exists(prebuilt_path): 366 print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,) 367 return File.FromLocalFile(name, prebuilt_path) 368 369 prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name) 370 if os.path.exists(prebuilt_path): 371 print "using prebuilt %s from IMAGES..." % (prebuilt_name,) 372 return File.FromLocalFile(name, prebuilt_path) 373 374 print "building image from target_files %s..." % (tree_subdir,) 375 fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt" 376 data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir), 377 os.path.join(unpack_dir, fs_config), 378 info_dict) 379 if data: 380 return File(name, data) 381 return None 382 383 384def UnzipTemp(filename, pattern=None): 385 """Unzip the given archive into a temporary directory and return the name. 386 387 If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a 388 temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES. 389 390 Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the 391 main file), open for reading. 392 """ 393 394 tmp = tempfile.mkdtemp(prefix="targetfiles-") 395 OPTIONS.tempfiles.append(tmp) 396 397 def unzip_to_dir(filename, dirname): 398 cmd = ["unzip", "-o", "-q", filename, "-d", dirname] 399 if pattern is not None: 400 cmd.append(pattern) 401 p = Run(cmd, stdout=subprocess.PIPE) 402 p.communicate() 403 if p.returncode != 0: 404 raise ExternalError("failed to unzip input target-files \"%s\"" % 405 (filename,)) 406 407 m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE) 408 if m: 409 unzip_to_dir(m.group(1), tmp) 410 unzip_to_dir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES")) 411 filename = m.group(1) 412 else: 413 unzip_to_dir(filename, tmp) 414 415 return tmp, zipfile.ZipFile(filename, "r") 416 417 418def GetKeyPasswords(keylist): 419 """Given a list of keys, prompt the user to enter passwords for 420 those which require them. Return a {key: password} dict. password 421 will be None if the key has no password.""" 422 423 no_passwords = [] 424 need_passwords = [] 425 key_passwords = {} 426 devnull = open("/dev/null", "w+b") 427 for k in sorted(keylist): 428 # We don't need a password for things that aren't really keys. 429 if k in SPECIAL_CERT_STRINGS: 430 no_passwords.append(k) 431 continue 432 433 p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix, 434 "-inform", "DER", "-nocrypt"], 435 stdin=devnull.fileno(), 436 stdout=devnull.fileno(), 437 stderr=subprocess.STDOUT) 438 p.communicate() 439 if p.returncode == 0: 440 # Definitely an unencrypted key. 441 no_passwords.append(k) 442 else: 443 p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix, 444 "-inform", "DER", "-passin", "pass:"], 445 stdin=devnull.fileno(), 446 stdout=devnull.fileno(), 447 stderr=subprocess.PIPE) 448 stdout, stderr = p.communicate() 449 if p.returncode == 0: 450 # Encrypted key with empty string as password. 451 key_passwords[k] = '' 452 elif stderr.startswith('Error decrypting key'): 453 # Definitely encrypted key. 454 # It would have said "Error reading key" if it didn't parse correctly. 455 need_passwords.append(k) 456 else: 457 # Potentially, a type of key that openssl doesn't understand. 458 # We'll let the routines in signapk.jar handle it. 459 no_passwords.append(k) 460 devnull.close() 461 462 key_passwords.update(PasswordManager().GetPasswords(need_passwords)) 463 key_passwords.update(dict.fromkeys(no_passwords, None)) 464 return key_passwords 465 466 467def SignFile(input_name, output_name, key, password, align=None, 468 whole_file=False): 469 """Sign the input_name zip/jar/apk, producing output_name. Use the 470 given key and password (the latter may be None if the key does not 471 have a password. 472 473 If align is an integer > 1, zipalign is run to align stored files in 474 the output zip on 'align'-byte boundaries. 475 476 If whole_file is true, use the "-w" option to SignApk to embed a 477 signature that covers the whole file in the archive comment of the 478 zip file. 479 """ 480 481 if align == 0 or align == 1: 482 align = None 483 484 if align: 485 temp = tempfile.NamedTemporaryFile() 486 sign_name = temp.name 487 else: 488 sign_name = output_name 489 490 cmd = [OPTIONS.java_path, "-Xmx2048m", "-jar", 491 os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)] 492 cmd.extend(OPTIONS.extra_signapk_args) 493 if whole_file: 494 cmd.append("-w") 495 cmd.extend([key + OPTIONS.public_key_suffix, 496 key + OPTIONS.private_key_suffix, 497 input_name, sign_name]) 498 499 p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE) 500 if password is not None: 501 password += "\n" 502 p.communicate(password) 503 if p.returncode != 0: 504 raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,)) 505 506 if align: 507 p = Run(["zipalign", "-f", str(align), sign_name, output_name]) 508 p.communicate() 509 if p.returncode != 0: 510 raise ExternalError("zipalign failed: return code %s" % (p.returncode,)) 511 temp.close() 512 513 514def CheckSize(data, target, info_dict): 515 """Check the data string passed against the max size limit, if 516 any, for the given target. Raise exception if the data is too big. 517 Print a warning if the data is nearing the maximum size.""" 518 519 if target.endswith(".img"): target = target[:-4] 520 mount_point = "/" + target 521 522 fs_type = None 523 limit = None 524 if info_dict["fstab"]: 525 if mount_point == "/userdata": mount_point = "/data" 526 p = info_dict["fstab"][mount_point] 527 fs_type = p.fs_type 528 device = p.device 529 if "/" in device: 530 device = device[device.rfind("/")+1:] 531 limit = info_dict.get(device + "_size", None) 532 if not fs_type or not limit: return 533 534 if fs_type == "yaffs2": 535 # image size should be increased by 1/64th to account for the 536 # spare area (64 bytes per 2k page) 537 limit = limit / 2048 * (2048+64) 538 size = len(data) 539 pct = float(size) * 100.0 / limit 540 msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit) 541 if pct >= 99.0: 542 raise ExternalError(msg) 543 elif pct >= 95.0: 544 print 545 print " WARNING: ", msg 546 print 547 elif OPTIONS.verbose: 548 print " ", msg 549 550 551def ReadApkCerts(tf_zip): 552 """Given a target_files ZipFile, parse the META/apkcerts.txt file 553 and return a {package: cert} dict.""" 554 certmap = {} 555 for line in tf_zip.read("META/apkcerts.txt").split("\n"): 556 line = line.strip() 557 if not line: continue 558 m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+' 559 r'private_key="(.*)"$', line) 560 if m: 561 name, cert, privkey = m.groups() 562 public_key_suffix_len = len(OPTIONS.public_key_suffix) 563 private_key_suffix_len = len(OPTIONS.private_key_suffix) 564 if cert in SPECIAL_CERT_STRINGS and not privkey: 565 certmap[name] = cert 566 elif (cert.endswith(OPTIONS.public_key_suffix) and 567 privkey.endswith(OPTIONS.private_key_suffix) and 568 cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]): 569 certmap[name] = cert[:-public_key_suffix_len] 570 else: 571 raise ValueError("failed to parse line from apkcerts.txt:\n" + line) 572 return certmap 573 574 575COMMON_DOCSTRING = """ 576 -p (--path) <dir> 577 Prepend <dir>/bin to the list of places to search for binaries 578 run by this script, and expect to find jars in <dir>/framework. 579 580 -s (--device_specific) <file> 581 Path to the python module containing device-specific 582 releasetools code. 583 584 -x (--extra) <key=value> 585 Add a key/value pair to the 'extras' dict, which device-specific 586 extension code may look at. 587 588 -v (--verbose) 589 Show command lines being executed. 590 591 -h (--help) 592 Display this usage message and exit. 593""" 594 595def Usage(docstring): 596 print docstring.rstrip("\n") 597 print COMMON_DOCSTRING 598 599 600def ParseOptions(argv, 601 docstring, 602 extra_opts="", extra_long_opts=(), 603 extra_option_handler=None): 604 """Parse the options in argv and return any arguments that aren't 605 flags. docstring is the calling module's docstring, to be displayed 606 for errors and -h. extra_opts and extra_long_opts are for flags 607 defined by the caller, which are processed by passing them to 608 extra_option_handler.""" 609 610 try: 611 opts, args = getopt.getopt( 612 argv, "hvp:s:x:" + extra_opts, 613 ["help", "verbose", "path=", "signapk_path=", "extra_signapk_args=", 614 "java_path=", "public_key_suffix=", "private_key_suffix=", 615 "device_specific=", "extra="] + 616 list(extra_long_opts)) 617 except getopt.GetoptError, err: 618 Usage(docstring) 619 print "**", str(err), "**" 620 sys.exit(2) 621 622 path_specified = False 623 624 for o, a in opts: 625 if o in ("-h", "--help"): 626 Usage(docstring) 627 sys.exit() 628 elif o in ("-v", "--verbose"): 629 OPTIONS.verbose = True 630 elif o in ("-p", "--path"): 631 OPTIONS.search_path = a 632 elif o in ("--signapk_path",): 633 OPTIONS.signapk_path = a 634 elif o in ("--extra_signapk_args",): 635 OPTIONS.extra_signapk_args = shlex.split(a) 636 elif o in ("--java_path",): 637 OPTIONS.java_path = a 638 elif o in ("--public_key_suffix",): 639 OPTIONS.public_key_suffix = a 640 elif o in ("--private_key_suffix",): 641 OPTIONS.private_key_suffix = a 642 elif o in ("-s", "--device_specific"): 643 OPTIONS.device_specific = a 644 elif o in ("-x", "--extra"): 645 key, value = a.split("=", 1) 646 OPTIONS.extras[key] = value 647 else: 648 if extra_option_handler is None or not extra_option_handler(o, a): 649 assert False, "unknown option \"%s\"" % (o,) 650 651 os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") + 652 os.pathsep + os.environ["PATH"]) 653 654 return args 655 656 657def MakeTempFile(prefix=None, suffix=None): 658 """Make a temp file and add it to the list of things to be deleted 659 when Cleanup() is called. Return the filename.""" 660 fd, fn = tempfile.mkstemp(prefix=prefix, suffix=suffix) 661 os.close(fd) 662 OPTIONS.tempfiles.append(fn) 663 return fn 664 665 666def Cleanup(): 667 for i in OPTIONS.tempfiles: 668 if os.path.isdir(i): 669 shutil.rmtree(i) 670 else: 671 os.remove(i) 672 673 674class PasswordManager(object): 675 def __init__(self): 676 self.editor = os.getenv("EDITOR", None) 677 self.pwfile = os.getenv("ANDROID_PW_FILE", None) 678 679 def GetPasswords(self, items): 680 """Get passwords corresponding to each string in 'items', 681 returning a dict. (The dict may have keys in addition to the 682 values in 'items'.) 683 684 Uses the passwords in $ANDROID_PW_FILE if available, letting the 685 user edit that file to add more needed passwords. If no editor is 686 available, or $ANDROID_PW_FILE isn't define, prompts the user 687 interactively in the ordinary way. 688 """ 689 690 current = self.ReadFile() 691 692 first = True 693 while True: 694 missing = [] 695 for i in items: 696 if i not in current or not current[i]: 697 missing.append(i) 698 # Are all the passwords already in the file? 699 if not missing: return current 700 701 for i in missing: 702 current[i] = "" 703 704 if not first: 705 print "key file %s still missing some passwords." % (self.pwfile,) 706 answer = raw_input("try to edit again? [y]> ").strip() 707 if answer and answer[0] not in 'yY': 708 raise RuntimeError("key passwords unavailable") 709 first = False 710 711 current = self.UpdateAndReadFile(current) 712 713 def PromptResult(self, current): 714 """Prompt the user to enter a value (password) for each key in 715 'current' whose value is fales. Returns a new dict with all the 716 values. 717 """ 718 result = {} 719 for k, v in sorted(current.iteritems()): 720 if v: 721 result[k] = v 722 else: 723 while True: 724 result[k] = getpass.getpass("Enter password for %s key> " 725 % (k,)).strip() 726 if result[k]: break 727 return result 728 729 def UpdateAndReadFile(self, current): 730 if not self.editor or not self.pwfile: 731 return self.PromptResult(current) 732 733 f = open(self.pwfile, "w") 734 os.chmod(self.pwfile, 0600) 735 f.write("# Enter key passwords between the [[[ ]]] brackets.\n") 736 f.write("# (Additional spaces are harmless.)\n\n") 737 738 first_line = None 739 sorted = [(not v, k, v) for (k, v) in current.iteritems()] 740 sorted.sort() 741 for i, (_, k, v) in enumerate(sorted): 742 f.write("[[[ %s ]]] %s\n" % (v, k)) 743 if not v and first_line is None: 744 # position cursor on first line with no password. 745 first_line = i + 4 746 f.close() 747 748 p = Run([self.editor, "+%d" % (first_line,), self.pwfile]) 749 _, _ = p.communicate() 750 751 return self.ReadFile() 752 753 def ReadFile(self): 754 result = {} 755 if self.pwfile is None: return result 756 try: 757 f = open(self.pwfile, "r") 758 for line in f: 759 line = line.strip() 760 if not line or line[0] == '#': continue 761 m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line) 762 if not m: 763 print "failed to parse password file: ", line 764 else: 765 result[m.group(2)] = m.group(1) 766 f.close() 767 except IOError, e: 768 if e.errno != errno.ENOENT: 769 print "error reading password file: ", str(e) 770 return result 771 772 773def ZipWriteStr(zip, filename, data, perms=0644, compression=None): 774 # use a fixed timestamp so the output is repeatable. 775 zinfo = zipfile.ZipInfo(filename=filename, 776 date_time=(2009, 1, 1, 0, 0, 0)) 777 if compression is None: 778 zinfo.compress_type = zip.compression 779 else: 780 zinfo.compress_type = compression 781 zinfo.external_attr = perms << 16 782 zip.writestr(zinfo, data) 783 784 785class DeviceSpecificParams(object): 786 module = None 787 def __init__(self, **kwargs): 788 """Keyword arguments to the constructor become attributes of this 789 object, which is passed to all functions in the device-specific 790 module.""" 791 for k, v in kwargs.iteritems(): 792 setattr(self, k, v) 793 self.extras = OPTIONS.extras 794 795 if self.module is None: 796 path = OPTIONS.device_specific 797 if not path: return 798 try: 799 if os.path.isdir(path): 800 info = imp.find_module("releasetools", [path]) 801 else: 802 d, f = os.path.split(path) 803 b, x = os.path.splitext(f) 804 if x == ".py": 805 f = b 806 info = imp.find_module(f, [d]) 807 print "loaded device-specific extensions from", path 808 self.module = imp.load_module("device_specific", *info) 809 except ImportError: 810 print "unable to load device-specific module; assuming none" 811 812 def _DoCall(self, function_name, *args, **kwargs): 813 """Call the named function in the device-specific module, passing 814 the given args and kwargs. The first argument to the call will be 815 the DeviceSpecific object itself. If there is no module, or the 816 module does not define the function, return the value of the 817 'default' kwarg (which itself defaults to None).""" 818 if self.module is None or not hasattr(self.module, function_name): 819 return kwargs.get("default", None) 820 return getattr(self.module, function_name)(*((self,) + args), **kwargs) 821 822 def FullOTA_Assertions(self): 823 """Called after emitting the block of assertions at the top of a 824 full OTA package. Implementations can add whatever additional 825 assertions they like.""" 826 return self._DoCall("FullOTA_Assertions") 827 828 def FullOTA_InstallBegin(self): 829 """Called at the start of full OTA installation.""" 830 return self._DoCall("FullOTA_InstallBegin") 831 832 def FullOTA_InstallEnd(self): 833 """Called at the end of full OTA installation; typically this is 834 used to install the image for the device's baseband processor.""" 835 return self._DoCall("FullOTA_InstallEnd") 836 837 def IncrementalOTA_Assertions(self): 838 """Called after emitting the block of assertions at the top of an 839 incremental OTA package. Implementations can add whatever 840 additional assertions they like.""" 841 return self._DoCall("IncrementalOTA_Assertions") 842 843 def IncrementalOTA_VerifyBegin(self): 844 """Called at the start of the verification phase of incremental 845 OTA installation; additional checks can be placed here to abort 846 the script before any changes are made.""" 847 return self._DoCall("IncrementalOTA_VerifyBegin") 848 849 def IncrementalOTA_VerifyEnd(self): 850 """Called at the end of the verification phase of incremental OTA 851 installation; additional checks can be placed here to abort the 852 script before any changes are made.""" 853 return self._DoCall("IncrementalOTA_VerifyEnd") 854 855 def IncrementalOTA_InstallBegin(self): 856 """Called at the start of incremental OTA installation (after 857 verification is complete).""" 858 return self._DoCall("IncrementalOTA_InstallBegin") 859 860 def IncrementalOTA_InstallEnd(self): 861 """Called at the end of incremental OTA installation; typically 862 this is used to install the image for the device's baseband 863 processor.""" 864 return self._DoCall("IncrementalOTA_InstallEnd") 865 866class File(object): 867 def __init__(self, name, data): 868 self.name = name 869 self.data = data 870 self.size = len(data) 871 self.sha1 = sha1(data).hexdigest() 872 873 @classmethod 874 def FromLocalFile(cls, name, diskname): 875 f = open(diskname, "rb") 876 data = f.read() 877 f.close() 878 return File(name, data) 879 880 def WriteToTemp(self): 881 t = tempfile.NamedTemporaryFile() 882 t.write(self.data) 883 t.flush() 884 return t 885 886 def AddToZip(self, z, compression=None): 887 ZipWriteStr(z, self.name, self.data, compression=compression) 888 889DIFF_PROGRAM_BY_EXT = { 890 ".gz" : "imgdiff", 891 ".zip" : ["imgdiff", "-z"], 892 ".jar" : ["imgdiff", "-z"], 893 ".apk" : ["imgdiff", "-z"], 894 ".img" : "imgdiff", 895 } 896 897class Difference(object): 898 def __init__(self, tf, sf, diff_program=None): 899 self.tf = tf 900 self.sf = sf 901 self.patch = None 902 self.diff_program = diff_program 903 904 def ComputePatch(self): 905 """Compute the patch (as a string of data) needed to turn sf into 906 tf. Returns the same tuple as GetPatch().""" 907 908 tf = self.tf 909 sf = self.sf 910 911 if self.diff_program: 912 diff_program = self.diff_program 913 else: 914 ext = os.path.splitext(tf.name)[1] 915 diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff") 916 917 ttemp = tf.WriteToTemp() 918 stemp = sf.WriteToTemp() 919 920 ext = os.path.splitext(tf.name)[1] 921 922 try: 923 ptemp = tempfile.NamedTemporaryFile() 924 if isinstance(diff_program, list): 925 cmd = copy.copy(diff_program) 926 else: 927 cmd = [diff_program] 928 cmd.append(stemp.name) 929 cmd.append(ttemp.name) 930 cmd.append(ptemp.name) 931 p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 932 err = [] 933 def run(): 934 _, e = p.communicate() 935 if e: err.append(e) 936 th = threading.Thread(target=run) 937 th.start() 938 th.join(timeout=300) # 5 mins 939 if th.is_alive(): 940 print "WARNING: diff command timed out" 941 p.terminate() 942 th.join(5) 943 if th.is_alive(): 944 p.kill() 945 th.join() 946 947 if err or p.returncode != 0: 948 print "WARNING: failure running %s:\n%s\n" % ( 949 diff_program, "".join(err)) 950 self.patch = None 951 return None, None, None 952 diff = ptemp.read() 953 finally: 954 ptemp.close() 955 stemp.close() 956 ttemp.close() 957 958 self.patch = diff 959 return self.tf, self.sf, self.patch 960 961 962 def GetPatch(self): 963 """Return a tuple (target_file, source_file, patch_data). 964 patch_data may be None if ComputePatch hasn't been called, or if 965 computing the patch failed.""" 966 return self.tf, self.sf, self.patch 967 968 969def ComputeDifferences(diffs): 970 """Call ComputePatch on all the Difference objects in 'diffs'.""" 971 print len(diffs), "diffs to compute" 972 973 # Do the largest files first, to try and reduce the long-pole effect. 974 by_size = [(i.tf.size, i) for i in diffs] 975 by_size.sort(reverse=True) 976 by_size = [i[1] for i in by_size] 977 978 lock = threading.Lock() 979 diff_iter = iter(by_size) # accessed under lock 980 981 def worker(): 982 try: 983 lock.acquire() 984 for d in diff_iter: 985 lock.release() 986 start = time.time() 987 d.ComputePatch() 988 dur = time.time() - start 989 lock.acquire() 990 991 tf, sf, patch = d.GetPatch() 992 if sf.name == tf.name: 993 name = tf.name 994 else: 995 name = "%s (%s)" % (tf.name, sf.name) 996 if patch is None: 997 print "patching failed! %s" % (name,) 998 else: 999 print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % ( 1000 dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name) 1001 lock.release() 1002 except Exception, e: 1003 print e 1004 raise 1005 1006 # start worker threads; wait for them all to finish. 1007 threads = [threading.Thread(target=worker) 1008 for i in range(OPTIONS.worker_threads)] 1009 for th in threads: 1010 th.start() 1011 while threads: 1012 threads.pop().join() 1013 1014 1015class BlockDifference: 1016 def __init__(self, partition, tgt, src=None): 1017 self.tgt = tgt 1018 self.src = src 1019 self.partition = partition 1020 1021 b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads) 1022 tmpdir = tempfile.mkdtemp() 1023 OPTIONS.tempfiles.append(tmpdir) 1024 self.path = os.path.join(tmpdir, partition) 1025 b.Compute(self.path) 1026 1027 _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict) 1028 1029 def WriteScript(self, script, output_zip, progress=None): 1030 if not self.src: 1031 # write the output unconditionally 1032 if progress: script.ShowProgress(progress, 0) 1033 self._WriteUpdate(script, output_zip) 1034 1035 else: 1036 script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % 1037 (self.device, self.src.care_map.to_string_raw(), 1038 self.src.TotalSha1())) 1039 script.Print("Patching %s image..." % (self.partition,)) 1040 if progress: script.ShowProgress(progress, 0) 1041 self._WriteUpdate(script, output_zip) 1042 script.AppendExtra(('else\n' 1043 ' (range_sha1("%s", "%s") == "%s") ||\n' 1044 ' abort("%s partition has unexpected contents");\n' 1045 'endif;') % 1046 (self.device, self.tgt.care_map.to_string_raw(), 1047 self.tgt.TotalSha1(), self.partition)) 1048 1049 def _WriteUpdate(self, script, output_zip): 1050 partition = self.partition 1051 with open(self.path + ".transfer.list", "rb") as f: 1052 ZipWriteStr(output_zip, partition + ".transfer.list", f.read()) 1053 with open(self.path + ".new.dat", "rb") as f: 1054 ZipWriteStr(output_zip, partition + ".new.dat", f.read()) 1055 with open(self.path + ".patch.dat", "rb") as f: 1056 ZipWriteStr(output_zip, partition + ".patch.dat", f.read(), 1057 compression=zipfile.ZIP_STORED) 1058 1059 call = (('block_image_update("%s", ' 1060 'package_extract_file("%s.transfer.list"), ' 1061 '"%s.new.dat", "%s.patch.dat");\n') % 1062 (self.device, partition, partition, partition)) 1063 script.AppendExtra(script._WordWrap(call)) 1064 1065 1066DataImage = blockimgdiff.DataImage 1067 1068 1069# map recovery.fstab's fs_types to mount/format "partition types" 1070PARTITION_TYPES = { "yaffs2": "MTD", "mtd": "MTD", 1071 "ext4": "EMMC", "emmc": "EMMC", 1072 "f2fs": "EMMC" } 1073 1074def GetTypeAndDevice(mount_point, info): 1075 fstab = info["fstab"] 1076 if fstab: 1077 return PARTITION_TYPES[fstab[mount_point].fs_type], fstab[mount_point].device 1078 else: 1079 return None 1080 1081 1082def ParseCertificate(data): 1083 """Parse a PEM-format certificate.""" 1084 cert = [] 1085 save = False 1086 for line in data.split("\n"): 1087 if "--END CERTIFICATE--" in line: 1088 break 1089 if save: 1090 cert.append(line) 1091 if "--BEGIN CERTIFICATE--" in line: 1092 save = True 1093 cert = "".join(cert).decode('base64') 1094 return cert 1095 1096def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img, 1097 info_dict=None): 1098 """Generate a binary patch that creates the recovery image starting 1099 with the boot image. (Most of the space in these images is just the 1100 kernel, which is identical for the two, so the resulting patch 1101 should be efficient.) Add it to the output zip, along with a shell 1102 script that is run from init.rc on first boot to actually do the 1103 patching and install the new recovery image. 1104 1105 recovery_img and boot_img should be File objects for the 1106 corresponding images. info should be the dictionary returned by 1107 common.LoadInfoDict() on the input target_files. 1108 """ 1109 1110 if info_dict is None: 1111 info_dict = OPTIONS.info_dict 1112 1113 diff_program = ["imgdiff"] 1114 path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat") 1115 if os.path.exists(path): 1116 diff_program.append("-b") 1117 diff_program.append(path) 1118 bonus_args = "-b /system/etc/recovery-resource.dat" 1119 else: 1120 bonus_args = "" 1121 1122 d = Difference(recovery_img, boot_img, diff_program=diff_program) 1123 _, _, patch = d.ComputePatch() 1124 output_sink("recovery-from-boot.p", patch) 1125 1126 td_pair = GetTypeAndDevice("/boot", info_dict) 1127 if not td_pair: 1128 return 1129 boot_type, boot_device = td_pair 1130 td_pair = GetTypeAndDevice("/recovery", info_dict) 1131 if not td_pair: 1132 return 1133 recovery_type, recovery_device = td_pair 1134 1135 sh = """#!/system/bin/sh 1136if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then 1137 applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed" 1138else 1139 log -t recovery "Recovery image already installed" 1140fi 1141""" % { 'boot_size': boot_img.size, 1142 'boot_sha1': boot_img.sha1, 1143 'recovery_size': recovery_img.size, 1144 'recovery_sha1': recovery_img.sha1, 1145 'boot_type': boot_type, 1146 'boot_device': boot_device, 1147 'recovery_type': recovery_type, 1148 'recovery_device': recovery_device, 1149 'bonus_args': bonus_args, 1150 } 1151 1152 # The install script location moved from /system/etc to /system/bin 1153 # in the L release. Parse the init.rc file to find out where the 1154 # target-files expects it to be, and put it there. 1155 sh_location = "etc/install-recovery.sh" 1156 try: 1157 with open(os.path.join(input_dir, "BOOT", "RAMDISK", "init.rc")) as f: 1158 for line in f: 1159 m = re.match("^service flash_recovery /system/(\S+)\s*$", line) 1160 if m: 1161 sh_location = m.group(1) 1162 print "putting script in", sh_location 1163 break 1164 except (OSError, IOError), e: 1165 print "failed to read init.rc: %s" % (e,) 1166 1167 output_sink(sh_location, sh) 1168