common.py revision df06e9682642e1ca4863dec315ea3ad3a22766c9
1# Copyright (C) 2008 The Android Open Source Project 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14 15import copy 16import errno 17import getopt 18import getpass 19import imp 20import os 21import platform 22import re 23import shlex 24import shutil 25import subprocess 26import sys 27import tempfile 28import threading 29import time 30import zipfile 31 32import blockimgdiff 33import rangelib 34 35from hashlib import sha1 as sha1 36 37 38class Options(object): 39 def __init__(self): 40 platform_search_path = { 41 "linux2": "out/host/linux-x86", 42 "darwin": "out/host/darwin-x86", 43 } 44 45 self.search_path = platform_search_path.get(sys.platform, None) 46 self.signapk_path = "framework/signapk.jar" # Relative to search_path 47 self.extra_signapk_args = [] 48 self.java_path = "java" # Use the one on the path by default. 49 self.java_args = "-Xmx2048m" # JVM Args 50 self.public_key_suffix = ".x509.pem" 51 self.private_key_suffix = ".pk8" 52 # use otatools built boot_signer by default 53 self.boot_signer_path = "boot_signer" 54 self.verbose = False 55 self.tempfiles = [] 56 self.device_specific = None 57 self.extras = {} 58 self.info_dict = None 59 self.worker_threads = None 60 61 62OPTIONS = Options() 63 64 65# Values for "certificate" in apkcerts that mean special things. 66SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL") 67 68 69class ExternalError(RuntimeError): 70 pass 71 72 73def Run(args, **kwargs): 74 """Create and return a subprocess.Popen object, printing the command 75 line on the terminal if -v was specified.""" 76 if OPTIONS.verbose: 77 print " running: ", " ".join(args) 78 return subprocess.Popen(args, **kwargs) 79 80 81def CloseInheritedPipes(): 82 """ Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds 83 before doing other work.""" 84 if platform.system() != "Darwin": 85 return 86 for d in range(3, 1025): 87 try: 88 stat = os.fstat(d) 89 if stat is not None: 90 pipebit = stat[0] & 0x1000 91 if pipebit != 0: 92 os.close(d) 93 except OSError: 94 pass 95 96 97def LoadInfoDict(input_file): 98 """Read and parse the META/misc_info.txt key/value pairs from the 99 input target files and return a dict.""" 100 101 def read_helper(fn): 102 if isinstance(input_file, zipfile.ZipFile): 103 return input_file.read(fn) 104 else: 105 path = os.path.join(input_file, *fn.split("/")) 106 try: 107 with open(path) as f: 108 return f.read() 109 except IOError as e: 110 if e.errno == errno.ENOENT: 111 raise KeyError(fn) 112 d = {} 113 try: 114 d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n")) 115 except KeyError: 116 # ok if misc_info.txt doesn't exist 117 pass 118 119 # backwards compatibility: These values used to be in their own 120 # files. Look for them, in case we're processing an old 121 # target_files zip. 122 123 if "mkyaffs2_extra_flags" not in d: 124 try: 125 d["mkyaffs2_extra_flags"] = read_helper( 126 "META/mkyaffs2-extra-flags.txt").strip() 127 except KeyError: 128 # ok if flags don't exist 129 pass 130 131 if "recovery_api_version" not in d: 132 try: 133 d["recovery_api_version"] = read_helper( 134 "META/recovery-api-version.txt").strip() 135 except KeyError: 136 raise ValueError("can't find recovery API version in input target-files") 137 138 if "tool_extensions" not in d: 139 try: 140 d["tool_extensions"] = read_helper("META/tool-extensions.txt").strip() 141 except KeyError: 142 # ok if extensions don't exist 143 pass 144 145 if "fstab_version" not in d: 146 d["fstab_version"] = "1" 147 148 try: 149 data = read_helper("META/imagesizes.txt") 150 for line in data.split("\n"): 151 if not line: 152 continue 153 name, value = line.split(" ", 1) 154 if not value: 155 continue 156 if name == "blocksize": 157 d[name] = value 158 else: 159 d[name + "_size"] = value 160 except KeyError: 161 pass 162 163 def makeint(key): 164 if key in d: 165 d[key] = int(d[key], 0) 166 167 makeint("recovery_api_version") 168 makeint("blocksize") 169 makeint("system_size") 170 makeint("vendor_size") 171 makeint("userdata_size") 172 makeint("cache_size") 173 makeint("recovery_size") 174 makeint("boot_size") 175 makeint("fstab_version") 176 177 d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"]) 178 d["build.prop"] = LoadBuildProp(read_helper) 179 return d 180 181def LoadBuildProp(read_helper): 182 try: 183 data = read_helper("SYSTEM/build.prop") 184 except KeyError: 185 print "Warning: could not find SYSTEM/build.prop in %s" % zip 186 data = "" 187 return LoadDictionaryFromLines(data.split("\n")) 188 189def LoadDictionaryFromLines(lines): 190 d = {} 191 for line in lines: 192 line = line.strip() 193 if not line or line.startswith("#"): 194 continue 195 if "=" in line: 196 name, value = line.split("=", 1) 197 d[name] = value 198 return d 199 200def LoadRecoveryFSTab(read_helper, fstab_version): 201 class Partition(object): 202 def __init__(self, mount_point, fs_type, device, length, device2, context): 203 self.mount_point = mount_point 204 self.fs_type = fs_type 205 self.device = device 206 self.length = length 207 self.device2 = device2 208 self.context = context 209 210 try: 211 data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab") 212 except KeyError: 213 print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab" 214 data = "" 215 216 if fstab_version == 1: 217 d = {} 218 for line in data.split("\n"): 219 line = line.strip() 220 if not line or line.startswith("#"): 221 continue 222 pieces = line.split() 223 if not 3 <= len(pieces) <= 4: 224 raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,)) 225 options = None 226 if len(pieces) >= 4: 227 if pieces[3].startswith("/"): 228 device2 = pieces[3] 229 if len(pieces) >= 5: 230 options = pieces[4] 231 else: 232 device2 = None 233 options = pieces[3] 234 else: 235 device2 = None 236 237 mount_point = pieces[0] 238 length = 0 239 if options: 240 options = options.split(",") 241 for i in options: 242 if i.startswith("length="): 243 length = int(i[7:]) 244 else: 245 print "%s: unknown option \"%s\"" % (mount_point, i) 246 247 d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[1], 248 device=pieces[2], length=length, 249 device2=device2) 250 251 elif fstab_version == 2: 252 d = {} 253 for line in data.split("\n"): 254 line = line.strip() 255 if not line or line.startswith("#"): 256 continue 257 # <src> <mnt_point> <type> <mnt_flags and options> <fs_mgr_flags> 258 pieces = line.split() 259 if len(pieces) != 5: 260 raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,)) 261 262 # Ignore entries that are managed by vold 263 options = pieces[4] 264 if "voldmanaged=" in options: 265 continue 266 267 # It's a good line, parse it 268 length = 0 269 options = options.split(",") 270 for i in options: 271 if i.startswith("length="): 272 length = int(i[7:]) 273 else: 274 # Ignore all unknown options in the unified fstab 275 continue 276 277 mount_flags = pieces[3] 278 # Honor the SELinux context if present. 279 context = None 280 for i in mount_flags.split(","): 281 if i.startswith("context="): 282 context = i 283 284 mount_point = pieces[1] 285 d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[2], 286 device=pieces[0], length=length, 287 device2=None, context=context) 288 289 else: 290 raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,)) 291 292 return d 293 294 295def DumpInfoDict(d): 296 for k, v in sorted(d.items()): 297 print "%-25s = (%s) %s" % (k, type(v).__name__, v) 298 299 300def BuildBootableImage(sourcedir, fs_config_file, info_dict=None): 301 """Take a kernel, cmdline, and ramdisk directory from the input (in 302 'sourcedir'), and turn them into a boot image. Return the image 303 data, or None if sourcedir does not appear to contains files for 304 building the requested image.""" 305 306 if (not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK) or 307 not os.access(os.path.join(sourcedir, "kernel"), os.F_OK)): 308 return None 309 310 if info_dict is None: 311 info_dict = OPTIONS.info_dict 312 313 ramdisk_img = tempfile.NamedTemporaryFile() 314 img = tempfile.NamedTemporaryFile() 315 316 if os.access(fs_config_file, os.F_OK): 317 cmd = ["mkbootfs", "-f", fs_config_file, os.path.join(sourcedir, "RAMDISK")] 318 else: 319 cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")] 320 p1 = Run(cmd, stdout=subprocess.PIPE) 321 p2 = Run(["minigzip"], 322 stdin=p1.stdout, stdout=ramdisk_img.file.fileno()) 323 324 p2.wait() 325 p1.wait() 326 assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (sourcedir,) 327 assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (sourcedir,) 328 329 # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set 330 mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg" 331 332 cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")] 333 334 fn = os.path.join(sourcedir, "second") 335 if os.access(fn, os.F_OK): 336 cmd.append("--second") 337 cmd.append(fn) 338 339 fn = os.path.join(sourcedir, "cmdline") 340 if os.access(fn, os.F_OK): 341 cmd.append("--cmdline") 342 cmd.append(open(fn).read().rstrip("\n")) 343 344 fn = os.path.join(sourcedir, "base") 345 if os.access(fn, os.F_OK): 346 cmd.append("--base") 347 cmd.append(open(fn).read().rstrip("\n")) 348 349 fn = os.path.join(sourcedir, "pagesize") 350 if os.access(fn, os.F_OK): 351 cmd.append("--pagesize") 352 cmd.append(open(fn).read().rstrip("\n")) 353 354 args = info_dict.get("mkbootimg_args", None) 355 if args and args.strip(): 356 cmd.extend(shlex.split(args)) 357 358 img_unsigned = None 359 if info_dict.get("vboot", None): 360 img_unsigned = tempfile.NamedTemporaryFile() 361 cmd.extend(["--ramdisk", ramdisk_img.name, 362 "--output", img_unsigned.name]) 363 else: 364 cmd.extend(["--ramdisk", ramdisk_img.name, 365 "--output", img.name]) 366 367 p = Run(cmd, stdout=subprocess.PIPE) 368 p.communicate() 369 assert p.returncode == 0, "mkbootimg of %s image failed" % ( 370 os.path.basename(sourcedir),) 371 372 if info_dict.get("verity_key", None): 373 path = "/" + os.path.basename(sourcedir).lower() 374 cmd = [OPTIONS.boot_signer_path, path, img.name, 375 info_dict["verity_key"] + ".pk8", 376 info_dict["verity_key"] + ".x509.pem", img.name] 377 p = Run(cmd, stdout=subprocess.PIPE) 378 p.communicate() 379 assert p.returncode == 0, "boot_signer of %s image failed" % path 380 381 # Sign the image if vboot is non-empty. 382 elif info_dict.get("vboot", None): 383 path = "/" + os.path.basename(sourcedir).lower() 384 img_keyblock = tempfile.NamedTemporaryFile() 385 cmd = [info_dict["vboot_signer_cmd"], info_dict["futility"], 386 img_unsigned.name, info_dict["vboot_key"] + ".vbpubk", 387 info_dict["vboot_key"] + ".vbprivk", img_keyblock.name, 388 img.name] 389 p = Run(cmd, stdout=subprocess.PIPE) 390 p.communicate() 391 assert p.returncode == 0, "vboot_signer of %s image failed" % path 392 393 # Clean up the temp files. 394 img_unsigned.close() 395 img_keyblock.close() 396 397 img.seek(os.SEEK_SET, 0) 398 data = img.read() 399 400 ramdisk_img.close() 401 img.close() 402 403 return data 404 405 406def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir, 407 info_dict=None): 408 """Return a File object (with name 'name') with the desired bootable 409 image. Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name 410 'prebuilt_name', otherwise look for it under 'unpack_dir'/IMAGES, 411 otherwise construct it from the source files in 412 'unpack_dir'/'tree_subdir'.""" 413 414 prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name) 415 if os.path.exists(prebuilt_path): 416 print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,) 417 return File.FromLocalFile(name, prebuilt_path) 418 419 prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name) 420 if os.path.exists(prebuilt_path): 421 print "using prebuilt %s from IMAGES..." % (prebuilt_name,) 422 return File.FromLocalFile(name, prebuilt_path) 423 424 print "building image from target_files %s..." % (tree_subdir,) 425 fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt" 426 data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir), 427 os.path.join(unpack_dir, fs_config), 428 info_dict) 429 if data: 430 return File(name, data) 431 return None 432 433 434def UnzipTemp(filename, pattern=None): 435 """Unzip the given archive into a temporary directory and return the name. 436 437 If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a 438 temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES. 439 440 Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the 441 main file), open for reading. 442 """ 443 444 tmp = tempfile.mkdtemp(prefix="targetfiles-") 445 OPTIONS.tempfiles.append(tmp) 446 447 def unzip_to_dir(filename, dirname): 448 cmd = ["unzip", "-o", "-q", filename, "-d", dirname] 449 if pattern is not None: 450 cmd.append(pattern) 451 p = Run(cmd, stdout=subprocess.PIPE) 452 p.communicate() 453 if p.returncode != 0: 454 raise ExternalError("failed to unzip input target-files \"%s\"" % 455 (filename,)) 456 457 m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE) 458 if m: 459 unzip_to_dir(m.group(1), tmp) 460 unzip_to_dir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES")) 461 filename = m.group(1) 462 else: 463 unzip_to_dir(filename, tmp) 464 465 return tmp, zipfile.ZipFile(filename, "r") 466 467 468def GetKeyPasswords(keylist): 469 """Given a list of keys, prompt the user to enter passwords for 470 those which require them. Return a {key: password} dict. password 471 will be None if the key has no password.""" 472 473 no_passwords = [] 474 need_passwords = [] 475 key_passwords = {} 476 devnull = open("/dev/null", "w+b") 477 for k in sorted(keylist): 478 # We don't need a password for things that aren't really keys. 479 if k in SPECIAL_CERT_STRINGS: 480 no_passwords.append(k) 481 continue 482 483 p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix, 484 "-inform", "DER", "-nocrypt"], 485 stdin=devnull.fileno(), 486 stdout=devnull.fileno(), 487 stderr=subprocess.STDOUT) 488 p.communicate() 489 if p.returncode == 0: 490 # Definitely an unencrypted key. 491 no_passwords.append(k) 492 else: 493 p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix, 494 "-inform", "DER", "-passin", "pass:"], 495 stdin=devnull.fileno(), 496 stdout=devnull.fileno(), 497 stderr=subprocess.PIPE) 498 _, stderr = p.communicate() 499 if p.returncode == 0: 500 # Encrypted key with empty string as password. 501 key_passwords[k] = '' 502 elif stderr.startswith('Error decrypting key'): 503 # Definitely encrypted key. 504 # It would have said "Error reading key" if it didn't parse correctly. 505 need_passwords.append(k) 506 else: 507 # Potentially, a type of key that openssl doesn't understand. 508 # We'll let the routines in signapk.jar handle it. 509 no_passwords.append(k) 510 devnull.close() 511 512 key_passwords.update(PasswordManager().GetPasswords(need_passwords)) 513 key_passwords.update(dict.fromkeys(no_passwords, None)) 514 return key_passwords 515 516 517def SignFile(input_name, output_name, key, password, align=None, 518 whole_file=False): 519 """Sign the input_name zip/jar/apk, producing output_name. Use the 520 given key and password (the latter may be None if the key does not 521 have a password. 522 523 If align is an integer > 1, zipalign is run to align stored files in 524 the output zip on 'align'-byte boundaries. 525 526 If whole_file is true, use the "-w" option to SignApk to embed a 527 signature that covers the whole file in the archive comment of the 528 zip file. 529 """ 530 531 if align == 0 or align == 1: 532 align = None 533 534 if align: 535 temp = tempfile.NamedTemporaryFile() 536 sign_name = temp.name 537 else: 538 sign_name = output_name 539 540 cmd = [OPTIONS.java_path, OPTIONS.java_args, "-jar", 541 os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)] 542 cmd.extend(OPTIONS.extra_signapk_args) 543 if whole_file: 544 cmd.append("-w") 545 cmd.extend([key + OPTIONS.public_key_suffix, 546 key + OPTIONS.private_key_suffix, 547 input_name, sign_name]) 548 549 p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE) 550 if password is not None: 551 password += "\n" 552 p.communicate(password) 553 if p.returncode != 0: 554 raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,)) 555 556 if align: 557 p = Run(["zipalign", "-f", "-p", str(align), sign_name, output_name]) 558 p.communicate() 559 if p.returncode != 0: 560 raise ExternalError("zipalign failed: return code %s" % (p.returncode,)) 561 temp.close() 562 563 564def CheckSize(data, target, info_dict): 565 """Check the data string passed against the max size limit, if 566 any, for the given target. Raise exception if the data is too big. 567 Print a warning if the data is nearing the maximum size.""" 568 569 if target.endswith(".img"): 570 target = target[:-4] 571 mount_point = "/" + target 572 573 fs_type = None 574 limit = None 575 if info_dict["fstab"]: 576 if mount_point == "/userdata": 577 mount_point = "/data" 578 p = info_dict["fstab"][mount_point] 579 fs_type = p.fs_type 580 device = p.device 581 if "/" in device: 582 device = device[device.rfind("/")+1:] 583 limit = info_dict.get(device + "_size", None) 584 if not fs_type or not limit: 585 return 586 587 if fs_type == "yaffs2": 588 # image size should be increased by 1/64th to account for the 589 # spare area (64 bytes per 2k page) 590 limit = limit / 2048 * (2048+64) 591 size = len(data) 592 pct = float(size) * 100.0 / limit 593 msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit) 594 if pct >= 99.0: 595 raise ExternalError(msg) 596 elif pct >= 95.0: 597 print 598 print " WARNING: ", msg 599 print 600 elif OPTIONS.verbose: 601 print " ", msg 602 603 604def ReadApkCerts(tf_zip): 605 """Given a target_files ZipFile, parse the META/apkcerts.txt file 606 and return a {package: cert} dict.""" 607 certmap = {} 608 for line in tf_zip.read("META/apkcerts.txt").split("\n"): 609 line = line.strip() 610 if not line: 611 continue 612 m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+' 613 r'private_key="(.*)"$', line) 614 if m: 615 name, cert, privkey = m.groups() 616 public_key_suffix_len = len(OPTIONS.public_key_suffix) 617 private_key_suffix_len = len(OPTIONS.private_key_suffix) 618 if cert in SPECIAL_CERT_STRINGS and not privkey: 619 certmap[name] = cert 620 elif (cert.endswith(OPTIONS.public_key_suffix) and 621 privkey.endswith(OPTIONS.private_key_suffix) and 622 cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]): 623 certmap[name] = cert[:-public_key_suffix_len] 624 else: 625 raise ValueError("failed to parse line from apkcerts.txt:\n" + line) 626 return certmap 627 628 629COMMON_DOCSTRING = """ 630 -p (--path) <dir> 631 Prepend <dir>/bin to the list of places to search for binaries 632 run by this script, and expect to find jars in <dir>/framework. 633 634 -s (--device_specific) <file> 635 Path to the python module containing device-specific 636 releasetools code. 637 638 -x (--extra) <key=value> 639 Add a key/value pair to the 'extras' dict, which device-specific 640 extension code may look at. 641 642 -v (--verbose) 643 Show command lines being executed. 644 645 -h (--help) 646 Display this usage message and exit. 647""" 648 649def Usage(docstring): 650 print docstring.rstrip("\n") 651 print COMMON_DOCSTRING 652 653 654def ParseOptions(argv, 655 docstring, 656 extra_opts="", extra_long_opts=(), 657 extra_option_handler=None): 658 """Parse the options in argv and return any arguments that aren't 659 flags. docstring is the calling module's docstring, to be displayed 660 for errors and -h. extra_opts and extra_long_opts are for flags 661 defined by the caller, which are processed by passing them to 662 extra_option_handler.""" 663 664 try: 665 opts, args = getopt.getopt( 666 argv, "hvp:s:x:" + extra_opts, 667 ["help", "verbose", "path=", "signapk_path=", "extra_signapk_args=", 668 "java_path=", "java_args=", "public_key_suffix=", 669 "private_key_suffix=", "boot_signer_path=", "device_specific=", 670 "extra="] + 671 list(extra_long_opts)) 672 except getopt.GetoptError as err: 673 Usage(docstring) 674 print "**", str(err), "**" 675 sys.exit(2) 676 677 for o, a in opts: 678 if o in ("-h", "--help"): 679 Usage(docstring) 680 sys.exit() 681 elif o in ("-v", "--verbose"): 682 OPTIONS.verbose = True 683 elif o in ("-p", "--path"): 684 OPTIONS.search_path = a 685 elif o in ("--signapk_path",): 686 OPTIONS.signapk_path = a 687 elif o in ("--extra_signapk_args",): 688 OPTIONS.extra_signapk_args = shlex.split(a) 689 elif o in ("--java_path",): 690 OPTIONS.java_path = a 691 elif o in ("--java_args",): 692 OPTIONS.java_args = a 693 elif o in ("--public_key_suffix",): 694 OPTIONS.public_key_suffix = a 695 elif o in ("--private_key_suffix",): 696 OPTIONS.private_key_suffix = a 697 elif o in ("--boot_signer_path",): 698 OPTIONS.boot_signer_path = a 699 elif o in ("-s", "--device_specific"): 700 OPTIONS.device_specific = a 701 elif o in ("-x", "--extra"): 702 key, value = a.split("=", 1) 703 OPTIONS.extras[key] = value 704 else: 705 if extra_option_handler is None or not extra_option_handler(o, a): 706 assert False, "unknown option \"%s\"" % (o,) 707 708 if OPTIONS.search_path: 709 os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") + 710 os.pathsep + os.environ["PATH"]) 711 712 return args 713 714 715def MakeTempFile(prefix=None, suffix=None): 716 """Make a temp file and add it to the list of things to be deleted 717 when Cleanup() is called. Return the filename.""" 718 fd, fn = tempfile.mkstemp(prefix=prefix, suffix=suffix) 719 os.close(fd) 720 OPTIONS.tempfiles.append(fn) 721 return fn 722 723 724def Cleanup(): 725 for i in OPTIONS.tempfiles: 726 if os.path.isdir(i): 727 shutil.rmtree(i) 728 else: 729 os.remove(i) 730 731 732class PasswordManager(object): 733 def __init__(self): 734 self.editor = os.getenv("EDITOR", None) 735 self.pwfile = os.getenv("ANDROID_PW_FILE", None) 736 737 def GetPasswords(self, items): 738 """Get passwords corresponding to each string in 'items', 739 returning a dict. (The dict may have keys in addition to the 740 values in 'items'.) 741 742 Uses the passwords in $ANDROID_PW_FILE if available, letting the 743 user edit that file to add more needed passwords. If no editor is 744 available, or $ANDROID_PW_FILE isn't define, prompts the user 745 interactively in the ordinary way. 746 """ 747 748 current = self.ReadFile() 749 750 first = True 751 while True: 752 missing = [] 753 for i in items: 754 if i not in current or not current[i]: 755 missing.append(i) 756 # Are all the passwords already in the file? 757 if not missing: 758 return current 759 760 for i in missing: 761 current[i] = "" 762 763 if not first: 764 print "key file %s still missing some passwords." % (self.pwfile,) 765 answer = raw_input("try to edit again? [y]> ").strip() 766 if answer and answer[0] not in 'yY': 767 raise RuntimeError("key passwords unavailable") 768 first = False 769 770 current = self.UpdateAndReadFile(current) 771 772 def PromptResult(self, current): # pylint: disable=no-self-use 773 """Prompt the user to enter a value (password) for each key in 774 'current' whose value is fales. Returns a new dict with all the 775 values. 776 """ 777 result = {} 778 for k, v in sorted(current.iteritems()): 779 if v: 780 result[k] = v 781 else: 782 while True: 783 result[k] = getpass.getpass( 784 "Enter password for %s key> " % k).strip() 785 if result[k]: 786 break 787 return result 788 789 def UpdateAndReadFile(self, current): 790 if not self.editor or not self.pwfile: 791 return self.PromptResult(current) 792 793 f = open(self.pwfile, "w") 794 os.chmod(self.pwfile, 0o600) 795 f.write("# Enter key passwords between the [[[ ]]] brackets.\n") 796 f.write("# (Additional spaces are harmless.)\n\n") 797 798 first_line = None 799 sorted_list = sorted([(not v, k, v) for (k, v) in current.iteritems()]) 800 for i, (_, k, v) in enumerate(sorted_list): 801 f.write("[[[ %s ]]] %s\n" % (v, k)) 802 if not v and first_line is None: 803 # position cursor on first line with no password. 804 first_line = i + 4 805 f.close() 806 807 p = Run([self.editor, "+%d" % (first_line,), self.pwfile]) 808 _, _ = p.communicate() 809 810 return self.ReadFile() 811 812 def ReadFile(self): 813 result = {} 814 if self.pwfile is None: 815 return result 816 try: 817 f = open(self.pwfile, "r") 818 for line in f: 819 line = line.strip() 820 if not line or line[0] == '#': 821 continue 822 m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line) 823 if not m: 824 print "failed to parse password file: ", line 825 else: 826 result[m.group(2)] = m.group(1) 827 f.close() 828 except IOError as e: 829 if e.errno != errno.ENOENT: 830 print "error reading password file: ", str(e) 831 return result 832 833 834def ZipWrite(zip_file, filename, arcname=None, perms=0o644, 835 compress_type=None): 836 import datetime 837 838 # http://b/18015246 839 # Python 2.7's zipfile implementation wrongly thinks that zip64 is required 840 # for files larger than 2GiB. We can work around this by adjusting their 841 # limit. Note that `zipfile.writestr()` will not work for strings larger than 842 # 2GiB. The Python interpreter sometimes rejects strings that large (though 843 # it isn't clear to me exactly what circumstances cause this). 844 # `zipfile.write()` must be used directly to work around this. 845 # 846 # This mess can be avoided if we port to python3. 847 saved_zip64_limit = zipfile.ZIP64_LIMIT 848 zipfile.ZIP64_LIMIT = (1 << 32) - 1 849 850 if compress_type is None: 851 compress_type = zip_file.compression 852 if arcname is None: 853 arcname = filename 854 855 saved_stat = os.stat(filename) 856 857 try: 858 # `zipfile.write()` doesn't allow us to pass ZipInfo, so just modify the 859 # file to be zipped and reset it when we're done. 860 os.chmod(filename, perms) 861 862 # Use a fixed timestamp so the output is repeatable. 863 epoch = datetime.datetime.fromtimestamp(0) 864 timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds() 865 os.utime(filename, (timestamp, timestamp)) 866 867 zip_file.write(filename, arcname=arcname, compress_type=compress_type) 868 finally: 869 os.chmod(filename, saved_stat.st_mode) 870 os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime)) 871 zipfile.ZIP64_LIMIT = saved_zip64_limit 872 873 874def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None, 875 compress_type=None): 876 """Wrap zipfile.writestr() function to work around the zip64 limit. 877 878 Even with the ZIP64_LIMIT workaround, it won't allow writing a string 879 longer than 2GiB. It gives 'OverflowError: size does not fit in an int' 880 when calling crc32(bytes). 881 882 But it still works fine to write a shorter string into a large zip file. 883 We should use ZipWrite() whenever possible, and only use ZipWriteStr() 884 when we know the string won't be too long. 885 """ 886 887 saved_zip64_limit = zipfile.ZIP64_LIMIT 888 zipfile.ZIP64_LIMIT = (1 << 32) - 1 889 890 if not isinstance(zinfo_or_arcname, zipfile.ZipInfo): 891 zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname) 892 zinfo.compress_type = zip_file.compression 893 if perms is None: 894 perms = 0o644 895 else: 896 zinfo = zinfo_or_arcname 897 898 # If compress_type is given, it overrides the value in zinfo. 899 if compress_type is not None: 900 zinfo.compress_type = compress_type 901 902 # If perms is given, it has a priority. 903 if perms is not None: 904 zinfo.external_attr = perms << 16 905 906 # Use a fixed timestamp so the output is repeatable. 907 zinfo.date_time = (2009, 1, 1, 0, 0, 0) 908 909 zip_file.writestr(zinfo, data) 910 zipfile.ZIP64_LIMIT = saved_zip64_limit 911 912 913def ZipClose(zip_file): 914 # http://b/18015246 915 # zipfile also refers to ZIP64_LIMIT during close() when it writes out the 916 # central directory. 917 saved_zip64_limit = zipfile.ZIP64_LIMIT 918 zipfile.ZIP64_LIMIT = (1 << 32) - 1 919 920 zip_file.close() 921 922 zipfile.ZIP64_LIMIT = saved_zip64_limit 923 924 925class DeviceSpecificParams(object): 926 module = None 927 def __init__(self, **kwargs): 928 """Keyword arguments to the constructor become attributes of this 929 object, which is passed to all functions in the device-specific 930 module.""" 931 for k, v in kwargs.iteritems(): 932 setattr(self, k, v) 933 self.extras = OPTIONS.extras 934 935 if self.module is None: 936 path = OPTIONS.device_specific 937 if not path: 938 return 939 try: 940 if os.path.isdir(path): 941 info = imp.find_module("releasetools", [path]) 942 else: 943 d, f = os.path.split(path) 944 b, x = os.path.splitext(f) 945 if x == ".py": 946 f = b 947 info = imp.find_module(f, [d]) 948 print "loaded device-specific extensions from", path 949 self.module = imp.load_module("device_specific", *info) 950 except ImportError: 951 print "unable to load device-specific module; assuming none" 952 953 def _DoCall(self, function_name, *args, **kwargs): 954 """Call the named function in the device-specific module, passing 955 the given args and kwargs. The first argument to the call will be 956 the DeviceSpecific object itself. If there is no module, or the 957 module does not define the function, return the value of the 958 'default' kwarg (which itself defaults to None).""" 959 if self.module is None or not hasattr(self.module, function_name): 960 return kwargs.get("default", None) 961 return getattr(self.module, function_name)(*((self,) + args), **kwargs) 962 963 def FullOTA_Assertions(self): 964 """Called after emitting the block of assertions at the top of a 965 full OTA package. Implementations can add whatever additional 966 assertions they like.""" 967 return self._DoCall("FullOTA_Assertions") 968 969 def FullOTA_InstallBegin(self): 970 """Called at the start of full OTA installation.""" 971 return self._DoCall("FullOTA_InstallBegin") 972 973 def FullOTA_InstallEnd(self): 974 """Called at the end of full OTA installation; typically this is 975 used to install the image for the device's baseband processor.""" 976 return self._DoCall("FullOTA_InstallEnd") 977 978 def IncrementalOTA_Assertions(self): 979 """Called after emitting the block of assertions at the top of an 980 incremental OTA package. Implementations can add whatever 981 additional assertions they like.""" 982 return self._DoCall("IncrementalOTA_Assertions") 983 984 def IncrementalOTA_VerifyBegin(self): 985 """Called at the start of the verification phase of incremental 986 OTA installation; additional checks can be placed here to abort 987 the script before any changes are made.""" 988 return self._DoCall("IncrementalOTA_VerifyBegin") 989 990 def IncrementalOTA_VerifyEnd(self): 991 """Called at the end of the verification phase of incremental OTA 992 installation; additional checks can be placed here to abort the 993 script before any changes are made.""" 994 return self._DoCall("IncrementalOTA_VerifyEnd") 995 996 def IncrementalOTA_InstallBegin(self): 997 """Called at the start of incremental OTA installation (after 998 verification is complete).""" 999 return self._DoCall("IncrementalOTA_InstallBegin") 1000 1001 def IncrementalOTA_InstallEnd(self): 1002 """Called at the end of incremental OTA installation; typically 1003 this is used to install the image for the device's baseband 1004 processor.""" 1005 return self._DoCall("IncrementalOTA_InstallEnd") 1006 1007class File(object): 1008 def __init__(self, name, data): 1009 self.name = name 1010 self.data = data 1011 self.size = len(data) 1012 self.sha1 = sha1(data).hexdigest() 1013 1014 @classmethod 1015 def FromLocalFile(cls, name, diskname): 1016 f = open(diskname, "rb") 1017 data = f.read() 1018 f.close() 1019 return File(name, data) 1020 1021 def WriteToTemp(self): 1022 t = tempfile.NamedTemporaryFile() 1023 t.write(self.data) 1024 t.flush() 1025 return t 1026 1027 def AddToZip(self, z, compression=None): 1028 ZipWriteStr(z, self.name, self.data, compress_type=compression) 1029 1030DIFF_PROGRAM_BY_EXT = { 1031 ".gz" : "imgdiff", 1032 ".zip" : ["imgdiff", "-z"], 1033 ".jar" : ["imgdiff", "-z"], 1034 ".apk" : ["imgdiff", "-z"], 1035 ".img" : "imgdiff", 1036 } 1037 1038class Difference(object): 1039 def __init__(self, tf, sf, diff_program=None): 1040 self.tf = tf 1041 self.sf = sf 1042 self.patch = None 1043 self.diff_program = diff_program 1044 1045 def ComputePatch(self): 1046 """Compute the patch (as a string of data) needed to turn sf into 1047 tf. Returns the same tuple as GetPatch().""" 1048 1049 tf = self.tf 1050 sf = self.sf 1051 1052 if self.diff_program: 1053 diff_program = self.diff_program 1054 else: 1055 ext = os.path.splitext(tf.name)[1] 1056 diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff") 1057 1058 ttemp = tf.WriteToTemp() 1059 stemp = sf.WriteToTemp() 1060 1061 ext = os.path.splitext(tf.name)[1] 1062 1063 try: 1064 ptemp = tempfile.NamedTemporaryFile() 1065 if isinstance(diff_program, list): 1066 cmd = copy.copy(diff_program) 1067 else: 1068 cmd = [diff_program] 1069 cmd.append(stemp.name) 1070 cmd.append(ttemp.name) 1071 cmd.append(ptemp.name) 1072 p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 1073 err = [] 1074 def run(): 1075 _, e = p.communicate() 1076 if e: 1077 err.append(e) 1078 th = threading.Thread(target=run) 1079 th.start() 1080 th.join(timeout=300) # 5 mins 1081 if th.is_alive(): 1082 print "WARNING: diff command timed out" 1083 p.terminate() 1084 th.join(5) 1085 if th.is_alive(): 1086 p.kill() 1087 th.join() 1088 1089 if err or p.returncode != 0: 1090 print "WARNING: failure running %s:\n%s\n" % ( 1091 diff_program, "".join(err)) 1092 self.patch = None 1093 return None, None, None 1094 diff = ptemp.read() 1095 finally: 1096 ptemp.close() 1097 stemp.close() 1098 ttemp.close() 1099 1100 self.patch = diff 1101 return self.tf, self.sf, self.patch 1102 1103 1104 def GetPatch(self): 1105 """Return a tuple (target_file, source_file, patch_data). 1106 patch_data may be None if ComputePatch hasn't been called, or if 1107 computing the patch failed.""" 1108 return self.tf, self.sf, self.patch 1109 1110 1111def ComputeDifferences(diffs): 1112 """Call ComputePatch on all the Difference objects in 'diffs'.""" 1113 print len(diffs), "diffs to compute" 1114 1115 # Do the largest files first, to try and reduce the long-pole effect. 1116 by_size = [(i.tf.size, i) for i in diffs] 1117 by_size.sort(reverse=True) 1118 by_size = [i[1] for i in by_size] 1119 1120 lock = threading.Lock() 1121 diff_iter = iter(by_size) # accessed under lock 1122 1123 def worker(): 1124 try: 1125 lock.acquire() 1126 for d in diff_iter: 1127 lock.release() 1128 start = time.time() 1129 d.ComputePatch() 1130 dur = time.time() - start 1131 lock.acquire() 1132 1133 tf, sf, patch = d.GetPatch() 1134 if sf.name == tf.name: 1135 name = tf.name 1136 else: 1137 name = "%s (%s)" % (tf.name, sf.name) 1138 if patch is None: 1139 print "patching failed! %s" % (name,) 1140 else: 1141 print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % ( 1142 dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name) 1143 lock.release() 1144 except Exception as e: 1145 print e 1146 raise 1147 1148 # start worker threads; wait for them all to finish. 1149 threads = [threading.Thread(target=worker) 1150 for i in range(OPTIONS.worker_threads)] 1151 for th in threads: 1152 th.start() 1153 while threads: 1154 threads.pop().join() 1155 1156 1157class BlockDifference(object): 1158 def __init__(self, partition, tgt, src=None, check_first_block=False, 1159 version=None): 1160 self.tgt = tgt 1161 self.src = src 1162 self.partition = partition 1163 self.check_first_block = check_first_block 1164 1165 # Due to http://b/20939131, check_first_block is disabled temporarily. 1166 assert not self.check_first_block 1167 1168 if version is None: 1169 version = 1 1170 if OPTIONS.info_dict: 1171 version = max( 1172 int(i) for i in 1173 OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(",")) 1174 self.version = version 1175 1176 b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads, 1177 version=self.version) 1178 tmpdir = tempfile.mkdtemp() 1179 OPTIONS.tempfiles.append(tmpdir) 1180 self.path = os.path.join(tmpdir, partition) 1181 b.Compute(self.path) 1182 1183 _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict) 1184 1185 def WriteScript(self, script, output_zip, progress=None): 1186 if not self.src: 1187 # write the output unconditionally 1188 script.Print("Patching %s image unconditionally..." % (self.partition,)) 1189 else: 1190 script.Print("Patching %s image after verification." % (self.partition,)) 1191 1192 if progress: 1193 script.ShowProgress(progress, 0) 1194 self._WriteUpdate(script, output_zip) 1195 self._WritePostInstallVerifyScript(script) 1196 1197 def WriteVerifyScript(self, script): 1198 partition = self.partition 1199 if not self.src: 1200 script.Print("Image %s will be patched unconditionally." % (partition,)) 1201 else: 1202 ranges = self.src.care_map.subtract(self.src.clobbered_blocks) 1203 ranges_str = ranges.to_string_raw() 1204 if self.version >= 3: 1205 script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || ' 1206 'block_image_verify("%s", ' 1207 'package_extract_file("%s.transfer.list"), ' 1208 '"%s.new.dat", "%s.patch.dat")) then') % ( 1209 self.device, ranges_str, self.src.TotalSha1(), 1210 self.device, partition, partition, partition)) 1211 else: 1212 script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % ( 1213 self.device, ranges_str, self.src.TotalSha1())) 1214 script.Print('Verified %s image...' % (partition,)) 1215 script.AppendExtra('else') 1216 1217 # When generating incrementals for the system and vendor partitions, 1218 # explicitly check the first block (which contains the superblock) of 1219 # the partition to see if it's what we expect. If this check fails, 1220 # give an explicit log message about the partition having been 1221 # remounted R/W (the most likely explanation) and the need to flash to 1222 # get OTAs working again. 1223 if self.check_first_block: 1224 self._CheckFirstBlock(script) 1225 1226 # Abort the OTA update. Note that the incremental OTA cannot be applied 1227 # even if it may match the checksum of the target partition. 1228 # a) If version < 3, operations like move and erase will make changes 1229 # unconditionally and damage the partition. 1230 # b) If version >= 3, it won't even reach here. 1231 script.AppendExtra(('abort("%s partition has unexpected contents");\n' 1232 'endif;') % (partition,)) 1233 1234 def _WritePostInstallVerifyScript(self, script): 1235 partition = self.partition 1236 script.Print('Verifying the updated %s image...' % (partition,)) 1237 # Unlike pre-install verification, clobbered_blocks should not be ignored. 1238 ranges = self.tgt.care_map 1239 ranges_str = ranges.to_string_raw() 1240 script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % ( 1241 self.device, ranges_str, 1242 self.tgt.TotalSha1(include_clobbered_blocks=True))) 1243 script.Print('Verified the updated %s image.' % (partition,)) 1244 script.AppendExtra( 1245 'else\n' 1246 ' abort("%s partition has unexpected contents after OTA update");\n' 1247 'endif;' % (partition,)) 1248 1249 def _WriteUpdate(self, script, output_zip): 1250 ZipWrite(output_zip, 1251 '{}.transfer.list'.format(self.path), 1252 '{}.transfer.list'.format(self.partition)) 1253 ZipWrite(output_zip, 1254 '{}.new.dat'.format(self.path), 1255 '{}.new.dat'.format(self.partition)) 1256 ZipWrite(output_zip, 1257 '{}.patch.dat'.format(self.path), 1258 '{}.patch.dat'.format(self.partition), 1259 compress_type=zipfile.ZIP_STORED) 1260 1261 call = ('block_image_update("{device}", ' 1262 'package_extract_file("{partition}.transfer.list"), ' 1263 '"{partition}.new.dat", "{partition}.patch.dat");\n'.format( 1264 device=self.device, partition=self.partition)) 1265 script.AppendExtra(script.WordWrap(call)) 1266 1267 def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use 1268 data = source.ReadRangeSet(ranges) 1269 ctx = sha1() 1270 1271 for p in data: 1272 ctx.update(p) 1273 1274 return ctx.hexdigest() 1275 1276 # TODO(tbao): Due to http://b/20939131, block 0 may be changed without 1277 # remounting R/W. Will change the checking to a finer-grained way to 1278 # mask off those bits. 1279 def _CheckFirstBlock(self, script): 1280 r = rangelib.RangeSet((0, 1)) 1281 srchash = self._HashBlocks(self.src, r) 1282 1283 script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || ' 1284 'abort("%s has been remounted R/W; ' 1285 'reflash device to reenable OTA updates");') 1286 % (self.device, r.to_string_raw(), srchash, 1287 self.device)) 1288 1289DataImage = blockimgdiff.DataImage 1290 1291 1292# map recovery.fstab's fs_types to mount/format "partition types" 1293PARTITION_TYPES = { 1294 "yaffs2": "MTD", 1295 "mtd": "MTD", 1296 "ext4": "EMMC", 1297 "emmc": "EMMC", 1298 "f2fs": "EMMC", 1299 "squashfs": "EMMC" 1300} 1301 1302def GetTypeAndDevice(mount_point, info): 1303 fstab = info["fstab"] 1304 if fstab: 1305 return (PARTITION_TYPES[fstab[mount_point].fs_type], 1306 fstab[mount_point].device) 1307 else: 1308 raise KeyError 1309 1310 1311def ParseCertificate(data): 1312 """Parse a PEM-format certificate.""" 1313 cert = [] 1314 save = False 1315 for line in data.split("\n"): 1316 if "--END CERTIFICATE--" in line: 1317 break 1318 if save: 1319 cert.append(line) 1320 if "--BEGIN CERTIFICATE--" in line: 1321 save = True 1322 cert = "".join(cert).decode('base64') 1323 return cert 1324 1325def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img, 1326 info_dict=None): 1327 """Generate a binary patch that creates the recovery image starting 1328 with the boot image. (Most of the space in these images is just the 1329 kernel, which is identical for the two, so the resulting patch 1330 should be efficient.) Add it to the output zip, along with a shell 1331 script that is run from init.rc on first boot to actually do the 1332 patching and install the new recovery image. 1333 1334 recovery_img and boot_img should be File objects for the 1335 corresponding images. info should be the dictionary returned by 1336 common.LoadInfoDict() on the input target_files. 1337 """ 1338 1339 if info_dict is None: 1340 info_dict = OPTIONS.info_dict 1341 1342 diff_program = ["imgdiff"] 1343 path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat") 1344 if os.path.exists(path): 1345 diff_program.append("-b") 1346 diff_program.append(path) 1347 bonus_args = "-b /system/etc/recovery-resource.dat" 1348 else: 1349 bonus_args = "" 1350 1351 d = Difference(recovery_img, boot_img, diff_program=diff_program) 1352 _, _, patch = d.ComputePatch() 1353 output_sink("recovery-from-boot.p", patch) 1354 1355 try: 1356 boot_type, boot_device = GetTypeAndDevice("/boot", info_dict) 1357 recovery_type, recovery_device = GetTypeAndDevice("/recovery", info_dict) 1358 except KeyError: 1359 return 1360 1361 sh = """#!/system/bin/sh 1362if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then 1363 applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed" 1364else 1365 log -t recovery "Recovery image already installed" 1366fi 1367""" % {'boot_size': boot_img.size, 1368 'boot_sha1': boot_img.sha1, 1369 'recovery_size': recovery_img.size, 1370 'recovery_sha1': recovery_img.sha1, 1371 'boot_type': boot_type, 1372 'boot_device': boot_device, 1373 'recovery_type': recovery_type, 1374 'recovery_device': recovery_device, 1375 'bonus_args': bonus_args} 1376 1377 # The install script location moved from /system/etc to /system/bin 1378 # in the L release. Parse the init.rc file to find out where the 1379 # target-files expects it to be, and put it there. 1380 sh_location = "etc/install-recovery.sh" 1381 try: 1382 with open(os.path.join(input_dir, "BOOT", "RAMDISK", "init.rc")) as f: 1383 for line in f: 1384 m = re.match(r"^service flash_recovery /system/(\S+)\s*$", line) 1385 if m: 1386 sh_location = m.group(1) 1387 print "putting script in", sh_location 1388 break 1389 except (OSError, IOError) as e: 1390 print "failed to read init.rc: %s" % (e,) 1391 1392 output_sink(sh_location, sh) 1393