common.py revision b11d2c5dd687aac12ee187a07f1d392193fe4d9c
1# Copyright (C) 2008 The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15import copy
16import errno
17import getopt
18import getpass
19import imp
20import os
21import platform
22import re
23import shlex
24import shutil
25import subprocess
26import sys
27import tempfile
28import threading
29import time
30import zipfile
31
32import blockimgdiff
33import rangelib
34
35from hashlib import sha1 as sha1
36
37
38class Options(object):
39  def __init__(self):
40    platform_search_path = {
41        "linux2": "out/host/linux-x86",
42        "darwin": "out/host/darwin-x86",
43    }
44
45    self.search_path = platform_search_path.get(sys.platform, None)
46    self.signapk_path = "framework/signapk.jar"  # Relative to search_path
47    self.extra_signapk_args = []
48    self.java_path = "java"  # Use the one on the path by default.
49    self.java_args = "-Xmx2048m" # JVM Args
50    self.public_key_suffix = ".x509.pem"
51    self.private_key_suffix = ".pk8"
52    # use otatools built boot_signer by default
53    self.boot_signer_path = "boot_signer"
54    self.boot_signer_args = []
55    self.verity_signer_path = None
56    self.verity_signer_args = []
57    self.verbose = False
58    self.tempfiles = []
59    self.device_specific = None
60    self.extras = {}
61    self.info_dict = None
62    self.source_info_dict = None
63    self.target_info_dict = None
64    self.worker_threads = None
65    # Stash size cannot exceed cache_size * threshold.
66    self.cache_size = None
67    self.stash_threshold = 0.8
68
69
70OPTIONS = Options()
71
72
73# Values for "certificate" in apkcerts that mean special things.
74SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL")
75
76
77class ExternalError(RuntimeError):
78  pass
79
80
81def Run(args, **kwargs):
82  """Create and return a subprocess.Popen object, printing the command
83  line on the terminal if -v was specified."""
84  if OPTIONS.verbose:
85    print "  running: ", " ".join(args)
86  return subprocess.Popen(args, **kwargs)
87
88
89def CloseInheritedPipes():
90  """ Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds
91  before doing other work."""
92  if platform.system() != "Darwin":
93    return
94  for d in range(3, 1025):
95    try:
96      stat = os.fstat(d)
97      if stat is not None:
98        pipebit = stat[0] & 0x1000
99        if pipebit != 0:
100          os.close(d)
101    except OSError:
102      pass
103
104
105def LoadInfoDict(input_file):
106  """Read and parse the META/misc_info.txt key/value pairs from the
107  input target files and return a dict."""
108
109  def read_helper(fn):
110    if isinstance(input_file, zipfile.ZipFile):
111      return input_file.read(fn)
112    else:
113      path = os.path.join(input_file, *fn.split("/"))
114      try:
115        with open(path) as f:
116          return f.read()
117      except IOError as e:
118        if e.errno == errno.ENOENT:
119          raise KeyError(fn)
120  d = {}
121  try:
122    d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n"))
123  except KeyError:
124    # ok if misc_info.txt doesn't exist
125    pass
126
127  # backwards compatibility: These values used to be in their own
128  # files.  Look for them, in case we're processing an old
129  # target_files zip.
130
131  if "mkyaffs2_extra_flags" not in d:
132    try:
133      d["mkyaffs2_extra_flags"] = read_helper(
134          "META/mkyaffs2-extra-flags.txt").strip()
135    except KeyError:
136      # ok if flags don't exist
137      pass
138
139  if "recovery_api_version" not in d:
140    try:
141      d["recovery_api_version"] = read_helper(
142          "META/recovery-api-version.txt").strip()
143    except KeyError:
144      raise ValueError("can't find recovery API version in input target-files")
145
146  if "tool_extensions" not in d:
147    try:
148      d["tool_extensions"] = read_helper("META/tool-extensions.txt").strip()
149    except KeyError:
150      # ok if extensions don't exist
151      pass
152
153  if "fstab_version" not in d:
154    d["fstab_version"] = "1"
155
156  try:
157    data = read_helper("META/imagesizes.txt")
158    for line in data.split("\n"):
159      if not line:
160        continue
161      name, value = line.split(" ", 1)
162      if not value:
163        continue
164      if name == "blocksize":
165        d[name] = value
166      else:
167        d[name + "_size"] = value
168  except KeyError:
169    pass
170
171  def makeint(key):
172    if key in d:
173      d[key] = int(d[key], 0)
174
175  makeint("recovery_api_version")
176  makeint("blocksize")
177  makeint("system_size")
178  makeint("vendor_size")
179  makeint("userdata_size")
180  makeint("cache_size")
181  makeint("recovery_size")
182  makeint("boot_size")
183  makeint("fstab_version")
184
185  d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"],
186                                 d.get("system_root_image", False))
187  d["build.prop"] = LoadBuildProp(read_helper)
188  return d
189
190def LoadBuildProp(read_helper):
191  try:
192    data = read_helper("SYSTEM/build.prop")
193  except KeyError:
194    print "Warning: could not find SYSTEM/build.prop in %s" % zip
195    data = ""
196  return LoadDictionaryFromLines(data.split("\n"))
197
198def LoadDictionaryFromLines(lines):
199  d = {}
200  for line in lines:
201    line = line.strip()
202    if not line or line.startswith("#"):
203      continue
204    if "=" in line:
205      name, value = line.split("=", 1)
206      d[name] = value
207  return d
208
209def LoadRecoveryFSTab(read_helper, fstab_version, system_root_image=False):
210  class Partition(object):
211    def __init__(self, mount_point, fs_type, device, length, device2, context):
212      self.mount_point = mount_point
213      self.fs_type = fs_type
214      self.device = device
215      self.length = length
216      self.device2 = device2
217      self.context = context
218
219  try:
220    data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab")
221  except KeyError:
222    print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab"
223    data = ""
224
225  if fstab_version == 1:
226    d = {}
227    for line in data.split("\n"):
228      line = line.strip()
229      if not line or line.startswith("#"):
230        continue
231      pieces = line.split()
232      if not 3 <= len(pieces) <= 4:
233        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
234      options = None
235      if len(pieces) >= 4:
236        if pieces[3].startswith("/"):
237          device2 = pieces[3]
238          if len(pieces) >= 5:
239            options = pieces[4]
240        else:
241          device2 = None
242          options = pieces[3]
243      else:
244        device2 = None
245
246      mount_point = pieces[0]
247      length = 0
248      if options:
249        options = options.split(",")
250        for i in options:
251          if i.startswith("length="):
252            length = int(i[7:])
253          else:
254            print "%s: unknown option \"%s\"" % (mount_point, i)
255
256      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[1],
257                                 device=pieces[2], length=length,
258                                 device2=device2)
259
260  elif fstab_version == 2:
261    d = {}
262    for line in data.split("\n"):
263      line = line.strip()
264      if not line or line.startswith("#"):
265        continue
266      # <src> <mnt_point> <type> <mnt_flags and options> <fs_mgr_flags>
267      pieces = line.split()
268      if len(pieces) != 5:
269        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
270
271      # Ignore entries that are managed by vold
272      options = pieces[4]
273      if "voldmanaged=" in options:
274        continue
275
276      # It's a good line, parse it
277      length = 0
278      options = options.split(",")
279      for i in options:
280        if i.startswith("length="):
281          length = int(i[7:])
282        else:
283          # Ignore all unknown options in the unified fstab
284          continue
285
286      mount_flags = pieces[3]
287      # Honor the SELinux context if present.
288      context = None
289      for i in mount_flags.split(","):
290        if i.startswith("context="):
291          context = i
292
293      mount_point = pieces[1]
294      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[2],
295                                 device=pieces[0], length=length,
296                                 device2=None, context=context)
297
298  else:
299    raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,))
300
301  # / is used for the system mount point when the root directory is included in
302  # system. Other areas assume system is always at "/system" so point /system
303  # at /.
304  if system_root_image:
305    assert not d.has_key("/system") and d.has_key("/")
306    d["/system"] = d["/"]
307  return d
308
309
310def DumpInfoDict(d):
311  for k, v in sorted(d.items()):
312    print "%-25s = (%s) %s" % (k, type(v).__name__, v)
313
314
315def _BuildBootableImage(sourcedir, fs_config_file, info_dict=None,
316                        has_ramdisk=False):
317  """Build a bootable image from the specified sourcedir.
318
319  Take a kernel, cmdline, and optionally a ramdisk directory from the input (in
320  'sourcedir'), and turn them into a boot image.  Return the image data, or
321  None if sourcedir does not appear to contains files for building the
322  requested image."""
323
324  def make_ramdisk():
325    ramdisk_img = tempfile.NamedTemporaryFile()
326
327    if os.access(fs_config_file, os.F_OK):
328      cmd = ["mkbootfs", "-f", fs_config_file,
329             os.path.join(sourcedir, "RAMDISK")]
330    else:
331      cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
332    p1 = Run(cmd, stdout=subprocess.PIPE)
333    p2 = Run(["minigzip"], stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
334
335    p2.wait()
336    p1.wait()
337    assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (sourcedir,)
338    assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (sourcedir,)
339
340    return ramdisk_img
341
342  if not os.access(os.path.join(sourcedir, "kernel"), os.F_OK):
343    return None
344
345  if has_ramdisk and not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK):
346    return None
347
348  if info_dict is None:
349    info_dict = OPTIONS.info_dict
350
351  img = tempfile.NamedTemporaryFile()
352
353  if has_ramdisk:
354    ramdisk_img = make_ramdisk()
355
356  # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
357  mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
358
359  cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")]
360
361  fn = os.path.join(sourcedir, "second")
362  if os.access(fn, os.F_OK):
363    cmd.append("--second")
364    cmd.append(fn)
365
366  fn = os.path.join(sourcedir, "cmdline")
367  if os.access(fn, os.F_OK):
368    cmd.append("--cmdline")
369    cmd.append(open(fn).read().rstrip("\n"))
370
371  fn = os.path.join(sourcedir, "base")
372  if os.access(fn, os.F_OK):
373    cmd.append("--base")
374    cmd.append(open(fn).read().rstrip("\n"))
375
376  fn = os.path.join(sourcedir, "pagesize")
377  if os.access(fn, os.F_OK):
378    cmd.append("--pagesize")
379    cmd.append(open(fn).read().rstrip("\n"))
380
381  args = info_dict.get("mkbootimg_args", None)
382  if args and args.strip():
383    cmd.extend(shlex.split(args))
384
385  if has_ramdisk:
386    cmd.extend(["--ramdisk", ramdisk_img.name])
387
388  img_unsigned = None
389  if info_dict.get("vboot", None):
390    img_unsigned = tempfile.NamedTemporaryFile()
391    cmd.extend(["--output", img_unsigned.name])
392  else:
393    cmd.extend(["--output", img.name])
394
395  p = Run(cmd, stdout=subprocess.PIPE)
396  p.communicate()
397  assert p.returncode == 0, "mkbootimg of %s image failed" % (
398      os.path.basename(sourcedir),)
399
400  if (info_dict.get("boot_signer", None) == "true" and
401      info_dict.get("verity_key", None)):
402    path = "/" + os.path.basename(sourcedir).lower()
403    cmd = [OPTIONS.boot_signer_path]
404    cmd.extend(OPTIONS.boot_signer_args)
405    cmd.extend([path, img.name,
406                info_dict["verity_key"] + ".pk8",
407                info_dict["verity_key"] + ".x509.pem", img.name])
408    p = Run(cmd, stdout=subprocess.PIPE)
409    p.communicate()
410    assert p.returncode == 0, "boot_signer of %s image failed" % path
411
412  # Sign the image if vboot is non-empty.
413  elif info_dict.get("vboot", None):
414    path = "/" + os.path.basename(sourcedir).lower()
415    img_keyblock = tempfile.NamedTemporaryFile()
416    cmd = [info_dict["vboot_signer_cmd"], info_dict["futility"],
417           img_unsigned.name, info_dict["vboot_key"] + ".vbpubk",
418           info_dict["vboot_key"] + ".vbprivk",
419           info_dict["vboot_subkey"] + ".vbprivk",
420           img_keyblock.name,
421           img.name]
422    p = Run(cmd, stdout=subprocess.PIPE)
423    p.communicate()
424    assert p.returncode == 0, "vboot_signer of %s image failed" % path
425
426    # Clean up the temp files.
427    img_unsigned.close()
428    img_keyblock.close()
429
430  img.seek(os.SEEK_SET, 0)
431  data = img.read()
432
433  if has_ramdisk:
434    ramdisk_img.close()
435  img.close()
436
437  return data
438
439
440def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
441                     info_dict=None):
442  """Return a File object with the desired bootable image.
443
444  Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name 'prebuilt_name',
445  otherwise look for it under 'unpack_dir'/IMAGES, otherwise construct it from
446  the source files in 'unpack_dir'/'tree_subdir'."""
447
448  prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
449  if os.path.exists(prebuilt_path):
450    print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,)
451    return File.FromLocalFile(name, prebuilt_path)
452
453  prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
454  if os.path.exists(prebuilt_path):
455    print "using prebuilt %s from IMAGES..." % (prebuilt_name,)
456    return File.FromLocalFile(name, prebuilt_path)
457
458  print "building image from target_files %s..." % (tree_subdir,)
459
460  if info_dict is None:
461    info_dict = OPTIONS.info_dict
462
463  # With system_root_image == "true", we don't pack ramdisk into the boot image.
464  has_ramdisk = (info_dict.get("system_root_image", None) != "true" or
465                 prebuilt_name != "boot.img")
466
467  fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
468  data = _BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
469                             os.path.join(unpack_dir, fs_config),
470                             info_dict, has_ramdisk)
471  if data:
472    return File(name, data)
473  return None
474
475
476def UnzipTemp(filename, pattern=None):
477  """Unzip the given archive into a temporary directory and return the name.
478
479  If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a
480  temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
481
482  Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the
483  main file), open for reading.
484  """
485
486  tmp = tempfile.mkdtemp(prefix="targetfiles-")
487  OPTIONS.tempfiles.append(tmp)
488
489  def unzip_to_dir(filename, dirname):
490    cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
491    if pattern is not None:
492      cmd.append(pattern)
493    p = Run(cmd, stdout=subprocess.PIPE)
494    p.communicate()
495    if p.returncode != 0:
496      raise ExternalError("failed to unzip input target-files \"%s\"" %
497                          (filename,))
498
499  m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
500  if m:
501    unzip_to_dir(m.group(1), tmp)
502    unzip_to_dir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"))
503    filename = m.group(1)
504  else:
505    unzip_to_dir(filename, tmp)
506
507  return tmp, zipfile.ZipFile(filename, "r")
508
509
510def GetKeyPasswords(keylist):
511  """Given a list of keys, prompt the user to enter passwords for
512  those which require them.  Return a {key: password} dict.  password
513  will be None if the key has no password."""
514
515  no_passwords = []
516  need_passwords = []
517  key_passwords = {}
518  devnull = open("/dev/null", "w+b")
519  for k in sorted(keylist):
520    # We don't need a password for things that aren't really keys.
521    if k in SPECIAL_CERT_STRINGS:
522      no_passwords.append(k)
523      continue
524
525    p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
526             "-inform", "DER", "-nocrypt"],
527            stdin=devnull.fileno(),
528            stdout=devnull.fileno(),
529            stderr=subprocess.STDOUT)
530    p.communicate()
531    if p.returncode == 0:
532      # Definitely an unencrypted key.
533      no_passwords.append(k)
534    else:
535      p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
536               "-inform", "DER", "-passin", "pass:"],
537              stdin=devnull.fileno(),
538              stdout=devnull.fileno(),
539              stderr=subprocess.PIPE)
540      _, stderr = p.communicate()
541      if p.returncode == 0:
542        # Encrypted key with empty string as password.
543        key_passwords[k] = ''
544      elif stderr.startswith('Error decrypting key'):
545        # Definitely encrypted key.
546        # It would have said "Error reading key" if it didn't parse correctly.
547        need_passwords.append(k)
548      else:
549        # Potentially, a type of key that openssl doesn't understand.
550        # We'll let the routines in signapk.jar handle it.
551        no_passwords.append(k)
552  devnull.close()
553
554  key_passwords.update(PasswordManager().GetPasswords(need_passwords))
555  key_passwords.update(dict.fromkeys(no_passwords, None))
556  return key_passwords
557
558
559def SignFile(input_name, output_name, key, password, align=None,
560             whole_file=False):
561  """Sign the input_name zip/jar/apk, producing output_name.  Use the
562  given key and password (the latter may be None if the key does not
563  have a password.
564
565  If align is an integer > 1, zipalign is run to align stored files in
566  the output zip on 'align'-byte boundaries.
567
568  If whole_file is true, use the "-w" option to SignApk to embed a
569  signature that covers the whole file in the archive comment of the
570  zip file.
571  """
572
573  if align == 0 or align == 1:
574    align = None
575
576  if align:
577    temp = tempfile.NamedTemporaryFile()
578    sign_name = temp.name
579  else:
580    sign_name = output_name
581
582  cmd = [OPTIONS.java_path, OPTIONS.java_args, "-jar",
583         os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)]
584  cmd.extend(OPTIONS.extra_signapk_args)
585  if whole_file:
586    cmd.append("-w")
587  cmd.extend([key + OPTIONS.public_key_suffix,
588              key + OPTIONS.private_key_suffix,
589              input_name, sign_name])
590
591  p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
592  if password is not None:
593    password += "\n"
594  p.communicate(password)
595  if p.returncode != 0:
596    raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
597
598  if align:
599    p = Run(["zipalign", "-f", "-p", str(align), sign_name, output_name])
600    p.communicate()
601    if p.returncode != 0:
602      raise ExternalError("zipalign failed: return code %s" % (p.returncode,))
603    temp.close()
604
605
606def CheckSize(data, target, info_dict):
607  """Check the data string passed against the max size limit, if
608  any, for the given target.  Raise exception if the data is too big.
609  Print a warning if the data is nearing the maximum size."""
610
611  if target.endswith(".img"):
612    target = target[:-4]
613  mount_point = "/" + target
614
615  fs_type = None
616  limit = None
617  if info_dict["fstab"]:
618    if mount_point == "/userdata":
619      mount_point = "/data"
620    p = info_dict["fstab"][mount_point]
621    fs_type = p.fs_type
622    device = p.device
623    if "/" in device:
624      device = device[device.rfind("/")+1:]
625    limit = info_dict.get(device + "_size", None)
626  if not fs_type or not limit:
627    return
628
629  if fs_type == "yaffs2":
630    # image size should be increased by 1/64th to account for the
631    # spare area (64 bytes per 2k page)
632    limit = limit / 2048 * (2048+64)
633  size = len(data)
634  pct = float(size) * 100.0 / limit
635  msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit)
636  if pct >= 99.0:
637    raise ExternalError(msg)
638  elif pct >= 95.0:
639    print
640    print "  WARNING: ", msg
641    print
642  elif OPTIONS.verbose:
643    print "  ", msg
644
645
646def ReadApkCerts(tf_zip):
647  """Given a target_files ZipFile, parse the META/apkcerts.txt file
648  and return a {package: cert} dict."""
649  certmap = {}
650  for line in tf_zip.read("META/apkcerts.txt").split("\n"):
651    line = line.strip()
652    if not line:
653      continue
654    m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+'
655                 r'private_key="(.*)"$', line)
656    if m:
657      name, cert, privkey = m.groups()
658      public_key_suffix_len = len(OPTIONS.public_key_suffix)
659      private_key_suffix_len = len(OPTIONS.private_key_suffix)
660      if cert in SPECIAL_CERT_STRINGS and not privkey:
661        certmap[name] = cert
662      elif (cert.endswith(OPTIONS.public_key_suffix) and
663            privkey.endswith(OPTIONS.private_key_suffix) and
664            cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
665        certmap[name] = cert[:-public_key_suffix_len]
666      else:
667        raise ValueError("failed to parse line from apkcerts.txt:\n" + line)
668  return certmap
669
670
671COMMON_DOCSTRING = """
672  -p  (--path)  <dir>
673      Prepend <dir>/bin to the list of places to search for binaries
674      run by this script, and expect to find jars in <dir>/framework.
675
676  -s  (--device_specific) <file>
677      Path to the python module containing device-specific
678      releasetools code.
679
680  -x  (--extra)  <key=value>
681      Add a key/value pair to the 'extras' dict, which device-specific
682      extension code may look at.
683
684  -v  (--verbose)
685      Show command lines being executed.
686
687  -h  (--help)
688      Display this usage message and exit.
689"""
690
691def Usage(docstring):
692  print docstring.rstrip("\n")
693  print COMMON_DOCSTRING
694
695
696def ParseOptions(argv,
697                 docstring,
698                 extra_opts="", extra_long_opts=(),
699                 extra_option_handler=None):
700  """Parse the options in argv and return any arguments that aren't
701  flags.  docstring is the calling module's docstring, to be displayed
702  for errors and -h.  extra_opts and extra_long_opts are for flags
703  defined by the caller, which are processed by passing them to
704  extra_option_handler."""
705
706  try:
707    opts, args = getopt.getopt(
708        argv, "hvp:s:x:" + extra_opts,
709        ["help", "verbose", "path=", "signapk_path=", "extra_signapk_args=",
710         "java_path=", "java_args=", "public_key_suffix=",
711         "private_key_suffix=", "boot_signer_path=", "boot_signer_args=",
712         "verity_signer_path=", "verity_signer_args=", "device_specific=",
713         "extra="] +
714        list(extra_long_opts))
715  except getopt.GetoptError as err:
716    Usage(docstring)
717    print "**", str(err), "**"
718    sys.exit(2)
719
720  for o, a in opts:
721    if o in ("-h", "--help"):
722      Usage(docstring)
723      sys.exit()
724    elif o in ("-v", "--verbose"):
725      OPTIONS.verbose = True
726    elif o in ("-p", "--path"):
727      OPTIONS.search_path = a
728    elif o in ("--signapk_path",):
729      OPTIONS.signapk_path = a
730    elif o in ("--extra_signapk_args",):
731      OPTIONS.extra_signapk_args = shlex.split(a)
732    elif o in ("--java_path",):
733      OPTIONS.java_path = a
734    elif o in ("--java_args",):
735      OPTIONS.java_args = a
736    elif o in ("--public_key_suffix",):
737      OPTIONS.public_key_suffix = a
738    elif o in ("--private_key_suffix",):
739      OPTIONS.private_key_suffix = a
740    elif o in ("--boot_signer_path",):
741      OPTIONS.boot_signer_path = a
742    elif o in ("--boot_signer_args",):
743      OPTIONS.boot_signer_args = shlex.split(a)
744    elif o in ("--verity_signer_path",):
745      OPTIONS.verity_signer_path = a
746    elif o in ("--verity_signer_args",):
747      OPTIONS.verity_signer_args = shlex.split(a)
748    elif o in ("-s", "--device_specific"):
749      OPTIONS.device_specific = a
750    elif o in ("-x", "--extra"):
751      key, value = a.split("=", 1)
752      OPTIONS.extras[key] = value
753    else:
754      if extra_option_handler is None or not extra_option_handler(o, a):
755        assert False, "unknown option \"%s\"" % (o,)
756
757  if OPTIONS.search_path:
758    os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") +
759                          os.pathsep + os.environ["PATH"])
760
761  return args
762
763
764def MakeTempFile(prefix=None, suffix=None):
765  """Make a temp file and add it to the list of things to be deleted
766  when Cleanup() is called.  Return the filename."""
767  fd, fn = tempfile.mkstemp(prefix=prefix, suffix=suffix)
768  os.close(fd)
769  OPTIONS.tempfiles.append(fn)
770  return fn
771
772
773def Cleanup():
774  for i in OPTIONS.tempfiles:
775    if os.path.isdir(i):
776      shutil.rmtree(i)
777    else:
778      os.remove(i)
779
780
781class PasswordManager(object):
782  def __init__(self):
783    self.editor = os.getenv("EDITOR", None)
784    self.pwfile = os.getenv("ANDROID_PW_FILE", None)
785
786  def GetPasswords(self, items):
787    """Get passwords corresponding to each string in 'items',
788    returning a dict.  (The dict may have keys in addition to the
789    values in 'items'.)
790
791    Uses the passwords in $ANDROID_PW_FILE if available, letting the
792    user edit that file to add more needed passwords.  If no editor is
793    available, or $ANDROID_PW_FILE isn't define, prompts the user
794    interactively in the ordinary way.
795    """
796
797    current = self.ReadFile()
798
799    first = True
800    while True:
801      missing = []
802      for i in items:
803        if i not in current or not current[i]:
804          missing.append(i)
805      # Are all the passwords already in the file?
806      if not missing:
807        return current
808
809      for i in missing:
810        current[i] = ""
811
812      if not first:
813        print "key file %s still missing some passwords." % (self.pwfile,)
814        answer = raw_input("try to edit again? [y]> ").strip()
815        if answer and answer[0] not in 'yY':
816          raise RuntimeError("key passwords unavailable")
817      first = False
818
819      current = self.UpdateAndReadFile(current)
820
821  def PromptResult(self, current): # pylint: disable=no-self-use
822    """Prompt the user to enter a value (password) for each key in
823    'current' whose value is fales.  Returns a new dict with all the
824    values.
825    """
826    result = {}
827    for k, v in sorted(current.iteritems()):
828      if v:
829        result[k] = v
830      else:
831        while True:
832          result[k] = getpass.getpass(
833              "Enter password for %s key> " % k).strip()
834          if result[k]:
835            break
836    return result
837
838  def UpdateAndReadFile(self, current):
839    if not self.editor or not self.pwfile:
840      return self.PromptResult(current)
841
842    f = open(self.pwfile, "w")
843    os.chmod(self.pwfile, 0o600)
844    f.write("# Enter key passwords between the [[[ ]]] brackets.\n")
845    f.write("# (Additional spaces are harmless.)\n\n")
846
847    first_line = None
848    sorted_list = sorted([(not v, k, v) for (k, v) in current.iteritems()])
849    for i, (_, k, v) in enumerate(sorted_list):
850      f.write("[[[  %s  ]]] %s\n" % (v, k))
851      if not v and first_line is None:
852        # position cursor on first line with no password.
853        first_line = i + 4
854    f.close()
855
856    p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
857    _, _ = p.communicate()
858
859    return self.ReadFile()
860
861  def ReadFile(self):
862    result = {}
863    if self.pwfile is None:
864      return result
865    try:
866      f = open(self.pwfile, "r")
867      for line in f:
868        line = line.strip()
869        if not line or line[0] == '#':
870          continue
871        m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
872        if not m:
873          print "failed to parse password file: ", line
874        else:
875          result[m.group(2)] = m.group(1)
876      f.close()
877    except IOError as e:
878      if e.errno != errno.ENOENT:
879        print "error reading password file: ", str(e)
880    return result
881
882
883def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
884             compress_type=None):
885  import datetime
886
887  # http://b/18015246
888  # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
889  # for files larger than 2GiB. We can work around this by adjusting their
890  # limit. Note that `zipfile.writestr()` will not work for strings larger than
891  # 2GiB. The Python interpreter sometimes rejects strings that large (though
892  # it isn't clear to me exactly what circumstances cause this).
893  # `zipfile.write()` must be used directly to work around this.
894  #
895  # This mess can be avoided if we port to python3.
896  saved_zip64_limit = zipfile.ZIP64_LIMIT
897  zipfile.ZIP64_LIMIT = (1 << 32) - 1
898
899  if compress_type is None:
900    compress_type = zip_file.compression
901  if arcname is None:
902    arcname = filename
903
904  saved_stat = os.stat(filename)
905
906  try:
907    # `zipfile.write()` doesn't allow us to pass ZipInfo, so just modify the
908    # file to be zipped and reset it when we're done.
909    os.chmod(filename, perms)
910
911    # Use a fixed timestamp so the output is repeatable.
912    epoch = datetime.datetime.fromtimestamp(0)
913    timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
914    os.utime(filename, (timestamp, timestamp))
915
916    zip_file.write(filename, arcname=arcname, compress_type=compress_type)
917  finally:
918    os.chmod(filename, saved_stat.st_mode)
919    os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
920    zipfile.ZIP64_LIMIT = saved_zip64_limit
921
922
923def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None,
924                compress_type=None):
925  """Wrap zipfile.writestr() function to work around the zip64 limit.
926
927  Even with the ZIP64_LIMIT workaround, it won't allow writing a string
928  longer than 2GiB. It gives 'OverflowError: size does not fit in an int'
929  when calling crc32(bytes).
930
931  But it still works fine to write a shorter string into a large zip file.
932  We should use ZipWrite() whenever possible, and only use ZipWriteStr()
933  when we know the string won't be too long.
934  """
935
936  saved_zip64_limit = zipfile.ZIP64_LIMIT
937  zipfile.ZIP64_LIMIT = (1 << 32) - 1
938
939  if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
940    zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
941    zinfo.compress_type = zip_file.compression
942    if perms is None:
943      perms = 0o644
944  else:
945    zinfo = zinfo_or_arcname
946
947  # If compress_type is given, it overrides the value in zinfo.
948  if compress_type is not None:
949    zinfo.compress_type = compress_type
950
951  # If perms is given, it has a priority.
952  if perms is not None:
953    zinfo.external_attr = perms << 16
954
955  # Use a fixed timestamp so the output is repeatable.
956  zinfo.date_time = (2009, 1, 1, 0, 0, 0)
957
958  zip_file.writestr(zinfo, data)
959  zipfile.ZIP64_LIMIT = saved_zip64_limit
960
961
962def ZipClose(zip_file):
963  # http://b/18015246
964  # zipfile also refers to ZIP64_LIMIT during close() when it writes out the
965  # central directory.
966  saved_zip64_limit = zipfile.ZIP64_LIMIT
967  zipfile.ZIP64_LIMIT = (1 << 32) - 1
968
969  zip_file.close()
970
971  zipfile.ZIP64_LIMIT = saved_zip64_limit
972
973
974class DeviceSpecificParams(object):
975  module = None
976  def __init__(self, **kwargs):
977    """Keyword arguments to the constructor become attributes of this
978    object, which is passed to all functions in the device-specific
979    module."""
980    for k, v in kwargs.iteritems():
981      setattr(self, k, v)
982    self.extras = OPTIONS.extras
983
984    if self.module is None:
985      path = OPTIONS.device_specific
986      if not path:
987        return
988      try:
989        if os.path.isdir(path):
990          info = imp.find_module("releasetools", [path])
991        else:
992          d, f = os.path.split(path)
993          b, x = os.path.splitext(f)
994          if x == ".py":
995            f = b
996          info = imp.find_module(f, [d])
997        print "loaded device-specific extensions from", path
998        self.module = imp.load_module("device_specific", *info)
999      except ImportError:
1000        print "unable to load device-specific module; assuming none"
1001
1002  def _DoCall(self, function_name, *args, **kwargs):
1003    """Call the named function in the device-specific module, passing
1004    the given args and kwargs.  The first argument to the call will be
1005    the DeviceSpecific object itself.  If there is no module, or the
1006    module does not define the function, return the value of the
1007    'default' kwarg (which itself defaults to None)."""
1008    if self.module is None or not hasattr(self.module, function_name):
1009      return kwargs.get("default", None)
1010    return getattr(self.module, function_name)(*((self,) + args), **kwargs)
1011
1012  def FullOTA_Assertions(self):
1013    """Called after emitting the block of assertions at the top of a
1014    full OTA package.  Implementations can add whatever additional
1015    assertions they like."""
1016    return self._DoCall("FullOTA_Assertions")
1017
1018  def FullOTA_InstallBegin(self):
1019    """Called at the start of full OTA installation."""
1020    return self._DoCall("FullOTA_InstallBegin")
1021
1022  def FullOTA_InstallEnd(self):
1023    """Called at the end of full OTA installation; typically this is
1024    used to install the image for the device's baseband processor."""
1025    return self._DoCall("FullOTA_InstallEnd")
1026
1027  def IncrementalOTA_Assertions(self):
1028    """Called after emitting the block of assertions at the top of an
1029    incremental OTA package.  Implementations can add whatever
1030    additional assertions they like."""
1031    return self._DoCall("IncrementalOTA_Assertions")
1032
1033  def IncrementalOTA_VerifyBegin(self):
1034    """Called at the start of the verification phase of incremental
1035    OTA installation; additional checks can be placed here to abort
1036    the script before any changes are made."""
1037    return self._DoCall("IncrementalOTA_VerifyBegin")
1038
1039  def IncrementalOTA_VerifyEnd(self):
1040    """Called at the end of the verification phase of incremental OTA
1041    installation; additional checks can be placed here to abort the
1042    script before any changes are made."""
1043    return self._DoCall("IncrementalOTA_VerifyEnd")
1044
1045  def IncrementalOTA_InstallBegin(self):
1046    """Called at the start of incremental OTA installation (after
1047    verification is complete)."""
1048    return self._DoCall("IncrementalOTA_InstallBegin")
1049
1050  def IncrementalOTA_InstallEnd(self):
1051    """Called at the end of incremental OTA installation; typically
1052    this is used to install the image for the device's baseband
1053    processor."""
1054    return self._DoCall("IncrementalOTA_InstallEnd")
1055
1056class File(object):
1057  def __init__(self, name, data):
1058    self.name = name
1059    self.data = data
1060    self.size = len(data)
1061    self.sha1 = sha1(data).hexdigest()
1062
1063  @classmethod
1064  def FromLocalFile(cls, name, diskname):
1065    f = open(diskname, "rb")
1066    data = f.read()
1067    f.close()
1068    return File(name, data)
1069
1070  def WriteToTemp(self):
1071    t = tempfile.NamedTemporaryFile()
1072    t.write(self.data)
1073    t.flush()
1074    return t
1075
1076  def AddToZip(self, z, compression=None):
1077    ZipWriteStr(z, self.name, self.data, compress_type=compression)
1078
1079DIFF_PROGRAM_BY_EXT = {
1080    ".gz" : "imgdiff",
1081    ".zip" : ["imgdiff", "-z"],
1082    ".jar" : ["imgdiff", "-z"],
1083    ".apk" : ["imgdiff", "-z"],
1084    ".img" : "imgdiff",
1085    }
1086
1087class Difference(object):
1088  def __init__(self, tf, sf, diff_program=None):
1089    self.tf = tf
1090    self.sf = sf
1091    self.patch = None
1092    self.diff_program = diff_program
1093
1094  def ComputePatch(self):
1095    """Compute the patch (as a string of data) needed to turn sf into
1096    tf.  Returns the same tuple as GetPatch()."""
1097
1098    tf = self.tf
1099    sf = self.sf
1100
1101    if self.diff_program:
1102      diff_program = self.diff_program
1103    else:
1104      ext = os.path.splitext(tf.name)[1]
1105      diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff")
1106
1107    ttemp = tf.WriteToTemp()
1108    stemp = sf.WriteToTemp()
1109
1110    ext = os.path.splitext(tf.name)[1]
1111
1112    try:
1113      ptemp = tempfile.NamedTemporaryFile()
1114      if isinstance(diff_program, list):
1115        cmd = copy.copy(diff_program)
1116      else:
1117        cmd = [diff_program]
1118      cmd.append(stemp.name)
1119      cmd.append(ttemp.name)
1120      cmd.append(ptemp.name)
1121      p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1122      err = []
1123      def run():
1124        _, e = p.communicate()
1125        if e:
1126          err.append(e)
1127      th = threading.Thread(target=run)
1128      th.start()
1129      th.join(timeout=300)   # 5 mins
1130      if th.is_alive():
1131        print "WARNING: diff command timed out"
1132        p.terminate()
1133        th.join(5)
1134        if th.is_alive():
1135          p.kill()
1136          th.join()
1137
1138      if err or p.returncode != 0:
1139        print "WARNING: failure running %s:\n%s\n" % (
1140            diff_program, "".join(err))
1141        self.patch = None
1142        return None, None, None
1143      diff = ptemp.read()
1144    finally:
1145      ptemp.close()
1146      stemp.close()
1147      ttemp.close()
1148
1149    self.patch = diff
1150    return self.tf, self.sf, self.patch
1151
1152
1153  def GetPatch(self):
1154    """Return a tuple (target_file, source_file, patch_data).
1155    patch_data may be None if ComputePatch hasn't been called, or if
1156    computing the patch failed."""
1157    return self.tf, self.sf, self.patch
1158
1159
1160def ComputeDifferences(diffs):
1161  """Call ComputePatch on all the Difference objects in 'diffs'."""
1162  print len(diffs), "diffs to compute"
1163
1164  # Do the largest files first, to try and reduce the long-pole effect.
1165  by_size = [(i.tf.size, i) for i in diffs]
1166  by_size.sort(reverse=True)
1167  by_size = [i[1] for i in by_size]
1168
1169  lock = threading.Lock()
1170  diff_iter = iter(by_size)   # accessed under lock
1171
1172  def worker():
1173    try:
1174      lock.acquire()
1175      for d in diff_iter:
1176        lock.release()
1177        start = time.time()
1178        d.ComputePatch()
1179        dur = time.time() - start
1180        lock.acquire()
1181
1182        tf, sf, patch = d.GetPatch()
1183        if sf.name == tf.name:
1184          name = tf.name
1185        else:
1186          name = "%s (%s)" % (tf.name, sf.name)
1187        if patch is None:
1188          print "patching failed!                                  %s" % (name,)
1189        else:
1190          print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
1191              dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name)
1192      lock.release()
1193    except Exception as e:
1194      print e
1195      raise
1196
1197  # start worker threads; wait for them all to finish.
1198  threads = [threading.Thread(target=worker)
1199             for i in range(OPTIONS.worker_threads)]
1200  for th in threads:
1201    th.start()
1202  while threads:
1203    threads.pop().join()
1204
1205
1206class BlockDifference(object):
1207  def __init__(self, partition, tgt, src=None, check_first_block=False,
1208               version=None):
1209    self.tgt = tgt
1210    self.src = src
1211    self.partition = partition
1212    self.check_first_block = check_first_block
1213
1214    # Due to http://b/20939131, check_first_block is disabled temporarily.
1215    assert not self.check_first_block
1216
1217    if version is None:
1218      version = 1
1219      if OPTIONS.info_dict:
1220        version = max(
1221            int(i) for i in
1222            OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
1223    self.version = version
1224
1225    b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
1226                                    version=self.version)
1227    tmpdir = tempfile.mkdtemp()
1228    OPTIONS.tempfiles.append(tmpdir)
1229    self.path = os.path.join(tmpdir, partition)
1230    b.Compute(self.path)
1231
1232    if src is None:
1233      _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
1234    else:
1235      _, self.device = GetTypeAndDevice("/" + partition,
1236                                        OPTIONS.source_info_dict)
1237
1238  def WriteScript(self, script, output_zip, progress=None):
1239    if not self.src:
1240      # write the output unconditionally
1241      script.Print("Patching %s image unconditionally..." % (self.partition,))
1242    else:
1243      script.Print("Patching %s image after verification." % (self.partition,))
1244
1245    if progress:
1246      script.ShowProgress(progress, 0)
1247    self._WriteUpdate(script, output_zip)
1248    self._WritePostInstallVerifyScript(script)
1249
1250  def WriteVerifyScript(self, script):
1251    partition = self.partition
1252    if not self.src:
1253      script.Print("Image %s will be patched unconditionally." % (partition,))
1254    else:
1255      ranges = self.src.care_map.subtract(self.src.clobbered_blocks)
1256      ranges_str = ranges.to_string_raw()
1257      if self.version >= 3:
1258        script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || '
1259                            'block_image_verify("%s", '
1260                            'package_extract_file("%s.transfer.list"), '
1261                            '"%s.new.dat", "%s.patch.dat")) then') % (
1262                            self.device, ranges_str, self.src.TotalSha1(),
1263                            self.device, partition, partition, partition))
1264      else:
1265        script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1266                           self.device, ranges_str, self.src.TotalSha1()))
1267      script.Print('Verified %s image...' % (partition,))
1268      script.AppendExtra('else')
1269
1270      # When generating incrementals for the system and vendor partitions,
1271      # explicitly check the first block (which contains the superblock) of
1272      # the partition to see if it's what we expect. If this check fails,
1273      # give an explicit log message about the partition having been
1274      # remounted R/W (the most likely explanation) and the need to flash to
1275      # get OTAs working again.
1276      if self.check_first_block:
1277        self._CheckFirstBlock(script)
1278
1279      # Abort the OTA update. Note that the incremental OTA cannot be applied
1280      # even if it may match the checksum of the target partition.
1281      # a) If version < 3, operations like move and erase will make changes
1282      #    unconditionally and damage the partition.
1283      # b) If version >= 3, it won't even reach here.
1284      script.AppendExtra(('abort("%s partition has unexpected contents");\n'
1285                          'endif;') % (partition,))
1286
1287  def _WritePostInstallVerifyScript(self, script):
1288    partition = self.partition
1289    script.Print('Verifying the updated %s image...' % (partition,))
1290    # Unlike pre-install verification, clobbered_blocks should not be ignored.
1291    ranges = self.tgt.care_map
1292    ranges_str = ranges.to_string_raw()
1293    script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1294                       self.device, ranges_str,
1295                       self.tgt.TotalSha1(include_clobbered_blocks=True)))
1296
1297    # Bug: 20881595
1298    # Verify that extended blocks are really zeroed out.
1299    if self.tgt.extended:
1300      ranges_str = self.tgt.extended.to_string_raw()
1301      script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1302                         self.device, ranges_str,
1303                         self._HashZeroBlocks(self.tgt.extended.size())))
1304      script.Print('Verified the updated %s image.' % (partition,))
1305      script.AppendExtra(
1306          'else\n'
1307          '  abort("%s partition has unexpected non-zero contents after OTA '
1308          'update");\n'
1309          'endif;' % (partition,))
1310    else:
1311      script.Print('Verified the updated %s image.' % (partition,))
1312
1313    script.AppendExtra(
1314        'else\n'
1315        '  abort("%s partition has unexpected contents after OTA update");\n'
1316        'endif;' % (partition,))
1317
1318  def _WriteUpdate(self, script, output_zip):
1319    ZipWrite(output_zip,
1320             '{}.transfer.list'.format(self.path),
1321             '{}.transfer.list'.format(self.partition))
1322    ZipWrite(output_zip,
1323             '{}.new.dat'.format(self.path),
1324             '{}.new.dat'.format(self.partition))
1325    ZipWrite(output_zip,
1326             '{}.patch.dat'.format(self.path),
1327             '{}.patch.dat'.format(self.partition),
1328             compress_type=zipfile.ZIP_STORED)
1329
1330    call = ('block_image_update("{device}", '
1331            'package_extract_file("{partition}.transfer.list"), '
1332            '"{partition}.new.dat", "{partition}.patch.dat");\n'.format(
1333                device=self.device, partition=self.partition))
1334    script.AppendExtra(script.WordWrap(call))
1335
1336  def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use
1337    data = source.ReadRangeSet(ranges)
1338    ctx = sha1()
1339
1340    for p in data:
1341      ctx.update(p)
1342
1343    return ctx.hexdigest()
1344
1345  def _HashZeroBlocks(self, num_blocks): # pylint: disable=no-self-use
1346    """Return the hash value for all zero blocks."""
1347    zero_block = '\x00' * 4096
1348    ctx = sha1()
1349    for _ in range(num_blocks):
1350      ctx.update(zero_block)
1351
1352    return ctx.hexdigest()
1353
1354  # TODO(tbao): Due to http://b/20939131, block 0 may be changed without
1355  # remounting R/W. Will change the checking to a finer-grained way to
1356  # mask off those bits.
1357  def _CheckFirstBlock(self, script):
1358    r = rangelib.RangeSet((0, 1))
1359    srchash = self._HashBlocks(self.src, r)
1360
1361    script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || '
1362                        'abort("%s has been remounted R/W; '
1363                        'reflash device to reenable OTA updates");')
1364                       % (self.device, r.to_string_raw(), srchash,
1365                          self.device))
1366
1367DataImage = blockimgdiff.DataImage
1368
1369
1370# map recovery.fstab's fs_types to mount/format "partition types"
1371PARTITION_TYPES = {
1372    "yaffs2": "MTD",
1373    "mtd": "MTD",
1374    "ext4": "EMMC",
1375    "emmc": "EMMC",
1376    "f2fs": "EMMC",
1377    "squashfs": "EMMC"
1378}
1379
1380def GetTypeAndDevice(mount_point, info):
1381  fstab = info["fstab"]
1382  if fstab:
1383    return (PARTITION_TYPES[fstab[mount_point].fs_type],
1384            fstab[mount_point].device)
1385  else:
1386    raise KeyError
1387
1388
1389def ParseCertificate(data):
1390  """Parse a PEM-format certificate."""
1391  cert = []
1392  save = False
1393  for line in data.split("\n"):
1394    if "--END CERTIFICATE--" in line:
1395      break
1396    if save:
1397      cert.append(line)
1398    if "--BEGIN CERTIFICATE--" in line:
1399      save = True
1400  cert = "".join(cert).decode('base64')
1401  return cert
1402
1403def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
1404                      info_dict=None):
1405  """Generate a binary patch that creates the recovery image starting
1406  with the boot image.  (Most of the space in these images is just the
1407  kernel, which is identical for the two, so the resulting patch
1408  should be efficient.)  Add it to the output zip, along with a shell
1409  script that is run from init.rc on first boot to actually do the
1410  patching and install the new recovery image.
1411
1412  recovery_img and boot_img should be File objects for the
1413  corresponding images.  info should be the dictionary returned by
1414  common.LoadInfoDict() on the input target_files.
1415  """
1416
1417  if info_dict is None:
1418    info_dict = OPTIONS.info_dict
1419
1420  full_recovery_image = info_dict.get("full_recovery_image", None) == "true"
1421  system_root_image = info_dict.get("system_root_image", None) == "true"
1422
1423  if full_recovery_image:
1424    output_sink("etc/recovery.img", recovery_img.data)
1425
1426  else:
1427    diff_program = ["imgdiff"]
1428    path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
1429    if os.path.exists(path):
1430      diff_program.append("-b")
1431      diff_program.append(path)
1432      bonus_args = "-b /system/etc/recovery-resource.dat"
1433    else:
1434      bonus_args = ""
1435
1436    d = Difference(recovery_img, boot_img, diff_program=diff_program)
1437    _, _, patch = d.ComputePatch()
1438    output_sink("recovery-from-boot.p", patch)
1439
1440  try:
1441    # The following GetTypeAndDevice()s need to use the path in the target
1442    # info_dict instead of source_info_dict.
1443    boot_type, boot_device = GetTypeAndDevice("/boot", info_dict)
1444    recovery_type, recovery_device = GetTypeAndDevice("/recovery", info_dict)
1445  except KeyError:
1446    return
1447
1448  if full_recovery_image:
1449    sh = """#!/system/bin/sh
1450if ! applypatch -c %(type)s:%(device)s:%(size)d:%(sha1)s; then
1451  applypatch /system/etc/recovery.img %(type)s:%(device)s %(sha1)s %(size)d && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
1452else
1453  log -t recovery "Recovery image already installed"
1454fi
1455""" % {'type': recovery_type,
1456       'device': recovery_device,
1457       'sha1': recovery_img.sha1,
1458       'size': recovery_img.size}
1459  else:
1460    sh = """#!/system/bin/sh
1461if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
1462  applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
1463else
1464  log -t recovery "Recovery image already installed"
1465fi
1466""" % {'boot_size': boot_img.size,
1467       'boot_sha1': boot_img.sha1,
1468       'recovery_size': recovery_img.size,
1469       'recovery_sha1': recovery_img.sha1,
1470       'boot_type': boot_type,
1471       'boot_device': boot_device,
1472       'recovery_type': recovery_type,
1473       'recovery_device': recovery_device,
1474       'bonus_args': bonus_args}
1475
1476  # The install script location moved from /system/etc to /system/bin
1477  # in the L release.  Parse init.*.rc files to find out where the
1478  # target-files expects it to be, and put it there.
1479  sh_location = "etc/install-recovery.sh"
1480  found = False
1481  if system_root_image:
1482    init_rc_dir = os.path.join(input_dir, "ROOT")
1483  else:
1484    init_rc_dir = os.path.join(input_dir, "BOOT", "RAMDISK")
1485  init_rc_files = os.listdir(init_rc_dir)
1486  for init_rc_file in init_rc_files:
1487    if (not init_rc_file.startswith('init.') or
1488        not init_rc_file.endswith('.rc')):
1489      continue
1490    with open(os.path.join(init_rc_dir, init_rc_file)) as f:
1491      for line in f:
1492        m = re.match(r"^service flash_recovery /system/(\S+)\s*$", line)
1493        if m:
1494          sh_location = m.group(1)
1495          found = True
1496          break
1497    if found:
1498      break
1499  print "putting script in", sh_location
1500
1501  output_sink(sh_location, sh)
1502