common.py revision 2a410587e810babe0871f1ef2a6f20d06f3269d8
1# Copyright (C) 2008 The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15import copy
16import errno
17import getopt
18import getpass
19import imp
20import os
21import platform
22import re
23import shlex
24import shutil
25import subprocess
26import sys
27import tempfile
28import threading
29import time
30import zipfile
31
32import blockimgdiff
33import rangelib
34
35from hashlib import sha1 as sha1
36
37
38class Options(object):
39  def __init__(self):
40    platform_search_path = {
41        "linux2": "out/host/linux-x86",
42        "darwin": "out/host/darwin-x86",
43    }
44
45    self.search_path = platform_search_path.get(sys.platform, None)
46    self.signapk_path = "framework/signapk.jar"  # Relative to search_path
47    self.extra_signapk_args = []
48    self.java_path = "java"  # Use the one on the path by default.
49    self.java_args = "-Xmx2048m" # JVM Args
50    self.public_key_suffix = ".x509.pem"
51    self.private_key_suffix = ".pk8"
52    # use otatools built boot_signer by default
53    self.boot_signer_path = "boot_signer"
54    self.verbose = False
55    self.tempfiles = []
56    self.device_specific = None
57    self.extras = {}
58    self.info_dict = None
59    self.worker_threads = None
60
61
62OPTIONS = Options()
63
64
65# Values for "certificate" in apkcerts that mean special things.
66SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL")
67
68
69class ExternalError(RuntimeError):
70  pass
71
72
73def Run(args, **kwargs):
74  """Create and return a subprocess.Popen object, printing the command
75  line on the terminal if -v was specified."""
76  if OPTIONS.verbose:
77    print "  running: ", " ".join(args)
78  return subprocess.Popen(args, **kwargs)
79
80
81def CloseInheritedPipes():
82  """ Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds
83  before doing other work."""
84  if platform.system() != "Darwin":
85    return
86  for d in range(3, 1025):
87    try:
88      stat = os.fstat(d)
89      if stat is not None:
90        pipebit = stat[0] & 0x1000
91        if pipebit != 0:
92          os.close(d)
93    except OSError:
94      pass
95
96
97def LoadInfoDict(input_file, input_dir=None):
98  """Read and parse the META/misc_info.txt key/value pairs from the
99  input target files and return a dict."""
100
101  def read_helper(fn):
102    if isinstance(input_file, zipfile.ZipFile):
103      return input_file.read(fn)
104    else:
105      path = os.path.join(input_file, *fn.split("/"))
106      try:
107        with open(path) as f:
108          return f.read()
109      except IOError as e:
110        if e.errno == errno.ENOENT:
111          raise KeyError(fn)
112  d = {}
113  try:
114    d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n"))
115  except KeyError:
116    # ok if misc_info.txt doesn't exist
117    pass
118
119  # backwards compatibility: These values used to be in their own
120  # files.  Look for them, in case we're processing an old
121  # target_files zip.
122
123  if "mkyaffs2_extra_flags" not in d:
124    try:
125      d["mkyaffs2_extra_flags"] = read_helper(
126          "META/mkyaffs2-extra-flags.txt").strip()
127    except KeyError:
128      # ok if flags don't exist
129      pass
130
131  if "recovery_api_version" not in d:
132    try:
133      d["recovery_api_version"] = read_helper(
134          "META/recovery-api-version.txt").strip()
135    except KeyError:
136      raise ValueError("can't find recovery API version in input target-files")
137
138  if "tool_extensions" not in d:
139    try:
140      d["tool_extensions"] = read_helper("META/tool-extensions.txt").strip()
141    except KeyError:
142      # ok if extensions don't exist
143      pass
144
145  if "fstab_version" not in d:
146    d["fstab_version"] = "1"
147
148  # During building, we use the "file_contexts" in the out/ directory tree.
149  # It is no longer available when (re)generating from target_files zip. So
150  # when generating from target_files zip, we look for a copy under META/
151  # first, if not available search under BOOT/RAMDISK/. Note that we may need
152  # a different file_contexts to build images than the one running on device,
153  # such as when enabling system_root_image. In that case, we must have the
154  # one for building copied to META/.
155  if input_dir is not None:
156    fc_config = os.path.join(input_dir, "META", "file_contexts")
157    if not os.path.exists(fc_config):
158      fc_config = os.path.join(input_dir, "BOOT", "RAMDISK", "file_contexts")
159      if not os.path.exists(fc_config):
160        fc_config = None
161
162    if fc_config:
163      d["selinux_fc"] = fc_config
164
165  try:
166    data = read_helper("META/imagesizes.txt")
167    for line in data.split("\n"):
168      if not line:
169        continue
170      name, value = line.split(" ", 1)
171      if not value:
172        continue
173      if name == "blocksize":
174        d[name] = value
175      else:
176        d[name + "_size"] = value
177  except KeyError:
178    pass
179
180  def makeint(key):
181    if key in d:
182      d[key] = int(d[key], 0)
183
184  makeint("recovery_api_version")
185  makeint("blocksize")
186  makeint("system_size")
187  makeint("vendor_size")
188  makeint("userdata_size")
189  makeint("cache_size")
190  makeint("recovery_size")
191  makeint("boot_size")
192  makeint("fstab_version")
193
194  d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"])
195  d["build.prop"] = LoadBuildProp(read_helper)
196  return d
197
198def LoadBuildProp(read_helper):
199  try:
200    data = read_helper("SYSTEM/build.prop")
201  except KeyError:
202    print "Warning: could not find SYSTEM/build.prop in %s" % zip
203    data = ""
204  return LoadDictionaryFromLines(data.split("\n"))
205
206def LoadDictionaryFromLines(lines):
207  d = {}
208  for line in lines:
209    line = line.strip()
210    if not line or line.startswith("#"):
211      continue
212    if "=" in line:
213      name, value = line.split("=", 1)
214      d[name] = value
215  return d
216
217def LoadRecoveryFSTab(read_helper, fstab_version):
218  class Partition(object):
219    def __init__(self, mount_point, fs_type, device, length, device2, context):
220      self.mount_point = mount_point
221      self.fs_type = fs_type
222      self.device = device
223      self.length = length
224      self.device2 = device2
225      self.context = context
226
227  try:
228    data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab")
229  except KeyError:
230    print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab"
231    data = ""
232
233  if fstab_version == 1:
234    d = {}
235    for line in data.split("\n"):
236      line = line.strip()
237      if not line or line.startswith("#"):
238        continue
239      pieces = line.split()
240      if not 3 <= len(pieces) <= 4:
241        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
242      options = None
243      if len(pieces) >= 4:
244        if pieces[3].startswith("/"):
245          device2 = pieces[3]
246          if len(pieces) >= 5:
247            options = pieces[4]
248        else:
249          device2 = None
250          options = pieces[3]
251      else:
252        device2 = None
253
254      mount_point = pieces[0]
255      length = 0
256      if options:
257        options = options.split(",")
258        for i in options:
259          if i.startswith("length="):
260            length = int(i[7:])
261          else:
262            print "%s: unknown option \"%s\"" % (mount_point, i)
263
264      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[1],
265                                 device=pieces[2], length=length,
266                                 device2=device2)
267
268  elif fstab_version == 2:
269    d = {}
270    for line in data.split("\n"):
271      line = line.strip()
272      if not line or line.startswith("#"):
273        continue
274      # <src> <mnt_point> <type> <mnt_flags and options> <fs_mgr_flags>
275      pieces = line.split()
276      if len(pieces) != 5:
277        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
278
279      # Ignore entries that are managed by vold
280      options = pieces[4]
281      if "voldmanaged=" in options:
282        continue
283
284      # It's a good line, parse it
285      length = 0
286      options = options.split(",")
287      for i in options:
288        if i.startswith("length="):
289          length = int(i[7:])
290        else:
291          # Ignore all unknown options in the unified fstab
292          continue
293
294      mount_flags = pieces[3]
295      # Honor the SELinux context if present.
296      context = None
297      for i in mount_flags.split(","):
298        if i.startswith("context="):
299          context = i
300
301      mount_point = pieces[1]
302      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[2],
303                                 device=pieces[0], length=length,
304                                 device2=None, context=context)
305
306  else:
307    raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,))
308
309  return d
310
311
312def DumpInfoDict(d):
313  for k, v in sorted(d.items()):
314    print "%-25s = (%s) %s" % (k, type(v).__name__, v)
315
316
317def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
318  """Take a kernel, cmdline, and ramdisk directory from the input (in
319  'sourcedir'), and turn them into a boot image.  Return the image
320  data, or None if sourcedir does not appear to contains files for
321  building the requested image."""
322
323  if (not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK) or
324      not os.access(os.path.join(sourcedir, "kernel"), os.F_OK)):
325    return None
326
327  if info_dict is None:
328    info_dict = OPTIONS.info_dict
329
330  ramdisk_img = tempfile.NamedTemporaryFile()
331  img = tempfile.NamedTemporaryFile()
332
333  if os.access(fs_config_file, os.F_OK):
334    cmd = ["mkbootfs", "-f", fs_config_file, os.path.join(sourcedir, "RAMDISK")]
335  else:
336    cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
337  p1 = Run(cmd, stdout=subprocess.PIPE)
338  p2 = Run(["minigzip"],
339           stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
340
341  p2.wait()
342  p1.wait()
343  assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (sourcedir,)
344  assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (sourcedir,)
345
346  # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
347  mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
348
349  cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")]
350
351  fn = os.path.join(sourcedir, "second")
352  if os.access(fn, os.F_OK):
353    cmd.append("--second")
354    cmd.append(fn)
355
356  fn = os.path.join(sourcedir, "cmdline")
357  if os.access(fn, os.F_OK):
358    cmd.append("--cmdline")
359    cmd.append(open(fn).read().rstrip("\n"))
360
361  fn = os.path.join(sourcedir, "base")
362  if os.access(fn, os.F_OK):
363    cmd.append("--base")
364    cmd.append(open(fn).read().rstrip("\n"))
365
366  fn = os.path.join(sourcedir, "pagesize")
367  if os.access(fn, os.F_OK):
368    cmd.append("--pagesize")
369    cmd.append(open(fn).read().rstrip("\n"))
370
371  args = info_dict.get("mkbootimg_args", None)
372  if args and args.strip():
373    cmd.extend(shlex.split(args))
374
375  img_unsigned = None
376  if info_dict.get("vboot", None):
377    img_unsigned = tempfile.NamedTemporaryFile()
378    cmd.extend(["--ramdisk", ramdisk_img.name,
379                "--output", img_unsigned.name])
380  else:
381    cmd.extend(["--ramdisk", ramdisk_img.name,
382                "--output", img.name])
383
384  p = Run(cmd, stdout=subprocess.PIPE)
385  p.communicate()
386  assert p.returncode == 0, "mkbootimg of %s image failed" % (
387      os.path.basename(sourcedir),)
388
389  if info_dict.get("verity_key", None):
390    path = "/" + os.path.basename(sourcedir).lower()
391    cmd = [OPTIONS.boot_signer_path, path, img.name,
392           info_dict["verity_key"] + ".pk8",
393           info_dict["verity_key"] + ".x509.pem", img.name]
394    p = Run(cmd, stdout=subprocess.PIPE)
395    p.communicate()
396    assert p.returncode == 0, "boot_signer of %s image failed" % path
397
398  # Sign the image if vboot is non-empty.
399  elif info_dict.get("vboot", None):
400    path = "/" + os.path.basename(sourcedir).lower()
401    img_keyblock = tempfile.NamedTemporaryFile()
402    cmd = [info_dict["vboot_signer_cmd"], info_dict["futility"],
403           img_unsigned.name, info_dict["vboot_key"] + ".vbpubk",
404           info_dict["vboot_key"] + ".vbprivk", img_keyblock.name,
405           img.name]
406    p = Run(cmd, stdout=subprocess.PIPE)
407    p.communicate()
408    assert p.returncode == 0, "vboot_signer of %s image failed" % path
409
410    # Clean up the temp files.
411    img_unsigned.close()
412    img_keyblock.close()
413
414  img.seek(os.SEEK_SET, 0)
415  data = img.read()
416
417  ramdisk_img.close()
418  img.close()
419
420  return data
421
422
423def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
424                     info_dict=None):
425  """Return a File object (with name 'name') with the desired bootable
426  image.  Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name
427  'prebuilt_name', otherwise look for it under 'unpack_dir'/IMAGES,
428  otherwise construct it from the source files in
429  'unpack_dir'/'tree_subdir'."""
430
431  prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
432  if os.path.exists(prebuilt_path):
433    print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,)
434    return File.FromLocalFile(name, prebuilt_path)
435
436  prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
437  if os.path.exists(prebuilt_path):
438    print "using prebuilt %s from IMAGES..." % (prebuilt_name,)
439    return File.FromLocalFile(name, prebuilt_path)
440
441  print "building image from target_files %s..." % (tree_subdir,)
442  fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
443  data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
444                            os.path.join(unpack_dir, fs_config),
445                            info_dict)
446  if data:
447    return File(name, data)
448  return None
449
450
451def UnzipTemp(filename, pattern=None):
452  """Unzip the given archive into a temporary directory and return the name.
453
454  If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a
455  temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
456
457  Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the
458  main file), open for reading.
459  """
460
461  tmp = tempfile.mkdtemp(prefix="targetfiles-")
462  OPTIONS.tempfiles.append(tmp)
463
464  def unzip_to_dir(filename, dirname):
465    cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
466    if pattern is not None:
467      cmd.append(pattern)
468    p = Run(cmd, stdout=subprocess.PIPE)
469    p.communicate()
470    if p.returncode != 0:
471      raise ExternalError("failed to unzip input target-files \"%s\"" %
472                          (filename,))
473
474  m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
475  if m:
476    unzip_to_dir(m.group(1), tmp)
477    unzip_to_dir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"))
478    filename = m.group(1)
479  else:
480    unzip_to_dir(filename, tmp)
481
482  return tmp, zipfile.ZipFile(filename, "r")
483
484
485def GetKeyPasswords(keylist):
486  """Given a list of keys, prompt the user to enter passwords for
487  those which require them.  Return a {key: password} dict.  password
488  will be None if the key has no password."""
489
490  no_passwords = []
491  need_passwords = []
492  key_passwords = {}
493  devnull = open("/dev/null", "w+b")
494  for k in sorted(keylist):
495    # We don't need a password for things that aren't really keys.
496    if k in SPECIAL_CERT_STRINGS:
497      no_passwords.append(k)
498      continue
499
500    p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
501             "-inform", "DER", "-nocrypt"],
502            stdin=devnull.fileno(),
503            stdout=devnull.fileno(),
504            stderr=subprocess.STDOUT)
505    p.communicate()
506    if p.returncode == 0:
507      # Definitely an unencrypted key.
508      no_passwords.append(k)
509    else:
510      p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
511               "-inform", "DER", "-passin", "pass:"],
512              stdin=devnull.fileno(),
513              stdout=devnull.fileno(),
514              stderr=subprocess.PIPE)
515      _, stderr = p.communicate()
516      if p.returncode == 0:
517        # Encrypted key with empty string as password.
518        key_passwords[k] = ''
519      elif stderr.startswith('Error decrypting key'):
520        # Definitely encrypted key.
521        # It would have said "Error reading key" if it didn't parse correctly.
522        need_passwords.append(k)
523      else:
524        # Potentially, a type of key that openssl doesn't understand.
525        # We'll let the routines in signapk.jar handle it.
526        no_passwords.append(k)
527  devnull.close()
528
529  key_passwords.update(PasswordManager().GetPasswords(need_passwords))
530  key_passwords.update(dict.fromkeys(no_passwords, None))
531  return key_passwords
532
533
534def SignFile(input_name, output_name, key, password, align=None,
535             whole_file=False):
536  """Sign the input_name zip/jar/apk, producing output_name.  Use the
537  given key and password (the latter may be None if the key does not
538  have a password.
539
540  If align is an integer > 1, zipalign is run to align stored files in
541  the output zip on 'align'-byte boundaries.
542
543  If whole_file is true, use the "-w" option to SignApk to embed a
544  signature that covers the whole file in the archive comment of the
545  zip file.
546  """
547
548  if align == 0 or align == 1:
549    align = None
550
551  if align:
552    temp = tempfile.NamedTemporaryFile()
553    sign_name = temp.name
554  else:
555    sign_name = output_name
556
557  cmd = [OPTIONS.java_path, OPTIONS.java_args, "-jar",
558         os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)]
559  cmd.extend(OPTIONS.extra_signapk_args)
560  if whole_file:
561    cmd.append("-w")
562  cmd.extend([key + OPTIONS.public_key_suffix,
563              key + OPTIONS.private_key_suffix,
564              input_name, sign_name])
565
566  p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
567  if password is not None:
568    password += "\n"
569  p.communicate(password)
570  if p.returncode != 0:
571    raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
572
573  if align:
574    p = Run(["zipalign", "-f", "-p", str(align), sign_name, output_name])
575    p.communicate()
576    if p.returncode != 0:
577      raise ExternalError("zipalign failed: return code %s" % (p.returncode,))
578    temp.close()
579
580
581def CheckSize(data, target, info_dict):
582  """Check the data string passed against the max size limit, if
583  any, for the given target.  Raise exception if the data is too big.
584  Print a warning if the data is nearing the maximum size."""
585
586  if target.endswith(".img"):
587    target = target[:-4]
588  mount_point = "/" + target
589
590  fs_type = None
591  limit = None
592  if info_dict["fstab"]:
593    if mount_point == "/userdata":
594      mount_point = "/data"
595    p = info_dict["fstab"][mount_point]
596    fs_type = p.fs_type
597    device = p.device
598    if "/" in device:
599      device = device[device.rfind("/")+1:]
600    limit = info_dict.get(device + "_size", None)
601  if not fs_type or not limit:
602    return
603
604  if fs_type == "yaffs2":
605    # image size should be increased by 1/64th to account for the
606    # spare area (64 bytes per 2k page)
607    limit = limit / 2048 * (2048+64)
608  size = len(data)
609  pct = float(size) * 100.0 / limit
610  msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit)
611  if pct >= 99.0:
612    raise ExternalError(msg)
613  elif pct >= 95.0:
614    print
615    print "  WARNING: ", msg
616    print
617  elif OPTIONS.verbose:
618    print "  ", msg
619
620
621def ReadApkCerts(tf_zip):
622  """Given a target_files ZipFile, parse the META/apkcerts.txt file
623  and return a {package: cert} dict."""
624  certmap = {}
625  for line in tf_zip.read("META/apkcerts.txt").split("\n"):
626    line = line.strip()
627    if not line:
628      continue
629    m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+'
630                 r'private_key="(.*)"$', line)
631    if m:
632      name, cert, privkey = m.groups()
633      public_key_suffix_len = len(OPTIONS.public_key_suffix)
634      private_key_suffix_len = len(OPTIONS.private_key_suffix)
635      if cert in SPECIAL_CERT_STRINGS and not privkey:
636        certmap[name] = cert
637      elif (cert.endswith(OPTIONS.public_key_suffix) and
638            privkey.endswith(OPTIONS.private_key_suffix) and
639            cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
640        certmap[name] = cert[:-public_key_suffix_len]
641      else:
642        raise ValueError("failed to parse line from apkcerts.txt:\n" + line)
643  return certmap
644
645
646COMMON_DOCSTRING = """
647  -p  (--path)  <dir>
648      Prepend <dir>/bin to the list of places to search for binaries
649      run by this script, and expect to find jars in <dir>/framework.
650
651  -s  (--device_specific) <file>
652      Path to the python module containing device-specific
653      releasetools code.
654
655  -x  (--extra)  <key=value>
656      Add a key/value pair to the 'extras' dict, which device-specific
657      extension code may look at.
658
659  -v  (--verbose)
660      Show command lines being executed.
661
662  -h  (--help)
663      Display this usage message and exit.
664"""
665
666def Usage(docstring):
667  print docstring.rstrip("\n")
668  print COMMON_DOCSTRING
669
670
671def ParseOptions(argv,
672                 docstring,
673                 extra_opts="", extra_long_opts=(),
674                 extra_option_handler=None):
675  """Parse the options in argv and return any arguments that aren't
676  flags.  docstring is the calling module's docstring, to be displayed
677  for errors and -h.  extra_opts and extra_long_opts are for flags
678  defined by the caller, which are processed by passing them to
679  extra_option_handler."""
680
681  try:
682    opts, args = getopt.getopt(
683        argv, "hvp:s:x:" + extra_opts,
684        ["help", "verbose", "path=", "signapk_path=", "extra_signapk_args=",
685         "java_path=", "java_args=", "public_key_suffix=",
686         "private_key_suffix=", "boot_signer_path=", "device_specific=",
687         "extra="] +
688        list(extra_long_opts))
689  except getopt.GetoptError as err:
690    Usage(docstring)
691    print "**", str(err), "**"
692    sys.exit(2)
693
694  for o, a in opts:
695    if o in ("-h", "--help"):
696      Usage(docstring)
697      sys.exit()
698    elif o in ("-v", "--verbose"):
699      OPTIONS.verbose = True
700    elif o in ("-p", "--path"):
701      OPTIONS.search_path = a
702    elif o in ("--signapk_path",):
703      OPTIONS.signapk_path = a
704    elif o in ("--extra_signapk_args",):
705      OPTIONS.extra_signapk_args = shlex.split(a)
706    elif o in ("--java_path",):
707      OPTIONS.java_path = a
708    elif o in ("--java_args",):
709      OPTIONS.java_args = a
710    elif o in ("--public_key_suffix",):
711      OPTIONS.public_key_suffix = a
712    elif o in ("--private_key_suffix",):
713      OPTIONS.private_key_suffix = a
714    elif o in ("--boot_signer_path",):
715      OPTIONS.boot_signer_path = a
716    elif o in ("-s", "--device_specific"):
717      OPTIONS.device_specific = a
718    elif o in ("-x", "--extra"):
719      key, value = a.split("=", 1)
720      OPTIONS.extras[key] = value
721    else:
722      if extra_option_handler is None or not extra_option_handler(o, a):
723        assert False, "unknown option \"%s\"" % (o,)
724
725  if OPTIONS.search_path:
726    os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") +
727                          os.pathsep + os.environ["PATH"])
728
729  return args
730
731
732def MakeTempFile(prefix=None, suffix=None):
733  """Make a temp file and add it to the list of things to be deleted
734  when Cleanup() is called.  Return the filename."""
735  fd, fn = tempfile.mkstemp(prefix=prefix, suffix=suffix)
736  os.close(fd)
737  OPTIONS.tempfiles.append(fn)
738  return fn
739
740
741def Cleanup():
742  for i in OPTIONS.tempfiles:
743    if os.path.isdir(i):
744      shutil.rmtree(i)
745    else:
746      os.remove(i)
747
748
749class PasswordManager(object):
750  def __init__(self):
751    self.editor = os.getenv("EDITOR", None)
752    self.pwfile = os.getenv("ANDROID_PW_FILE", None)
753
754  def GetPasswords(self, items):
755    """Get passwords corresponding to each string in 'items',
756    returning a dict.  (The dict may have keys in addition to the
757    values in 'items'.)
758
759    Uses the passwords in $ANDROID_PW_FILE if available, letting the
760    user edit that file to add more needed passwords.  If no editor is
761    available, or $ANDROID_PW_FILE isn't define, prompts the user
762    interactively in the ordinary way.
763    """
764
765    current = self.ReadFile()
766
767    first = True
768    while True:
769      missing = []
770      for i in items:
771        if i not in current or not current[i]:
772          missing.append(i)
773      # Are all the passwords already in the file?
774      if not missing:
775        return current
776
777      for i in missing:
778        current[i] = ""
779
780      if not first:
781        print "key file %s still missing some passwords." % (self.pwfile,)
782        answer = raw_input("try to edit again? [y]> ").strip()
783        if answer and answer[0] not in 'yY':
784          raise RuntimeError("key passwords unavailable")
785      first = False
786
787      current = self.UpdateAndReadFile(current)
788
789  def PromptResult(self, current): # pylint: disable=no-self-use
790    """Prompt the user to enter a value (password) for each key in
791    'current' whose value is fales.  Returns a new dict with all the
792    values.
793    """
794    result = {}
795    for k, v in sorted(current.iteritems()):
796      if v:
797        result[k] = v
798      else:
799        while True:
800          result[k] = getpass.getpass(
801              "Enter password for %s key> " % k).strip()
802          if result[k]:
803            break
804    return result
805
806  def UpdateAndReadFile(self, current):
807    if not self.editor or not self.pwfile:
808      return self.PromptResult(current)
809
810    f = open(self.pwfile, "w")
811    os.chmod(self.pwfile, 0o600)
812    f.write("# Enter key passwords between the [[[ ]]] brackets.\n")
813    f.write("# (Additional spaces are harmless.)\n\n")
814
815    first_line = None
816    sorted_list = sorted([(not v, k, v) for (k, v) in current.iteritems()])
817    for i, (_, k, v) in enumerate(sorted_list):
818      f.write("[[[  %s  ]]] %s\n" % (v, k))
819      if not v and first_line is None:
820        # position cursor on first line with no password.
821        first_line = i + 4
822    f.close()
823
824    p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
825    _, _ = p.communicate()
826
827    return self.ReadFile()
828
829  def ReadFile(self):
830    result = {}
831    if self.pwfile is None:
832      return result
833    try:
834      f = open(self.pwfile, "r")
835      for line in f:
836        line = line.strip()
837        if not line or line[0] == '#':
838          continue
839        m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
840        if not m:
841          print "failed to parse password file: ", line
842        else:
843          result[m.group(2)] = m.group(1)
844      f.close()
845    except IOError as e:
846      if e.errno != errno.ENOENT:
847        print "error reading password file: ", str(e)
848    return result
849
850
851def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
852             compress_type=None):
853  import datetime
854
855  # http://b/18015246
856  # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
857  # for files larger than 2GiB. We can work around this by adjusting their
858  # limit. Note that `zipfile.writestr()` will not work for strings larger than
859  # 2GiB. The Python interpreter sometimes rejects strings that large (though
860  # it isn't clear to me exactly what circumstances cause this).
861  # `zipfile.write()` must be used directly to work around this.
862  #
863  # This mess can be avoided if we port to python3.
864  saved_zip64_limit = zipfile.ZIP64_LIMIT
865  zipfile.ZIP64_LIMIT = (1 << 32) - 1
866
867  if compress_type is None:
868    compress_type = zip_file.compression
869  if arcname is None:
870    arcname = filename
871
872  saved_stat = os.stat(filename)
873
874  try:
875    # `zipfile.write()` doesn't allow us to pass ZipInfo, so just modify the
876    # file to be zipped and reset it when we're done.
877    os.chmod(filename, perms)
878
879    # Use a fixed timestamp so the output is repeatable.
880    epoch = datetime.datetime.fromtimestamp(0)
881    timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
882    os.utime(filename, (timestamp, timestamp))
883
884    zip_file.write(filename, arcname=arcname, compress_type=compress_type)
885  finally:
886    os.chmod(filename, saved_stat.st_mode)
887    os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
888    zipfile.ZIP64_LIMIT = saved_zip64_limit
889
890
891def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None,
892                compress_type=None):
893  """Wrap zipfile.writestr() function to work around the zip64 limit.
894
895  Even with the ZIP64_LIMIT workaround, it won't allow writing a string
896  longer than 2GiB. It gives 'OverflowError: size does not fit in an int'
897  when calling crc32(bytes).
898
899  But it still works fine to write a shorter string into a large zip file.
900  We should use ZipWrite() whenever possible, and only use ZipWriteStr()
901  when we know the string won't be too long.
902  """
903
904  saved_zip64_limit = zipfile.ZIP64_LIMIT
905  zipfile.ZIP64_LIMIT = (1 << 32) - 1
906
907  if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
908    zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
909    zinfo.compress_type = zip_file.compression
910    if perms is None:
911      perms = 0o100644
912  else:
913    zinfo = zinfo_or_arcname
914
915  # If compress_type is given, it overrides the value in zinfo.
916  if compress_type is not None:
917    zinfo.compress_type = compress_type
918
919  # If perms is given, it has a priority.
920  if perms is not None:
921    # If perms doesn't set the file type, mark it as a regular file.
922    if perms & 0o770000 == 0:
923      perms |= 0o100000
924    zinfo.external_attr = perms << 16
925
926  # Use a fixed timestamp so the output is repeatable.
927  zinfo.date_time = (2009, 1, 1, 0, 0, 0)
928
929  zip_file.writestr(zinfo, data)
930  zipfile.ZIP64_LIMIT = saved_zip64_limit
931
932
933def ZipClose(zip_file):
934  # http://b/18015246
935  # zipfile also refers to ZIP64_LIMIT during close() when it writes out the
936  # central directory.
937  saved_zip64_limit = zipfile.ZIP64_LIMIT
938  zipfile.ZIP64_LIMIT = (1 << 32) - 1
939
940  zip_file.close()
941
942  zipfile.ZIP64_LIMIT = saved_zip64_limit
943
944
945class DeviceSpecificParams(object):
946  module = None
947  def __init__(self, **kwargs):
948    """Keyword arguments to the constructor become attributes of this
949    object, which is passed to all functions in the device-specific
950    module."""
951    for k, v in kwargs.iteritems():
952      setattr(self, k, v)
953    self.extras = OPTIONS.extras
954
955    if self.module is None:
956      path = OPTIONS.device_specific
957      if not path:
958        return
959      try:
960        if os.path.isdir(path):
961          info = imp.find_module("releasetools", [path])
962        else:
963          d, f = os.path.split(path)
964          b, x = os.path.splitext(f)
965          if x == ".py":
966            f = b
967          info = imp.find_module(f, [d])
968        print "loaded device-specific extensions from", path
969        self.module = imp.load_module("device_specific", *info)
970      except ImportError:
971        print "unable to load device-specific module; assuming none"
972
973  def _DoCall(self, function_name, *args, **kwargs):
974    """Call the named function in the device-specific module, passing
975    the given args and kwargs.  The first argument to the call will be
976    the DeviceSpecific object itself.  If there is no module, or the
977    module does not define the function, return the value of the
978    'default' kwarg (which itself defaults to None)."""
979    if self.module is None or not hasattr(self.module, function_name):
980      return kwargs.get("default", None)
981    return getattr(self.module, function_name)(*((self,) + args), **kwargs)
982
983  def FullOTA_Assertions(self):
984    """Called after emitting the block of assertions at the top of a
985    full OTA package.  Implementations can add whatever additional
986    assertions they like."""
987    return self._DoCall("FullOTA_Assertions")
988
989  def FullOTA_InstallBegin(self):
990    """Called at the start of full OTA installation."""
991    return self._DoCall("FullOTA_InstallBegin")
992
993  def FullOTA_InstallEnd(self):
994    """Called at the end of full OTA installation; typically this is
995    used to install the image for the device's baseband processor."""
996    return self._DoCall("FullOTA_InstallEnd")
997
998  def IncrementalOTA_Assertions(self):
999    """Called after emitting the block of assertions at the top of an
1000    incremental OTA package.  Implementations can add whatever
1001    additional assertions they like."""
1002    return self._DoCall("IncrementalOTA_Assertions")
1003
1004  def IncrementalOTA_VerifyBegin(self):
1005    """Called at the start of the verification phase of incremental
1006    OTA installation; additional checks can be placed here to abort
1007    the script before any changes are made."""
1008    return self._DoCall("IncrementalOTA_VerifyBegin")
1009
1010  def IncrementalOTA_VerifyEnd(self):
1011    """Called at the end of the verification phase of incremental OTA
1012    installation; additional checks can be placed here to abort the
1013    script before any changes are made."""
1014    return self._DoCall("IncrementalOTA_VerifyEnd")
1015
1016  def IncrementalOTA_InstallBegin(self):
1017    """Called at the start of incremental OTA installation (after
1018    verification is complete)."""
1019    return self._DoCall("IncrementalOTA_InstallBegin")
1020
1021  def IncrementalOTA_InstallEnd(self):
1022    """Called at the end of incremental OTA installation; typically
1023    this is used to install the image for the device's baseband
1024    processor."""
1025    return self._DoCall("IncrementalOTA_InstallEnd")
1026
1027class File(object):
1028  def __init__(self, name, data):
1029    self.name = name
1030    self.data = data
1031    self.size = len(data)
1032    self.sha1 = sha1(data).hexdigest()
1033
1034  @classmethod
1035  def FromLocalFile(cls, name, diskname):
1036    f = open(diskname, "rb")
1037    data = f.read()
1038    f.close()
1039    return File(name, data)
1040
1041  def WriteToTemp(self):
1042    t = tempfile.NamedTemporaryFile()
1043    t.write(self.data)
1044    t.flush()
1045    return t
1046
1047  def AddToZip(self, z, compression=None):
1048    ZipWriteStr(z, self.name, self.data, compress_type=compression)
1049
1050DIFF_PROGRAM_BY_EXT = {
1051    ".gz" : "imgdiff",
1052    ".zip" : ["imgdiff", "-z"],
1053    ".jar" : ["imgdiff", "-z"],
1054    ".apk" : ["imgdiff", "-z"],
1055    ".img" : "imgdiff",
1056    }
1057
1058class Difference(object):
1059  def __init__(self, tf, sf, diff_program=None):
1060    self.tf = tf
1061    self.sf = sf
1062    self.patch = None
1063    self.diff_program = diff_program
1064
1065  def ComputePatch(self):
1066    """Compute the patch (as a string of data) needed to turn sf into
1067    tf.  Returns the same tuple as GetPatch()."""
1068
1069    tf = self.tf
1070    sf = self.sf
1071
1072    if self.diff_program:
1073      diff_program = self.diff_program
1074    else:
1075      ext = os.path.splitext(tf.name)[1]
1076      diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff")
1077
1078    ttemp = tf.WriteToTemp()
1079    stemp = sf.WriteToTemp()
1080
1081    ext = os.path.splitext(tf.name)[1]
1082
1083    try:
1084      ptemp = tempfile.NamedTemporaryFile()
1085      if isinstance(diff_program, list):
1086        cmd = copy.copy(diff_program)
1087      else:
1088        cmd = [diff_program]
1089      cmd.append(stemp.name)
1090      cmd.append(ttemp.name)
1091      cmd.append(ptemp.name)
1092      p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1093      err = []
1094      def run():
1095        _, e = p.communicate()
1096        if e:
1097          err.append(e)
1098      th = threading.Thread(target=run)
1099      th.start()
1100      th.join(timeout=300)   # 5 mins
1101      if th.is_alive():
1102        print "WARNING: diff command timed out"
1103        p.terminate()
1104        th.join(5)
1105        if th.is_alive():
1106          p.kill()
1107          th.join()
1108
1109      if err or p.returncode != 0:
1110        print "WARNING: failure running %s:\n%s\n" % (
1111            diff_program, "".join(err))
1112        self.patch = None
1113        return None, None, None
1114      diff = ptemp.read()
1115    finally:
1116      ptemp.close()
1117      stemp.close()
1118      ttemp.close()
1119
1120    self.patch = diff
1121    return self.tf, self.sf, self.patch
1122
1123
1124  def GetPatch(self):
1125    """Return a tuple (target_file, source_file, patch_data).
1126    patch_data may be None if ComputePatch hasn't been called, or if
1127    computing the patch failed."""
1128    return self.tf, self.sf, self.patch
1129
1130
1131def ComputeDifferences(diffs):
1132  """Call ComputePatch on all the Difference objects in 'diffs'."""
1133  print len(diffs), "diffs to compute"
1134
1135  # Do the largest files first, to try and reduce the long-pole effect.
1136  by_size = [(i.tf.size, i) for i in diffs]
1137  by_size.sort(reverse=True)
1138  by_size = [i[1] for i in by_size]
1139
1140  lock = threading.Lock()
1141  diff_iter = iter(by_size)   # accessed under lock
1142
1143  def worker():
1144    try:
1145      lock.acquire()
1146      for d in diff_iter:
1147        lock.release()
1148        start = time.time()
1149        d.ComputePatch()
1150        dur = time.time() - start
1151        lock.acquire()
1152
1153        tf, sf, patch = d.GetPatch()
1154        if sf.name == tf.name:
1155          name = tf.name
1156        else:
1157          name = "%s (%s)" % (tf.name, sf.name)
1158        if patch is None:
1159          print "patching failed!                                  %s" % (name,)
1160        else:
1161          print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
1162              dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name)
1163      lock.release()
1164    except Exception as e:
1165      print e
1166      raise
1167
1168  # start worker threads; wait for them all to finish.
1169  threads = [threading.Thread(target=worker)
1170             for i in range(OPTIONS.worker_threads)]
1171  for th in threads:
1172    th.start()
1173  while threads:
1174    threads.pop().join()
1175
1176
1177class BlockDifference(object):
1178  def __init__(self, partition, tgt, src=None, check_first_block=False,
1179               version=None):
1180    self.tgt = tgt
1181    self.src = src
1182    self.partition = partition
1183    self.check_first_block = check_first_block
1184
1185    # Due to http://b/20939131, check_first_block is disabled temporarily.
1186    assert not self.check_first_block
1187
1188    if version is None:
1189      version = 1
1190      if OPTIONS.info_dict:
1191        version = max(
1192            int(i) for i in
1193            OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
1194    self.version = version
1195
1196    b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
1197                                    version=self.version)
1198    tmpdir = tempfile.mkdtemp()
1199    OPTIONS.tempfiles.append(tmpdir)
1200    self.path = os.path.join(tmpdir, partition)
1201    b.Compute(self.path)
1202
1203    _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
1204
1205  def WriteScript(self, script, output_zip, progress=None):
1206    if not self.src:
1207      # write the output unconditionally
1208      script.Print("Patching %s image unconditionally..." % (self.partition,))
1209    else:
1210      script.Print("Patching %s image after verification." % (self.partition,))
1211
1212    if progress:
1213      script.ShowProgress(progress, 0)
1214    self._WriteUpdate(script, output_zip)
1215    self._WritePostInstallVerifyScript(script)
1216
1217  def WriteVerifyScript(self, script):
1218    partition = self.partition
1219    if not self.src:
1220      script.Print("Image %s will be patched unconditionally." % (partition,))
1221    else:
1222      ranges = self.src.care_map.subtract(self.src.clobbered_blocks)
1223      ranges_str = ranges.to_string_raw()
1224      if self.version >= 3:
1225        script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || '
1226                            'block_image_verify("%s", '
1227                            'package_extract_file("%s.transfer.list"), '
1228                            '"%s.new.dat", "%s.patch.dat")) then') % (
1229                            self.device, ranges_str, self.src.TotalSha1(),
1230                            self.device, partition, partition, partition))
1231      else:
1232        script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1233                           self.device, ranges_str, self.src.TotalSha1()))
1234      script.Print('Verified %s image...' % (partition,))
1235      script.AppendExtra('else')
1236
1237      # When generating incrementals for the system and vendor partitions,
1238      # explicitly check the first block (which contains the superblock) of
1239      # the partition to see if it's what we expect. If this check fails,
1240      # give an explicit log message about the partition having been
1241      # remounted R/W (the most likely explanation) and the need to flash to
1242      # get OTAs working again.
1243      if self.check_first_block:
1244        self._CheckFirstBlock(script)
1245
1246      # Abort the OTA update. Note that the incremental OTA cannot be applied
1247      # even if it may match the checksum of the target partition.
1248      # a) If version < 3, operations like move and erase will make changes
1249      #    unconditionally and damage the partition.
1250      # b) If version >= 3, it won't even reach here.
1251      script.AppendExtra(('abort("%s partition has unexpected contents");\n'
1252                          'endif;') % (partition,))
1253
1254  def _WritePostInstallVerifyScript(self, script):
1255    partition = self.partition
1256    script.Print('Verifying the updated %s image...' % (partition,))
1257    # Unlike pre-install verification, clobbered_blocks should not be ignored.
1258    ranges = self.tgt.care_map
1259    ranges_str = ranges.to_string_raw()
1260    script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1261                       self.device, ranges_str,
1262                       self.tgt.TotalSha1(include_clobbered_blocks=True)))
1263
1264    # Bug: 20881595
1265    # Verify that extended blocks are really zeroed out.
1266    if self.tgt.extended:
1267      ranges_str = self.tgt.extended.to_string_raw()
1268      script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1269                         self.device, ranges_str,
1270                         self._HashZeroBlocks(self.tgt.extended.size())))
1271      script.Print('Verified the updated %s image.' % (partition,))
1272      script.AppendExtra(
1273          'else\n'
1274          '  abort("%s partition has unexpected non-zero contents after OTA '
1275          'update");\n'
1276          'endif;' % (partition,))
1277    else:
1278      script.Print('Verified the updated %s image.' % (partition,))
1279
1280    script.AppendExtra(
1281        'else\n'
1282        '  abort("%s partition has unexpected contents after OTA update");\n'
1283        'endif;' % (partition,))
1284
1285  def _WriteUpdate(self, script, output_zip):
1286    ZipWrite(output_zip,
1287             '{}.transfer.list'.format(self.path),
1288             '{}.transfer.list'.format(self.partition))
1289    ZipWrite(output_zip,
1290             '{}.new.dat'.format(self.path),
1291             '{}.new.dat'.format(self.partition))
1292    ZipWrite(output_zip,
1293             '{}.patch.dat'.format(self.path),
1294             '{}.patch.dat'.format(self.partition),
1295             compress_type=zipfile.ZIP_STORED)
1296
1297    call = ('block_image_update("{device}", '
1298            'package_extract_file("{partition}.transfer.list"), '
1299            '"{partition}.new.dat", "{partition}.patch.dat");\n'.format(
1300                device=self.device, partition=self.partition))
1301    script.AppendExtra(script.WordWrap(call))
1302
1303  def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use
1304    data = source.ReadRangeSet(ranges)
1305    ctx = sha1()
1306
1307    for p in data:
1308      ctx.update(p)
1309
1310    return ctx.hexdigest()
1311
1312  def _HashZeroBlocks(self, num_blocks): # pylint: disable=no-self-use
1313    """Return the hash value for all zero blocks."""
1314    zero_block = '\x00' * 4096
1315    ctx = sha1()
1316    for _ in range(num_blocks):
1317      ctx.update(zero_block)
1318
1319    return ctx.hexdigest()
1320
1321  # TODO(tbao): Due to http://b/20939131, block 0 may be changed without
1322  # remounting R/W. Will change the checking to a finer-grained way to
1323  # mask off those bits.
1324  def _CheckFirstBlock(self, script):
1325    r = rangelib.RangeSet((0, 1))
1326    srchash = self._HashBlocks(self.src, r)
1327
1328    script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || '
1329                        'abort("%s has been remounted R/W; '
1330                        'reflash device to reenable OTA updates");')
1331                       % (self.device, r.to_string_raw(), srchash,
1332                          self.device))
1333
1334DataImage = blockimgdiff.DataImage
1335
1336
1337# map recovery.fstab's fs_types to mount/format "partition types"
1338PARTITION_TYPES = {
1339    "yaffs2": "MTD",
1340    "mtd": "MTD",
1341    "ext4": "EMMC",
1342    "emmc": "EMMC",
1343    "f2fs": "EMMC",
1344    "squashfs": "EMMC"
1345}
1346
1347def GetTypeAndDevice(mount_point, info):
1348  fstab = info["fstab"]
1349  if fstab:
1350    return (PARTITION_TYPES[fstab[mount_point].fs_type],
1351            fstab[mount_point].device)
1352  else:
1353    raise KeyError
1354
1355
1356def ParseCertificate(data):
1357  """Parse a PEM-format certificate."""
1358  cert = []
1359  save = False
1360  for line in data.split("\n"):
1361    if "--END CERTIFICATE--" in line:
1362      break
1363    if save:
1364      cert.append(line)
1365    if "--BEGIN CERTIFICATE--" in line:
1366      save = True
1367  cert = "".join(cert).decode('base64')
1368  return cert
1369
1370def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
1371                      info_dict=None):
1372  """Generate a binary patch that creates the recovery image starting
1373  with the boot image.  (Most of the space in these images is just the
1374  kernel, which is identical for the two, so the resulting patch
1375  should be efficient.)  Add it to the output zip, along with a shell
1376  script that is run from init.rc on first boot to actually do the
1377  patching and install the new recovery image.
1378
1379  recovery_img and boot_img should be File objects for the
1380  corresponding images.  info should be the dictionary returned by
1381  common.LoadInfoDict() on the input target_files.
1382  """
1383
1384  if info_dict is None:
1385    info_dict = OPTIONS.info_dict
1386
1387  diff_program = ["imgdiff"]
1388  path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
1389  if os.path.exists(path):
1390    diff_program.append("-b")
1391    diff_program.append(path)
1392    bonus_args = "-b /system/etc/recovery-resource.dat"
1393  else:
1394    bonus_args = ""
1395
1396  d = Difference(recovery_img, boot_img, diff_program=diff_program)
1397  _, _, patch = d.ComputePatch()
1398  output_sink("recovery-from-boot.p", patch)
1399
1400  try:
1401    boot_type, boot_device = GetTypeAndDevice("/boot", info_dict)
1402    recovery_type, recovery_device = GetTypeAndDevice("/recovery", info_dict)
1403  except KeyError:
1404    return
1405
1406  sh = """#!/system/bin/sh
1407if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
1408  applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
1409else
1410  log -t recovery "Recovery image already installed"
1411fi
1412""" % {'boot_size': boot_img.size,
1413       'boot_sha1': boot_img.sha1,
1414       'recovery_size': recovery_img.size,
1415       'recovery_sha1': recovery_img.sha1,
1416       'boot_type': boot_type,
1417       'boot_device': boot_device,
1418       'recovery_type': recovery_type,
1419       'recovery_device': recovery_device,
1420       'bonus_args': bonus_args}
1421
1422  # The install script location moved from /system/etc to /system/bin
1423  # in the L release.  Parse init.*.rc files to find out where the
1424  # target-files expects it to be, and put it there.
1425  sh_location = "etc/install-recovery.sh"
1426  found = False
1427  init_rc_dir = os.path.join(input_dir, "BOOT", "RAMDISK")
1428  init_rc_files = os.listdir(init_rc_dir)
1429  for init_rc_file in init_rc_files:
1430    if (not init_rc_file.startswith('init.') or
1431        not init_rc_file.endswith('.rc')):
1432      continue
1433
1434    with open(os.path.join(init_rc_dir, init_rc_file)) as f:
1435      for line in f:
1436        m = re.match(r"^service flash_recovery /system/(\S+)\s*$", line)
1437        if m:
1438          sh_location = m.group(1)
1439          found = True
1440          break
1441
1442    if found:
1443      break
1444
1445  print "putting script in", sh_location
1446
1447  output_sink(sh_location, sh)
1448