common.py revision e09359abc0f8c29fbb16d35ab7375d80fde6f931
1# Copyright (C) 2008 The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15import copy
16import errno
17import getopt
18import getpass
19import imp
20import os
21import platform
22import re
23import shlex
24import shutil
25import subprocess
26import sys
27import tempfile
28import threading
29import time
30import zipfile
31
32import blockimgdiff
33import rangelib
34
35from hashlib import sha1 as sha1
36
37
38class Options(object):
39  def __init__(self):
40    platform_search_path = {
41        "linux2": "out/host/linux-x86",
42        "darwin": "out/host/darwin-x86",
43    }
44
45    self.search_path = platform_search_path.get(sys.platform, None)
46    self.signapk_path = "framework/signapk.jar"  # Relative to search_path
47    self.extra_signapk_args = []
48    self.java_path = "java"  # Use the one on the path by default.
49    self.java_args = "-Xmx2048m" # JVM Args
50    self.public_key_suffix = ".x509.pem"
51    self.private_key_suffix = ".pk8"
52    # use otatools built boot_signer by default
53    self.boot_signer_path = "boot_signer"
54    self.boot_signer_args = []
55    self.verity_signer_path = None
56    self.verity_signer_args = []
57    self.verbose = False
58    self.tempfiles = []
59    self.device_specific = None
60    self.extras = {}
61    self.info_dict = None
62    self.source_info_dict = None
63    self.target_info_dict = None
64    self.worker_threads = None
65
66
67OPTIONS = Options()
68
69
70# Values for "certificate" in apkcerts that mean special things.
71SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL")
72
73
74class ExternalError(RuntimeError):
75  pass
76
77
78def Run(args, **kwargs):
79  """Create and return a subprocess.Popen object, printing the command
80  line on the terminal if -v was specified."""
81  if OPTIONS.verbose:
82    print "  running: ", " ".join(args)
83  return subprocess.Popen(args, **kwargs)
84
85
86def CloseInheritedPipes():
87  """ Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds
88  before doing other work."""
89  if platform.system() != "Darwin":
90    return
91  for d in range(3, 1025):
92    try:
93      stat = os.fstat(d)
94      if stat is not None:
95        pipebit = stat[0] & 0x1000
96        if pipebit != 0:
97          os.close(d)
98    except OSError:
99      pass
100
101
102def LoadInfoDict(input_file):
103  """Read and parse the META/misc_info.txt key/value pairs from the
104  input target files and return a dict."""
105
106  def read_helper(fn):
107    if isinstance(input_file, zipfile.ZipFile):
108      return input_file.read(fn)
109    else:
110      path = os.path.join(input_file, *fn.split("/"))
111      try:
112        with open(path) as f:
113          return f.read()
114      except IOError as e:
115        if e.errno == errno.ENOENT:
116          raise KeyError(fn)
117  d = {}
118  try:
119    d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n"))
120  except KeyError:
121    # ok if misc_info.txt doesn't exist
122    pass
123
124  # backwards compatibility: These values used to be in their own
125  # files.  Look for them, in case we're processing an old
126  # target_files zip.
127
128  if "mkyaffs2_extra_flags" not in d:
129    try:
130      d["mkyaffs2_extra_flags"] = read_helper(
131          "META/mkyaffs2-extra-flags.txt").strip()
132    except KeyError:
133      # ok if flags don't exist
134      pass
135
136  if "recovery_api_version" not in d:
137    try:
138      d["recovery_api_version"] = read_helper(
139          "META/recovery-api-version.txt").strip()
140    except KeyError:
141      raise ValueError("can't find recovery API version in input target-files")
142
143  if "tool_extensions" not in d:
144    try:
145      d["tool_extensions"] = read_helper("META/tool-extensions.txt").strip()
146    except KeyError:
147      # ok if extensions don't exist
148      pass
149
150  if "fstab_version" not in d:
151    d["fstab_version"] = "1"
152
153  try:
154    data = read_helper("META/imagesizes.txt")
155    for line in data.split("\n"):
156      if not line:
157        continue
158      name, value = line.split(" ", 1)
159      if not value:
160        continue
161      if name == "blocksize":
162        d[name] = value
163      else:
164        d[name + "_size"] = value
165  except KeyError:
166    pass
167
168  def makeint(key):
169    if key in d:
170      d[key] = int(d[key], 0)
171
172  makeint("recovery_api_version")
173  makeint("blocksize")
174  makeint("system_size")
175  makeint("vendor_size")
176  makeint("userdata_size")
177  makeint("cache_size")
178  makeint("recovery_size")
179  makeint("boot_size")
180  makeint("fstab_version")
181
182  d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"])
183  d["build.prop"] = LoadBuildProp(read_helper)
184  return d
185
186def LoadBuildProp(read_helper):
187  try:
188    data = read_helper("SYSTEM/build.prop")
189  except KeyError:
190    print "Warning: could not find SYSTEM/build.prop in %s" % zip
191    data = ""
192  return LoadDictionaryFromLines(data.split("\n"))
193
194def LoadDictionaryFromLines(lines):
195  d = {}
196  for line in lines:
197    line = line.strip()
198    if not line or line.startswith("#"):
199      continue
200    if "=" in line:
201      name, value = line.split("=", 1)
202      d[name] = value
203  return d
204
205def LoadRecoveryFSTab(read_helper, fstab_version):
206  class Partition(object):
207    def __init__(self, mount_point, fs_type, device, length, device2, context):
208      self.mount_point = mount_point
209      self.fs_type = fs_type
210      self.device = device
211      self.length = length
212      self.device2 = device2
213      self.context = context
214
215  try:
216    data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab")
217  except KeyError:
218    print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab"
219    data = ""
220
221  if fstab_version == 1:
222    d = {}
223    for line in data.split("\n"):
224      line = line.strip()
225      if not line or line.startswith("#"):
226        continue
227      pieces = line.split()
228      if not 3 <= len(pieces) <= 4:
229        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
230      options = None
231      if len(pieces) >= 4:
232        if pieces[3].startswith("/"):
233          device2 = pieces[3]
234          if len(pieces) >= 5:
235            options = pieces[4]
236        else:
237          device2 = None
238          options = pieces[3]
239      else:
240        device2 = None
241
242      mount_point = pieces[0]
243      length = 0
244      if options:
245        options = options.split(",")
246        for i in options:
247          if i.startswith("length="):
248            length = int(i[7:])
249          else:
250            print "%s: unknown option \"%s\"" % (mount_point, i)
251
252      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[1],
253                                 device=pieces[2], length=length,
254                                 device2=device2)
255
256  elif fstab_version == 2:
257    d = {}
258    for line in data.split("\n"):
259      line = line.strip()
260      if not line or line.startswith("#"):
261        continue
262      # <src> <mnt_point> <type> <mnt_flags and options> <fs_mgr_flags>
263      pieces = line.split()
264      if len(pieces) != 5:
265        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
266
267      # Ignore entries that are managed by vold
268      options = pieces[4]
269      if "voldmanaged=" in options:
270        continue
271
272      # It's a good line, parse it
273      length = 0
274      options = options.split(",")
275      for i in options:
276        if i.startswith("length="):
277          length = int(i[7:])
278        else:
279          # Ignore all unknown options in the unified fstab
280          continue
281
282      mount_flags = pieces[3]
283      # Honor the SELinux context if present.
284      context = None
285      for i in mount_flags.split(","):
286        if i.startswith("context="):
287          context = i
288
289      mount_point = pieces[1]
290      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[2],
291                                 device=pieces[0], length=length,
292                                 device2=None, context=context)
293
294  else:
295    raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,))
296
297  return d
298
299
300def DumpInfoDict(d):
301  for k, v in sorted(d.items()):
302    print "%-25s = (%s) %s" % (k, type(v).__name__, v)
303
304
305def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
306  """Take a kernel, cmdline, and ramdisk directory from the input (in
307  'sourcedir'), and turn them into a boot image.  Return the image
308  data, or None if sourcedir does not appear to contains files for
309  building the requested image."""
310
311  if (not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK) or
312      not os.access(os.path.join(sourcedir, "kernel"), os.F_OK)):
313    return None
314
315  if info_dict is None:
316    info_dict = OPTIONS.info_dict
317
318  ramdisk_img = tempfile.NamedTemporaryFile()
319  img = tempfile.NamedTemporaryFile()
320
321  if os.access(fs_config_file, os.F_OK):
322    cmd = ["mkbootfs", "-f", fs_config_file, os.path.join(sourcedir, "RAMDISK")]
323  else:
324    cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
325  p1 = Run(cmd, stdout=subprocess.PIPE)
326  p2 = Run(["minigzip"],
327           stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
328
329  p2.wait()
330  p1.wait()
331  assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (sourcedir,)
332  assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (sourcedir,)
333
334  # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
335  mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
336
337  cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")]
338
339  fn = os.path.join(sourcedir, "second")
340  if os.access(fn, os.F_OK):
341    cmd.append("--second")
342    cmd.append(fn)
343
344  fn = os.path.join(sourcedir, "cmdline")
345  if os.access(fn, os.F_OK):
346    cmd.append("--cmdline")
347    cmd.append(open(fn).read().rstrip("\n"))
348
349  fn = os.path.join(sourcedir, "base")
350  if os.access(fn, os.F_OK):
351    cmd.append("--base")
352    cmd.append(open(fn).read().rstrip("\n"))
353
354  fn = os.path.join(sourcedir, "pagesize")
355  if os.access(fn, os.F_OK):
356    cmd.append("--pagesize")
357    cmd.append(open(fn).read().rstrip("\n"))
358
359  args = info_dict.get("mkbootimg_args", None)
360  if args and args.strip():
361    cmd.extend(shlex.split(args))
362
363  img_unsigned = None
364  if info_dict.get("vboot", None):
365    img_unsigned = tempfile.NamedTemporaryFile()
366    cmd.extend(["--ramdisk", ramdisk_img.name,
367                "--output", img_unsigned.name])
368  else:
369    cmd.extend(["--ramdisk", ramdisk_img.name,
370                "--output", img.name])
371
372  p = Run(cmd, stdout=subprocess.PIPE)
373  p.communicate()
374  assert p.returncode == 0, "mkbootimg of %s image failed" % (
375      os.path.basename(sourcedir),)
376
377  if (info_dict.get("boot_signer", None) == "true" and
378      info_dict.get("verity_key", None)):
379    path = "/" + os.path.basename(sourcedir).lower()
380    cmd = [OPTIONS.boot_signer_path]
381    cmd.extend(OPTIONS.boot_signer_args)
382    cmd.extend([path, img.name,
383                info_dict["verity_key"] + ".pk8",
384                info_dict["verity_key"] + ".x509.pem", img.name])
385    p = Run(cmd, stdout=subprocess.PIPE)
386    p.communicate()
387    assert p.returncode == 0, "boot_signer of %s image failed" % path
388
389  # Sign the image if vboot is non-empty.
390  elif info_dict.get("vboot", None):
391    path = "/" + os.path.basename(sourcedir).lower()
392    img_keyblock = tempfile.NamedTemporaryFile()
393    cmd = [info_dict["vboot_signer_cmd"], info_dict["futility"],
394           img_unsigned.name, info_dict["vboot_key"] + ".vbpubk",
395           info_dict["vboot_key"] + ".vbprivk", img_keyblock.name,
396           img.name]
397    p = Run(cmd, stdout=subprocess.PIPE)
398    p.communicate()
399    assert p.returncode == 0, "vboot_signer of %s image failed" % path
400
401    # Clean up the temp files.
402    img_unsigned.close()
403    img_keyblock.close()
404
405  img.seek(os.SEEK_SET, 0)
406  data = img.read()
407
408  ramdisk_img.close()
409  img.close()
410
411  return data
412
413
414def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
415                     info_dict=None):
416  """Return a File object (with name 'name') with the desired bootable
417  image.  Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name
418  'prebuilt_name', otherwise look for it under 'unpack_dir'/IMAGES,
419  otherwise construct it from the source files in
420  'unpack_dir'/'tree_subdir'."""
421
422  prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
423  if os.path.exists(prebuilt_path):
424    print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,)
425    return File.FromLocalFile(name, prebuilt_path)
426
427  prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
428  if os.path.exists(prebuilt_path):
429    print "using prebuilt %s from IMAGES..." % (prebuilt_name,)
430    return File.FromLocalFile(name, prebuilt_path)
431
432  print "building image from target_files %s..." % (tree_subdir,)
433  fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
434  data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
435                            os.path.join(unpack_dir, fs_config),
436                            info_dict)
437  if data:
438    return File(name, data)
439  return None
440
441
442def UnzipTemp(filename, pattern=None):
443  """Unzip the given archive into a temporary directory and return the name.
444
445  If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a
446  temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
447
448  Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the
449  main file), open for reading.
450  """
451
452  tmp = tempfile.mkdtemp(prefix="targetfiles-")
453  OPTIONS.tempfiles.append(tmp)
454
455  def unzip_to_dir(filename, dirname):
456    cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
457    if pattern is not None:
458      cmd.append(pattern)
459    p = Run(cmd, stdout=subprocess.PIPE)
460    p.communicate()
461    if p.returncode != 0:
462      raise ExternalError("failed to unzip input target-files \"%s\"" %
463                          (filename,))
464
465  m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
466  if m:
467    unzip_to_dir(m.group(1), tmp)
468    unzip_to_dir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"))
469    filename = m.group(1)
470  else:
471    unzip_to_dir(filename, tmp)
472
473  return tmp, zipfile.ZipFile(filename, "r")
474
475
476def GetKeyPasswords(keylist):
477  """Given a list of keys, prompt the user to enter passwords for
478  those which require them.  Return a {key: password} dict.  password
479  will be None if the key has no password."""
480
481  no_passwords = []
482  need_passwords = []
483  key_passwords = {}
484  devnull = open("/dev/null", "w+b")
485  for k in sorted(keylist):
486    # We don't need a password for things that aren't really keys.
487    if k in SPECIAL_CERT_STRINGS:
488      no_passwords.append(k)
489      continue
490
491    p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
492             "-inform", "DER", "-nocrypt"],
493            stdin=devnull.fileno(),
494            stdout=devnull.fileno(),
495            stderr=subprocess.STDOUT)
496    p.communicate()
497    if p.returncode == 0:
498      # Definitely an unencrypted key.
499      no_passwords.append(k)
500    else:
501      p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
502               "-inform", "DER", "-passin", "pass:"],
503              stdin=devnull.fileno(),
504              stdout=devnull.fileno(),
505              stderr=subprocess.PIPE)
506      _, stderr = p.communicate()
507      if p.returncode == 0:
508        # Encrypted key with empty string as password.
509        key_passwords[k] = ''
510      elif stderr.startswith('Error decrypting key'):
511        # Definitely encrypted key.
512        # It would have said "Error reading key" if it didn't parse correctly.
513        need_passwords.append(k)
514      else:
515        # Potentially, a type of key that openssl doesn't understand.
516        # We'll let the routines in signapk.jar handle it.
517        no_passwords.append(k)
518  devnull.close()
519
520  key_passwords.update(PasswordManager().GetPasswords(need_passwords))
521  key_passwords.update(dict.fromkeys(no_passwords, None))
522  return key_passwords
523
524
525def SignFile(input_name, output_name, key, password, align=None,
526             whole_file=False):
527  """Sign the input_name zip/jar/apk, producing output_name.  Use the
528  given key and password (the latter may be None if the key does not
529  have a password.
530
531  If align is an integer > 1, zipalign is run to align stored files in
532  the output zip on 'align'-byte boundaries.
533
534  If whole_file is true, use the "-w" option to SignApk to embed a
535  signature that covers the whole file in the archive comment of the
536  zip file.
537  """
538
539  if align == 0 or align == 1:
540    align = None
541
542  if align:
543    temp = tempfile.NamedTemporaryFile()
544    sign_name = temp.name
545  else:
546    sign_name = output_name
547
548  cmd = [OPTIONS.java_path, OPTIONS.java_args, "-jar",
549         os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)]
550  cmd.extend(OPTIONS.extra_signapk_args)
551  if whole_file:
552    cmd.append("-w")
553  cmd.extend([key + OPTIONS.public_key_suffix,
554              key + OPTIONS.private_key_suffix,
555              input_name, sign_name])
556
557  p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
558  if password is not None:
559    password += "\n"
560  p.communicate(password)
561  if p.returncode != 0:
562    raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
563
564  if align:
565    p = Run(["zipalign", "-f", "-p", str(align), sign_name, output_name])
566    p.communicate()
567    if p.returncode != 0:
568      raise ExternalError("zipalign failed: return code %s" % (p.returncode,))
569    temp.close()
570
571
572def CheckSize(data, target, info_dict):
573  """Check the data string passed against the max size limit, if
574  any, for the given target.  Raise exception if the data is too big.
575  Print a warning if the data is nearing the maximum size."""
576
577  if target.endswith(".img"):
578    target = target[:-4]
579  mount_point = "/" + target
580
581  fs_type = None
582  limit = None
583  if info_dict["fstab"]:
584    if mount_point == "/userdata":
585      mount_point = "/data"
586    p = info_dict["fstab"][mount_point]
587    fs_type = p.fs_type
588    device = p.device
589    if "/" in device:
590      device = device[device.rfind("/")+1:]
591    limit = info_dict.get(device + "_size", None)
592  if not fs_type or not limit:
593    return
594
595  if fs_type == "yaffs2":
596    # image size should be increased by 1/64th to account for the
597    # spare area (64 bytes per 2k page)
598    limit = limit / 2048 * (2048+64)
599  size = len(data)
600  pct = float(size) * 100.0 / limit
601  msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit)
602  if pct >= 99.0:
603    raise ExternalError(msg)
604  elif pct >= 95.0:
605    print
606    print "  WARNING: ", msg
607    print
608  elif OPTIONS.verbose:
609    print "  ", msg
610
611
612def ReadApkCerts(tf_zip):
613  """Given a target_files ZipFile, parse the META/apkcerts.txt file
614  and return a {package: cert} dict."""
615  certmap = {}
616  for line in tf_zip.read("META/apkcerts.txt").split("\n"):
617    line = line.strip()
618    if not line:
619      continue
620    m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+'
621                 r'private_key="(.*)"$', line)
622    if m:
623      name, cert, privkey = m.groups()
624      public_key_suffix_len = len(OPTIONS.public_key_suffix)
625      private_key_suffix_len = len(OPTIONS.private_key_suffix)
626      if cert in SPECIAL_CERT_STRINGS and not privkey:
627        certmap[name] = cert
628      elif (cert.endswith(OPTIONS.public_key_suffix) and
629            privkey.endswith(OPTIONS.private_key_suffix) and
630            cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
631        certmap[name] = cert[:-public_key_suffix_len]
632      else:
633        raise ValueError("failed to parse line from apkcerts.txt:\n" + line)
634  return certmap
635
636
637COMMON_DOCSTRING = """
638  -p  (--path)  <dir>
639      Prepend <dir>/bin to the list of places to search for binaries
640      run by this script, and expect to find jars in <dir>/framework.
641
642  -s  (--device_specific) <file>
643      Path to the python module containing device-specific
644      releasetools code.
645
646  -x  (--extra)  <key=value>
647      Add a key/value pair to the 'extras' dict, which device-specific
648      extension code may look at.
649
650  -v  (--verbose)
651      Show command lines being executed.
652
653  -h  (--help)
654      Display this usage message and exit.
655"""
656
657def Usage(docstring):
658  print docstring.rstrip("\n")
659  print COMMON_DOCSTRING
660
661
662def ParseOptions(argv,
663                 docstring,
664                 extra_opts="", extra_long_opts=(),
665                 extra_option_handler=None):
666  """Parse the options in argv and return any arguments that aren't
667  flags.  docstring is the calling module's docstring, to be displayed
668  for errors and -h.  extra_opts and extra_long_opts are for flags
669  defined by the caller, which are processed by passing them to
670  extra_option_handler."""
671
672  try:
673    opts, args = getopt.getopt(
674        argv, "hvp:s:x:" + extra_opts,
675        ["help", "verbose", "path=", "signapk_path=", "extra_signapk_args=",
676         "java_path=", "java_args=", "public_key_suffix=",
677         "private_key_suffix=", "boot_signer_path=", "boot_signer_args=",
678         "verity_signer_path=", "verity_signer_args=", "device_specific=",
679         "extra="] +
680        list(extra_long_opts))
681  except getopt.GetoptError as err:
682    Usage(docstring)
683    print "**", str(err), "**"
684    sys.exit(2)
685
686  for o, a in opts:
687    if o in ("-h", "--help"):
688      Usage(docstring)
689      sys.exit()
690    elif o in ("-v", "--verbose"):
691      OPTIONS.verbose = True
692    elif o in ("-p", "--path"):
693      OPTIONS.search_path = a
694    elif o in ("--signapk_path",):
695      OPTIONS.signapk_path = a
696    elif o in ("--extra_signapk_args",):
697      OPTIONS.extra_signapk_args = shlex.split(a)
698    elif o in ("--java_path",):
699      OPTIONS.java_path = a
700    elif o in ("--java_args",):
701      OPTIONS.java_args = a
702    elif o in ("--public_key_suffix",):
703      OPTIONS.public_key_suffix = a
704    elif o in ("--private_key_suffix",):
705      OPTIONS.private_key_suffix = a
706    elif o in ("--boot_signer_path",):
707      OPTIONS.boot_signer_path = a
708    elif o in ("--boot_signer_args",):
709      OPTIONS.boot_signer_args = shlex.split(a)
710    elif o in ("--verity_signer_path",):
711      OPTIONS.verity_signer_path = a
712    elif o in ("--verity_signer_args",):
713      OPTIONS.verity_signer_args = shlex.split(a)
714    elif o in ("-s", "--device_specific"):
715      OPTIONS.device_specific = a
716    elif o in ("-x", "--extra"):
717      key, value = a.split("=", 1)
718      OPTIONS.extras[key] = value
719    else:
720      if extra_option_handler is None or not extra_option_handler(o, a):
721        assert False, "unknown option \"%s\"" % (o,)
722
723  if OPTIONS.search_path:
724    os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") +
725                          os.pathsep + os.environ["PATH"])
726
727  return args
728
729
730def MakeTempFile(prefix=None, suffix=None):
731  """Make a temp file and add it to the list of things to be deleted
732  when Cleanup() is called.  Return the filename."""
733  fd, fn = tempfile.mkstemp(prefix=prefix, suffix=suffix)
734  os.close(fd)
735  OPTIONS.tempfiles.append(fn)
736  return fn
737
738
739def Cleanup():
740  for i in OPTIONS.tempfiles:
741    if os.path.isdir(i):
742      shutil.rmtree(i)
743    else:
744      os.remove(i)
745
746
747class PasswordManager(object):
748  def __init__(self):
749    self.editor = os.getenv("EDITOR", None)
750    self.pwfile = os.getenv("ANDROID_PW_FILE", None)
751
752  def GetPasswords(self, items):
753    """Get passwords corresponding to each string in 'items',
754    returning a dict.  (The dict may have keys in addition to the
755    values in 'items'.)
756
757    Uses the passwords in $ANDROID_PW_FILE if available, letting the
758    user edit that file to add more needed passwords.  If no editor is
759    available, or $ANDROID_PW_FILE isn't define, prompts the user
760    interactively in the ordinary way.
761    """
762
763    current = self.ReadFile()
764
765    first = True
766    while True:
767      missing = []
768      for i in items:
769        if i not in current or not current[i]:
770          missing.append(i)
771      # Are all the passwords already in the file?
772      if not missing:
773        return current
774
775      for i in missing:
776        current[i] = ""
777
778      if not first:
779        print "key file %s still missing some passwords." % (self.pwfile,)
780        answer = raw_input("try to edit again? [y]> ").strip()
781        if answer and answer[0] not in 'yY':
782          raise RuntimeError("key passwords unavailable")
783      first = False
784
785      current = self.UpdateAndReadFile(current)
786
787  def PromptResult(self, current): # pylint: disable=no-self-use
788    """Prompt the user to enter a value (password) for each key in
789    'current' whose value is fales.  Returns a new dict with all the
790    values.
791    """
792    result = {}
793    for k, v in sorted(current.iteritems()):
794      if v:
795        result[k] = v
796      else:
797        while True:
798          result[k] = getpass.getpass(
799              "Enter password for %s key> " % k).strip()
800          if result[k]:
801            break
802    return result
803
804  def UpdateAndReadFile(self, current):
805    if not self.editor or not self.pwfile:
806      return self.PromptResult(current)
807
808    f = open(self.pwfile, "w")
809    os.chmod(self.pwfile, 0o600)
810    f.write("# Enter key passwords between the [[[ ]]] brackets.\n")
811    f.write("# (Additional spaces are harmless.)\n\n")
812
813    first_line = None
814    sorted_list = sorted([(not v, k, v) for (k, v) in current.iteritems()])
815    for i, (_, k, v) in enumerate(sorted_list):
816      f.write("[[[  %s  ]]] %s\n" % (v, k))
817      if not v and first_line is None:
818        # position cursor on first line with no password.
819        first_line = i + 4
820    f.close()
821
822    p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
823    _, _ = p.communicate()
824
825    return self.ReadFile()
826
827  def ReadFile(self):
828    result = {}
829    if self.pwfile is None:
830      return result
831    try:
832      f = open(self.pwfile, "r")
833      for line in f:
834        line = line.strip()
835        if not line or line[0] == '#':
836          continue
837        m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
838        if not m:
839          print "failed to parse password file: ", line
840        else:
841          result[m.group(2)] = m.group(1)
842      f.close()
843    except IOError as e:
844      if e.errno != errno.ENOENT:
845        print "error reading password file: ", str(e)
846    return result
847
848
849def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
850             compress_type=None):
851  import datetime
852
853  # http://b/18015246
854  # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
855  # for files larger than 2GiB. We can work around this by adjusting their
856  # limit. Note that `zipfile.writestr()` will not work for strings larger than
857  # 2GiB. The Python interpreter sometimes rejects strings that large (though
858  # it isn't clear to me exactly what circumstances cause this).
859  # `zipfile.write()` must be used directly to work around this.
860  #
861  # This mess can be avoided if we port to python3.
862  saved_zip64_limit = zipfile.ZIP64_LIMIT
863  zipfile.ZIP64_LIMIT = (1 << 32) - 1
864
865  if compress_type is None:
866    compress_type = zip_file.compression
867  if arcname is None:
868    arcname = filename
869
870  saved_stat = os.stat(filename)
871
872  try:
873    # `zipfile.write()` doesn't allow us to pass ZipInfo, so just modify the
874    # file to be zipped and reset it when we're done.
875    os.chmod(filename, perms)
876
877    # Use a fixed timestamp so the output is repeatable.
878    epoch = datetime.datetime.fromtimestamp(0)
879    timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
880    os.utime(filename, (timestamp, timestamp))
881
882    zip_file.write(filename, arcname=arcname, compress_type=compress_type)
883  finally:
884    os.chmod(filename, saved_stat.st_mode)
885    os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
886    zipfile.ZIP64_LIMIT = saved_zip64_limit
887
888
889def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None,
890                compress_type=None):
891  """Wrap zipfile.writestr() function to work around the zip64 limit.
892
893  Even with the ZIP64_LIMIT workaround, it won't allow writing a string
894  longer than 2GiB. It gives 'OverflowError: size does not fit in an int'
895  when calling crc32(bytes).
896
897  But it still works fine to write a shorter string into a large zip file.
898  We should use ZipWrite() whenever possible, and only use ZipWriteStr()
899  when we know the string won't be too long.
900  """
901
902  saved_zip64_limit = zipfile.ZIP64_LIMIT
903  zipfile.ZIP64_LIMIT = (1 << 32) - 1
904
905  if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
906    zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
907    zinfo.compress_type = zip_file.compression
908    if perms is None:
909      perms = 0o644
910  else:
911    zinfo = zinfo_or_arcname
912
913  # If compress_type is given, it overrides the value in zinfo.
914  if compress_type is not None:
915    zinfo.compress_type = compress_type
916
917  # If perms is given, it has a priority.
918  if perms is not None:
919    zinfo.external_attr = perms << 16
920
921  # Use a fixed timestamp so the output is repeatable.
922  zinfo.date_time = (2009, 1, 1, 0, 0, 0)
923
924  zip_file.writestr(zinfo, data)
925  zipfile.ZIP64_LIMIT = saved_zip64_limit
926
927
928def ZipClose(zip_file):
929  # http://b/18015246
930  # zipfile also refers to ZIP64_LIMIT during close() when it writes out the
931  # central directory.
932  saved_zip64_limit = zipfile.ZIP64_LIMIT
933  zipfile.ZIP64_LIMIT = (1 << 32) - 1
934
935  zip_file.close()
936
937  zipfile.ZIP64_LIMIT = saved_zip64_limit
938
939
940class DeviceSpecificParams(object):
941  module = None
942  def __init__(self, **kwargs):
943    """Keyword arguments to the constructor become attributes of this
944    object, which is passed to all functions in the device-specific
945    module."""
946    for k, v in kwargs.iteritems():
947      setattr(self, k, v)
948    self.extras = OPTIONS.extras
949
950    if self.module is None:
951      path = OPTIONS.device_specific
952      if not path:
953        return
954      try:
955        if os.path.isdir(path):
956          info = imp.find_module("releasetools", [path])
957        else:
958          d, f = os.path.split(path)
959          b, x = os.path.splitext(f)
960          if x == ".py":
961            f = b
962          info = imp.find_module(f, [d])
963        print "loaded device-specific extensions from", path
964        self.module = imp.load_module("device_specific", *info)
965      except ImportError:
966        print "unable to load device-specific module; assuming none"
967
968  def _DoCall(self, function_name, *args, **kwargs):
969    """Call the named function in the device-specific module, passing
970    the given args and kwargs.  The first argument to the call will be
971    the DeviceSpecific object itself.  If there is no module, or the
972    module does not define the function, return the value of the
973    'default' kwarg (which itself defaults to None)."""
974    if self.module is None or not hasattr(self.module, function_name):
975      return kwargs.get("default", None)
976    return getattr(self.module, function_name)(*((self,) + args), **kwargs)
977
978  def FullOTA_Assertions(self):
979    """Called after emitting the block of assertions at the top of a
980    full OTA package.  Implementations can add whatever additional
981    assertions they like."""
982    return self._DoCall("FullOTA_Assertions")
983
984  def FullOTA_InstallBegin(self):
985    """Called at the start of full OTA installation."""
986    return self._DoCall("FullOTA_InstallBegin")
987
988  def FullOTA_InstallEnd(self):
989    """Called at the end of full OTA installation; typically this is
990    used to install the image for the device's baseband processor."""
991    return self._DoCall("FullOTA_InstallEnd")
992
993  def IncrementalOTA_Assertions(self):
994    """Called after emitting the block of assertions at the top of an
995    incremental OTA package.  Implementations can add whatever
996    additional assertions they like."""
997    return self._DoCall("IncrementalOTA_Assertions")
998
999  def IncrementalOTA_VerifyBegin(self):
1000    """Called at the start of the verification phase of incremental
1001    OTA installation; additional checks can be placed here to abort
1002    the script before any changes are made."""
1003    return self._DoCall("IncrementalOTA_VerifyBegin")
1004
1005  def IncrementalOTA_VerifyEnd(self):
1006    """Called at the end of the verification phase of incremental OTA
1007    installation; additional checks can be placed here to abort the
1008    script before any changes are made."""
1009    return self._DoCall("IncrementalOTA_VerifyEnd")
1010
1011  def IncrementalOTA_InstallBegin(self):
1012    """Called at the start of incremental OTA installation (after
1013    verification is complete)."""
1014    return self._DoCall("IncrementalOTA_InstallBegin")
1015
1016  def IncrementalOTA_InstallEnd(self):
1017    """Called at the end of incremental OTA installation; typically
1018    this is used to install the image for the device's baseband
1019    processor."""
1020    return self._DoCall("IncrementalOTA_InstallEnd")
1021
1022class File(object):
1023  def __init__(self, name, data):
1024    self.name = name
1025    self.data = data
1026    self.size = len(data)
1027    self.sha1 = sha1(data).hexdigest()
1028
1029  @classmethod
1030  def FromLocalFile(cls, name, diskname):
1031    f = open(diskname, "rb")
1032    data = f.read()
1033    f.close()
1034    return File(name, data)
1035
1036  def WriteToTemp(self):
1037    t = tempfile.NamedTemporaryFile()
1038    t.write(self.data)
1039    t.flush()
1040    return t
1041
1042  def AddToZip(self, z, compression=None):
1043    ZipWriteStr(z, self.name, self.data, compress_type=compression)
1044
1045DIFF_PROGRAM_BY_EXT = {
1046    ".gz" : "imgdiff",
1047    ".zip" : ["imgdiff", "-z"],
1048    ".jar" : ["imgdiff", "-z"],
1049    ".apk" : ["imgdiff", "-z"],
1050    ".img" : "imgdiff",
1051    }
1052
1053class Difference(object):
1054  def __init__(self, tf, sf, diff_program=None):
1055    self.tf = tf
1056    self.sf = sf
1057    self.patch = None
1058    self.diff_program = diff_program
1059
1060  def ComputePatch(self):
1061    """Compute the patch (as a string of data) needed to turn sf into
1062    tf.  Returns the same tuple as GetPatch()."""
1063
1064    tf = self.tf
1065    sf = self.sf
1066
1067    if self.diff_program:
1068      diff_program = self.diff_program
1069    else:
1070      ext = os.path.splitext(tf.name)[1]
1071      diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff")
1072
1073    ttemp = tf.WriteToTemp()
1074    stemp = sf.WriteToTemp()
1075
1076    ext = os.path.splitext(tf.name)[1]
1077
1078    try:
1079      ptemp = tempfile.NamedTemporaryFile()
1080      if isinstance(diff_program, list):
1081        cmd = copy.copy(diff_program)
1082      else:
1083        cmd = [diff_program]
1084      cmd.append(stemp.name)
1085      cmd.append(ttemp.name)
1086      cmd.append(ptemp.name)
1087      p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1088      err = []
1089      def run():
1090        _, e = p.communicate()
1091        if e:
1092          err.append(e)
1093      th = threading.Thread(target=run)
1094      th.start()
1095      th.join(timeout=300)   # 5 mins
1096      if th.is_alive():
1097        print "WARNING: diff command timed out"
1098        p.terminate()
1099        th.join(5)
1100        if th.is_alive():
1101          p.kill()
1102          th.join()
1103
1104      if err or p.returncode != 0:
1105        print "WARNING: failure running %s:\n%s\n" % (
1106            diff_program, "".join(err))
1107        self.patch = None
1108        return None, None, None
1109      diff = ptemp.read()
1110    finally:
1111      ptemp.close()
1112      stemp.close()
1113      ttemp.close()
1114
1115    self.patch = diff
1116    return self.tf, self.sf, self.patch
1117
1118
1119  def GetPatch(self):
1120    """Return a tuple (target_file, source_file, patch_data).
1121    patch_data may be None if ComputePatch hasn't been called, or if
1122    computing the patch failed."""
1123    return self.tf, self.sf, self.patch
1124
1125
1126def ComputeDifferences(diffs):
1127  """Call ComputePatch on all the Difference objects in 'diffs'."""
1128  print len(diffs), "diffs to compute"
1129
1130  # Do the largest files first, to try and reduce the long-pole effect.
1131  by_size = [(i.tf.size, i) for i in diffs]
1132  by_size.sort(reverse=True)
1133  by_size = [i[1] for i in by_size]
1134
1135  lock = threading.Lock()
1136  diff_iter = iter(by_size)   # accessed under lock
1137
1138  def worker():
1139    try:
1140      lock.acquire()
1141      for d in diff_iter:
1142        lock.release()
1143        start = time.time()
1144        d.ComputePatch()
1145        dur = time.time() - start
1146        lock.acquire()
1147
1148        tf, sf, patch = d.GetPatch()
1149        if sf.name == tf.name:
1150          name = tf.name
1151        else:
1152          name = "%s (%s)" % (tf.name, sf.name)
1153        if patch is None:
1154          print "patching failed!                                  %s" % (name,)
1155        else:
1156          print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
1157              dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name)
1158      lock.release()
1159    except Exception as e:
1160      print e
1161      raise
1162
1163  # start worker threads; wait for them all to finish.
1164  threads = [threading.Thread(target=worker)
1165             for i in range(OPTIONS.worker_threads)]
1166  for th in threads:
1167    th.start()
1168  while threads:
1169    threads.pop().join()
1170
1171
1172class BlockDifference(object):
1173  def __init__(self, partition, tgt, src=None, check_first_block=False,
1174               version=None):
1175    self.tgt = tgt
1176    self.src = src
1177    self.partition = partition
1178    self.check_first_block = check_first_block
1179
1180    # Due to http://b/20939131, check_first_block is disabled temporarily.
1181    assert not self.check_first_block
1182
1183    if version is None:
1184      version = 1
1185      if OPTIONS.info_dict:
1186        version = max(
1187            int(i) for i in
1188            OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
1189    self.version = version
1190
1191    b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
1192                                    version=self.version)
1193    tmpdir = tempfile.mkdtemp()
1194    OPTIONS.tempfiles.append(tmpdir)
1195    self.path = os.path.join(tmpdir, partition)
1196    b.Compute(self.path)
1197
1198    if src is None:
1199      _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
1200    else:
1201      _, self.device = GetTypeAndDevice("/" + partition,
1202                                        OPTIONS.source_info_dict)
1203
1204  def WriteScript(self, script, output_zip, progress=None):
1205    if not self.src:
1206      # write the output unconditionally
1207      script.Print("Patching %s image unconditionally..." % (self.partition,))
1208    else:
1209      script.Print("Patching %s image after verification." % (self.partition,))
1210
1211    if progress:
1212      script.ShowProgress(progress, 0)
1213    self._WriteUpdate(script, output_zip)
1214    self._WritePostInstallVerifyScript(script)
1215
1216  def WriteVerifyScript(self, script):
1217    partition = self.partition
1218    if not self.src:
1219      script.Print("Image %s will be patched unconditionally." % (partition,))
1220    else:
1221      ranges = self.src.care_map.subtract(self.src.clobbered_blocks)
1222      ranges_str = ranges.to_string_raw()
1223      if self.version >= 3:
1224        script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || '
1225                            'block_image_verify("%s", '
1226                            'package_extract_file("%s.transfer.list"), '
1227                            '"%s.new.dat", "%s.patch.dat")) then') % (
1228                            self.device, ranges_str, self.src.TotalSha1(),
1229                            self.device, partition, partition, partition))
1230      else:
1231        script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1232                           self.device, ranges_str, self.src.TotalSha1()))
1233      script.Print('Verified %s image...' % (partition,))
1234      script.AppendExtra('else')
1235
1236      # When generating incrementals for the system and vendor partitions,
1237      # explicitly check the first block (which contains the superblock) of
1238      # the partition to see if it's what we expect. If this check fails,
1239      # give an explicit log message about the partition having been
1240      # remounted R/W (the most likely explanation) and the need to flash to
1241      # get OTAs working again.
1242      if self.check_first_block:
1243        self._CheckFirstBlock(script)
1244
1245      # Abort the OTA update. Note that the incremental OTA cannot be applied
1246      # even if it may match the checksum of the target partition.
1247      # a) If version < 3, operations like move and erase will make changes
1248      #    unconditionally and damage the partition.
1249      # b) If version >= 3, it won't even reach here.
1250      script.AppendExtra(('abort("%s partition has unexpected contents");\n'
1251                          'endif;') % (partition,))
1252
1253  def _WritePostInstallVerifyScript(self, script):
1254    partition = self.partition
1255    script.Print('Verifying the updated %s image...' % (partition,))
1256    # Unlike pre-install verification, clobbered_blocks should not be ignored.
1257    ranges = self.tgt.care_map
1258    ranges_str = ranges.to_string_raw()
1259    script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1260                       self.device, ranges_str,
1261                       self.tgt.TotalSha1(include_clobbered_blocks=True)))
1262
1263    # Bug: 20881595
1264    # Verify that extended blocks are really zeroed out.
1265    if self.tgt.extended:
1266      ranges_str = self.tgt.extended.to_string_raw()
1267      script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1268                         self.device, ranges_str,
1269                         self._HashZeroBlocks(self.tgt.extended.size())))
1270      script.Print('Verified the updated %s image.' % (partition,))
1271      script.AppendExtra(
1272          'else\n'
1273          '  abort("%s partition has unexpected non-zero contents after OTA '
1274          'update");\n'
1275          'endif;' % (partition,))
1276    else:
1277      script.Print('Verified the updated %s image.' % (partition,))
1278
1279    script.AppendExtra(
1280        'else\n'
1281        '  abort("%s partition has unexpected contents after OTA update");\n'
1282        'endif;' % (partition,))
1283
1284  def _WriteUpdate(self, script, output_zip):
1285    ZipWrite(output_zip,
1286             '{}.transfer.list'.format(self.path),
1287             '{}.transfer.list'.format(self.partition))
1288    ZipWrite(output_zip,
1289             '{}.new.dat'.format(self.path),
1290             '{}.new.dat'.format(self.partition))
1291    ZipWrite(output_zip,
1292             '{}.patch.dat'.format(self.path),
1293             '{}.patch.dat'.format(self.partition),
1294             compress_type=zipfile.ZIP_STORED)
1295
1296    call = ('block_image_update("{device}", '
1297            'package_extract_file("{partition}.transfer.list"), '
1298            '"{partition}.new.dat", "{partition}.patch.dat");\n'.format(
1299                device=self.device, partition=self.partition))
1300    script.AppendExtra(script.WordWrap(call))
1301
1302  def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use
1303    data = source.ReadRangeSet(ranges)
1304    ctx = sha1()
1305
1306    for p in data:
1307      ctx.update(p)
1308
1309    return ctx.hexdigest()
1310
1311  def _HashZeroBlocks(self, num_blocks): # pylint: disable=no-self-use
1312    """Return the hash value for all zero blocks."""
1313    zero_block = '\x00' * 4096
1314    ctx = sha1()
1315    for _ in range(num_blocks):
1316      ctx.update(zero_block)
1317
1318    return ctx.hexdigest()
1319
1320  # TODO(tbao): Due to http://b/20939131, block 0 may be changed without
1321  # remounting R/W. Will change the checking to a finer-grained way to
1322  # mask off those bits.
1323  def _CheckFirstBlock(self, script):
1324    r = rangelib.RangeSet((0, 1))
1325    srchash = self._HashBlocks(self.src, r)
1326
1327    script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || '
1328                        'abort("%s has been remounted R/W; '
1329                        'reflash device to reenable OTA updates");')
1330                       % (self.device, r.to_string_raw(), srchash,
1331                          self.device))
1332
1333DataImage = blockimgdiff.DataImage
1334
1335
1336# map recovery.fstab's fs_types to mount/format "partition types"
1337PARTITION_TYPES = {
1338    "yaffs2": "MTD",
1339    "mtd": "MTD",
1340    "ext4": "EMMC",
1341    "emmc": "EMMC",
1342    "f2fs": "EMMC",
1343    "squashfs": "EMMC"
1344}
1345
1346def GetTypeAndDevice(mount_point, info):
1347  fstab = info["fstab"]
1348  if fstab:
1349    return (PARTITION_TYPES[fstab[mount_point].fs_type],
1350            fstab[mount_point].device)
1351  else:
1352    raise KeyError
1353
1354
1355def ParseCertificate(data):
1356  """Parse a PEM-format certificate."""
1357  cert = []
1358  save = False
1359  for line in data.split("\n"):
1360    if "--END CERTIFICATE--" in line:
1361      break
1362    if save:
1363      cert.append(line)
1364    if "--BEGIN CERTIFICATE--" in line:
1365      save = True
1366  cert = "".join(cert).decode('base64')
1367  return cert
1368
1369def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
1370                      info_dict=None):
1371  """Generate a binary patch that creates the recovery image starting
1372  with the boot image.  (Most of the space in these images is just the
1373  kernel, which is identical for the two, so the resulting patch
1374  should be efficient.)  Add it to the output zip, along with a shell
1375  script that is run from init.rc on first boot to actually do the
1376  patching and install the new recovery image.
1377
1378  recovery_img and boot_img should be File objects for the
1379  corresponding images.  info should be the dictionary returned by
1380  common.LoadInfoDict() on the input target_files.
1381  """
1382
1383  if info_dict is None:
1384    info_dict = OPTIONS.info_dict
1385
1386  diff_program = ["imgdiff"]
1387  path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
1388  if os.path.exists(path):
1389    diff_program.append("-b")
1390    diff_program.append(path)
1391    bonus_args = "-b /system/etc/recovery-resource.dat"
1392  else:
1393    bonus_args = ""
1394
1395  d = Difference(recovery_img, boot_img, diff_program=diff_program)
1396  _, _, patch = d.ComputePatch()
1397  output_sink("recovery-from-boot.p", patch)
1398
1399  try:
1400    # The following GetTypeAndDevice()s need to use the path in the target
1401    # info_dict instead of source_info_dict.
1402    boot_type, boot_device = GetTypeAndDevice("/boot", info_dict)
1403    recovery_type, recovery_device = GetTypeAndDevice("/recovery", info_dict)
1404  except KeyError:
1405    return
1406
1407  sh = """#!/system/bin/sh
1408if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
1409  applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
1410else
1411  log -t recovery "Recovery image already installed"
1412fi
1413""" % {'boot_size': boot_img.size,
1414       'boot_sha1': boot_img.sha1,
1415       'recovery_size': recovery_img.size,
1416       'recovery_sha1': recovery_img.sha1,
1417       'boot_type': boot_type,
1418       'boot_device': boot_device,
1419       'recovery_type': recovery_type,
1420       'recovery_device': recovery_device,
1421       'bonus_args': bonus_args}
1422
1423  # The install script location moved from /system/etc to /system/bin
1424  # in the L release.  Parse the init.rc file to find out where the
1425  # target-files expects it to be, and put it there.
1426  sh_location = "etc/install-recovery.sh"
1427  try:
1428    with open(os.path.join(input_dir, "BOOT", "RAMDISK", "init.rc")) as f:
1429      for line in f:
1430        m = re.match(r"^service flash_recovery /system/(\S+)\s*$", line)
1431        if m:
1432          sh_location = m.group(1)
1433          print "putting script in", sh_location
1434          break
1435  except (OSError, IOError) as e:
1436    print "failed to read init.rc: %s" % (e,)
1437
1438  output_sink(sh_location, sh)
1439