common.py revision 2c15d9eefe977db63d7561bde326f24fab31653d
1# Copyright (C) 2008 The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15import copy
16import errno
17import getopt
18import getpass
19import imp
20import os
21import platform
22import re
23import shlex
24import shutil
25import subprocess
26import sys
27import tempfile
28import threading
29import time
30import zipfile
31
32import blockimgdiff
33import rangelib
34
35from hashlib import sha1 as sha1
36
37
38class Options(object):
39  def __init__(self):
40    platform_search_path = {
41        "linux2": "out/host/linux-x86",
42        "darwin": "out/host/darwin-x86",
43    }
44
45    self.search_path = platform_search_path.get(sys.platform, None)
46    self.signapk_path = "framework/signapk.jar"  # Relative to search_path
47    self.extra_signapk_args = []
48    self.java_path = "java"  # Use the one on the path by default.
49    self.java_args = "-Xmx2048m" # JVM Args
50    self.public_key_suffix = ".x509.pem"
51    self.private_key_suffix = ".pk8"
52    # use otatools built boot_signer by default
53    self.boot_signer_path = "boot_signer"
54    self.verbose = False
55    self.tempfiles = []
56    self.device_specific = None
57    self.extras = {}
58    self.info_dict = None
59    self.worker_threads = None
60
61
62OPTIONS = Options()
63
64
65# Values for "certificate" in apkcerts that mean special things.
66SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL")
67
68
69class ExternalError(RuntimeError):
70  pass
71
72
73def Run(args, **kwargs):
74  """Create and return a subprocess.Popen object, printing the command
75  line on the terminal if -v was specified."""
76  if OPTIONS.verbose:
77    print "  running: ", " ".join(args)
78  return subprocess.Popen(args, **kwargs)
79
80
81def CloseInheritedPipes():
82  """ Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds
83  before doing other work."""
84  if platform.system() != "Darwin":
85    return
86  for d in range(3, 1025):
87    try:
88      stat = os.fstat(d)
89      if stat is not None:
90        pipebit = stat[0] & 0x1000
91        if pipebit != 0:
92          os.close(d)
93    except OSError:
94      pass
95
96
97def LoadInfoDict(input_file, input_dir=None):
98  """Read and parse the META/misc_info.txt key/value pairs from the
99  input target files and return a dict."""
100
101  def read_helper(fn):
102    if isinstance(input_file, zipfile.ZipFile):
103      return input_file.read(fn)
104    else:
105      path = os.path.join(input_file, *fn.split("/"))
106      try:
107        with open(path) as f:
108          return f.read()
109      except IOError as e:
110        if e.errno == errno.ENOENT:
111          raise KeyError(fn)
112  d = {}
113  try:
114    d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n"))
115  except KeyError:
116    # ok if misc_info.txt doesn't exist
117    pass
118
119  # backwards compatibility: These values used to be in their own
120  # files.  Look for them, in case we're processing an old
121  # target_files zip.
122
123  if "mkyaffs2_extra_flags" not in d:
124    try:
125      d["mkyaffs2_extra_flags"] = read_helper(
126          "META/mkyaffs2-extra-flags.txt").strip()
127    except KeyError:
128      # ok if flags don't exist
129      pass
130
131  if "recovery_api_version" not in d:
132    try:
133      d["recovery_api_version"] = read_helper(
134          "META/recovery-api-version.txt").strip()
135    except KeyError:
136      raise ValueError("can't find recovery API version in input target-files")
137
138  if "tool_extensions" not in d:
139    try:
140      d["tool_extensions"] = read_helper("META/tool-extensions.txt").strip()
141    except KeyError:
142      # ok if extensions don't exist
143      pass
144
145  if "fstab_version" not in d:
146    d["fstab_version"] = "1"
147
148  # During building, we use the "file_contexts" in the out/ directory tree.
149  # It is no longer available when (re)generating from target_files zip. So
150  # when generating from target_files zip, we look for a copy under META/
151  # first, if not available search under BOOT/RAMDISK/. Note that we may need
152  # a different file_contexts to build images than the one running on device,
153  # such as when enabling system_root_image. In that case, we must have the
154  # one for building copied to META/.
155  if input_dir is not None:
156    fc_config = os.path.join(input_dir, "META", "file_contexts")
157    if not os.path.exists(fc_config):
158      fc_config = os.path.join(input_dir, "BOOT", "RAMDISK", "file_contexts")
159      if not os.path.exists(fc_config):
160        fc_config = None
161
162    if fc_config:
163      d["selinux_fc"] = fc_config
164
165  try:
166    data = read_helper("META/imagesizes.txt")
167    for line in data.split("\n"):
168      if not line:
169        continue
170      name, value = line.split(" ", 1)
171      if not value:
172        continue
173      if name == "blocksize":
174        d[name] = value
175      else:
176        d[name + "_size"] = value
177  except KeyError:
178    pass
179
180  def makeint(key):
181    if key in d:
182      d[key] = int(d[key], 0)
183
184  makeint("recovery_api_version")
185  makeint("blocksize")
186  makeint("system_size")
187  makeint("vendor_size")
188  makeint("userdata_size")
189  makeint("cache_size")
190  makeint("recovery_size")
191  makeint("boot_size")
192  makeint("fstab_version")
193
194  d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"])
195  d["build.prop"] = LoadBuildProp(read_helper)
196  return d
197
198def LoadBuildProp(read_helper):
199  try:
200    data = read_helper("SYSTEM/build.prop")
201  except KeyError:
202    print "Warning: could not find SYSTEM/build.prop in %s" % zip
203    data = ""
204  return LoadDictionaryFromLines(data.split("\n"))
205
206def LoadDictionaryFromLines(lines):
207  d = {}
208  for line in lines:
209    line = line.strip()
210    if not line or line.startswith("#"):
211      continue
212    if "=" in line:
213      name, value = line.split("=", 1)
214      d[name] = value
215  return d
216
217def LoadRecoveryFSTab(read_helper, fstab_version):
218  class Partition(object):
219    def __init__(self, mount_point, fs_type, device, length, device2, context):
220      self.mount_point = mount_point
221      self.fs_type = fs_type
222      self.device = device
223      self.length = length
224      self.device2 = device2
225      self.context = context
226
227  try:
228    data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab")
229  except KeyError:
230    print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab"
231    data = ""
232
233  if fstab_version == 1:
234    d = {}
235    for line in data.split("\n"):
236      line = line.strip()
237      if not line or line.startswith("#"):
238        continue
239      pieces = line.split()
240      if not 3 <= len(pieces) <= 4:
241        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
242      options = None
243      if len(pieces) >= 4:
244        if pieces[3].startswith("/"):
245          device2 = pieces[3]
246          if len(pieces) >= 5:
247            options = pieces[4]
248        else:
249          device2 = None
250          options = pieces[3]
251      else:
252        device2 = None
253
254      mount_point = pieces[0]
255      length = 0
256      if options:
257        options = options.split(",")
258        for i in options:
259          if i.startswith("length="):
260            length = int(i[7:])
261          else:
262            print "%s: unknown option \"%s\"" % (mount_point, i)
263
264      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[1],
265                                 device=pieces[2], length=length,
266                                 device2=device2)
267
268  elif fstab_version == 2:
269    d = {}
270    for line in data.split("\n"):
271      line = line.strip()
272      if not line or line.startswith("#"):
273        continue
274      # <src> <mnt_point> <type> <mnt_flags and options> <fs_mgr_flags>
275      pieces = line.split()
276      if len(pieces) != 5:
277        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
278
279      # Ignore entries that are managed by vold
280      options = pieces[4]
281      if "voldmanaged=" in options:
282        continue
283
284      # It's a good line, parse it
285      length = 0
286      options = options.split(",")
287      for i in options:
288        if i.startswith("length="):
289          length = int(i[7:])
290        else:
291          # Ignore all unknown options in the unified fstab
292          continue
293
294      mount_flags = pieces[3]
295      # Honor the SELinux context if present.
296      context = None
297      for i in mount_flags.split(","):
298        if i.startswith("context="):
299          context = i
300
301      mount_point = pieces[1]
302      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[2],
303                                 device=pieces[0], length=length,
304                                 device2=None, context=context)
305
306  else:
307    raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,))
308
309  return d
310
311
312def DumpInfoDict(d):
313  for k, v in sorted(d.items()):
314    print "%-25s = (%s) %s" % (k, type(v).__name__, v)
315
316
317def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
318  """Take a kernel, cmdline, and ramdisk directory from the input (in
319  'sourcedir'), and turn them into a boot image.  Return the image
320  data, or None if sourcedir does not appear to contains files for
321  building the requested image."""
322
323  if (not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK) or
324      not os.access(os.path.join(sourcedir, "kernel"), os.F_OK)):
325    return None
326
327  if info_dict is None:
328    info_dict = OPTIONS.info_dict
329
330  ramdisk_img = tempfile.NamedTemporaryFile()
331  img = tempfile.NamedTemporaryFile()
332
333  if os.access(fs_config_file, os.F_OK):
334    cmd = ["mkbootfs", "-f", fs_config_file, os.path.join(sourcedir, "RAMDISK")]
335  else:
336    cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
337  p1 = Run(cmd, stdout=subprocess.PIPE)
338  p2 = Run(["minigzip"],
339           stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
340
341  p2.wait()
342  p1.wait()
343  assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (sourcedir,)
344  assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (sourcedir,)
345
346  # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
347  mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
348
349  cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")]
350
351  fn = os.path.join(sourcedir, "second")
352  if os.access(fn, os.F_OK):
353    cmd.append("--second")
354    cmd.append(fn)
355
356  fn = os.path.join(sourcedir, "cmdline")
357  if os.access(fn, os.F_OK):
358    cmd.append("--cmdline")
359    cmd.append(open(fn).read().rstrip("\n"))
360
361  fn = os.path.join(sourcedir, "base")
362  if os.access(fn, os.F_OK):
363    cmd.append("--base")
364    cmd.append(open(fn).read().rstrip("\n"))
365
366  fn = os.path.join(sourcedir, "pagesize")
367  if os.access(fn, os.F_OK):
368    cmd.append("--pagesize")
369    cmd.append(open(fn).read().rstrip("\n"))
370
371  args = info_dict.get("mkbootimg_args", None)
372  if args and args.strip():
373    cmd.extend(shlex.split(args))
374
375  img_unsigned = None
376  if info_dict.get("vboot", None):
377    img_unsigned = tempfile.NamedTemporaryFile()
378    cmd.extend(["--ramdisk", ramdisk_img.name,
379                "--output", img_unsigned.name])
380  else:
381    cmd.extend(["--ramdisk", ramdisk_img.name,
382                "--output", img.name])
383
384  p = Run(cmd, stdout=subprocess.PIPE)
385  p.communicate()
386  assert p.returncode == 0, "mkbootimg of %s image failed" % (
387      os.path.basename(sourcedir),)
388
389  if info_dict.get("verity_key", None):
390    path = "/" + os.path.basename(sourcedir).lower()
391    cmd = [OPTIONS.boot_signer_path, path, img.name,
392           info_dict["verity_key"] + ".pk8",
393           info_dict["verity_key"] + ".x509.pem", img.name]
394    p = Run(cmd, stdout=subprocess.PIPE)
395    p.communicate()
396    assert p.returncode == 0, "boot_signer of %s image failed" % path
397
398  # Sign the image if vboot is non-empty.
399  elif info_dict.get("vboot", None):
400    path = "/" + os.path.basename(sourcedir).lower()
401    img_keyblock = tempfile.NamedTemporaryFile()
402    cmd = [info_dict["vboot_signer_cmd"], info_dict["futility"],
403           img_unsigned.name, info_dict["vboot_key"] + ".vbpubk",
404           info_dict["vboot_key"] + ".vbprivk", img_keyblock.name,
405           img.name]
406    p = Run(cmd, stdout=subprocess.PIPE)
407    p.communicate()
408    assert p.returncode == 0, "vboot_signer of %s image failed" % path
409
410    # Clean up the temp files.
411    img_unsigned.close()
412    img_keyblock.close()
413
414  img.seek(os.SEEK_SET, 0)
415  data = img.read()
416
417  ramdisk_img.close()
418  img.close()
419
420  return data
421
422
423def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
424                     info_dict=None):
425  """Return a File object (with name 'name') with the desired bootable
426  image.  Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name
427  'prebuilt_name', otherwise look for it under 'unpack_dir'/IMAGES,
428  otherwise construct it from the source files in
429  'unpack_dir'/'tree_subdir'."""
430
431  prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
432  if os.path.exists(prebuilt_path):
433    print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,)
434    return File.FromLocalFile(name, prebuilt_path)
435
436  prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
437  if os.path.exists(prebuilt_path):
438    print "using prebuilt %s from IMAGES..." % (prebuilt_name,)
439    return File.FromLocalFile(name, prebuilt_path)
440
441  print "building image from target_files %s..." % (tree_subdir,)
442  fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
443  data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
444                            os.path.join(unpack_dir, fs_config),
445                            info_dict)
446  if data:
447    return File(name, data)
448  return None
449
450
451def UnzipTemp(filename, pattern=None):
452  """Unzip the given archive into a temporary directory and return the name.
453
454  If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a
455  temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
456
457  Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the
458  main file), open for reading.
459  """
460
461  tmp = tempfile.mkdtemp(prefix="targetfiles-")
462  OPTIONS.tempfiles.append(tmp)
463
464  def unzip_to_dir(filename, dirname):
465    cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
466    if pattern is not None:
467      cmd.append(pattern)
468    p = Run(cmd, stdout=subprocess.PIPE)
469    p.communicate()
470    if p.returncode != 0:
471      raise ExternalError("failed to unzip input target-files \"%s\"" %
472                          (filename,))
473
474  m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
475  if m:
476    unzip_to_dir(m.group(1), tmp)
477    unzip_to_dir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"))
478    filename = m.group(1)
479  else:
480    unzip_to_dir(filename, tmp)
481
482  return tmp, zipfile.ZipFile(filename, "r")
483
484
485def GetKeyPasswords(keylist):
486  """Given a list of keys, prompt the user to enter passwords for
487  those which require them.  Return a {key: password} dict.  password
488  will be None if the key has no password."""
489
490  no_passwords = []
491  need_passwords = []
492  key_passwords = {}
493  devnull = open("/dev/null", "w+b")
494  for k in sorted(keylist):
495    # We don't need a password for things that aren't really keys.
496    if k in SPECIAL_CERT_STRINGS:
497      no_passwords.append(k)
498      continue
499
500    p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
501             "-inform", "DER", "-nocrypt"],
502            stdin=devnull.fileno(),
503            stdout=devnull.fileno(),
504            stderr=subprocess.STDOUT)
505    p.communicate()
506    if p.returncode == 0:
507      # Definitely an unencrypted key.
508      no_passwords.append(k)
509    else:
510      p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
511               "-inform", "DER", "-passin", "pass:"],
512              stdin=devnull.fileno(),
513              stdout=devnull.fileno(),
514              stderr=subprocess.PIPE)
515      _, stderr = p.communicate()
516      if p.returncode == 0:
517        # Encrypted key with empty string as password.
518        key_passwords[k] = ''
519      elif stderr.startswith('Error decrypting key'):
520        # Definitely encrypted key.
521        # It would have said "Error reading key" if it didn't parse correctly.
522        need_passwords.append(k)
523      else:
524        # Potentially, a type of key that openssl doesn't understand.
525        # We'll let the routines in signapk.jar handle it.
526        no_passwords.append(k)
527  devnull.close()
528
529  key_passwords.update(PasswordManager().GetPasswords(need_passwords))
530  key_passwords.update(dict.fromkeys(no_passwords, None))
531  return key_passwords
532
533
534def SignFile(input_name, output_name, key, password, align=None,
535             whole_file=False):
536  """Sign the input_name zip/jar/apk, producing output_name.  Use the
537  given key and password (the latter may be None if the key does not
538  have a password.
539
540  If align is an integer > 1, zipalign is run to align stored files in
541  the output zip on 'align'-byte boundaries.
542
543  If whole_file is true, use the "-w" option to SignApk to embed a
544  signature that covers the whole file in the archive comment of the
545  zip file.
546  """
547
548  if align == 0 or align == 1:
549    align = None
550
551  if align:
552    temp = tempfile.NamedTemporaryFile()
553    sign_name = temp.name
554  else:
555    sign_name = output_name
556
557  cmd = [OPTIONS.java_path, OPTIONS.java_args, "-jar",
558         os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)]
559  cmd.extend(OPTIONS.extra_signapk_args)
560  if whole_file:
561    cmd.append("-w")
562  cmd.extend([key + OPTIONS.public_key_suffix,
563              key + OPTIONS.private_key_suffix,
564              input_name, sign_name])
565
566  p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
567  if password is not None:
568    password += "\n"
569  p.communicate(password)
570  if p.returncode != 0:
571    raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
572
573  if align:
574    p = Run(["zipalign", "-f", "-p", str(align), sign_name, output_name])
575    p.communicate()
576    if p.returncode != 0:
577      raise ExternalError("zipalign failed: return code %s" % (p.returncode,))
578    temp.close()
579
580
581def CheckSize(data, target, info_dict):
582  """Check the data string passed against the max size limit, if
583  any, for the given target.  Raise exception if the data is too big.
584  Print a warning if the data is nearing the maximum size."""
585
586  if target.endswith(".img"):
587    target = target[:-4]
588  mount_point = "/" + target
589
590  fs_type = None
591  limit = None
592  if info_dict["fstab"]:
593    if mount_point == "/userdata":
594      mount_point = "/data"
595    p = info_dict["fstab"][mount_point]
596    fs_type = p.fs_type
597    device = p.device
598    if "/" in device:
599      device = device[device.rfind("/")+1:]
600    limit = info_dict.get(device + "_size", None)
601  if not fs_type or not limit:
602    return
603
604  if fs_type == "yaffs2":
605    # image size should be increased by 1/64th to account for the
606    # spare area (64 bytes per 2k page)
607    limit = limit / 2048 * (2048+64)
608  size = len(data)
609  pct = float(size) * 100.0 / limit
610  msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit)
611  if pct >= 99.0:
612    raise ExternalError(msg)
613  elif pct >= 95.0:
614    print
615    print "  WARNING: ", msg
616    print
617  elif OPTIONS.verbose:
618    print "  ", msg
619
620
621def ReadApkCerts(tf_zip):
622  """Given a target_files ZipFile, parse the META/apkcerts.txt file
623  and return a {package: cert} dict."""
624  certmap = {}
625  for line in tf_zip.read("META/apkcerts.txt").split("\n"):
626    line = line.strip()
627    if not line:
628      continue
629    m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+'
630                 r'private_key="(.*)"$', line)
631    if m:
632      name, cert, privkey = m.groups()
633      public_key_suffix_len = len(OPTIONS.public_key_suffix)
634      private_key_suffix_len = len(OPTIONS.private_key_suffix)
635      if cert in SPECIAL_CERT_STRINGS and not privkey:
636        certmap[name] = cert
637      elif (cert.endswith(OPTIONS.public_key_suffix) and
638            privkey.endswith(OPTIONS.private_key_suffix) and
639            cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
640        certmap[name] = cert[:-public_key_suffix_len]
641      else:
642        raise ValueError("failed to parse line from apkcerts.txt:\n" + line)
643  return certmap
644
645
646COMMON_DOCSTRING = """
647  -p  (--path)  <dir>
648      Prepend <dir>/bin to the list of places to search for binaries
649      run by this script, and expect to find jars in <dir>/framework.
650
651  -s  (--device_specific) <file>
652      Path to the python module containing device-specific
653      releasetools code.
654
655  -x  (--extra)  <key=value>
656      Add a key/value pair to the 'extras' dict, which device-specific
657      extension code may look at.
658
659  -v  (--verbose)
660      Show command lines being executed.
661
662  -h  (--help)
663      Display this usage message and exit.
664"""
665
666def Usage(docstring):
667  print docstring.rstrip("\n")
668  print COMMON_DOCSTRING
669
670
671def ParseOptions(argv,
672                 docstring,
673                 extra_opts="", extra_long_opts=(),
674                 extra_option_handler=None):
675  """Parse the options in argv and return any arguments that aren't
676  flags.  docstring is the calling module's docstring, to be displayed
677  for errors and -h.  extra_opts and extra_long_opts are for flags
678  defined by the caller, which are processed by passing them to
679  extra_option_handler."""
680
681  try:
682    opts, args = getopt.getopt(
683        argv, "hvp:s:x:" + extra_opts,
684        ["help", "verbose", "path=", "signapk_path=", "extra_signapk_args=",
685         "java_path=", "java_args=", "public_key_suffix=",
686         "private_key_suffix=", "boot_signer_path=", "device_specific=",
687         "extra="] +
688        list(extra_long_opts))
689  except getopt.GetoptError as err:
690    Usage(docstring)
691    print "**", str(err), "**"
692    sys.exit(2)
693
694  for o, a in opts:
695    if o in ("-h", "--help"):
696      Usage(docstring)
697      sys.exit()
698    elif o in ("-v", "--verbose"):
699      OPTIONS.verbose = True
700    elif o in ("-p", "--path"):
701      OPTIONS.search_path = a
702    elif o in ("--signapk_path",):
703      OPTIONS.signapk_path = a
704    elif o in ("--extra_signapk_args",):
705      OPTIONS.extra_signapk_args = shlex.split(a)
706    elif o in ("--java_path",):
707      OPTIONS.java_path = a
708    elif o in ("--java_args",):
709      OPTIONS.java_args = a
710    elif o in ("--public_key_suffix",):
711      OPTIONS.public_key_suffix = a
712    elif o in ("--private_key_suffix",):
713      OPTIONS.private_key_suffix = a
714    elif o in ("--boot_signer_path",):
715      OPTIONS.boot_signer_path = a
716    elif o in ("-s", "--device_specific"):
717      OPTIONS.device_specific = a
718    elif o in ("-x", "--extra"):
719      key, value = a.split("=", 1)
720      OPTIONS.extras[key] = value
721    else:
722      if extra_option_handler is None or not extra_option_handler(o, a):
723        assert False, "unknown option \"%s\"" % (o,)
724
725  if OPTIONS.search_path:
726    os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") +
727                          os.pathsep + os.environ["PATH"])
728
729  return args
730
731
732def MakeTempFile(prefix=None, suffix=None):
733  """Make a temp file and add it to the list of things to be deleted
734  when Cleanup() is called.  Return the filename."""
735  fd, fn = tempfile.mkstemp(prefix=prefix, suffix=suffix)
736  os.close(fd)
737  OPTIONS.tempfiles.append(fn)
738  return fn
739
740
741def Cleanup():
742  for i in OPTIONS.tempfiles:
743    if os.path.isdir(i):
744      shutil.rmtree(i)
745    else:
746      os.remove(i)
747
748
749class PasswordManager(object):
750  def __init__(self):
751    self.editor = os.getenv("EDITOR", None)
752    self.pwfile = os.getenv("ANDROID_PW_FILE", None)
753
754  def GetPasswords(self, items):
755    """Get passwords corresponding to each string in 'items',
756    returning a dict.  (The dict may have keys in addition to the
757    values in 'items'.)
758
759    Uses the passwords in $ANDROID_PW_FILE if available, letting the
760    user edit that file to add more needed passwords.  If no editor is
761    available, or $ANDROID_PW_FILE isn't define, prompts the user
762    interactively in the ordinary way.
763    """
764
765    current = self.ReadFile()
766
767    first = True
768    while True:
769      missing = []
770      for i in items:
771        if i not in current or not current[i]:
772          missing.append(i)
773      # Are all the passwords already in the file?
774      if not missing:
775        return current
776
777      for i in missing:
778        current[i] = ""
779
780      if not first:
781        print "key file %s still missing some passwords." % (self.pwfile,)
782        answer = raw_input("try to edit again? [y]> ").strip()
783        if answer and answer[0] not in 'yY':
784          raise RuntimeError("key passwords unavailable")
785      first = False
786
787      current = self.UpdateAndReadFile(current)
788
789  def PromptResult(self, current): # pylint: disable=no-self-use
790    """Prompt the user to enter a value (password) for each key in
791    'current' whose value is fales.  Returns a new dict with all the
792    values.
793    """
794    result = {}
795    for k, v in sorted(current.iteritems()):
796      if v:
797        result[k] = v
798      else:
799        while True:
800          result[k] = getpass.getpass(
801              "Enter password for %s key> " % k).strip()
802          if result[k]:
803            break
804    return result
805
806  def UpdateAndReadFile(self, current):
807    if not self.editor or not self.pwfile:
808      return self.PromptResult(current)
809
810    f = open(self.pwfile, "w")
811    os.chmod(self.pwfile, 0o600)
812    f.write("# Enter key passwords between the [[[ ]]] brackets.\n")
813    f.write("# (Additional spaces are harmless.)\n\n")
814
815    first_line = None
816    sorted_list = sorted([(not v, k, v) for (k, v) in current.iteritems()])
817    for i, (_, k, v) in enumerate(sorted_list):
818      f.write("[[[  %s  ]]] %s\n" % (v, k))
819      if not v and first_line is None:
820        # position cursor on first line with no password.
821        first_line = i + 4
822    f.close()
823
824    p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
825    _, _ = p.communicate()
826
827    return self.ReadFile()
828
829  def ReadFile(self):
830    result = {}
831    if self.pwfile is None:
832      return result
833    try:
834      f = open(self.pwfile, "r")
835      for line in f:
836        line = line.strip()
837        if not line or line[0] == '#':
838          continue
839        m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
840        if not m:
841          print "failed to parse password file: ", line
842        else:
843          result[m.group(2)] = m.group(1)
844      f.close()
845    except IOError as e:
846      if e.errno != errno.ENOENT:
847        print "error reading password file: ", str(e)
848    return result
849
850
851def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
852             compress_type=None):
853  import datetime
854
855  # http://b/18015246
856  # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
857  # for files larger than 2GiB. We can work around this by adjusting their
858  # limit. Note that `zipfile.writestr()` will not work for strings larger than
859  # 2GiB. The Python interpreter sometimes rejects strings that large (though
860  # it isn't clear to me exactly what circumstances cause this).
861  # `zipfile.write()` must be used directly to work around this.
862  #
863  # This mess can be avoided if we port to python3.
864  saved_zip64_limit = zipfile.ZIP64_LIMIT
865  zipfile.ZIP64_LIMIT = (1 << 32) - 1
866
867  if compress_type is None:
868    compress_type = zip_file.compression
869  if arcname is None:
870    arcname = filename
871
872  saved_stat = os.stat(filename)
873
874  try:
875    # `zipfile.write()` doesn't allow us to pass ZipInfo, so just modify the
876    # file to be zipped and reset it when we're done.
877    os.chmod(filename, perms)
878
879    # Use a fixed timestamp so the output is repeatable.
880    epoch = datetime.datetime.fromtimestamp(0)
881    timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
882    os.utime(filename, (timestamp, timestamp))
883
884    zip_file.write(filename, arcname=arcname, compress_type=compress_type)
885  finally:
886    os.chmod(filename, saved_stat.st_mode)
887    os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
888    zipfile.ZIP64_LIMIT = saved_zip64_limit
889
890
891def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None,
892                compress_type=None):
893  """Wrap zipfile.writestr() function to work around the zip64 limit.
894
895  Even with the ZIP64_LIMIT workaround, it won't allow writing a string
896  longer than 2GiB. It gives 'OverflowError: size does not fit in an int'
897  when calling crc32(bytes).
898
899  But it still works fine to write a shorter string into a large zip file.
900  We should use ZipWrite() whenever possible, and only use ZipWriteStr()
901  when we know the string won't be too long.
902  """
903
904  saved_zip64_limit = zipfile.ZIP64_LIMIT
905  zipfile.ZIP64_LIMIT = (1 << 32) - 1
906
907  if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
908    zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
909    zinfo.compress_type = zip_file.compression
910    if perms is None:
911      perms = 0o644
912  else:
913    zinfo = zinfo_or_arcname
914
915  # If compress_type is given, it overrides the value in zinfo.
916  if compress_type is not None:
917    zinfo.compress_type = compress_type
918
919  # If perms is given, it has a priority.
920  if perms is not None:
921    zinfo.external_attr = perms << 16
922
923  # Use a fixed timestamp so the output is repeatable.
924  zinfo.date_time = (2009, 1, 1, 0, 0, 0)
925
926  zip_file.writestr(zinfo, data)
927  zipfile.ZIP64_LIMIT = saved_zip64_limit
928
929
930def ZipClose(zip_file):
931  # http://b/18015246
932  # zipfile also refers to ZIP64_LIMIT during close() when it writes out the
933  # central directory.
934  saved_zip64_limit = zipfile.ZIP64_LIMIT
935  zipfile.ZIP64_LIMIT = (1 << 32) - 1
936
937  zip_file.close()
938
939  zipfile.ZIP64_LIMIT = saved_zip64_limit
940
941
942class DeviceSpecificParams(object):
943  module = None
944  def __init__(self, **kwargs):
945    """Keyword arguments to the constructor become attributes of this
946    object, which is passed to all functions in the device-specific
947    module."""
948    for k, v in kwargs.iteritems():
949      setattr(self, k, v)
950    self.extras = OPTIONS.extras
951
952    if self.module is None:
953      path = OPTIONS.device_specific
954      if not path:
955        return
956      try:
957        if os.path.isdir(path):
958          info = imp.find_module("releasetools", [path])
959        else:
960          d, f = os.path.split(path)
961          b, x = os.path.splitext(f)
962          if x == ".py":
963            f = b
964          info = imp.find_module(f, [d])
965        print "loaded device-specific extensions from", path
966        self.module = imp.load_module("device_specific", *info)
967      except ImportError:
968        print "unable to load device-specific module; assuming none"
969
970  def _DoCall(self, function_name, *args, **kwargs):
971    """Call the named function in the device-specific module, passing
972    the given args and kwargs.  The first argument to the call will be
973    the DeviceSpecific object itself.  If there is no module, or the
974    module does not define the function, return the value of the
975    'default' kwarg (which itself defaults to None)."""
976    if self.module is None or not hasattr(self.module, function_name):
977      return kwargs.get("default", None)
978    return getattr(self.module, function_name)(*((self,) + args), **kwargs)
979
980  def FullOTA_Assertions(self):
981    """Called after emitting the block of assertions at the top of a
982    full OTA package.  Implementations can add whatever additional
983    assertions they like."""
984    return self._DoCall("FullOTA_Assertions")
985
986  def FullOTA_InstallBegin(self):
987    """Called at the start of full OTA installation."""
988    return self._DoCall("FullOTA_InstallBegin")
989
990  def FullOTA_InstallEnd(self):
991    """Called at the end of full OTA installation; typically this is
992    used to install the image for the device's baseband processor."""
993    return self._DoCall("FullOTA_InstallEnd")
994
995  def IncrementalOTA_Assertions(self):
996    """Called after emitting the block of assertions at the top of an
997    incremental OTA package.  Implementations can add whatever
998    additional assertions they like."""
999    return self._DoCall("IncrementalOTA_Assertions")
1000
1001  def IncrementalOTA_VerifyBegin(self):
1002    """Called at the start of the verification phase of incremental
1003    OTA installation; additional checks can be placed here to abort
1004    the script before any changes are made."""
1005    return self._DoCall("IncrementalOTA_VerifyBegin")
1006
1007  def IncrementalOTA_VerifyEnd(self):
1008    """Called at the end of the verification phase of incremental OTA
1009    installation; additional checks can be placed here to abort the
1010    script before any changes are made."""
1011    return self._DoCall("IncrementalOTA_VerifyEnd")
1012
1013  def IncrementalOTA_InstallBegin(self):
1014    """Called at the start of incremental OTA installation (after
1015    verification is complete)."""
1016    return self._DoCall("IncrementalOTA_InstallBegin")
1017
1018  def IncrementalOTA_InstallEnd(self):
1019    """Called at the end of incremental OTA installation; typically
1020    this is used to install the image for the device's baseband
1021    processor."""
1022    return self._DoCall("IncrementalOTA_InstallEnd")
1023
1024class File(object):
1025  def __init__(self, name, data):
1026    self.name = name
1027    self.data = data
1028    self.size = len(data)
1029    self.sha1 = sha1(data).hexdigest()
1030
1031  @classmethod
1032  def FromLocalFile(cls, name, diskname):
1033    f = open(diskname, "rb")
1034    data = f.read()
1035    f.close()
1036    return File(name, data)
1037
1038  def WriteToTemp(self):
1039    t = tempfile.NamedTemporaryFile()
1040    t.write(self.data)
1041    t.flush()
1042    return t
1043
1044  def AddToZip(self, z, compression=None):
1045    ZipWriteStr(z, self.name, self.data, compress_type=compression)
1046
1047DIFF_PROGRAM_BY_EXT = {
1048    ".gz" : "imgdiff",
1049    ".zip" : ["imgdiff", "-z"],
1050    ".jar" : ["imgdiff", "-z"],
1051    ".apk" : ["imgdiff", "-z"],
1052    ".img" : "imgdiff",
1053    }
1054
1055class Difference(object):
1056  def __init__(self, tf, sf, diff_program=None):
1057    self.tf = tf
1058    self.sf = sf
1059    self.patch = None
1060    self.diff_program = diff_program
1061
1062  def ComputePatch(self):
1063    """Compute the patch (as a string of data) needed to turn sf into
1064    tf.  Returns the same tuple as GetPatch()."""
1065
1066    tf = self.tf
1067    sf = self.sf
1068
1069    if self.diff_program:
1070      diff_program = self.diff_program
1071    else:
1072      ext = os.path.splitext(tf.name)[1]
1073      diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff")
1074
1075    ttemp = tf.WriteToTemp()
1076    stemp = sf.WriteToTemp()
1077
1078    ext = os.path.splitext(tf.name)[1]
1079
1080    try:
1081      ptemp = tempfile.NamedTemporaryFile()
1082      if isinstance(diff_program, list):
1083        cmd = copy.copy(diff_program)
1084      else:
1085        cmd = [diff_program]
1086      cmd.append(stemp.name)
1087      cmd.append(ttemp.name)
1088      cmd.append(ptemp.name)
1089      p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1090      err = []
1091      def run():
1092        _, e = p.communicate()
1093        if e:
1094          err.append(e)
1095      th = threading.Thread(target=run)
1096      th.start()
1097      th.join(timeout=300)   # 5 mins
1098      if th.is_alive():
1099        print "WARNING: diff command timed out"
1100        p.terminate()
1101        th.join(5)
1102        if th.is_alive():
1103          p.kill()
1104          th.join()
1105
1106      if err or p.returncode != 0:
1107        print "WARNING: failure running %s:\n%s\n" % (
1108            diff_program, "".join(err))
1109        self.patch = None
1110        return None, None, None
1111      diff = ptemp.read()
1112    finally:
1113      ptemp.close()
1114      stemp.close()
1115      ttemp.close()
1116
1117    self.patch = diff
1118    return self.tf, self.sf, self.patch
1119
1120
1121  def GetPatch(self):
1122    """Return a tuple (target_file, source_file, patch_data).
1123    patch_data may be None if ComputePatch hasn't been called, or if
1124    computing the patch failed."""
1125    return self.tf, self.sf, self.patch
1126
1127
1128def ComputeDifferences(diffs):
1129  """Call ComputePatch on all the Difference objects in 'diffs'."""
1130  print len(diffs), "diffs to compute"
1131
1132  # Do the largest files first, to try and reduce the long-pole effect.
1133  by_size = [(i.tf.size, i) for i in diffs]
1134  by_size.sort(reverse=True)
1135  by_size = [i[1] for i in by_size]
1136
1137  lock = threading.Lock()
1138  diff_iter = iter(by_size)   # accessed under lock
1139
1140  def worker():
1141    try:
1142      lock.acquire()
1143      for d in diff_iter:
1144        lock.release()
1145        start = time.time()
1146        d.ComputePatch()
1147        dur = time.time() - start
1148        lock.acquire()
1149
1150        tf, sf, patch = d.GetPatch()
1151        if sf.name == tf.name:
1152          name = tf.name
1153        else:
1154          name = "%s (%s)" % (tf.name, sf.name)
1155        if patch is None:
1156          print "patching failed!                                  %s" % (name,)
1157        else:
1158          print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
1159              dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name)
1160      lock.release()
1161    except Exception as e:
1162      print e
1163      raise
1164
1165  # start worker threads; wait for them all to finish.
1166  threads = [threading.Thread(target=worker)
1167             for i in range(OPTIONS.worker_threads)]
1168  for th in threads:
1169    th.start()
1170  while threads:
1171    threads.pop().join()
1172
1173
1174class BlockDifference(object):
1175  def __init__(self, partition, tgt, src=None, check_first_block=False,
1176               version=None):
1177    self.tgt = tgt
1178    self.src = src
1179    self.partition = partition
1180    self.check_first_block = check_first_block
1181
1182    # Due to http://b/20939131, check_first_block is disabled temporarily.
1183    assert not self.check_first_block
1184
1185    if version is None:
1186      version = 1
1187      if OPTIONS.info_dict:
1188        version = max(
1189            int(i) for i in
1190            OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
1191    self.version = version
1192
1193    b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
1194                                    version=self.version)
1195    tmpdir = tempfile.mkdtemp()
1196    OPTIONS.tempfiles.append(tmpdir)
1197    self.path = os.path.join(tmpdir, partition)
1198    b.Compute(self.path)
1199
1200    _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
1201
1202  def WriteScript(self, script, output_zip, progress=None):
1203    if not self.src:
1204      # write the output unconditionally
1205      script.Print("Patching %s image unconditionally..." % (self.partition,))
1206    else:
1207      script.Print("Patching %s image after verification." % (self.partition,))
1208
1209    if progress:
1210      script.ShowProgress(progress, 0)
1211    self._WriteUpdate(script, output_zip)
1212    self._WritePostInstallVerifyScript(script)
1213
1214  def WriteVerifyScript(self, script):
1215    partition = self.partition
1216    if not self.src:
1217      script.Print("Image %s will be patched unconditionally." % (partition,))
1218    else:
1219      ranges = self.src.care_map.subtract(self.src.clobbered_blocks)
1220      ranges_str = ranges.to_string_raw()
1221      if self.version >= 3:
1222        script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || '
1223                            'block_image_verify("%s", '
1224                            'package_extract_file("%s.transfer.list"), '
1225                            '"%s.new.dat", "%s.patch.dat")) then') % (
1226                            self.device, ranges_str, self.src.TotalSha1(),
1227                            self.device, partition, partition, partition))
1228      else:
1229        script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1230                           self.device, ranges_str, self.src.TotalSha1()))
1231      script.Print('Verified %s image...' % (partition,))
1232      script.AppendExtra('else')
1233
1234      # When generating incrementals for the system and vendor partitions,
1235      # explicitly check the first block (which contains the superblock) of
1236      # the partition to see if it's what we expect. If this check fails,
1237      # give an explicit log message about the partition having been
1238      # remounted R/W (the most likely explanation) and the need to flash to
1239      # get OTAs working again.
1240      if self.check_first_block:
1241        self._CheckFirstBlock(script)
1242
1243      # Abort the OTA update. Note that the incremental OTA cannot be applied
1244      # even if it may match the checksum of the target partition.
1245      # a) If version < 3, operations like move and erase will make changes
1246      #    unconditionally and damage the partition.
1247      # b) If version >= 3, it won't even reach here.
1248      script.AppendExtra(('abort("%s partition has unexpected contents");\n'
1249                          'endif;') % (partition,))
1250
1251  def _WritePostInstallVerifyScript(self, script):
1252    partition = self.partition
1253    script.Print('Verifying the updated %s image...' % (partition,))
1254    # Unlike pre-install verification, clobbered_blocks should not be ignored.
1255    ranges = self.tgt.care_map
1256    ranges_str = ranges.to_string_raw()
1257    script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1258                       self.device, ranges_str,
1259                       self.tgt.TotalSha1(include_clobbered_blocks=True)))
1260
1261    # Bug: 20881595
1262    # Verify that extended blocks are really zeroed out.
1263    if self.tgt.extended:
1264      ranges_str = self.tgt.extended.to_string_raw()
1265      script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1266                         self.device, ranges_str,
1267                         self._HashZeroBlocks(self.tgt.extended.size())))
1268      script.Print('Verified the updated %s image.' % (partition,))
1269      script.AppendExtra(
1270          'else\n'
1271          '  abort("%s partition has unexpected non-zero contents after OTA '
1272          'update");\n'
1273          'endif;' % (partition,))
1274    else:
1275      script.Print('Verified the updated %s image.' % (partition,))
1276
1277    script.AppendExtra(
1278        'else\n'
1279        '  abort("%s partition has unexpected contents after OTA update");\n'
1280        'endif;' % (partition,))
1281
1282  def _WriteUpdate(self, script, output_zip):
1283    ZipWrite(output_zip,
1284             '{}.transfer.list'.format(self.path),
1285             '{}.transfer.list'.format(self.partition))
1286    ZipWrite(output_zip,
1287             '{}.new.dat'.format(self.path),
1288             '{}.new.dat'.format(self.partition))
1289    ZipWrite(output_zip,
1290             '{}.patch.dat'.format(self.path),
1291             '{}.patch.dat'.format(self.partition),
1292             compress_type=zipfile.ZIP_STORED)
1293
1294    call = ('block_image_update("{device}", '
1295            'package_extract_file("{partition}.transfer.list"), '
1296            '"{partition}.new.dat", "{partition}.patch.dat");\n'.format(
1297                device=self.device, partition=self.partition))
1298    script.AppendExtra(script.WordWrap(call))
1299
1300  def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use
1301    data = source.ReadRangeSet(ranges)
1302    ctx = sha1()
1303
1304    for p in data:
1305      ctx.update(p)
1306
1307    return ctx.hexdigest()
1308
1309  def _HashZeroBlocks(self, num_blocks): # pylint: disable=no-self-use
1310    """Return the hash value for all zero blocks."""
1311    zero_block = '\x00' * 4096
1312    ctx = sha1()
1313    for _ in range(num_blocks):
1314      ctx.update(zero_block)
1315
1316    return ctx.hexdigest()
1317
1318  # TODO(tbao): Due to http://b/20939131, block 0 may be changed without
1319  # remounting R/W. Will change the checking to a finer-grained way to
1320  # mask off those bits.
1321  def _CheckFirstBlock(self, script):
1322    r = rangelib.RangeSet((0, 1))
1323    srchash = self._HashBlocks(self.src, r)
1324
1325    script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || '
1326                        'abort("%s has been remounted R/W; '
1327                        'reflash device to reenable OTA updates");')
1328                       % (self.device, r.to_string_raw(), srchash,
1329                          self.device))
1330
1331DataImage = blockimgdiff.DataImage
1332
1333
1334# map recovery.fstab's fs_types to mount/format "partition types"
1335PARTITION_TYPES = {
1336    "yaffs2": "MTD",
1337    "mtd": "MTD",
1338    "ext4": "EMMC",
1339    "emmc": "EMMC",
1340    "f2fs": "EMMC",
1341    "squashfs": "EMMC"
1342}
1343
1344def GetTypeAndDevice(mount_point, info):
1345  fstab = info["fstab"]
1346  if fstab:
1347    return (PARTITION_TYPES[fstab[mount_point].fs_type],
1348            fstab[mount_point].device)
1349  else:
1350    raise KeyError
1351
1352
1353def ParseCertificate(data):
1354  """Parse a PEM-format certificate."""
1355  cert = []
1356  save = False
1357  for line in data.split("\n"):
1358    if "--END CERTIFICATE--" in line:
1359      break
1360    if save:
1361      cert.append(line)
1362    if "--BEGIN CERTIFICATE--" in line:
1363      save = True
1364  cert = "".join(cert).decode('base64')
1365  return cert
1366
1367def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
1368                      info_dict=None):
1369  """Generate a binary patch that creates the recovery image starting
1370  with the boot image.  (Most of the space in these images is just the
1371  kernel, which is identical for the two, so the resulting patch
1372  should be efficient.)  Add it to the output zip, along with a shell
1373  script that is run from init.rc on first boot to actually do the
1374  patching and install the new recovery image.
1375
1376  recovery_img and boot_img should be File objects for the
1377  corresponding images.  info should be the dictionary returned by
1378  common.LoadInfoDict() on the input target_files.
1379  """
1380
1381  if info_dict is None:
1382    info_dict = OPTIONS.info_dict
1383
1384  diff_program = ["imgdiff"]
1385  path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
1386  if os.path.exists(path):
1387    diff_program.append("-b")
1388    diff_program.append(path)
1389    bonus_args = "-b /system/etc/recovery-resource.dat"
1390  else:
1391    bonus_args = ""
1392
1393  d = Difference(recovery_img, boot_img, diff_program=diff_program)
1394  _, _, patch = d.ComputePatch()
1395  output_sink("recovery-from-boot.p", patch)
1396
1397  try:
1398    boot_type, boot_device = GetTypeAndDevice("/boot", info_dict)
1399    recovery_type, recovery_device = GetTypeAndDevice("/recovery", info_dict)
1400  except KeyError:
1401    return
1402
1403  sh = """#!/system/bin/sh
1404if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
1405  applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
1406else
1407  log -t recovery "Recovery image already installed"
1408fi
1409""" % {'boot_size': boot_img.size,
1410       'boot_sha1': boot_img.sha1,
1411       'recovery_size': recovery_img.size,
1412       'recovery_sha1': recovery_img.sha1,
1413       'boot_type': boot_type,
1414       'boot_device': boot_device,
1415       'recovery_type': recovery_type,
1416       'recovery_device': recovery_device,
1417       'bonus_args': bonus_args}
1418
1419  # The install script location moved from /system/etc to /system/bin
1420  # in the L release.  Parse init.*.rc files to find out where the
1421  # target-files expects it to be, and put it there.
1422  sh_location = "etc/install-recovery.sh"
1423  found = False
1424  init_rc_dir = os.path.join(input_dir, "BOOT", "RAMDISK")
1425  init_rc_files = os.listdir(init_rc_dir)
1426  for init_rc_file in init_rc_files:
1427    if (not init_rc_file.startswith('init.') or
1428        not init_rc_file.endswith('.rc')):
1429      continue
1430
1431    with open(os.path.join(init_rc_dir, init_rc_file)) as f:
1432      for line in f:
1433        m = re.match(r"^service flash_recovery /system/(\S+)\s*$", line)
1434        if m:
1435          sh_location = m.group(1)
1436          found = True
1437          break
1438
1439    if found:
1440      break
1441
1442  print "putting script in", sh_location
1443
1444  output_sink(sh_location, sh)
1445