common.py revision d3a803e6680e86f7b4960d51ab0b620728b793ca
1# Copyright (C) 2008 The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15import copy
16import errno
17import getopt
18import getpass
19import imp
20import os
21import platform
22import re
23import shlex
24import shutil
25import subprocess
26import sys
27import tempfile
28import threading
29import time
30import zipfile
31
32import blockimgdiff
33import rangelib
34
35from hashlib import sha1 as sha1
36
37
38class Options(object):
39  def __init__(self):
40    platform_search_path = {
41        "linux2": "out/host/linux-x86",
42        "darwin": "out/host/darwin-x86",
43    }
44
45    self.search_path = platform_search_path.get(sys.platform, None)
46    self.signapk_path = "framework/signapk.jar"  # Relative to search_path
47    self.extra_signapk_args = []
48    self.java_path = "java"  # Use the one on the path by default.
49    self.java_args = "-Xmx2048m" # JVM Args
50    self.public_key_suffix = ".x509.pem"
51    self.private_key_suffix = ".pk8"
52    # use otatools built boot_signer by default
53    self.boot_signer_path = "boot_signer"
54    self.verbose = False
55    self.tempfiles = []
56    self.device_specific = None
57    self.extras = {}
58    self.info_dict = None
59    self.worker_threads = None
60
61
62OPTIONS = Options()
63
64
65# Values for "certificate" in apkcerts that mean special things.
66SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL")
67
68
69class ExternalError(RuntimeError):
70  pass
71
72
73def Run(args, **kwargs):
74  """Create and return a subprocess.Popen object, printing the command
75  line on the terminal if -v was specified."""
76  if OPTIONS.verbose:
77    print "  running: ", " ".join(args)
78  return subprocess.Popen(args, **kwargs)
79
80
81def CloseInheritedPipes():
82  """ Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds
83  before doing other work."""
84  if platform.system() != "Darwin":
85    return
86  for d in range(3, 1025):
87    try:
88      stat = os.fstat(d)
89      if stat is not None:
90        pipebit = stat[0] & 0x1000
91        if pipebit != 0:
92          os.close(d)
93    except OSError:
94      pass
95
96
97def LoadInfoDict(input_file, input_dir=None):
98  """Read and parse the META/misc_info.txt key/value pairs from the
99  input target files and return a dict."""
100
101  def read_helper(fn):
102    if isinstance(input_file, zipfile.ZipFile):
103      return input_file.read(fn)
104    else:
105      path = os.path.join(input_file, *fn.split("/"))
106      try:
107        with open(path) as f:
108          return f.read()
109      except IOError as e:
110        if e.errno == errno.ENOENT:
111          raise KeyError(fn)
112  d = {}
113  try:
114    d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n"))
115  except KeyError:
116    # ok if misc_info.txt doesn't exist
117    pass
118
119  # backwards compatibility: These values used to be in their own
120  # files.  Look for them, in case we're processing an old
121  # target_files zip.
122
123  if "mkyaffs2_extra_flags" not in d:
124    try:
125      d["mkyaffs2_extra_flags"] = read_helper(
126          "META/mkyaffs2-extra-flags.txt").strip()
127    except KeyError:
128      # ok if flags don't exist
129      pass
130
131  if "recovery_api_version" not in d:
132    try:
133      d["recovery_api_version"] = read_helper(
134          "META/recovery-api-version.txt").strip()
135    except KeyError:
136      raise ValueError("can't find recovery API version in input target-files")
137
138  if "tool_extensions" not in d:
139    try:
140      d["tool_extensions"] = read_helper("META/tool-extensions.txt").strip()
141    except KeyError:
142      # ok if extensions don't exist
143      pass
144
145  if "fstab_version" not in d:
146    d["fstab_version"] = "1"
147
148  # A few properties are stored as links to the files in the out/ directory.
149  # It works fine with the build system. However, they are no longer available
150  # when (re)generating from target_files zip. If input_dir is not None, we
151  # are doing repacking. Redirect those properties to the actual files in the
152  # unzipped directory.
153  if input_dir is not None:
154    # We carry a copy of file_contexts.bin under META/. If not available,
155    # search BOOT/RAMDISK/. Note that sometimes we may need a different file
156    # to build images than the one running on device, such as when enabling
157    # system_root_image. In that case, we must have the one for image
158    # generation copied to META/.
159    fc_config = os.path.join(input_dir, "META", "file_contexts.bin")
160    if d.get("system_root_image") == "true":
161      assert os.path.exists(fc_config)
162    if not os.path.exists(fc_config):
163      fc_config = os.path.join(input_dir, "BOOT", "RAMDISK", "file_contexts.bin")
164      if not os.path.exists(fc_config):
165        fc_config = None
166
167    if fc_config:
168      d["selinux_fc"] = fc_config
169
170    # Similarly we need to redirect "ramdisk_dir" and "ramdisk_fs_config".
171    if d.get("system_root_image") == "true":
172      d["ramdisk_dir"] = os.path.join(input_dir, "ROOT")
173      d["ramdisk_fs_config"] = os.path.join(
174          input_dir, "META", "root_filesystem_config.txt")
175
176  try:
177    data = read_helper("META/imagesizes.txt")
178    for line in data.split("\n"):
179      if not line:
180        continue
181      name, value = line.split(" ", 1)
182      if not value:
183        continue
184      if name == "blocksize":
185        d[name] = value
186      else:
187        d[name + "_size"] = value
188  except KeyError:
189    pass
190
191  def makeint(key):
192    if key in d:
193      d[key] = int(d[key], 0)
194
195  makeint("recovery_api_version")
196  makeint("blocksize")
197  makeint("system_size")
198  makeint("vendor_size")
199  makeint("userdata_size")
200  makeint("cache_size")
201  makeint("recovery_size")
202  makeint("boot_size")
203  makeint("fstab_version")
204
205  d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"], d.get("system_root_image", False))
206  d["build.prop"] = LoadBuildProp(read_helper)
207  return d
208
209def LoadBuildProp(read_helper):
210  try:
211    data = read_helper("SYSTEM/build.prop")
212  except KeyError:
213    print "Warning: could not find SYSTEM/build.prop in %s" % zip
214    data = ""
215  return LoadDictionaryFromLines(data.split("\n"))
216
217def LoadDictionaryFromLines(lines):
218  d = {}
219  for line in lines:
220    line = line.strip()
221    if not line or line.startswith("#"):
222      continue
223    if "=" in line:
224      name, value = line.split("=", 1)
225      d[name] = value
226  return d
227
228def LoadRecoveryFSTab(read_helper, fstab_version, system_root_image=False):
229  class Partition(object):
230    def __init__(self, mount_point, fs_type, device, length, device2, context):
231      self.mount_point = mount_point
232      self.fs_type = fs_type
233      self.device = device
234      self.length = length
235      self.device2 = device2
236      self.context = context
237
238  try:
239    data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab")
240  except KeyError:
241    print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab"
242    data = ""
243
244  if fstab_version == 1:
245    d = {}
246    for line in data.split("\n"):
247      line = line.strip()
248      if not line or line.startswith("#"):
249        continue
250      pieces = line.split()
251      if not 3 <= len(pieces) <= 4:
252        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
253      options = None
254      if len(pieces) >= 4:
255        if pieces[3].startswith("/"):
256          device2 = pieces[3]
257          if len(pieces) >= 5:
258            options = pieces[4]
259        else:
260          device2 = None
261          options = pieces[3]
262      else:
263        device2 = None
264
265      mount_point = pieces[0]
266      length = 0
267      if options:
268        options = options.split(",")
269        for i in options:
270          if i.startswith("length="):
271            length = int(i[7:])
272          else:
273            print "%s: unknown option \"%s\"" % (mount_point, i)
274
275      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[1],
276                                 device=pieces[2], length=length,
277                                 device2=device2)
278
279  elif fstab_version == 2:
280    d = {}
281    for line in data.split("\n"):
282      line = line.strip()
283      if not line or line.startswith("#"):
284        continue
285      # <src> <mnt_point> <type> <mnt_flags and options> <fs_mgr_flags>
286      pieces = line.split()
287      if len(pieces) != 5:
288        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
289
290      # Ignore entries that are managed by vold
291      options = pieces[4]
292      if "voldmanaged=" in options:
293        continue
294
295      # It's a good line, parse it
296      length = 0
297      options = options.split(",")
298      for i in options:
299        if i.startswith("length="):
300          length = int(i[7:])
301        else:
302          # Ignore all unknown options in the unified fstab
303          continue
304
305      mount_flags = pieces[3]
306      # Honor the SELinux context if present.
307      context = None
308      for i in mount_flags.split(","):
309        if i.startswith("context="):
310          context = i
311
312      mount_point = pieces[1]
313      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[2],
314                                 device=pieces[0], length=length,
315                                 device2=None, context=context)
316
317  else:
318    raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,))
319
320  # / is used for the system mount point when the root directory is included in
321  # system. Other areas assume system is always at "/system" so point /system at /
322  if system_root_image:
323    assert not d.has_key("/system") and d.has_key("/")
324    d["/system"] = d["/"]
325  return d
326
327
328def DumpInfoDict(d):
329  for k, v in sorted(d.items()):
330    print "%-25s = (%s) %s" % (k, type(v).__name__, v)
331
332
333def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
334  """Take a kernel, cmdline, and ramdisk directory from the input (in
335  'sourcedir'), and turn them into a boot image.  Return the image
336  data, or None if sourcedir does not appear to contains files for
337  building the requested image."""
338
339  if (not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK) or
340      not os.access(os.path.join(sourcedir, "kernel"), os.F_OK)):
341    return None
342
343  if info_dict is None:
344    info_dict = OPTIONS.info_dict
345
346  ramdisk_img = tempfile.NamedTemporaryFile()
347  img = tempfile.NamedTemporaryFile()
348
349  if os.access(fs_config_file, os.F_OK):
350    cmd = ["mkbootfs", "-f", fs_config_file, os.path.join(sourcedir, "RAMDISK")]
351  else:
352    cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
353  p1 = Run(cmd, stdout=subprocess.PIPE)
354  p2 = Run(["minigzip"],
355           stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
356
357  p2.wait()
358  p1.wait()
359  assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (sourcedir,)
360  assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (sourcedir,)
361
362  # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
363  mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
364
365  cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")]
366
367  fn = os.path.join(sourcedir, "second")
368  if os.access(fn, os.F_OK):
369    cmd.append("--second")
370    cmd.append(fn)
371
372  fn = os.path.join(sourcedir, "cmdline")
373  if os.access(fn, os.F_OK):
374    cmd.append("--cmdline")
375    cmd.append(open(fn).read().rstrip("\n"))
376
377  fn = os.path.join(sourcedir, "base")
378  if os.access(fn, os.F_OK):
379    cmd.append("--base")
380    cmd.append(open(fn).read().rstrip("\n"))
381
382  fn = os.path.join(sourcedir, "pagesize")
383  if os.access(fn, os.F_OK):
384    cmd.append("--pagesize")
385    cmd.append(open(fn).read().rstrip("\n"))
386
387  args = info_dict.get("mkbootimg_args", None)
388  if args and args.strip():
389    cmd.extend(shlex.split(args))
390
391  img_unsigned = None
392  if info_dict.get("vboot", None):
393    img_unsigned = tempfile.NamedTemporaryFile()
394    cmd.extend(["--ramdisk", ramdisk_img.name,
395                "--output", img_unsigned.name])
396  else:
397    cmd.extend(["--ramdisk", ramdisk_img.name,
398                "--output", img.name])
399
400  p = Run(cmd, stdout=subprocess.PIPE)
401  p.communicate()
402  assert p.returncode == 0, "mkbootimg of %s image failed" % (
403      os.path.basename(sourcedir),)
404
405  if info_dict.get("verity_key", None):
406    path = "/" + os.path.basename(sourcedir).lower()
407    cmd = [OPTIONS.boot_signer_path, path, img.name,
408           info_dict["verity_key"] + ".pk8",
409           info_dict["verity_key"] + ".x509.pem", img.name]
410    p = Run(cmd, stdout=subprocess.PIPE)
411    p.communicate()
412    assert p.returncode == 0, "boot_signer of %s image failed" % path
413
414  # Sign the image if vboot is non-empty.
415  elif info_dict.get("vboot", None):
416    path = "/" + os.path.basename(sourcedir).lower()
417    img_keyblock = tempfile.NamedTemporaryFile()
418    cmd = [info_dict["vboot_signer_cmd"], info_dict["futility"],
419           img_unsigned.name, info_dict["vboot_key"] + ".vbpubk",
420           info_dict["vboot_key"] + ".vbprivk", img_keyblock.name,
421           img.name]
422    p = Run(cmd, stdout=subprocess.PIPE)
423    p.communicate()
424    assert p.returncode == 0, "vboot_signer of %s image failed" % path
425
426    # Clean up the temp files.
427    img_unsigned.close()
428    img_keyblock.close()
429
430  img.seek(os.SEEK_SET, 0)
431  data = img.read()
432
433  ramdisk_img.close()
434  img.close()
435
436  return data
437
438
439def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
440                     info_dict=None):
441  """Return a File object (with name 'name') with the desired bootable
442  image.  Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name
443  'prebuilt_name', otherwise look for it under 'unpack_dir'/IMAGES,
444  otherwise construct it from the source files in
445  'unpack_dir'/'tree_subdir'."""
446
447  prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
448  if os.path.exists(prebuilt_path):
449    print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,)
450    return File.FromLocalFile(name, prebuilt_path)
451
452  prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
453  if os.path.exists(prebuilt_path):
454    print "using prebuilt %s from IMAGES..." % (prebuilt_name,)
455    return File.FromLocalFile(name, prebuilt_path)
456
457  print "building image from target_files %s..." % (tree_subdir,)
458  fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
459  data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
460                            os.path.join(unpack_dir, fs_config),
461                            info_dict)
462  if data:
463    return File(name, data)
464  return None
465
466
467def UnzipTemp(filename, pattern=None):
468  """Unzip the given archive into a temporary directory and return the name.
469
470  If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a
471  temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
472
473  Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the
474  main file), open for reading.
475  """
476
477  tmp = tempfile.mkdtemp(prefix="targetfiles-")
478  OPTIONS.tempfiles.append(tmp)
479
480  def unzip_to_dir(filename, dirname):
481    cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
482    if pattern is not None:
483      cmd.append(pattern)
484    p = Run(cmd, stdout=subprocess.PIPE)
485    p.communicate()
486    if p.returncode != 0:
487      raise ExternalError("failed to unzip input target-files \"%s\"" %
488                          (filename,))
489
490  m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
491  if m:
492    unzip_to_dir(m.group(1), tmp)
493    unzip_to_dir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"))
494    filename = m.group(1)
495  else:
496    unzip_to_dir(filename, tmp)
497
498  return tmp, zipfile.ZipFile(filename, "r")
499
500
501def GetKeyPasswords(keylist):
502  """Given a list of keys, prompt the user to enter passwords for
503  those which require them.  Return a {key: password} dict.  password
504  will be None if the key has no password."""
505
506  no_passwords = []
507  need_passwords = []
508  key_passwords = {}
509  devnull = open("/dev/null", "w+b")
510  for k in sorted(keylist):
511    # We don't need a password for things that aren't really keys.
512    if k in SPECIAL_CERT_STRINGS:
513      no_passwords.append(k)
514      continue
515
516    p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
517             "-inform", "DER", "-nocrypt"],
518            stdin=devnull.fileno(),
519            stdout=devnull.fileno(),
520            stderr=subprocess.STDOUT)
521    p.communicate()
522    if p.returncode == 0:
523      # Definitely an unencrypted key.
524      no_passwords.append(k)
525    else:
526      p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
527               "-inform", "DER", "-passin", "pass:"],
528              stdin=devnull.fileno(),
529              stdout=devnull.fileno(),
530              stderr=subprocess.PIPE)
531      _, stderr = p.communicate()
532      if p.returncode == 0:
533        # Encrypted key with empty string as password.
534        key_passwords[k] = ''
535      elif stderr.startswith('Error decrypting key'):
536        # Definitely encrypted key.
537        # It would have said "Error reading key" if it didn't parse correctly.
538        need_passwords.append(k)
539      else:
540        # Potentially, a type of key that openssl doesn't understand.
541        # We'll let the routines in signapk.jar handle it.
542        no_passwords.append(k)
543  devnull.close()
544
545  key_passwords.update(PasswordManager().GetPasswords(need_passwords))
546  key_passwords.update(dict.fromkeys(no_passwords, None))
547  return key_passwords
548
549
550def SignFile(input_name, output_name, key, password, align=None,
551             whole_file=False):
552  """Sign the input_name zip/jar/apk, producing output_name.  Use the
553  given key and password (the latter may be None if the key does not
554  have a password.
555
556  If align is an integer > 1, zipalign is run to align stored files in
557  the output zip on 'align'-byte boundaries.
558
559  If whole_file is true, use the "-w" option to SignApk to embed a
560  signature that covers the whole file in the archive comment of the
561  zip file.
562  """
563
564  if align == 0 or align == 1:
565    align = None
566
567  if align:
568    temp = tempfile.NamedTemporaryFile()
569    sign_name = temp.name
570  else:
571    sign_name = output_name
572
573  cmd = [OPTIONS.java_path, OPTIONS.java_args, "-jar",
574         os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)]
575  cmd.extend(OPTIONS.extra_signapk_args)
576  if whole_file:
577    cmd.append("-w")
578  cmd.extend([key + OPTIONS.public_key_suffix,
579              key + OPTIONS.private_key_suffix,
580              input_name, sign_name])
581
582  p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
583  if password is not None:
584    password += "\n"
585  p.communicate(password)
586  if p.returncode != 0:
587    raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
588
589  if align:
590    p = Run(["zipalign", "-f", "-p", str(align), sign_name, output_name])
591    p.communicate()
592    if p.returncode != 0:
593      raise ExternalError("zipalign failed: return code %s" % (p.returncode,))
594    temp.close()
595
596
597def CheckSize(data, target, info_dict):
598  """Check the data string passed against the max size limit, if
599  any, for the given target.  Raise exception if the data is too big.
600  Print a warning if the data is nearing the maximum size."""
601
602  if target.endswith(".img"):
603    target = target[:-4]
604  mount_point = "/" + target
605
606  fs_type = None
607  limit = None
608  if info_dict["fstab"]:
609    if mount_point == "/userdata":
610      mount_point = "/data"
611    p = info_dict["fstab"][mount_point]
612    fs_type = p.fs_type
613    device = p.device
614    if "/" in device:
615      device = device[device.rfind("/")+1:]
616    limit = info_dict.get(device + "_size", None)
617  if not fs_type or not limit:
618    return
619
620  if fs_type == "yaffs2":
621    # image size should be increased by 1/64th to account for the
622    # spare area (64 bytes per 2k page)
623    limit = limit / 2048 * (2048+64)
624  size = len(data)
625  pct = float(size) * 100.0 / limit
626  msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit)
627  if pct >= 99.0:
628    raise ExternalError(msg)
629  elif pct >= 95.0:
630    print
631    print "  WARNING: ", msg
632    print
633  elif OPTIONS.verbose:
634    print "  ", msg
635
636
637def ReadApkCerts(tf_zip):
638  """Given a target_files ZipFile, parse the META/apkcerts.txt file
639  and return a {package: cert} dict."""
640  certmap = {}
641  for line in tf_zip.read("META/apkcerts.txt").split("\n"):
642    line = line.strip()
643    if not line:
644      continue
645    m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+'
646                 r'private_key="(.*)"$', line)
647    if m:
648      name, cert, privkey = m.groups()
649      public_key_suffix_len = len(OPTIONS.public_key_suffix)
650      private_key_suffix_len = len(OPTIONS.private_key_suffix)
651      if cert in SPECIAL_CERT_STRINGS and not privkey:
652        certmap[name] = cert
653      elif (cert.endswith(OPTIONS.public_key_suffix) and
654            privkey.endswith(OPTIONS.private_key_suffix) and
655            cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
656        certmap[name] = cert[:-public_key_suffix_len]
657      else:
658        raise ValueError("failed to parse line from apkcerts.txt:\n" + line)
659  return certmap
660
661
662COMMON_DOCSTRING = """
663  -p  (--path)  <dir>
664      Prepend <dir>/bin to the list of places to search for binaries
665      run by this script, and expect to find jars in <dir>/framework.
666
667  -s  (--device_specific) <file>
668      Path to the python module containing device-specific
669      releasetools code.
670
671  -x  (--extra)  <key=value>
672      Add a key/value pair to the 'extras' dict, which device-specific
673      extension code may look at.
674
675  -v  (--verbose)
676      Show command lines being executed.
677
678  -h  (--help)
679      Display this usage message and exit.
680"""
681
682def Usage(docstring):
683  print docstring.rstrip("\n")
684  print COMMON_DOCSTRING
685
686
687def ParseOptions(argv,
688                 docstring,
689                 extra_opts="", extra_long_opts=(),
690                 extra_option_handler=None):
691  """Parse the options in argv and return any arguments that aren't
692  flags.  docstring is the calling module's docstring, to be displayed
693  for errors and -h.  extra_opts and extra_long_opts are for flags
694  defined by the caller, which are processed by passing them to
695  extra_option_handler."""
696
697  try:
698    opts, args = getopt.getopt(
699        argv, "hvp:s:x:" + extra_opts,
700        ["help", "verbose", "path=", "signapk_path=", "extra_signapk_args=",
701         "java_path=", "java_args=", "public_key_suffix=",
702         "private_key_suffix=", "boot_signer_path=", "device_specific=",
703         "extra="] +
704        list(extra_long_opts))
705  except getopt.GetoptError as err:
706    Usage(docstring)
707    print "**", str(err), "**"
708    sys.exit(2)
709
710  for o, a in opts:
711    if o in ("-h", "--help"):
712      Usage(docstring)
713      sys.exit()
714    elif o in ("-v", "--verbose"):
715      OPTIONS.verbose = True
716    elif o in ("-p", "--path"):
717      OPTIONS.search_path = a
718    elif o in ("--signapk_path",):
719      OPTIONS.signapk_path = a
720    elif o in ("--extra_signapk_args",):
721      OPTIONS.extra_signapk_args = shlex.split(a)
722    elif o in ("--java_path",):
723      OPTIONS.java_path = a
724    elif o in ("--java_args",):
725      OPTIONS.java_args = a
726    elif o in ("--public_key_suffix",):
727      OPTIONS.public_key_suffix = a
728    elif o in ("--private_key_suffix",):
729      OPTIONS.private_key_suffix = a
730    elif o in ("--boot_signer_path",):
731      OPTIONS.boot_signer_path = a
732    elif o in ("-s", "--device_specific"):
733      OPTIONS.device_specific = a
734    elif o in ("-x", "--extra"):
735      key, value = a.split("=", 1)
736      OPTIONS.extras[key] = value
737    else:
738      if extra_option_handler is None or not extra_option_handler(o, a):
739        assert False, "unknown option \"%s\"" % (o,)
740
741  if OPTIONS.search_path:
742    os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") +
743                          os.pathsep + os.environ["PATH"])
744
745  return args
746
747
748def MakeTempFile(prefix=None, suffix=None):
749  """Make a temp file and add it to the list of things to be deleted
750  when Cleanup() is called.  Return the filename."""
751  fd, fn = tempfile.mkstemp(prefix=prefix, suffix=suffix)
752  os.close(fd)
753  OPTIONS.tempfiles.append(fn)
754  return fn
755
756
757def Cleanup():
758  for i in OPTIONS.tempfiles:
759    if os.path.isdir(i):
760      shutil.rmtree(i)
761    else:
762      os.remove(i)
763
764
765class PasswordManager(object):
766  def __init__(self):
767    self.editor = os.getenv("EDITOR", None)
768    self.pwfile = os.getenv("ANDROID_PW_FILE", None)
769
770  def GetPasswords(self, items):
771    """Get passwords corresponding to each string in 'items',
772    returning a dict.  (The dict may have keys in addition to the
773    values in 'items'.)
774
775    Uses the passwords in $ANDROID_PW_FILE if available, letting the
776    user edit that file to add more needed passwords.  If no editor is
777    available, or $ANDROID_PW_FILE isn't define, prompts the user
778    interactively in the ordinary way.
779    """
780
781    current = self.ReadFile()
782
783    first = True
784    while True:
785      missing = []
786      for i in items:
787        if i not in current or not current[i]:
788          missing.append(i)
789      # Are all the passwords already in the file?
790      if not missing:
791        return current
792
793      for i in missing:
794        current[i] = ""
795
796      if not first:
797        print "key file %s still missing some passwords." % (self.pwfile,)
798        answer = raw_input("try to edit again? [y]> ").strip()
799        if answer and answer[0] not in 'yY':
800          raise RuntimeError("key passwords unavailable")
801      first = False
802
803      current = self.UpdateAndReadFile(current)
804
805  def PromptResult(self, current): # pylint: disable=no-self-use
806    """Prompt the user to enter a value (password) for each key in
807    'current' whose value is fales.  Returns a new dict with all the
808    values.
809    """
810    result = {}
811    for k, v in sorted(current.iteritems()):
812      if v:
813        result[k] = v
814      else:
815        while True:
816          result[k] = getpass.getpass(
817              "Enter password for %s key> " % k).strip()
818          if result[k]:
819            break
820    return result
821
822  def UpdateAndReadFile(self, current):
823    if not self.editor or not self.pwfile:
824      return self.PromptResult(current)
825
826    f = open(self.pwfile, "w")
827    os.chmod(self.pwfile, 0o600)
828    f.write("# Enter key passwords between the [[[ ]]] brackets.\n")
829    f.write("# (Additional spaces are harmless.)\n\n")
830
831    first_line = None
832    sorted_list = sorted([(not v, k, v) for (k, v) in current.iteritems()])
833    for i, (_, k, v) in enumerate(sorted_list):
834      f.write("[[[  %s  ]]] %s\n" % (v, k))
835      if not v and first_line is None:
836        # position cursor on first line with no password.
837        first_line = i + 4
838    f.close()
839
840    p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
841    _, _ = p.communicate()
842
843    return self.ReadFile()
844
845  def ReadFile(self):
846    result = {}
847    if self.pwfile is None:
848      return result
849    try:
850      f = open(self.pwfile, "r")
851      for line in f:
852        line = line.strip()
853        if not line or line[0] == '#':
854          continue
855        m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
856        if not m:
857          print "failed to parse password file: ", line
858        else:
859          result[m.group(2)] = m.group(1)
860      f.close()
861    except IOError as e:
862      if e.errno != errno.ENOENT:
863        print "error reading password file: ", str(e)
864    return result
865
866
867def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
868             compress_type=None):
869  import datetime
870
871  # http://b/18015246
872  # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
873  # for files larger than 2GiB. We can work around this by adjusting their
874  # limit. Note that `zipfile.writestr()` will not work for strings larger than
875  # 2GiB. The Python interpreter sometimes rejects strings that large (though
876  # it isn't clear to me exactly what circumstances cause this).
877  # `zipfile.write()` must be used directly to work around this.
878  #
879  # This mess can be avoided if we port to python3.
880  saved_zip64_limit = zipfile.ZIP64_LIMIT
881  zipfile.ZIP64_LIMIT = (1 << 32) - 1
882
883  if compress_type is None:
884    compress_type = zip_file.compression
885  if arcname is None:
886    arcname = filename
887
888  saved_stat = os.stat(filename)
889
890  try:
891    # `zipfile.write()` doesn't allow us to pass ZipInfo, so just modify the
892    # file to be zipped and reset it when we're done.
893    os.chmod(filename, perms)
894
895    # Use a fixed timestamp so the output is repeatable.
896    epoch = datetime.datetime.fromtimestamp(0)
897    timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
898    os.utime(filename, (timestamp, timestamp))
899
900    zip_file.write(filename, arcname=arcname, compress_type=compress_type)
901  finally:
902    os.chmod(filename, saved_stat.st_mode)
903    os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
904    zipfile.ZIP64_LIMIT = saved_zip64_limit
905
906
907def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None,
908                compress_type=None):
909  """Wrap zipfile.writestr() function to work around the zip64 limit.
910
911  Even with the ZIP64_LIMIT workaround, it won't allow writing a string
912  longer than 2GiB. It gives 'OverflowError: size does not fit in an int'
913  when calling crc32(bytes).
914
915  But it still works fine to write a shorter string into a large zip file.
916  We should use ZipWrite() whenever possible, and only use ZipWriteStr()
917  when we know the string won't be too long.
918  """
919
920  saved_zip64_limit = zipfile.ZIP64_LIMIT
921  zipfile.ZIP64_LIMIT = (1 << 32) - 1
922
923  if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
924    zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
925    zinfo.compress_type = zip_file.compression
926    if perms is None:
927      perms = 0o100644
928  else:
929    zinfo = zinfo_or_arcname
930
931  # If compress_type is given, it overrides the value in zinfo.
932  if compress_type is not None:
933    zinfo.compress_type = compress_type
934
935  # If perms is given, it has a priority.
936  if perms is not None:
937    # If perms doesn't set the file type, mark it as a regular file.
938    if perms & 0o770000 == 0:
939      perms |= 0o100000
940    zinfo.external_attr = perms << 16
941
942  # Use a fixed timestamp so the output is repeatable.
943  zinfo.date_time = (2009, 1, 1, 0, 0, 0)
944
945  zip_file.writestr(zinfo, data)
946  zipfile.ZIP64_LIMIT = saved_zip64_limit
947
948
949def ZipClose(zip_file):
950  # http://b/18015246
951  # zipfile also refers to ZIP64_LIMIT during close() when it writes out the
952  # central directory.
953  saved_zip64_limit = zipfile.ZIP64_LIMIT
954  zipfile.ZIP64_LIMIT = (1 << 32) - 1
955
956  zip_file.close()
957
958  zipfile.ZIP64_LIMIT = saved_zip64_limit
959
960
961class DeviceSpecificParams(object):
962  module = None
963  def __init__(self, **kwargs):
964    """Keyword arguments to the constructor become attributes of this
965    object, which is passed to all functions in the device-specific
966    module."""
967    for k, v in kwargs.iteritems():
968      setattr(self, k, v)
969    self.extras = OPTIONS.extras
970
971    if self.module is None:
972      path = OPTIONS.device_specific
973      if not path:
974        return
975      try:
976        if os.path.isdir(path):
977          info = imp.find_module("releasetools", [path])
978        else:
979          d, f = os.path.split(path)
980          b, x = os.path.splitext(f)
981          if x == ".py":
982            f = b
983          info = imp.find_module(f, [d])
984        print "loaded device-specific extensions from", path
985        self.module = imp.load_module("device_specific", *info)
986      except ImportError:
987        print "unable to load device-specific module; assuming none"
988
989  def _DoCall(self, function_name, *args, **kwargs):
990    """Call the named function in the device-specific module, passing
991    the given args and kwargs.  The first argument to the call will be
992    the DeviceSpecific object itself.  If there is no module, or the
993    module does not define the function, return the value of the
994    'default' kwarg (which itself defaults to None)."""
995    if self.module is None or not hasattr(self.module, function_name):
996      return kwargs.get("default", None)
997    return getattr(self.module, function_name)(*((self,) + args), **kwargs)
998
999  def FullOTA_Assertions(self):
1000    """Called after emitting the block of assertions at the top of a
1001    full OTA package.  Implementations can add whatever additional
1002    assertions they like."""
1003    return self._DoCall("FullOTA_Assertions")
1004
1005  def FullOTA_InstallBegin(self):
1006    """Called at the start of full OTA installation."""
1007    return self._DoCall("FullOTA_InstallBegin")
1008
1009  def FullOTA_InstallEnd(self):
1010    """Called at the end of full OTA installation; typically this is
1011    used to install the image for the device's baseband processor."""
1012    return self._DoCall("FullOTA_InstallEnd")
1013
1014  def IncrementalOTA_Assertions(self):
1015    """Called after emitting the block of assertions at the top of an
1016    incremental OTA package.  Implementations can add whatever
1017    additional assertions they like."""
1018    return self._DoCall("IncrementalOTA_Assertions")
1019
1020  def IncrementalOTA_VerifyBegin(self):
1021    """Called at the start of the verification phase of incremental
1022    OTA installation; additional checks can be placed here to abort
1023    the script before any changes are made."""
1024    return self._DoCall("IncrementalOTA_VerifyBegin")
1025
1026  def IncrementalOTA_VerifyEnd(self):
1027    """Called at the end of the verification phase of incremental OTA
1028    installation; additional checks can be placed here to abort the
1029    script before any changes are made."""
1030    return self._DoCall("IncrementalOTA_VerifyEnd")
1031
1032  def IncrementalOTA_InstallBegin(self):
1033    """Called at the start of incremental OTA installation (after
1034    verification is complete)."""
1035    return self._DoCall("IncrementalOTA_InstallBegin")
1036
1037  def IncrementalOTA_InstallEnd(self):
1038    """Called at the end of incremental OTA installation; typically
1039    this is used to install the image for the device's baseband
1040    processor."""
1041    return self._DoCall("IncrementalOTA_InstallEnd")
1042
1043class File(object):
1044  def __init__(self, name, data):
1045    self.name = name
1046    self.data = data
1047    self.size = len(data)
1048    self.sha1 = sha1(data).hexdigest()
1049
1050  @classmethod
1051  def FromLocalFile(cls, name, diskname):
1052    f = open(diskname, "rb")
1053    data = f.read()
1054    f.close()
1055    return File(name, data)
1056
1057  def WriteToTemp(self):
1058    t = tempfile.NamedTemporaryFile()
1059    t.write(self.data)
1060    t.flush()
1061    return t
1062
1063  def AddToZip(self, z, compression=None):
1064    ZipWriteStr(z, self.name, self.data, compress_type=compression)
1065
1066DIFF_PROGRAM_BY_EXT = {
1067    ".gz" : "imgdiff",
1068    ".zip" : ["imgdiff", "-z"],
1069    ".jar" : ["imgdiff", "-z"],
1070    ".apk" : ["imgdiff", "-z"],
1071    ".img" : "imgdiff",
1072    }
1073
1074class Difference(object):
1075  def __init__(self, tf, sf, diff_program=None):
1076    self.tf = tf
1077    self.sf = sf
1078    self.patch = None
1079    self.diff_program = diff_program
1080
1081  def ComputePatch(self):
1082    """Compute the patch (as a string of data) needed to turn sf into
1083    tf.  Returns the same tuple as GetPatch()."""
1084
1085    tf = self.tf
1086    sf = self.sf
1087
1088    if self.diff_program:
1089      diff_program = self.diff_program
1090    else:
1091      ext = os.path.splitext(tf.name)[1]
1092      diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff")
1093
1094    ttemp = tf.WriteToTemp()
1095    stemp = sf.WriteToTemp()
1096
1097    ext = os.path.splitext(tf.name)[1]
1098
1099    try:
1100      ptemp = tempfile.NamedTemporaryFile()
1101      if isinstance(diff_program, list):
1102        cmd = copy.copy(diff_program)
1103      else:
1104        cmd = [diff_program]
1105      cmd.append(stemp.name)
1106      cmd.append(ttemp.name)
1107      cmd.append(ptemp.name)
1108      p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1109      err = []
1110      def run():
1111        _, e = p.communicate()
1112        if e:
1113          err.append(e)
1114      th = threading.Thread(target=run)
1115      th.start()
1116      th.join(timeout=300)   # 5 mins
1117      if th.is_alive():
1118        print "WARNING: diff command timed out"
1119        p.terminate()
1120        th.join(5)
1121        if th.is_alive():
1122          p.kill()
1123          th.join()
1124
1125      if err or p.returncode != 0:
1126        print "WARNING: failure running %s:\n%s\n" % (
1127            diff_program, "".join(err))
1128        self.patch = None
1129        return None, None, None
1130      diff = ptemp.read()
1131    finally:
1132      ptemp.close()
1133      stemp.close()
1134      ttemp.close()
1135
1136    self.patch = diff
1137    return self.tf, self.sf, self.patch
1138
1139
1140  def GetPatch(self):
1141    """Return a tuple (target_file, source_file, patch_data).
1142    patch_data may be None if ComputePatch hasn't been called, or if
1143    computing the patch failed."""
1144    return self.tf, self.sf, self.patch
1145
1146
1147def ComputeDifferences(diffs):
1148  """Call ComputePatch on all the Difference objects in 'diffs'."""
1149  print len(diffs), "diffs to compute"
1150
1151  # Do the largest files first, to try and reduce the long-pole effect.
1152  by_size = [(i.tf.size, i) for i in diffs]
1153  by_size.sort(reverse=True)
1154  by_size = [i[1] for i in by_size]
1155
1156  lock = threading.Lock()
1157  diff_iter = iter(by_size)   # accessed under lock
1158
1159  def worker():
1160    try:
1161      lock.acquire()
1162      for d in diff_iter:
1163        lock.release()
1164        start = time.time()
1165        d.ComputePatch()
1166        dur = time.time() - start
1167        lock.acquire()
1168
1169        tf, sf, patch = d.GetPatch()
1170        if sf.name == tf.name:
1171          name = tf.name
1172        else:
1173          name = "%s (%s)" % (tf.name, sf.name)
1174        if patch is None:
1175          print "patching failed!                                  %s" % (name,)
1176        else:
1177          print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
1178              dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name)
1179      lock.release()
1180    except Exception as e:
1181      print e
1182      raise
1183
1184  # start worker threads; wait for them all to finish.
1185  threads = [threading.Thread(target=worker)
1186             for i in range(OPTIONS.worker_threads)]
1187  for th in threads:
1188    th.start()
1189  while threads:
1190    threads.pop().join()
1191
1192
1193class BlockDifference(object):
1194  def __init__(self, partition, tgt, src=None, check_first_block=False,
1195               version=None):
1196    self.tgt = tgt
1197    self.src = src
1198    self.partition = partition
1199    self.check_first_block = check_first_block
1200
1201    # Due to http://b/20939131, check_first_block is disabled temporarily.
1202    assert not self.check_first_block
1203
1204    if version is None:
1205      version = 1
1206      if OPTIONS.info_dict:
1207        version = max(
1208            int(i) for i in
1209            OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
1210    self.version = version
1211
1212    b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
1213                                    version=self.version)
1214    tmpdir = tempfile.mkdtemp()
1215    OPTIONS.tempfiles.append(tmpdir)
1216    self.path = os.path.join(tmpdir, partition)
1217    b.Compute(self.path)
1218
1219    _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
1220
1221  def WriteScript(self, script, output_zip, progress=None):
1222    if not self.src:
1223      # write the output unconditionally
1224      script.Print("Patching %s image unconditionally..." % (self.partition,))
1225    else:
1226      script.Print("Patching %s image after verification." % (self.partition,))
1227
1228    if progress:
1229      script.ShowProgress(progress, 0)
1230    self._WriteUpdate(script, output_zip)
1231    self._WritePostInstallVerifyScript(script)
1232
1233  def WriteVerifyScript(self, script):
1234    partition = self.partition
1235    if not self.src:
1236      script.Print("Image %s will be patched unconditionally." % (partition,))
1237    else:
1238      ranges = self.src.care_map.subtract(self.src.clobbered_blocks)
1239      ranges_str = ranges.to_string_raw()
1240      if self.version >= 3:
1241        script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || '
1242                            'block_image_verify("%s", '
1243                            'package_extract_file("%s.transfer.list"), '
1244                            '"%s.new.dat", "%s.patch.dat")) then') % (
1245                            self.device, ranges_str, self.src.TotalSha1(),
1246                            self.device, partition, partition, partition))
1247      else:
1248        script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1249                           self.device, ranges_str, self.src.TotalSha1()))
1250      script.Print('Verified %s image...' % (partition,))
1251      script.AppendExtra('else')
1252
1253      # When generating incrementals for the system and vendor partitions,
1254      # explicitly check the first block (which contains the superblock) of
1255      # the partition to see if it's what we expect. If this check fails,
1256      # give an explicit log message about the partition having been
1257      # remounted R/W (the most likely explanation) and the need to flash to
1258      # get OTAs working again.
1259      if self.check_first_block:
1260        self._CheckFirstBlock(script)
1261
1262      # Abort the OTA update. Note that the incremental OTA cannot be applied
1263      # even if it may match the checksum of the target partition.
1264      # a) If version < 3, operations like move and erase will make changes
1265      #    unconditionally and damage the partition.
1266      # b) If version >= 3, it won't even reach here.
1267      script.AppendExtra(('abort("%s partition has unexpected contents");\n'
1268                          'endif;') % (partition,))
1269
1270  def _WritePostInstallVerifyScript(self, script):
1271    partition = self.partition
1272    script.Print('Verifying the updated %s image...' % (partition,))
1273    # Unlike pre-install verification, clobbered_blocks should not be ignored.
1274    ranges = self.tgt.care_map
1275    ranges_str = ranges.to_string_raw()
1276    script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1277                       self.device, ranges_str,
1278                       self.tgt.TotalSha1(include_clobbered_blocks=True)))
1279
1280    # Bug: 20881595
1281    # Verify that extended blocks are really zeroed out.
1282    if self.tgt.extended:
1283      ranges_str = self.tgt.extended.to_string_raw()
1284      script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1285                         self.device, ranges_str,
1286                         self._HashZeroBlocks(self.tgt.extended.size())))
1287      script.Print('Verified the updated %s image.' % (partition,))
1288      script.AppendExtra(
1289          'else\n'
1290          '  abort("%s partition has unexpected non-zero contents after OTA '
1291          'update");\n'
1292          'endif;' % (partition,))
1293    else:
1294      script.Print('Verified the updated %s image.' % (partition,))
1295
1296    script.AppendExtra(
1297        'else\n'
1298        '  abort("%s partition has unexpected contents after OTA update");\n'
1299        'endif;' % (partition,))
1300
1301  def _WriteUpdate(self, script, output_zip):
1302    ZipWrite(output_zip,
1303             '{}.transfer.list'.format(self.path),
1304             '{}.transfer.list'.format(self.partition))
1305    ZipWrite(output_zip,
1306             '{}.new.dat'.format(self.path),
1307             '{}.new.dat'.format(self.partition))
1308    ZipWrite(output_zip,
1309             '{}.patch.dat'.format(self.path),
1310             '{}.patch.dat'.format(self.partition),
1311             compress_type=zipfile.ZIP_STORED)
1312
1313    call = ('block_image_update("{device}", '
1314            'package_extract_file("{partition}.transfer.list"), '
1315            '"{partition}.new.dat", "{partition}.patch.dat");\n'.format(
1316                device=self.device, partition=self.partition))
1317    script.AppendExtra(script.WordWrap(call))
1318
1319  def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use
1320    data = source.ReadRangeSet(ranges)
1321    ctx = sha1()
1322
1323    for p in data:
1324      ctx.update(p)
1325
1326    return ctx.hexdigest()
1327
1328  def _HashZeroBlocks(self, num_blocks): # pylint: disable=no-self-use
1329    """Return the hash value for all zero blocks."""
1330    zero_block = '\x00' * 4096
1331    ctx = sha1()
1332    for _ in range(num_blocks):
1333      ctx.update(zero_block)
1334
1335    return ctx.hexdigest()
1336
1337  # TODO(tbao): Due to http://b/20939131, block 0 may be changed without
1338  # remounting R/W. Will change the checking to a finer-grained way to
1339  # mask off those bits.
1340  def _CheckFirstBlock(self, script):
1341    r = rangelib.RangeSet((0, 1))
1342    srchash = self._HashBlocks(self.src, r)
1343
1344    script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || '
1345                        'abort("%s has been remounted R/W; '
1346                        'reflash device to reenable OTA updates");')
1347                       % (self.device, r.to_string_raw(), srchash,
1348                          self.device))
1349
1350DataImage = blockimgdiff.DataImage
1351
1352
1353# map recovery.fstab's fs_types to mount/format "partition types"
1354PARTITION_TYPES = {
1355    "yaffs2": "MTD",
1356    "mtd": "MTD",
1357    "ext4": "EMMC",
1358    "emmc": "EMMC",
1359    "f2fs": "EMMC",
1360    "squashfs": "EMMC"
1361}
1362
1363def GetTypeAndDevice(mount_point, info):
1364  fstab = info["fstab"]
1365  if fstab:
1366    return (PARTITION_TYPES[fstab[mount_point].fs_type],
1367            fstab[mount_point].device)
1368  else:
1369    raise KeyError
1370
1371
1372def ParseCertificate(data):
1373  """Parse a PEM-format certificate."""
1374  cert = []
1375  save = False
1376  for line in data.split("\n"):
1377    if "--END CERTIFICATE--" in line:
1378      break
1379    if save:
1380      cert.append(line)
1381    if "--BEGIN CERTIFICATE--" in line:
1382      save = True
1383  cert = "".join(cert).decode('base64')
1384  return cert
1385
1386def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
1387                      info_dict=None):
1388  """Generate a binary patch that creates the recovery image starting
1389  with the boot image.  (Most of the space in these images is just the
1390  kernel, which is identical for the two, so the resulting patch
1391  should be efficient.)  Add it to the output zip, along with a shell
1392  script that is run from init.rc on first boot to actually do the
1393  patching and install the new recovery image.
1394
1395  recovery_img and boot_img should be File objects for the
1396  corresponding images.  info should be the dictionary returned by
1397  common.LoadInfoDict() on the input target_files.
1398  """
1399
1400  if info_dict is None:
1401    info_dict = OPTIONS.info_dict
1402
1403  full_recovery_image = info_dict.get("full_recovery_image", None) == "true"
1404
1405  if full_recovery_image:
1406    output_sink("etc/recovery.img", recovery_img.data)
1407
1408  else:
1409    diff_program = ["imgdiff"]
1410    path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
1411    if os.path.exists(path):
1412      diff_program.append("-b")
1413      diff_program.append(path)
1414      bonus_args = "-b /system/etc/recovery-resource.dat"
1415    else:
1416      bonus_args = ""
1417
1418    d = Difference(recovery_img, boot_img, diff_program=diff_program)
1419    _, _, patch = d.ComputePatch()
1420    output_sink("recovery-from-boot.p", patch)
1421
1422  try:
1423    boot_type, boot_device = GetTypeAndDevice("/boot", info_dict)
1424    recovery_type, recovery_device = GetTypeAndDevice("/recovery", info_dict)
1425  except KeyError:
1426    return
1427
1428  if full_recovery_image:
1429    sh = """#!/system/bin/sh
1430if ! applypatch -c %(type)s:%(device)s:%(size)d:%(sha1)s; then
1431  applypatch /system/etc/recovery.img %(type)s:%(device)s %(sha1)s %(size)d && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
1432else
1433  log -t recovery "Recovery image already installed"
1434fi
1435""" % {'type': recovery_type,
1436       'device': recovery_device,
1437       'sha1': recovery_img.sha1,
1438       'size': recovery_img.size}
1439  else:
1440    sh = """#!/system/bin/sh
1441if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
1442  applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
1443else
1444  log -t recovery "Recovery image already installed"
1445fi
1446""" % {'boot_size': boot_img.size,
1447       'boot_sha1': boot_img.sha1,
1448       'recovery_size': recovery_img.size,
1449       'recovery_sha1': recovery_img.sha1,
1450       'boot_type': boot_type,
1451       'boot_device': boot_device,
1452       'recovery_type': recovery_type,
1453       'recovery_device': recovery_device,
1454       'bonus_args': bonus_args}
1455
1456  # The install script location moved from /system/etc to /system/bin
1457  # in the L release.  Parse init.*.rc files to find out where the
1458  # target-files expects it to be, and put it there.
1459  sh_location = "etc/install-recovery.sh"
1460  found = False
1461  init_rc_dir = os.path.join(input_dir, "BOOT", "RAMDISK")
1462  init_rc_files = os.listdir(init_rc_dir)
1463  for init_rc_file in init_rc_files:
1464    if (not init_rc_file.startswith('init.') or
1465        not init_rc_file.endswith('.rc')):
1466      continue
1467
1468    with open(os.path.join(init_rc_dir, init_rc_file)) as f:
1469      for line in f:
1470        m = re.match(r"^service flash_recovery /system/(\S+)\s*$", line)
1471        if m:
1472          sh_location = m.group(1)
1473          found = True
1474          break
1475
1476    if found:
1477      break
1478
1479  print "putting script in", sh_location
1480
1481  output_sink(sh_location, sh)
1482