common.py revision 903186f938a447e459720f6825382470ebb77e37
1# Copyright (C) 2008 The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15import copy
16import errno
17import getopt
18import getpass
19import imp
20import os
21import platform
22import re
23import shlex
24import shutil
25import subprocess
26import sys
27import tempfile
28import threading
29import time
30import zipfile
31
32import blockimgdiff
33import rangelib
34
35from hashlib import sha1 as sha1
36
37
38class Options(object):
39  def __init__(self):
40    platform_search_path = {
41        "linux2": "out/host/linux-x86",
42        "darwin": "out/host/darwin-x86",
43    }
44
45    self.search_path = platform_search_path.get(sys.platform, None)
46    self.signapk_path = "framework/signapk.jar"  # Relative to search_path
47    self.extra_signapk_args = []
48    self.java_path = "java"  # Use the one on the path by default.
49    self.java_args = "-Xmx2048m" # JVM Args
50    self.public_key_suffix = ".x509.pem"
51    self.private_key_suffix = ".pk8"
52    # use otatools built boot_signer by default
53    self.boot_signer_path = "boot_signer"
54    self.verbose = False
55    self.tempfiles = []
56    self.device_specific = None
57    self.extras = {}
58    self.info_dict = None
59    self.worker_threads = None
60
61
62OPTIONS = Options()
63
64
65# Values for "certificate" in apkcerts that mean special things.
66SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL")
67
68
69class ExternalError(RuntimeError):
70  pass
71
72
73def Run(args, **kwargs):
74  """Create and return a subprocess.Popen object, printing the command
75  line on the terminal if -v was specified."""
76  if OPTIONS.verbose:
77    print "  running: ", " ".join(args)
78  return subprocess.Popen(args, **kwargs)
79
80
81def CloseInheritedPipes():
82  """ Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds
83  before doing other work."""
84  if platform.system() != "Darwin":
85    return
86  for d in range(3, 1025):
87    try:
88      stat = os.fstat(d)
89      if stat is not None:
90        pipebit = stat[0] & 0x1000
91        if pipebit != 0:
92          os.close(d)
93    except OSError:
94      pass
95
96
97def LoadInfoDict(input_file):
98  """Read and parse the META/misc_info.txt key/value pairs from the
99  input target files and return a dict."""
100
101  def read_helper(fn):
102    if isinstance(input_file, zipfile.ZipFile):
103      return input_file.read(fn)
104    else:
105      path = os.path.join(input_file, *fn.split("/"))
106      try:
107        with open(path) as f:
108          return f.read()
109      except IOError as e:
110        if e.errno == errno.ENOENT:
111          raise KeyError(fn)
112  d = {}
113  try:
114    d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n"))
115  except KeyError:
116    # ok if misc_info.txt doesn't exist
117    pass
118
119  # backwards compatibility: These values used to be in their own
120  # files.  Look for them, in case we're processing an old
121  # target_files zip.
122
123  if "mkyaffs2_extra_flags" not in d:
124    try:
125      d["mkyaffs2_extra_flags"] = read_helper(
126          "META/mkyaffs2-extra-flags.txt").strip()
127    except KeyError:
128      # ok if flags don't exist
129      pass
130
131  if "recovery_api_version" not in d:
132    try:
133      d["recovery_api_version"] = read_helper(
134          "META/recovery-api-version.txt").strip()
135    except KeyError:
136      raise ValueError("can't find recovery API version in input target-files")
137
138  if "tool_extensions" not in d:
139    try:
140      d["tool_extensions"] = read_helper("META/tool-extensions.txt").strip()
141    except KeyError:
142      # ok if extensions don't exist
143      pass
144
145  if "fstab_version" not in d:
146    d["fstab_version"] = "1"
147
148  try:
149    data = read_helper("META/imagesizes.txt")
150    for line in data.split("\n"):
151      if not line:
152        continue
153      name, value = line.split(" ", 1)
154      if not value:
155        continue
156      if name == "blocksize":
157        d[name] = value
158      else:
159        d[name + "_size"] = value
160  except KeyError:
161    pass
162
163  def makeint(key):
164    if key in d:
165      d[key] = int(d[key], 0)
166
167  makeint("recovery_api_version")
168  makeint("blocksize")
169  makeint("system_size")
170  makeint("vendor_size")
171  makeint("userdata_size")
172  makeint("cache_size")
173  makeint("recovery_size")
174  makeint("boot_size")
175  makeint("fstab_version")
176
177  d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"])
178  d["build.prop"] = LoadBuildProp(read_helper)
179  return d
180
181def LoadBuildProp(read_helper):
182  try:
183    data = read_helper("SYSTEM/build.prop")
184  except KeyError:
185    print "Warning: could not find SYSTEM/build.prop in %s" % zip
186    data = ""
187  return LoadDictionaryFromLines(data.split("\n"))
188
189def LoadDictionaryFromLines(lines):
190  d = {}
191  for line in lines:
192    line = line.strip()
193    if not line or line.startswith("#"):
194      continue
195    if "=" in line:
196      name, value = line.split("=", 1)
197      d[name] = value
198  return d
199
200def LoadRecoveryFSTab(read_helper, fstab_version):
201  class Partition(object):
202    def __init__(self, mount_point, fs_type, device, length, device2):
203      self.mount_point = mount_point
204      self.fs_type = fs_type
205      self.device = device
206      self.length = length
207      self.device2 = device2
208
209  try:
210    data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab")
211  except KeyError:
212    print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab"
213    data = ""
214
215  if fstab_version == 1:
216    d = {}
217    for line in data.split("\n"):
218      line = line.strip()
219      if not line or line.startswith("#"):
220        continue
221      pieces = line.split()
222      if not 3 <= len(pieces) <= 4:
223        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
224      options = None
225      if len(pieces) >= 4:
226        if pieces[3].startswith("/"):
227          device2 = pieces[3]
228          if len(pieces) >= 5:
229            options = pieces[4]
230        else:
231          device2 = None
232          options = pieces[3]
233      else:
234        device2 = None
235
236      mount_point = pieces[0]
237      length = 0
238      if options:
239        options = options.split(",")
240        for i in options:
241          if i.startswith("length="):
242            length = int(i[7:])
243          else:
244            print "%s: unknown option \"%s\"" % (mount_point, i)
245
246      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[1],
247                                 device=pieces[2], length=length,
248                                 device2=device2)
249
250  elif fstab_version == 2:
251    d = {}
252    for line in data.split("\n"):
253      line = line.strip()
254      if not line or line.startswith("#"):
255        continue
256      pieces = line.split()
257      if len(pieces) != 5:
258        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
259
260      # Ignore entries that are managed by vold
261      options = pieces[4]
262      if "voldmanaged=" in options:
263        continue
264
265      # It's a good line, parse it
266      length = 0
267      options = options.split(",")
268      for i in options:
269        if i.startswith("length="):
270          length = int(i[7:])
271        else:
272          # Ignore all unknown options in the unified fstab
273          continue
274
275      mount_point = pieces[1]
276      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[2],
277                                 device=pieces[0], length=length, device2=None)
278
279  else:
280    raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,))
281
282  return d
283
284
285def DumpInfoDict(d):
286  for k, v in sorted(d.items()):
287    print "%-25s = (%s) %s" % (k, type(v).__name__, v)
288
289
290def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
291  """Take a kernel, cmdline, and ramdisk directory from the input (in
292  'sourcedir'), and turn them into a boot image.  Return the image
293  data, or None if sourcedir does not appear to contains files for
294  building the requested image."""
295
296  if (not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK) or
297      not os.access(os.path.join(sourcedir, "kernel"), os.F_OK)):
298    return None
299
300  if info_dict is None:
301    info_dict = OPTIONS.info_dict
302
303  ramdisk_img = tempfile.NamedTemporaryFile()
304  img = tempfile.NamedTemporaryFile()
305
306  if os.access(fs_config_file, os.F_OK):
307    cmd = ["mkbootfs", "-f", fs_config_file, os.path.join(sourcedir, "RAMDISK")]
308  else:
309    cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
310  p1 = Run(cmd, stdout=subprocess.PIPE)
311  p2 = Run(["minigzip"],
312           stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
313
314  p2.wait()
315  p1.wait()
316  assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (sourcedir,)
317  assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (sourcedir,)
318
319  # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
320  mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
321
322  cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")]
323
324  fn = os.path.join(sourcedir, "second")
325  if os.access(fn, os.F_OK):
326    cmd.append("--second")
327    cmd.append(fn)
328
329  fn = os.path.join(sourcedir, "cmdline")
330  if os.access(fn, os.F_OK):
331    cmd.append("--cmdline")
332    cmd.append(open(fn).read().rstrip("\n"))
333
334  fn = os.path.join(sourcedir, "base")
335  if os.access(fn, os.F_OK):
336    cmd.append("--base")
337    cmd.append(open(fn).read().rstrip("\n"))
338
339  fn = os.path.join(sourcedir, "pagesize")
340  if os.access(fn, os.F_OK):
341    cmd.append("--pagesize")
342    cmd.append(open(fn).read().rstrip("\n"))
343
344  args = info_dict.get("mkbootimg_args", None)
345  if args and args.strip():
346    cmd.extend(shlex.split(args))
347
348  img_unsigned = None
349  if info_dict.get("vboot", None):
350    img_unsigned = tempfile.NamedTemporaryFile()
351    cmd.extend(["--ramdisk", ramdisk_img.name,
352                "--output", img_unsigned.name])
353  else:
354    cmd.extend(["--ramdisk", ramdisk_img.name,
355                "--output", img.name])
356
357  p = Run(cmd, stdout=subprocess.PIPE)
358  p.communicate()
359  assert p.returncode == 0, "mkbootimg of %s image failed" % (
360      os.path.basename(sourcedir),)
361
362  if (info_dict.get("boot_signer", None) == "true" and
363      info_dict.get("verity_key", None)):
364    path = "/" + os.path.basename(sourcedir).lower()
365    cmd = [OPTIONS.boot_signer_path, path, img.name,
366           info_dict["verity_key"] + ".pk8",
367           info_dict["verity_key"] + ".x509.pem", img.name]
368    p = Run(cmd, stdout=subprocess.PIPE)
369    p.communicate()
370    assert p.returncode == 0, "boot_signer of %s image failed" % path
371
372  # Sign the image if vboot is non-empty.
373  elif info_dict.get("vboot", None):
374    path = "/" + os.path.basename(sourcedir).lower()
375    img_keyblock = tempfile.NamedTemporaryFile()
376    cmd = [info_dict["vboot_signer_cmd"], info_dict["futility"],
377           img_unsigned.name, info_dict["vboot_key"] + ".vbpubk",
378           info_dict["vboot_key"] + ".vbprivk", img_keyblock.name,
379           img.name]
380    p = Run(cmd, stdout=subprocess.PIPE)
381    p.communicate()
382    assert p.returncode == 0, "vboot_signer of %s image failed" % path
383
384    # Clean up the temp files.
385    img_unsigned.close()
386    img_keyblock.close()
387
388  img.seek(os.SEEK_SET, 0)
389  data = img.read()
390
391  ramdisk_img.close()
392  img.close()
393
394  return data
395
396
397def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
398                     info_dict=None):
399  """Return a File object (with name 'name') with the desired bootable
400  image.  Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name
401  'prebuilt_name', otherwise look for it under 'unpack_dir'/IMAGES,
402  otherwise construct it from the source files in
403  'unpack_dir'/'tree_subdir'."""
404
405  prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
406  if os.path.exists(prebuilt_path):
407    print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,)
408    return File.FromLocalFile(name, prebuilt_path)
409
410  prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
411  if os.path.exists(prebuilt_path):
412    print "using prebuilt %s from IMAGES..." % (prebuilt_name,)
413    return File.FromLocalFile(name, prebuilt_path)
414
415  print "building image from target_files %s..." % (tree_subdir,)
416  fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
417  data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
418                            os.path.join(unpack_dir, fs_config),
419                            info_dict)
420  if data:
421    return File(name, data)
422  return None
423
424
425def UnzipTemp(filename, pattern=None):
426  """Unzip the given archive into a temporary directory and return the name.
427
428  If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a
429  temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
430
431  Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the
432  main file), open for reading.
433  """
434
435  tmp = tempfile.mkdtemp(prefix="targetfiles-")
436  OPTIONS.tempfiles.append(tmp)
437
438  def unzip_to_dir(filename, dirname):
439    cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
440    if pattern is not None:
441      cmd.append(pattern)
442    p = Run(cmd, stdout=subprocess.PIPE)
443    p.communicate()
444    if p.returncode != 0:
445      raise ExternalError("failed to unzip input target-files \"%s\"" %
446                          (filename,))
447
448  m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
449  if m:
450    unzip_to_dir(m.group(1), tmp)
451    unzip_to_dir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"))
452    filename = m.group(1)
453  else:
454    unzip_to_dir(filename, tmp)
455
456  return tmp, zipfile.ZipFile(filename, "r")
457
458
459def GetKeyPasswords(keylist):
460  """Given a list of keys, prompt the user to enter passwords for
461  those which require them.  Return a {key: password} dict.  password
462  will be None if the key has no password."""
463
464  no_passwords = []
465  need_passwords = []
466  key_passwords = {}
467  devnull = open("/dev/null", "w+b")
468  for k in sorted(keylist):
469    # We don't need a password for things that aren't really keys.
470    if k in SPECIAL_CERT_STRINGS:
471      no_passwords.append(k)
472      continue
473
474    p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
475             "-inform", "DER", "-nocrypt"],
476            stdin=devnull.fileno(),
477            stdout=devnull.fileno(),
478            stderr=subprocess.STDOUT)
479    p.communicate()
480    if p.returncode == 0:
481      # Definitely an unencrypted key.
482      no_passwords.append(k)
483    else:
484      p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
485               "-inform", "DER", "-passin", "pass:"],
486              stdin=devnull.fileno(),
487              stdout=devnull.fileno(),
488              stderr=subprocess.PIPE)
489      _, stderr = p.communicate()
490      if p.returncode == 0:
491        # Encrypted key with empty string as password.
492        key_passwords[k] = ''
493      elif stderr.startswith('Error decrypting key'):
494        # Definitely encrypted key.
495        # It would have said "Error reading key" if it didn't parse correctly.
496        need_passwords.append(k)
497      else:
498        # Potentially, a type of key that openssl doesn't understand.
499        # We'll let the routines in signapk.jar handle it.
500        no_passwords.append(k)
501  devnull.close()
502
503  key_passwords.update(PasswordManager().GetPasswords(need_passwords))
504  key_passwords.update(dict.fromkeys(no_passwords, None))
505  return key_passwords
506
507
508def SignFile(input_name, output_name, key, password, align=None,
509             whole_file=False):
510  """Sign the input_name zip/jar/apk, producing output_name.  Use the
511  given key and password (the latter may be None if the key does not
512  have a password.
513
514  If align is an integer > 1, zipalign is run to align stored files in
515  the output zip on 'align'-byte boundaries.
516
517  If whole_file is true, use the "-w" option to SignApk to embed a
518  signature that covers the whole file in the archive comment of the
519  zip file.
520  """
521
522  if align == 0 or align == 1:
523    align = None
524
525  if align:
526    temp = tempfile.NamedTemporaryFile()
527    sign_name = temp.name
528  else:
529    sign_name = output_name
530
531  cmd = [OPTIONS.java_path, OPTIONS.java_args, "-jar",
532         os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)]
533  cmd.extend(OPTIONS.extra_signapk_args)
534  if whole_file:
535    cmd.append("-w")
536  cmd.extend([key + OPTIONS.public_key_suffix,
537              key + OPTIONS.private_key_suffix,
538              input_name, sign_name])
539
540  p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
541  if password is not None:
542    password += "\n"
543  p.communicate(password)
544  if p.returncode != 0:
545    raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
546
547  if align:
548    p = Run(["zipalign", "-f", "-p", str(align), sign_name, output_name])
549    p.communicate()
550    if p.returncode != 0:
551      raise ExternalError("zipalign failed: return code %s" % (p.returncode,))
552    temp.close()
553
554
555def CheckSize(data, target, info_dict):
556  """Check the data string passed against the max size limit, if
557  any, for the given target.  Raise exception if the data is too big.
558  Print a warning if the data is nearing the maximum size."""
559
560  if target.endswith(".img"):
561    target = target[:-4]
562  mount_point = "/" + target
563
564  fs_type = None
565  limit = None
566  if info_dict["fstab"]:
567    if mount_point == "/userdata":
568      mount_point = "/data"
569    p = info_dict["fstab"][mount_point]
570    fs_type = p.fs_type
571    device = p.device
572    if "/" in device:
573      device = device[device.rfind("/")+1:]
574    limit = info_dict.get(device + "_size", None)
575  if not fs_type or not limit:
576    return
577
578  if fs_type == "yaffs2":
579    # image size should be increased by 1/64th to account for the
580    # spare area (64 bytes per 2k page)
581    limit = limit / 2048 * (2048+64)
582  size = len(data)
583  pct = float(size) * 100.0 / limit
584  msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit)
585  if pct >= 99.0:
586    raise ExternalError(msg)
587  elif pct >= 95.0:
588    print
589    print "  WARNING: ", msg
590    print
591  elif OPTIONS.verbose:
592    print "  ", msg
593
594
595def ReadApkCerts(tf_zip):
596  """Given a target_files ZipFile, parse the META/apkcerts.txt file
597  and return a {package: cert} dict."""
598  certmap = {}
599  for line in tf_zip.read("META/apkcerts.txt").split("\n"):
600    line = line.strip()
601    if not line:
602      continue
603    m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+'
604                 r'private_key="(.*)"$', line)
605    if m:
606      name, cert, privkey = m.groups()
607      public_key_suffix_len = len(OPTIONS.public_key_suffix)
608      private_key_suffix_len = len(OPTIONS.private_key_suffix)
609      if cert in SPECIAL_CERT_STRINGS and not privkey:
610        certmap[name] = cert
611      elif (cert.endswith(OPTIONS.public_key_suffix) and
612            privkey.endswith(OPTIONS.private_key_suffix) and
613            cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
614        certmap[name] = cert[:-public_key_suffix_len]
615      else:
616        raise ValueError("failed to parse line from apkcerts.txt:\n" + line)
617  return certmap
618
619
620COMMON_DOCSTRING = """
621  -p  (--path)  <dir>
622      Prepend <dir>/bin to the list of places to search for binaries
623      run by this script, and expect to find jars in <dir>/framework.
624
625  -s  (--device_specific) <file>
626      Path to the python module containing device-specific
627      releasetools code.
628
629  -x  (--extra)  <key=value>
630      Add a key/value pair to the 'extras' dict, which device-specific
631      extension code may look at.
632
633  -v  (--verbose)
634      Show command lines being executed.
635
636  -h  (--help)
637      Display this usage message and exit.
638"""
639
640def Usage(docstring):
641  print docstring.rstrip("\n")
642  print COMMON_DOCSTRING
643
644
645def ParseOptions(argv,
646                 docstring,
647                 extra_opts="", extra_long_opts=(),
648                 extra_option_handler=None):
649  """Parse the options in argv and return any arguments that aren't
650  flags.  docstring is the calling module's docstring, to be displayed
651  for errors and -h.  extra_opts and extra_long_opts are for flags
652  defined by the caller, which are processed by passing them to
653  extra_option_handler."""
654
655  try:
656    opts, args = getopt.getopt(
657        argv, "hvp:s:x:" + extra_opts,
658        ["help", "verbose", "path=", "signapk_path=", "extra_signapk_args=",
659         "java_path=", "java_args=", "public_key_suffix=",
660         "private_key_suffix=", "boot_signer_path=", "device_specific=",
661         "extra="] +
662        list(extra_long_opts))
663  except getopt.GetoptError as err:
664    Usage(docstring)
665    print "**", str(err), "**"
666    sys.exit(2)
667
668  for o, a in opts:
669    if o in ("-h", "--help"):
670      Usage(docstring)
671      sys.exit()
672    elif o in ("-v", "--verbose"):
673      OPTIONS.verbose = True
674    elif o in ("-p", "--path"):
675      OPTIONS.search_path = a
676    elif o in ("--signapk_path",):
677      OPTIONS.signapk_path = a
678    elif o in ("--extra_signapk_args",):
679      OPTIONS.extra_signapk_args = shlex.split(a)
680    elif o in ("--java_path",):
681      OPTIONS.java_path = a
682    elif o in ("--java_args",):
683      OPTIONS.java_args = a
684    elif o in ("--public_key_suffix",):
685      OPTIONS.public_key_suffix = a
686    elif o in ("--private_key_suffix",):
687      OPTIONS.private_key_suffix = a
688    elif o in ("--boot_signer_path",):
689      OPTIONS.boot_signer_path = a
690    elif o in ("-s", "--device_specific"):
691      OPTIONS.device_specific = a
692    elif o in ("-x", "--extra"):
693      key, value = a.split("=", 1)
694      OPTIONS.extras[key] = value
695    else:
696      if extra_option_handler is None or not extra_option_handler(o, a):
697        assert False, "unknown option \"%s\"" % (o,)
698
699  if OPTIONS.search_path:
700    os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") +
701                          os.pathsep + os.environ["PATH"])
702
703  return args
704
705
706def MakeTempFile(prefix=None, suffix=None):
707  """Make a temp file and add it to the list of things to be deleted
708  when Cleanup() is called.  Return the filename."""
709  fd, fn = tempfile.mkstemp(prefix=prefix, suffix=suffix)
710  os.close(fd)
711  OPTIONS.tempfiles.append(fn)
712  return fn
713
714
715def Cleanup():
716  for i in OPTIONS.tempfiles:
717    if os.path.isdir(i):
718      shutil.rmtree(i)
719    else:
720      os.remove(i)
721
722
723class PasswordManager(object):
724  def __init__(self):
725    self.editor = os.getenv("EDITOR", None)
726    self.pwfile = os.getenv("ANDROID_PW_FILE", None)
727
728  def GetPasswords(self, items):
729    """Get passwords corresponding to each string in 'items',
730    returning a dict.  (The dict may have keys in addition to the
731    values in 'items'.)
732
733    Uses the passwords in $ANDROID_PW_FILE if available, letting the
734    user edit that file to add more needed passwords.  If no editor is
735    available, or $ANDROID_PW_FILE isn't define, prompts the user
736    interactively in the ordinary way.
737    """
738
739    current = self.ReadFile()
740
741    first = True
742    while True:
743      missing = []
744      for i in items:
745        if i not in current or not current[i]:
746          missing.append(i)
747      # Are all the passwords already in the file?
748      if not missing:
749        return current
750
751      for i in missing:
752        current[i] = ""
753
754      if not first:
755        print "key file %s still missing some passwords." % (self.pwfile,)
756        answer = raw_input("try to edit again? [y]> ").strip()
757        if answer and answer[0] not in 'yY':
758          raise RuntimeError("key passwords unavailable")
759      first = False
760
761      current = self.UpdateAndReadFile(current)
762
763  def PromptResult(self, current): # pylint: disable=no-self-use
764    """Prompt the user to enter a value (password) for each key in
765    'current' whose value is fales.  Returns a new dict with all the
766    values.
767    """
768    result = {}
769    for k, v in sorted(current.iteritems()):
770      if v:
771        result[k] = v
772      else:
773        while True:
774          result[k] = getpass.getpass(
775              "Enter password for %s key> " % k).strip()
776          if result[k]:
777            break
778    return result
779
780  def UpdateAndReadFile(self, current):
781    if not self.editor or not self.pwfile:
782      return self.PromptResult(current)
783
784    f = open(self.pwfile, "w")
785    os.chmod(self.pwfile, 0o600)
786    f.write("# Enter key passwords between the [[[ ]]] brackets.\n")
787    f.write("# (Additional spaces are harmless.)\n\n")
788
789    first_line = None
790    sorted_list = sorted([(not v, k, v) for (k, v) in current.iteritems()])
791    for i, (_, k, v) in enumerate(sorted_list):
792      f.write("[[[  %s  ]]] %s\n" % (v, k))
793      if not v and first_line is None:
794        # position cursor on first line with no password.
795        first_line = i + 4
796    f.close()
797
798    p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
799    _, _ = p.communicate()
800
801    return self.ReadFile()
802
803  def ReadFile(self):
804    result = {}
805    if self.pwfile is None:
806      return result
807    try:
808      f = open(self.pwfile, "r")
809      for line in f:
810        line = line.strip()
811        if not line or line[0] == '#':
812          continue
813        m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
814        if not m:
815          print "failed to parse password file: ", line
816        else:
817          result[m.group(2)] = m.group(1)
818      f.close()
819    except IOError as e:
820      if e.errno != errno.ENOENT:
821        print "error reading password file: ", str(e)
822    return result
823
824
825def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
826             compress_type=None):
827  import datetime
828
829  # http://b/18015246
830  # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
831  # for files larger than 2GiB. We can work around this by adjusting their
832  # limit. Note that `zipfile.writestr()` will not work for strings larger than
833  # 2GiB. The Python interpreter sometimes rejects strings that large (though
834  # it isn't clear to me exactly what circumstances cause this).
835  # `zipfile.write()` must be used directly to work around this.
836  #
837  # This mess can be avoided if we port to python3.
838  saved_zip64_limit = zipfile.ZIP64_LIMIT
839  zipfile.ZIP64_LIMIT = (1 << 32) - 1
840
841  if compress_type is None:
842    compress_type = zip_file.compression
843  if arcname is None:
844    arcname = filename
845
846  saved_stat = os.stat(filename)
847
848  try:
849    # `zipfile.write()` doesn't allow us to pass ZipInfo, so just modify the
850    # file to be zipped and reset it when we're done.
851    os.chmod(filename, perms)
852
853    # Use a fixed timestamp so the output is repeatable.
854    epoch = datetime.datetime.fromtimestamp(0)
855    timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
856    os.utime(filename, (timestamp, timestamp))
857
858    zip_file.write(filename, arcname=arcname, compress_type=compress_type)
859  finally:
860    os.chmod(filename, saved_stat.st_mode)
861    os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
862    zipfile.ZIP64_LIMIT = saved_zip64_limit
863
864
865def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None,
866                compress_type=None):
867  """Wrap zipfile.writestr() function to work around the zip64 limit.
868
869  Even with the ZIP64_LIMIT workaround, it won't allow writing a string
870  longer than 2GiB. It gives 'OverflowError: size does not fit in an int'
871  when calling crc32(bytes).
872
873  But it still works fine to write a shorter string into a large zip file.
874  We should use ZipWrite() whenever possible, and only use ZipWriteStr()
875  when we know the string won't be too long.
876  """
877
878  saved_zip64_limit = zipfile.ZIP64_LIMIT
879  zipfile.ZIP64_LIMIT = (1 << 32) - 1
880
881  if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
882    zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
883    zinfo.compress_type = zip_file.compression
884    if perms is None:
885      perms = 0o644
886  else:
887    zinfo = zinfo_or_arcname
888
889  # If compress_type is given, it overrides the value in zinfo.
890  if compress_type is not None:
891    zinfo.compress_type = compress_type
892
893  # If perms is given, it has a priority.
894  if perms is not None:
895    zinfo.external_attr = perms << 16
896
897  # Use a fixed timestamp so the output is repeatable.
898  zinfo.date_time = (2009, 1, 1, 0, 0, 0)
899
900  zip_file.writestr(zinfo, data)
901  zipfile.ZIP64_LIMIT = saved_zip64_limit
902
903
904def ZipClose(zip_file):
905  # http://b/18015246
906  # zipfile also refers to ZIP64_LIMIT during close() when it writes out the
907  # central directory.
908  saved_zip64_limit = zipfile.ZIP64_LIMIT
909  zipfile.ZIP64_LIMIT = (1 << 32) - 1
910
911  zip_file.close()
912
913  zipfile.ZIP64_LIMIT = saved_zip64_limit
914
915
916class DeviceSpecificParams(object):
917  module = None
918  def __init__(self, **kwargs):
919    """Keyword arguments to the constructor become attributes of this
920    object, which is passed to all functions in the device-specific
921    module."""
922    for k, v in kwargs.iteritems():
923      setattr(self, k, v)
924    self.extras = OPTIONS.extras
925
926    if self.module is None:
927      path = OPTIONS.device_specific
928      if not path:
929        return
930      try:
931        if os.path.isdir(path):
932          info = imp.find_module("releasetools", [path])
933        else:
934          d, f = os.path.split(path)
935          b, x = os.path.splitext(f)
936          if x == ".py":
937            f = b
938          info = imp.find_module(f, [d])
939        print "loaded device-specific extensions from", path
940        self.module = imp.load_module("device_specific", *info)
941      except ImportError:
942        print "unable to load device-specific module; assuming none"
943
944  def _DoCall(self, function_name, *args, **kwargs):
945    """Call the named function in the device-specific module, passing
946    the given args and kwargs.  The first argument to the call will be
947    the DeviceSpecific object itself.  If there is no module, or the
948    module does not define the function, return the value of the
949    'default' kwarg (which itself defaults to None)."""
950    if self.module is None or not hasattr(self.module, function_name):
951      return kwargs.get("default", None)
952    return getattr(self.module, function_name)(*((self,) + args), **kwargs)
953
954  def FullOTA_Assertions(self):
955    """Called after emitting the block of assertions at the top of a
956    full OTA package.  Implementations can add whatever additional
957    assertions they like."""
958    return self._DoCall("FullOTA_Assertions")
959
960  def FullOTA_InstallBegin(self):
961    """Called at the start of full OTA installation."""
962    return self._DoCall("FullOTA_InstallBegin")
963
964  def FullOTA_InstallEnd(self):
965    """Called at the end of full OTA installation; typically this is
966    used to install the image for the device's baseband processor."""
967    return self._DoCall("FullOTA_InstallEnd")
968
969  def IncrementalOTA_Assertions(self):
970    """Called after emitting the block of assertions at the top of an
971    incremental OTA package.  Implementations can add whatever
972    additional assertions they like."""
973    return self._DoCall("IncrementalOTA_Assertions")
974
975  def IncrementalOTA_VerifyBegin(self):
976    """Called at the start of the verification phase of incremental
977    OTA installation; additional checks can be placed here to abort
978    the script before any changes are made."""
979    return self._DoCall("IncrementalOTA_VerifyBegin")
980
981  def IncrementalOTA_VerifyEnd(self):
982    """Called at the end of the verification phase of incremental OTA
983    installation; additional checks can be placed here to abort the
984    script before any changes are made."""
985    return self._DoCall("IncrementalOTA_VerifyEnd")
986
987  def IncrementalOTA_InstallBegin(self):
988    """Called at the start of incremental OTA installation (after
989    verification is complete)."""
990    return self._DoCall("IncrementalOTA_InstallBegin")
991
992  def IncrementalOTA_InstallEnd(self):
993    """Called at the end of incremental OTA installation; typically
994    this is used to install the image for the device's baseband
995    processor."""
996    return self._DoCall("IncrementalOTA_InstallEnd")
997
998class File(object):
999  def __init__(self, name, data):
1000    self.name = name
1001    self.data = data
1002    self.size = len(data)
1003    self.sha1 = sha1(data).hexdigest()
1004
1005  @classmethod
1006  def FromLocalFile(cls, name, diskname):
1007    f = open(diskname, "rb")
1008    data = f.read()
1009    f.close()
1010    return File(name, data)
1011
1012  def WriteToTemp(self):
1013    t = tempfile.NamedTemporaryFile()
1014    t.write(self.data)
1015    t.flush()
1016    return t
1017
1018  def AddToZip(self, z, compression=None):
1019    ZipWriteStr(z, self.name, self.data, compress_type=compression)
1020
1021DIFF_PROGRAM_BY_EXT = {
1022    ".gz" : "imgdiff",
1023    ".zip" : ["imgdiff", "-z"],
1024    ".jar" : ["imgdiff", "-z"],
1025    ".apk" : ["imgdiff", "-z"],
1026    ".img" : "imgdiff",
1027    }
1028
1029class Difference(object):
1030  def __init__(self, tf, sf, diff_program=None):
1031    self.tf = tf
1032    self.sf = sf
1033    self.patch = None
1034    self.diff_program = diff_program
1035
1036  def ComputePatch(self):
1037    """Compute the patch (as a string of data) needed to turn sf into
1038    tf.  Returns the same tuple as GetPatch()."""
1039
1040    tf = self.tf
1041    sf = self.sf
1042
1043    if self.diff_program:
1044      diff_program = self.diff_program
1045    else:
1046      ext = os.path.splitext(tf.name)[1]
1047      diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff")
1048
1049    ttemp = tf.WriteToTemp()
1050    stemp = sf.WriteToTemp()
1051
1052    ext = os.path.splitext(tf.name)[1]
1053
1054    try:
1055      ptemp = tempfile.NamedTemporaryFile()
1056      if isinstance(diff_program, list):
1057        cmd = copy.copy(diff_program)
1058      else:
1059        cmd = [diff_program]
1060      cmd.append(stemp.name)
1061      cmd.append(ttemp.name)
1062      cmd.append(ptemp.name)
1063      p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1064      err = []
1065      def run():
1066        _, e = p.communicate()
1067        if e:
1068          err.append(e)
1069      th = threading.Thread(target=run)
1070      th.start()
1071      th.join(timeout=300)   # 5 mins
1072      if th.is_alive():
1073        print "WARNING: diff command timed out"
1074        p.terminate()
1075        th.join(5)
1076        if th.is_alive():
1077          p.kill()
1078          th.join()
1079
1080      if err or p.returncode != 0:
1081        print "WARNING: failure running %s:\n%s\n" % (
1082            diff_program, "".join(err))
1083        self.patch = None
1084        return None, None, None
1085      diff = ptemp.read()
1086    finally:
1087      ptemp.close()
1088      stemp.close()
1089      ttemp.close()
1090
1091    self.patch = diff
1092    return self.tf, self.sf, self.patch
1093
1094
1095  def GetPatch(self):
1096    """Return a tuple (target_file, source_file, patch_data).
1097    patch_data may be None if ComputePatch hasn't been called, or if
1098    computing the patch failed."""
1099    return self.tf, self.sf, self.patch
1100
1101
1102def ComputeDifferences(diffs):
1103  """Call ComputePatch on all the Difference objects in 'diffs'."""
1104  print len(diffs), "diffs to compute"
1105
1106  # Do the largest files first, to try and reduce the long-pole effect.
1107  by_size = [(i.tf.size, i) for i in diffs]
1108  by_size.sort(reverse=True)
1109  by_size = [i[1] for i in by_size]
1110
1111  lock = threading.Lock()
1112  diff_iter = iter(by_size)   # accessed under lock
1113
1114  def worker():
1115    try:
1116      lock.acquire()
1117      for d in diff_iter:
1118        lock.release()
1119        start = time.time()
1120        d.ComputePatch()
1121        dur = time.time() - start
1122        lock.acquire()
1123
1124        tf, sf, patch = d.GetPatch()
1125        if sf.name == tf.name:
1126          name = tf.name
1127        else:
1128          name = "%s (%s)" % (tf.name, sf.name)
1129        if patch is None:
1130          print "patching failed!                                  %s" % (name,)
1131        else:
1132          print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
1133              dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name)
1134      lock.release()
1135    except Exception as e:
1136      print e
1137      raise
1138
1139  # start worker threads; wait for them all to finish.
1140  threads = [threading.Thread(target=worker)
1141             for i in range(OPTIONS.worker_threads)]
1142  for th in threads:
1143    th.start()
1144  while threads:
1145    threads.pop().join()
1146
1147
1148class BlockDifference(object):
1149  def __init__(self, partition, tgt, src=None, check_first_block=False,
1150               version=None):
1151    self.tgt = tgt
1152    self.src = src
1153    self.partition = partition
1154    self.check_first_block = check_first_block
1155
1156    # Due to http://b/20939131, check_first_block is disabled temporarily.
1157    assert not self.check_first_block
1158
1159    if version is None:
1160      version = 1
1161      if OPTIONS.info_dict:
1162        version = max(
1163            int(i) for i in
1164            OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
1165    self.version = version
1166
1167    b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
1168                                    version=self.version)
1169    tmpdir = tempfile.mkdtemp()
1170    OPTIONS.tempfiles.append(tmpdir)
1171    self.path = os.path.join(tmpdir, partition)
1172    b.Compute(self.path)
1173
1174    _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
1175
1176  def WriteScript(self, script, output_zip, progress=None):
1177    if not self.src:
1178      # write the output unconditionally
1179      script.Print("Patching %s image unconditionally..." % (self.partition,))
1180    else:
1181      script.Print("Patching %s image after verification." % (self.partition,))
1182
1183    if progress:
1184      script.ShowProgress(progress, 0)
1185    self._WriteUpdate(script, output_zip)
1186
1187  def WriteVerifyScript(self, script):
1188    partition = self.partition
1189    if not self.src:
1190      script.Print("Image %s will be patched unconditionally." % (partition,))
1191    else:
1192      ranges = self.src.care_map.subtract(self.src.clobbered_blocks)
1193      ranges_str = ranges.to_string_raw()
1194      if self.version >= 3:
1195        script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || '
1196                            'block_image_verify("%s", '
1197                            'package_extract_file("%s.transfer.list"), '
1198                            '"%s.new.dat", "%s.patch.dat")) then') % (
1199                            self.device, ranges_str, self.src.TotalSha1(),
1200                            self.device, partition, partition, partition))
1201      else:
1202        script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1203                           self.device, ranges_str, self.src.TotalSha1()))
1204      script.Print('Verified %s image...' % (partition,))
1205      script.AppendExtra('else')
1206
1207      # When generating incrementals for the system and vendor partitions,
1208      # explicitly check the first block (which contains the superblock) of
1209      # the partition to see if it's what we expect. If this check fails,
1210      # give an explicit log message about the partition having been
1211      # remounted R/W (the most likely explanation) and the need to flash to
1212      # get OTAs working again.
1213      if self.check_first_block:
1214        self._CheckFirstBlock(script)
1215
1216      # Abort the OTA update. Note that the incremental OTA cannot be applied
1217      # even if it may match the checksum of the target partition.
1218      # a) If version < 3, operations like move and erase will make changes
1219      #    unconditionally and damage the partition.
1220      # b) If version >= 3, it won't even reach here.
1221      script.AppendExtra(('abort("%s partition has unexpected contents");\n'
1222                          'endif;') % (partition,))
1223
1224  def _WriteUpdate(self, script, output_zip):
1225    ZipWrite(output_zip,
1226             '{}.transfer.list'.format(self.path),
1227             '{}.transfer.list'.format(self.partition))
1228    ZipWrite(output_zip,
1229             '{}.new.dat'.format(self.path),
1230             '{}.new.dat'.format(self.partition))
1231    ZipWrite(output_zip,
1232             '{}.patch.dat'.format(self.path),
1233             '{}.patch.dat'.format(self.partition),
1234             compress_type=zipfile.ZIP_STORED)
1235
1236    call = ('block_image_update("{device}", '
1237            'package_extract_file("{partition}.transfer.list"), '
1238            '"{partition}.new.dat", "{partition}.patch.dat");\n'.format(
1239                device=self.device, partition=self.partition))
1240    script.AppendExtra(script.WordWrap(call))
1241
1242  def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use
1243    data = source.ReadRangeSet(ranges)
1244    ctx = sha1()
1245
1246    for p in data:
1247      ctx.update(p)
1248
1249    return ctx.hexdigest()
1250
1251  # TODO(tbao): Due to http://b/20939131, block 0 may be changed without
1252  # remounting R/W. Will change the checking to a finer-grained way to
1253  # mask off those bits.
1254  def _CheckFirstBlock(self, script):
1255    r = rangelib.RangeSet((0, 1))
1256    srchash = self._HashBlocks(self.src, r)
1257
1258    script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || '
1259                        'abort("%s has been remounted R/W; '
1260                        'reflash device to reenable OTA updates");')
1261                       % (self.device, r.to_string_raw(), srchash,
1262                          self.device))
1263
1264DataImage = blockimgdiff.DataImage
1265
1266
1267# map recovery.fstab's fs_types to mount/format "partition types"
1268PARTITION_TYPES = {
1269    "yaffs2": "MTD",
1270    "mtd": "MTD",
1271    "ext4": "EMMC",
1272    "emmc": "EMMC",
1273    "f2fs": "EMMC",
1274    "squashfs": "EMMC"
1275}
1276
1277def GetTypeAndDevice(mount_point, info):
1278  fstab = info["fstab"]
1279  if fstab:
1280    return (PARTITION_TYPES[fstab[mount_point].fs_type],
1281            fstab[mount_point].device)
1282  else:
1283    raise KeyError
1284
1285
1286def ParseCertificate(data):
1287  """Parse a PEM-format certificate."""
1288  cert = []
1289  save = False
1290  for line in data.split("\n"):
1291    if "--END CERTIFICATE--" in line:
1292      break
1293    if save:
1294      cert.append(line)
1295    if "--BEGIN CERTIFICATE--" in line:
1296      save = True
1297  cert = "".join(cert).decode('base64')
1298  return cert
1299
1300def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
1301                      info_dict=None):
1302  """Generate a binary patch that creates the recovery image starting
1303  with the boot image.  (Most of the space in these images is just the
1304  kernel, which is identical for the two, so the resulting patch
1305  should be efficient.)  Add it to the output zip, along with a shell
1306  script that is run from init.rc on first boot to actually do the
1307  patching and install the new recovery image.
1308
1309  recovery_img and boot_img should be File objects for the
1310  corresponding images.  info should be the dictionary returned by
1311  common.LoadInfoDict() on the input target_files.
1312  """
1313
1314  if info_dict is None:
1315    info_dict = OPTIONS.info_dict
1316
1317  diff_program = ["imgdiff"]
1318  path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
1319  if os.path.exists(path):
1320    diff_program.append("-b")
1321    diff_program.append(path)
1322    bonus_args = "-b /system/etc/recovery-resource.dat"
1323  else:
1324    bonus_args = ""
1325
1326  d = Difference(recovery_img, boot_img, diff_program=diff_program)
1327  _, _, patch = d.ComputePatch()
1328  output_sink("recovery-from-boot.p", patch)
1329
1330  try:
1331    boot_type, boot_device = GetTypeAndDevice("/boot", info_dict)
1332    recovery_type, recovery_device = GetTypeAndDevice("/recovery", info_dict)
1333  except KeyError:
1334    return
1335
1336  sh = """#!/system/bin/sh
1337if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
1338  applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
1339else
1340  log -t recovery "Recovery image already installed"
1341fi
1342""" % {'boot_size': boot_img.size,
1343       'boot_sha1': boot_img.sha1,
1344       'recovery_size': recovery_img.size,
1345       'recovery_sha1': recovery_img.sha1,
1346       'boot_type': boot_type,
1347       'boot_device': boot_device,
1348       'recovery_type': recovery_type,
1349       'recovery_device': recovery_device,
1350       'bonus_args': bonus_args}
1351
1352  # The install script location moved from /system/etc to /system/bin
1353  # in the L release.  Parse the init.rc file to find out where the
1354  # target-files expects it to be, and put it there.
1355  sh_location = "etc/install-recovery.sh"
1356  try:
1357    with open(os.path.join(input_dir, "BOOT", "RAMDISK", "init.rc")) as f:
1358      for line in f:
1359        m = re.match(r"^service flash_recovery /system/(\S+)\s*$", line)
1360        if m:
1361          sh_location = m.group(1)
1362          print "putting script in", sh_location
1363          break
1364  except (OSError, IOError) as e:
1365    print "failed to read init.rc: %s" % (e,)
1366
1367  output_sink(sh_location, sh)
1368