common.py revision ff7778166bd13a90c89fa333591ee2037f587a11
1# Copyright (C) 2008 The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15import copy
16import errno
17import getopt
18import getpass
19import imp
20import os
21import platform
22import re
23import shlex
24import shutil
25import subprocess
26import sys
27import tempfile
28import threading
29import time
30import zipfile
31
32import blockimgdiff
33import rangelib
34
35from hashlib import sha1 as sha1
36
37
38class Options(object):
39  def __init__(self):
40    platform_search_path = {
41        "linux2": "out/host/linux-x86",
42        "darwin": "out/host/darwin-x86",
43    }
44
45    self.search_path = platform_search_path.get(sys.platform, None)
46    self.signapk_path = "framework/signapk.jar"  # Relative to search_path
47    self.extra_signapk_args = []
48    self.java_path = "java"  # Use the one on the path by default.
49    self.java_args = "-Xmx2048m" # JVM Args
50    self.public_key_suffix = ".x509.pem"
51    self.private_key_suffix = ".pk8"
52    self.verbose = False
53    self.tempfiles = []
54    self.device_specific = None
55    self.extras = {}
56    self.info_dict = None
57    self.worker_threads = None
58
59
60OPTIONS = Options()
61
62
63# Values for "certificate" in apkcerts that mean special things.
64SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL")
65
66
67class ExternalError(RuntimeError):
68  pass
69
70
71def Run(args, **kwargs):
72  """Create and return a subprocess.Popen object, printing the command
73  line on the terminal if -v was specified."""
74  if OPTIONS.verbose:
75    print "  running: ", " ".join(args)
76  return subprocess.Popen(args, **kwargs)
77
78
79def CloseInheritedPipes():
80  """ Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds
81  before doing other work."""
82  if platform.system() != "Darwin":
83    return
84  for d in range(3, 1025):
85    try:
86      stat = os.fstat(d)
87      if stat is not None:
88        pipebit = stat[0] & 0x1000
89        if pipebit != 0:
90          os.close(d)
91    except OSError:
92      pass
93
94
95def LoadInfoDict(input_file):
96  """Read and parse the META/misc_info.txt key/value pairs from the
97  input target files and return a dict."""
98
99  def read_helper(fn):
100    if isinstance(input_file, zipfile.ZipFile):
101      return input_file.read(fn)
102    else:
103      path = os.path.join(input_file, *fn.split("/"))
104      try:
105        with open(path) as f:
106          return f.read()
107      except IOError as e:
108        if e.errno == errno.ENOENT:
109          raise KeyError(fn)
110  d = {}
111  try:
112    d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n"))
113  except KeyError:
114    # ok if misc_info.txt doesn't exist
115    pass
116
117  # backwards compatibility: These values used to be in their own
118  # files.  Look for them, in case we're processing an old
119  # target_files zip.
120
121  if "mkyaffs2_extra_flags" not in d:
122    try:
123      d["mkyaffs2_extra_flags"] = read_helper(
124          "META/mkyaffs2-extra-flags.txt").strip()
125    except KeyError:
126      # ok if flags don't exist
127      pass
128
129  if "recovery_api_version" not in d:
130    try:
131      d["recovery_api_version"] = read_helper(
132          "META/recovery-api-version.txt").strip()
133    except KeyError:
134      raise ValueError("can't find recovery API version in input target-files")
135
136  if "tool_extensions" not in d:
137    try:
138      d["tool_extensions"] = read_helper("META/tool-extensions.txt").strip()
139    except KeyError:
140      # ok if extensions don't exist
141      pass
142
143  if "fstab_version" not in d:
144    d["fstab_version"] = "1"
145
146  try:
147    data = read_helper("META/imagesizes.txt")
148    for line in data.split("\n"):
149      if not line:
150        continue
151      name, value = line.split(" ", 1)
152      if not value:
153        continue
154      if name == "blocksize":
155        d[name] = value
156      else:
157        d[name + "_size"] = value
158  except KeyError:
159    pass
160
161  def makeint(key):
162    if key in d:
163      d[key] = int(d[key], 0)
164
165  makeint("recovery_api_version")
166  makeint("blocksize")
167  makeint("system_size")
168  makeint("vendor_size")
169  makeint("userdata_size")
170  makeint("cache_size")
171  makeint("recovery_size")
172  makeint("boot_size")
173  makeint("fstab_version")
174
175  d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"])
176  d["build.prop"] = LoadBuildProp(read_helper)
177  return d
178
179def LoadBuildProp(read_helper):
180  try:
181    data = read_helper("SYSTEM/build.prop")
182  except KeyError:
183    print "Warning: could not find SYSTEM/build.prop in %s" % zip
184    data = ""
185  return LoadDictionaryFromLines(data.split("\n"))
186
187def LoadDictionaryFromLines(lines):
188  d = {}
189  for line in lines:
190    line = line.strip()
191    if not line or line.startswith("#"):
192      continue
193    if "=" in line:
194      name, value = line.split("=", 1)
195      d[name] = value
196  return d
197
198def LoadRecoveryFSTab(read_helper, fstab_version):
199  class Partition(object):
200    def __init__(self, mount_point, fs_type, device, length, device2):
201      self.mount_point = mount_point
202      self.fs_type = fs_type
203      self.device = device
204      self.length = length
205      self.device2 = device2
206
207  try:
208    data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab")
209  except KeyError:
210    print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab"
211    data = ""
212
213  if fstab_version == 1:
214    d = {}
215    for line in data.split("\n"):
216      line = line.strip()
217      if not line or line.startswith("#"):
218        continue
219      pieces = line.split()
220      if not 3 <= len(pieces) <= 4:
221        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
222      options = None
223      if len(pieces) >= 4:
224        if pieces[3].startswith("/"):
225          device2 = pieces[3]
226          if len(pieces) >= 5:
227            options = pieces[4]
228        else:
229          device2 = None
230          options = pieces[3]
231      else:
232        device2 = None
233
234      mount_point = pieces[0]
235      length = 0
236      if options:
237        options = options.split(",")
238        for i in options:
239          if i.startswith("length="):
240            length = int(i[7:])
241          else:
242            print "%s: unknown option \"%s\"" % (mount_point, i)
243
244      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[1],
245                                 device=pieces[2], length=length,
246                                 device2=device2)
247
248  elif fstab_version == 2:
249    d = {}
250    for line in data.split("\n"):
251      line = line.strip()
252      if not line or line.startswith("#"):
253        continue
254      pieces = line.split()
255      if len(pieces) != 5:
256        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
257
258      # Ignore entries that are managed by vold
259      options = pieces[4]
260      if "voldmanaged=" in options:
261        continue
262
263      # It's a good line, parse it
264      length = 0
265      options = options.split(",")
266      for i in options:
267        if i.startswith("length="):
268          length = int(i[7:])
269        else:
270          # Ignore all unknown options in the unified fstab
271          continue
272
273      mount_point = pieces[1]
274      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[2],
275                                 device=pieces[0], length=length, device2=None)
276
277  else:
278    raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,))
279
280  return d
281
282
283def DumpInfoDict(d):
284  for k, v in sorted(d.items()):
285    print "%-25s = (%s) %s" % (k, type(v).__name__, v)
286
287
288def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
289  """Take a kernel, cmdline, and ramdisk directory from the input (in
290  'sourcedir'), and turn them into a boot image.  Return the image
291  data, or None if sourcedir does not appear to contains files for
292  building the requested image."""
293
294  if (not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK) or
295      not os.access(os.path.join(sourcedir, "kernel"), os.F_OK)):
296    return None
297
298  if info_dict is None:
299    info_dict = OPTIONS.info_dict
300
301  ramdisk_img = tempfile.NamedTemporaryFile()
302  img = tempfile.NamedTemporaryFile()
303
304  if os.access(fs_config_file, os.F_OK):
305    cmd = ["mkbootfs", "-f", fs_config_file, os.path.join(sourcedir, "RAMDISK")]
306  else:
307    cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
308  p1 = Run(cmd, stdout=subprocess.PIPE)
309  p2 = Run(["minigzip"],
310           stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
311
312  p2.wait()
313  p1.wait()
314  assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (sourcedir,)
315  assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (sourcedir,)
316
317  # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
318  mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
319
320  cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")]
321
322  fn = os.path.join(sourcedir, "second")
323  if os.access(fn, os.F_OK):
324    cmd.append("--second")
325    cmd.append(fn)
326
327  fn = os.path.join(sourcedir, "cmdline")
328  if os.access(fn, os.F_OK):
329    cmd.append("--cmdline")
330    cmd.append(open(fn).read().rstrip("\n"))
331
332  fn = os.path.join(sourcedir, "base")
333  if os.access(fn, os.F_OK):
334    cmd.append("--base")
335    cmd.append(open(fn).read().rstrip("\n"))
336
337  fn = os.path.join(sourcedir, "pagesize")
338  if os.access(fn, os.F_OK):
339    cmd.append("--pagesize")
340    cmd.append(open(fn).read().rstrip("\n"))
341
342  args = info_dict.get("mkbootimg_args", None)
343  if args and args.strip():
344    cmd.extend(shlex.split(args))
345
346  img_unsigned = None
347  if info_dict.get("vboot", None):
348    img_unsigned = tempfile.NamedTemporaryFile()
349    cmd.extend(["--ramdisk", ramdisk_img.name,
350                "--output", img_unsigned.name])
351  else:
352    cmd.extend(["--ramdisk", ramdisk_img.name,
353                "--output", img.name])
354
355  p = Run(cmd, stdout=subprocess.PIPE)
356  p.communicate()
357  assert p.returncode == 0, "mkbootimg of %s image failed" % (
358      os.path.basename(sourcedir),)
359
360  if info_dict.get("verity_key", None):
361    path = "/" + os.path.basename(sourcedir).lower()
362    cmd = ["boot_signer", path, img.name, info_dict["verity_key"] + ".pk8",
363           info_dict["verity_key"] + ".x509.pem", img.name]
364    p = Run(cmd, stdout=subprocess.PIPE)
365    p.communicate()
366    assert p.returncode == 0, "boot_signer of %s image failed" % path
367
368  # Sign the image if vboot is non-empty.
369  elif info_dict.get("vboot", None):
370    path = "/" + os.path.basename(sourcedir).lower()
371    img_keyblock = tempfile.NamedTemporaryFile()
372    cmd = [info_dict["vboot_signer_cmd"], info_dict["futility"],
373           img_unsigned.name, info_dict["vboot_key"] + ".vbpubk",
374           info_dict["vboot_key"] + ".vbprivk", img_keyblock.name,
375           img.name]
376    p = Run(cmd, stdout=subprocess.PIPE)
377    p.communicate()
378    assert p.returncode == 0, "vboot_signer of %s image failed" % path
379
380    # Clean up the temp files.
381    img_unsigned.close()
382    img_keyblock.close()
383
384  img.seek(os.SEEK_SET, 0)
385  data = img.read()
386
387  ramdisk_img.close()
388  img.close()
389
390  return data
391
392
393def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
394                     info_dict=None):
395  """Return a File object (with name 'name') with the desired bootable
396  image.  Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name
397  'prebuilt_name', otherwise look for it under 'unpack_dir'/IMAGES,
398  otherwise construct it from the source files in
399  'unpack_dir'/'tree_subdir'."""
400
401  prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
402  if os.path.exists(prebuilt_path):
403    print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,)
404    return File.FromLocalFile(name, prebuilt_path)
405
406  prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
407  if os.path.exists(prebuilt_path):
408    print "using prebuilt %s from IMAGES..." % (prebuilt_name,)
409    return File.FromLocalFile(name, prebuilt_path)
410
411  print "building image from target_files %s..." % (tree_subdir,)
412  fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
413  data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
414                            os.path.join(unpack_dir, fs_config),
415                            info_dict)
416  if data:
417    return File(name, data)
418  return None
419
420
421def UnzipTemp(filename, pattern=None):
422  """Unzip the given archive into a temporary directory and return the name.
423
424  If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a
425  temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
426
427  Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the
428  main file), open for reading.
429  """
430
431  tmp = tempfile.mkdtemp(prefix="targetfiles-")
432  OPTIONS.tempfiles.append(tmp)
433
434  def unzip_to_dir(filename, dirname):
435    cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
436    if pattern is not None:
437      cmd.append(pattern)
438    p = Run(cmd, stdout=subprocess.PIPE)
439    p.communicate()
440    if p.returncode != 0:
441      raise ExternalError("failed to unzip input target-files \"%s\"" %
442                          (filename,))
443
444  m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
445  if m:
446    unzip_to_dir(m.group(1), tmp)
447    unzip_to_dir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"))
448    filename = m.group(1)
449  else:
450    unzip_to_dir(filename, tmp)
451
452  return tmp, zipfile.ZipFile(filename, "r")
453
454
455def GetKeyPasswords(keylist):
456  """Given a list of keys, prompt the user to enter passwords for
457  those which require them.  Return a {key: password} dict.  password
458  will be None if the key has no password."""
459
460  no_passwords = []
461  need_passwords = []
462  key_passwords = {}
463  devnull = open("/dev/null", "w+b")
464  for k in sorted(keylist):
465    # We don't need a password for things that aren't really keys.
466    if k in SPECIAL_CERT_STRINGS:
467      no_passwords.append(k)
468      continue
469
470    p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
471             "-inform", "DER", "-nocrypt"],
472            stdin=devnull.fileno(),
473            stdout=devnull.fileno(),
474            stderr=subprocess.STDOUT)
475    p.communicate()
476    if p.returncode == 0:
477      # Definitely an unencrypted key.
478      no_passwords.append(k)
479    else:
480      p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
481               "-inform", "DER", "-passin", "pass:"],
482              stdin=devnull.fileno(),
483              stdout=devnull.fileno(),
484              stderr=subprocess.PIPE)
485      _, stderr = p.communicate()
486      if p.returncode == 0:
487        # Encrypted key with empty string as password.
488        key_passwords[k] = ''
489      elif stderr.startswith('Error decrypting key'):
490        # Definitely encrypted key.
491        # It would have said "Error reading key" if it didn't parse correctly.
492        need_passwords.append(k)
493      else:
494        # Potentially, a type of key that openssl doesn't understand.
495        # We'll let the routines in signapk.jar handle it.
496        no_passwords.append(k)
497  devnull.close()
498
499  key_passwords.update(PasswordManager().GetPasswords(need_passwords))
500  key_passwords.update(dict.fromkeys(no_passwords, None))
501  return key_passwords
502
503
504def SignFile(input_name, output_name, key, password, align=None,
505             whole_file=False):
506  """Sign the input_name zip/jar/apk, producing output_name.  Use the
507  given key and password (the latter may be None if the key does not
508  have a password.
509
510  If align is an integer > 1, zipalign is run to align stored files in
511  the output zip on 'align'-byte boundaries.
512
513  If whole_file is true, use the "-w" option to SignApk to embed a
514  signature that covers the whole file in the archive comment of the
515  zip file.
516  """
517
518  if align == 0 or align == 1:
519    align = None
520
521  if align:
522    temp = tempfile.NamedTemporaryFile()
523    sign_name = temp.name
524  else:
525    sign_name = output_name
526
527  cmd = [OPTIONS.java_path, OPTIONS.java_args, "-jar",
528         os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)]
529  cmd.extend(OPTIONS.extra_signapk_args)
530  if whole_file:
531    cmd.append("-w")
532  cmd.extend([key + OPTIONS.public_key_suffix,
533              key + OPTIONS.private_key_suffix,
534              input_name, sign_name])
535
536  p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
537  if password is not None:
538    password += "\n"
539  p.communicate(password)
540  if p.returncode != 0:
541    raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
542
543  if align:
544    p = Run(["zipalign", "-f", str(align), sign_name, output_name])
545    p.communicate()
546    if p.returncode != 0:
547      raise ExternalError("zipalign failed: return code %s" % (p.returncode,))
548    temp.close()
549
550
551def CheckSize(data, target, info_dict):
552  """Check the data string passed against the max size limit, if
553  any, for the given target.  Raise exception if the data is too big.
554  Print a warning if the data is nearing the maximum size."""
555
556  if target.endswith(".img"):
557    target = target[:-4]
558  mount_point = "/" + target
559
560  fs_type = None
561  limit = None
562  if info_dict["fstab"]:
563    if mount_point == "/userdata":
564      mount_point = "/data"
565    p = info_dict["fstab"][mount_point]
566    fs_type = p.fs_type
567    device = p.device
568    if "/" in device:
569      device = device[device.rfind("/")+1:]
570    limit = info_dict.get(device + "_size", None)
571  if not fs_type or not limit:
572    return
573
574  if fs_type == "yaffs2":
575    # image size should be increased by 1/64th to account for the
576    # spare area (64 bytes per 2k page)
577    limit = limit / 2048 * (2048+64)
578  size = len(data)
579  pct = float(size) * 100.0 / limit
580  msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit)
581  if pct >= 99.0:
582    raise ExternalError(msg)
583  elif pct >= 95.0:
584    print
585    print "  WARNING: ", msg
586    print
587  elif OPTIONS.verbose:
588    print "  ", msg
589
590
591def ReadApkCerts(tf_zip):
592  """Given a target_files ZipFile, parse the META/apkcerts.txt file
593  and return a {package: cert} dict."""
594  certmap = {}
595  for line in tf_zip.read("META/apkcerts.txt").split("\n"):
596    line = line.strip()
597    if not line:
598      continue
599    m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+'
600                 r'private_key="(.*)"$', line)
601    if m:
602      name, cert, privkey = m.groups()
603      public_key_suffix_len = len(OPTIONS.public_key_suffix)
604      private_key_suffix_len = len(OPTIONS.private_key_suffix)
605      if cert in SPECIAL_CERT_STRINGS and not privkey:
606        certmap[name] = cert
607      elif (cert.endswith(OPTIONS.public_key_suffix) and
608            privkey.endswith(OPTIONS.private_key_suffix) and
609            cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
610        certmap[name] = cert[:-public_key_suffix_len]
611      else:
612        raise ValueError("failed to parse line from apkcerts.txt:\n" + line)
613  return certmap
614
615
616COMMON_DOCSTRING = """
617  -p  (--path)  <dir>
618      Prepend <dir>/bin to the list of places to search for binaries
619      run by this script, and expect to find jars in <dir>/framework.
620
621  -s  (--device_specific) <file>
622      Path to the python module containing device-specific
623      releasetools code.
624
625  -x  (--extra)  <key=value>
626      Add a key/value pair to the 'extras' dict, which device-specific
627      extension code may look at.
628
629  -v  (--verbose)
630      Show command lines being executed.
631
632  -h  (--help)
633      Display this usage message and exit.
634"""
635
636def Usage(docstring):
637  print docstring.rstrip("\n")
638  print COMMON_DOCSTRING
639
640
641def ParseOptions(argv,
642                 docstring,
643                 extra_opts="", extra_long_opts=(),
644                 extra_option_handler=None):
645  """Parse the options in argv and return any arguments that aren't
646  flags.  docstring is the calling module's docstring, to be displayed
647  for errors and -h.  extra_opts and extra_long_opts are for flags
648  defined by the caller, which are processed by passing them to
649  extra_option_handler."""
650
651  try:
652    opts, args = getopt.getopt(
653        argv, "hvp:s:x:" + extra_opts,
654        ["help", "verbose", "path=", "signapk_path=", "extra_signapk_args=",
655         "java_path=", "java_args=", "public_key_suffix=",
656         "private_key_suffix=", "device_specific=", "extra="] +
657        list(extra_long_opts))
658  except getopt.GetoptError as err:
659    Usage(docstring)
660    print "**", str(err), "**"
661    sys.exit(2)
662
663  for o, a in opts:
664    if o in ("-h", "--help"):
665      Usage(docstring)
666      sys.exit()
667    elif o in ("-v", "--verbose"):
668      OPTIONS.verbose = True
669    elif o in ("-p", "--path"):
670      OPTIONS.search_path = a
671    elif o in ("--signapk_path",):
672      OPTIONS.signapk_path = a
673    elif o in ("--extra_signapk_args",):
674      OPTIONS.extra_signapk_args = shlex.split(a)
675    elif o in ("--java_path",):
676      OPTIONS.java_path = a
677    elif o in ("--java_args",):
678      OPTIONS.java_args = a
679    elif o in ("--public_key_suffix",):
680      OPTIONS.public_key_suffix = a
681    elif o in ("--private_key_suffix",):
682      OPTIONS.private_key_suffix = a
683    elif o in ("-s", "--device_specific"):
684      OPTIONS.device_specific = a
685    elif o in ("-x", "--extra"):
686      key, value = a.split("=", 1)
687      OPTIONS.extras[key] = value
688    else:
689      if extra_option_handler is None or not extra_option_handler(o, a):
690        assert False, "unknown option \"%s\"" % (o,)
691
692  if OPTIONS.search_path:
693    os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") +
694                          os.pathsep + os.environ["PATH"])
695
696  return args
697
698
699def MakeTempFile(prefix=None, suffix=None):
700  """Make a temp file and add it to the list of things to be deleted
701  when Cleanup() is called.  Return the filename."""
702  fd, fn = tempfile.mkstemp(prefix=prefix, suffix=suffix)
703  os.close(fd)
704  OPTIONS.tempfiles.append(fn)
705  return fn
706
707
708def Cleanup():
709  for i in OPTIONS.tempfiles:
710    if os.path.isdir(i):
711      shutil.rmtree(i)
712    else:
713      os.remove(i)
714
715
716class PasswordManager(object):
717  def __init__(self):
718    self.editor = os.getenv("EDITOR", None)
719    self.pwfile = os.getenv("ANDROID_PW_FILE", None)
720
721  def GetPasswords(self, items):
722    """Get passwords corresponding to each string in 'items',
723    returning a dict.  (The dict may have keys in addition to the
724    values in 'items'.)
725
726    Uses the passwords in $ANDROID_PW_FILE if available, letting the
727    user edit that file to add more needed passwords.  If no editor is
728    available, or $ANDROID_PW_FILE isn't define, prompts the user
729    interactively in the ordinary way.
730    """
731
732    current = self.ReadFile()
733
734    first = True
735    while True:
736      missing = []
737      for i in items:
738        if i not in current or not current[i]:
739          missing.append(i)
740      # Are all the passwords already in the file?
741      if not missing:
742        return current
743
744      for i in missing:
745        current[i] = ""
746
747      if not first:
748        print "key file %s still missing some passwords." % (self.pwfile,)
749        answer = raw_input("try to edit again? [y]> ").strip()
750        if answer and answer[0] not in 'yY':
751          raise RuntimeError("key passwords unavailable")
752      first = False
753
754      current = self.UpdateAndReadFile(current)
755
756  def PromptResult(self, current): # pylint: disable=no-self-use
757    """Prompt the user to enter a value (password) for each key in
758    'current' whose value is fales.  Returns a new dict with all the
759    values.
760    """
761    result = {}
762    for k, v in sorted(current.iteritems()):
763      if v:
764        result[k] = v
765      else:
766        while True:
767          result[k] = getpass.getpass(
768              "Enter password for %s key> " % k).strip()
769          if result[k]:
770            break
771    return result
772
773  def UpdateAndReadFile(self, current):
774    if not self.editor or not self.pwfile:
775      return self.PromptResult(current)
776
777    f = open(self.pwfile, "w")
778    os.chmod(self.pwfile, 0o600)
779    f.write("# Enter key passwords between the [[[ ]]] brackets.\n")
780    f.write("# (Additional spaces are harmless.)\n\n")
781
782    first_line = None
783    sorted_list = sorted([(not v, k, v) for (k, v) in current.iteritems()])
784    for i, (_, k, v) in enumerate(sorted_list):
785      f.write("[[[  %s  ]]] %s\n" % (v, k))
786      if not v and first_line is None:
787        # position cursor on first line with no password.
788        first_line = i + 4
789    f.close()
790
791    p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
792    _, _ = p.communicate()
793
794    return self.ReadFile()
795
796  def ReadFile(self):
797    result = {}
798    if self.pwfile is None:
799      return result
800    try:
801      f = open(self.pwfile, "r")
802      for line in f:
803        line = line.strip()
804        if not line or line[0] == '#':
805          continue
806        m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
807        if not m:
808          print "failed to parse password file: ", line
809        else:
810          result[m.group(2)] = m.group(1)
811      f.close()
812    except IOError as e:
813      if e.errno != errno.ENOENT:
814        print "error reading password file: ", str(e)
815    return result
816
817
818def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
819             compress_type=None):
820  import datetime
821
822  # http://b/18015246
823  # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
824  # for files larger than 2GiB. We can work around this by adjusting their
825  # limit. Note that `zipfile.writestr()` will not work for strings larger than
826  # 2GiB. The Python interpreter sometimes rejects strings that large (though
827  # it isn't clear to me exactly what circumstances cause this).
828  # `zipfile.write()` must be used directly to work around this.
829  #
830  # This mess can be avoided if we port to python3.
831  saved_zip64_limit = zipfile.ZIP64_LIMIT
832  zipfile.ZIP64_LIMIT = (1 << 32) - 1
833
834  if compress_type is None:
835    compress_type = zip_file.compression
836  if arcname is None:
837    arcname = filename
838
839  saved_stat = os.stat(filename)
840
841  try:
842    # `zipfile.write()` doesn't allow us to pass ZipInfo, so just modify the
843    # file to be zipped and reset it when we're done.
844    os.chmod(filename, perms)
845
846    # Use a fixed timestamp so the output is repeatable.
847    epoch = datetime.datetime.fromtimestamp(0)
848    timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
849    os.utime(filename, (timestamp, timestamp))
850
851    zip_file.write(filename, arcname=arcname, compress_type=compress_type)
852  finally:
853    os.chmod(filename, saved_stat.st_mode)
854    os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
855    zipfile.ZIP64_LIMIT = saved_zip64_limit
856
857
858def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=0o644,
859                compress_type=None):
860  """Wrap zipfile.writestr() function to work around the zip64 limit.
861
862  Even with the ZIP64_LIMIT workaround, it won't allow writing a string
863  longer than 2GiB. It gives 'OverflowError: size does not fit in an int'
864  when calling crc32(bytes).
865
866  But it still works fine to write a shorter string into a large zip file.
867  We should use ZipWrite() whenever possible, and only use ZipWriteStr()
868  when we know the string won't be too long.
869  """
870
871  saved_zip64_limit = zipfile.ZIP64_LIMIT
872  zipfile.ZIP64_LIMIT = (1 << 32) - 1
873
874  if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
875    zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
876    zinfo.compress_type = zip_file.compression
877  else:
878    zinfo = zinfo_or_arcname
879
880  # If compress_type is given, it overrides the value in zinfo.
881  if compress_type is not None:
882    zinfo.compress_type = compress_type
883
884  # Use a fixed timestamp so the output is repeatable.
885  zinfo.external_attr = perms << 16
886  zinfo.date_time = (2009, 1, 1, 0, 0, 0)
887
888  zip_file.writestr(zinfo, data)
889  zipfile.ZIP64_LIMIT = saved_zip64_limit
890
891
892def ZipClose(zip_file):
893  # http://b/18015246
894  # zipfile also refers to ZIP64_LIMIT during close() when it writes out the
895  # central directory.
896  saved_zip64_limit = zipfile.ZIP64_LIMIT
897  zipfile.ZIP64_LIMIT = (1 << 32) - 1
898
899  zip_file.close()
900
901  zipfile.ZIP64_LIMIT = saved_zip64_limit
902
903
904class DeviceSpecificParams(object):
905  module = None
906  def __init__(self, **kwargs):
907    """Keyword arguments to the constructor become attributes of this
908    object, which is passed to all functions in the device-specific
909    module."""
910    for k, v in kwargs.iteritems():
911      setattr(self, k, v)
912    self.extras = OPTIONS.extras
913
914    if self.module is None:
915      path = OPTIONS.device_specific
916      if not path:
917        return
918      try:
919        if os.path.isdir(path):
920          info = imp.find_module("releasetools", [path])
921        else:
922          d, f = os.path.split(path)
923          b, x = os.path.splitext(f)
924          if x == ".py":
925            f = b
926          info = imp.find_module(f, [d])
927        print "loaded device-specific extensions from", path
928        self.module = imp.load_module("device_specific", *info)
929      except ImportError:
930        print "unable to load device-specific module; assuming none"
931
932  def _DoCall(self, function_name, *args, **kwargs):
933    """Call the named function in the device-specific module, passing
934    the given args and kwargs.  The first argument to the call will be
935    the DeviceSpecific object itself.  If there is no module, or the
936    module does not define the function, return the value of the
937    'default' kwarg (which itself defaults to None)."""
938    if self.module is None or not hasattr(self.module, function_name):
939      return kwargs.get("default", None)
940    return getattr(self.module, function_name)(*((self,) + args), **kwargs)
941
942  def FullOTA_Assertions(self):
943    """Called after emitting the block of assertions at the top of a
944    full OTA package.  Implementations can add whatever additional
945    assertions they like."""
946    return self._DoCall("FullOTA_Assertions")
947
948  def FullOTA_InstallBegin(self):
949    """Called at the start of full OTA installation."""
950    return self._DoCall("FullOTA_InstallBegin")
951
952  def FullOTA_InstallEnd(self):
953    """Called at the end of full OTA installation; typically this is
954    used to install the image for the device's baseband processor."""
955    return self._DoCall("FullOTA_InstallEnd")
956
957  def IncrementalOTA_Assertions(self):
958    """Called after emitting the block of assertions at the top of an
959    incremental OTA package.  Implementations can add whatever
960    additional assertions they like."""
961    return self._DoCall("IncrementalOTA_Assertions")
962
963  def IncrementalOTA_VerifyBegin(self):
964    """Called at the start of the verification phase of incremental
965    OTA installation; additional checks can be placed here to abort
966    the script before any changes are made."""
967    return self._DoCall("IncrementalOTA_VerifyBegin")
968
969  def IncrementalOTA_VerifyEnd(self):
970    """Called at the end of the verification phase of incremental OTA
971    installation; additional checks can be placed here to abort the
972    script before any changes are made."""
973    return self._DoCall("IncrementalOTA_VerifyEnd")
974
975  def IncrementalOTA_InstallBegin(self):
976    """Called at the start of incremental OTA installation (after
977    verification is complete)."""
978    return self._DoCall("IncrementalOTA_InstallBegin")
979
980  def IncrementalOTA_InstallEnd(self):
981    """Called at the end of incremental OTA installation; typically
982    this is used to install the image for the device's baseband
983    processor."""
984    return self._DoCall("IncrementalOTA_InstallEnd")
985
986class File(object):
987  def __init__(self, name, data):
988    self.name = name
989    self.data = data
990    self.size = len(data)
991    self.sha1 = sha1(data).hexdigest()
992
993  @classmethod
994  def FromLocalFile(cls, name, diskname):
995    f = open(diskname, "rb")
996    data = f.read()
997    f.close()
998    return File(name, data)
999
1000  def WriteToTemp(self):
1001    t = tempfile.NamedTemporaryFile()
1002    t.write(self.data)
1003    t.flush()
1004    return t
1005
1006  def AddToZip(self, z, compression=None):
1007    ZipWriteStr(z, self.name, self.data, compress_type=compression)
1008
1009DIFF_PROGRAM_BY_EXT = {
1010    ".gz" : "imgdiff",
1011    ".zip" : ["imgdiff", "-z"],
1012    ".jar" : ["imgdiff", "-z"],
1013    ".apk" : ["imgdiff", "-z"],
1014    ".img" : "imgdiff",
1015    }
1016
1017class Difference(object):
1018  def __init__(self, tf, sf, diff_program=None):
1019    self.tf = tf
1020    self.sf = sf
1021    self.patch = None
1022    self.diff_program = diff_program
1023
1024  def ComputePatch(self):
1025    """Compute the patch (as a string of data) needed to turn sf into
1026    tf.  Returns the same tuple as GetPatch()."""
1027
1028    tf = self.tf
1029    sf = self.sf
1030
1031    if self.diff_program:
1032      diff_program = self.diff_program
1033    else:
1034      ext = os.path.splitext(tf.name)[1]
1035      diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff")
1036
1037    ttemp = tf.WriteToTemp()
1038    stemp = sf.WriteToTemp()
1039
1040    ext = os.path.splitext(tf.name)[1]
1041
1042    try:
1043      ptemp = tempfile.NamedTemporaryFile()
1044      if isinstance(diff_program, list):
1045        cmd = copy.copy(diff_program)
1046      else:
1047        cmd = [diff_program]
1048      cmd.append(stemp.name)
1049      cmd.append(ttemp.name)
1050      cmd.append(ptemp.name)
1051      p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1052      err = []
1053      def run():
1054        _, e = p.communicate()
1055        if e:
1056          err.append(e)
1057      th = threading.Thread(target=run)
1058      th.start()
1059      th.join(timeout=300)   # 5 mins
1060      if th.is_alive():
1061        print "WARNING: diff command timed out"
1062        p.terminate()
1063        th.join(5)
1064        if th.is_alive():
1065          p.kill()
1066          th.join()
1067
1068      if err or p.returncode != 0:
1069        print "WARNING: failure running %s:\n%s\n" % (
1070            diff_program, "".join(err))
1071        self.patch = None
1072        return None, None, None
1073      diff = ptemp.read()
1074    finally:
1075      ptemp.close()
1076      stemp.close()
1077      ttemp.close()
1078
1079    self.patch = diff
1080    return self.tf, self.sf, self.patch
1081
1082
1083  def GetPatch(self):
1084    """Return a tuple (target_file, source_file, patch_data).
1085    patch_data may be None if ComputePatch hasn't been called, or if
1086    computing the patch failed."""
1087    return self.tf, self.sf, self.patch
1088
1089
1090def ComputeDifferences(diffs):
1091  """Call ComputePatch on all the Difference objects in 'diffs'."""
1092  print len(diffs), "diffs to compute"
1093
1094  # Do the largest files first, to try and reduce the long-pole effect.
1095  by_size = [(i.tf.size, i) for i in diffs]
1096  by_size.sort(reverse=True)
1097  by_size = [i[1] for i in by_size]
1098
1099  lock = threading.Lock()
1100  diff_iter = iter(by_size)   # accessed under lock
1101
1102  def worker():
1103    try:
1104      lock.acquire()
1105      for d in diff_iter:
1106        lock.release()
1107        start = time.time()
1108        d.ComputePatch()
1109        dur = time.time() - start
1110        lock.acquire()
1111
1112        tf, sf, patch = d.GetPatch()
1113        if sf.name == tf.name:
1114          name = tf.name
1115        else:
1116          name = "%s (%s)" % (tf.name, sf.name)
1117        if patch is None:
1118          print "patching failed!                                  %s" % (name,)
1119        else:
1120          print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
1121              dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name)
1122      lock.release()
1123    except Exception as e:
1124      print e
1125      raise
1126
1127  # start worker threads; wait for them all to finish.
1128  threads = [threading.Thread(target=worker)
1129             for i in range(OPTIONS.worker_threads)]
1130  for th in threads:
1131    th.start()
1132  while threads:
1133    threads.pop().join()
1134
1135
1136class BlockDifference(object):
1137  def __init__(self, partition, tgt, src=None, check_first_block=False,
1138               version=None):
1139    self.tgt = tgt
1140    self.src = src
1141    self.partition = partition
1142    self.check_first_block = check_first_block
1143
1144    # Due to http://b/20939131, check_first_block is disabled temporarily.
1145    assert not self.check_first_block
1146
1147    if version is None:
1148      version = 1
1149      if OPTIONS.info_dict:
1150        version = max(
1151            int(i) for i in
1152            OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
1153    self.version = version
1154
1155    b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
1156                                    version=self.version)
1157    tmpdir = tempfile.mkdtemp()
1158    OPTIONS.tempfiles.append(tmpdir)
1159    self.path = os.path.join(tmpdir, partition)
1160    b.Compute(self.path)
1161
1162    _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
1163
1164  def WriteScript(self, script, output_zip, progress=None):
1165    if not self.src:
1166      # write the output unconditionally
1167      script.Print("Patching %s image unconditionally..." % (self.partition,))
1168    else:
1169      script.Print("Patching %s image after verification." % (self.partition,))
1170
1171    if progress:
1172      script.ShowProgress(progress, 0)
1173    self._WriteUpdate(script, output_zip)
1174
1175  def WriteVerifyScript(self, script):
1176    partition = self.partition
1177    if not self.src:
1178      script.Print("Image %s will be patched unconditionally." % (partition,))
1179    else:
1180      ranges = self.src.care_map.subtract(self.src.clobbered_blocks)
1181      ranges_str = ranges.to_string_raw()
1182      if self.version >= 3:
1183        script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || '
1184                            'block_image_verify("%s", '
1185                            'package_extract_file("%s.transfer.list"), '
1186                            '"%s.new.dat", "%s.patch.dat")) then') % (
1187                            self.device, ranges_str, self.src.TotalSha1(),
1188                            self.device, partition, partition, partition))
1189      else:
1190        script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1191                           self.device, ranges_str, self.src.TotalSha1()))
1192      script.Print('Verified %s image...' % (partition,))
1193      script.AppendExtra('else')
1194
1195      # When generating incrementals for the system and vendor partitions,
1196      # explicitly check the first block (which contains the superblock) of
1197      # the partition to see if it's what we expect. If this check fails,
1198      # give an explicit log message about the partition having been
1199      # remounted R/W (the most likely explanation) and the need to flash to
1200      # get OTAs working again.
1201      if self.check_first_block:
1202        self._CheckFirstBlock(script)
1203
1204      # Abort the OTA update. Note that the incremental OTA cannot be applied
1205      # even if it may match the checksum of the target partition.
1206      # a) If version < 3, operations like move and erase will make changes
1207      #    unconditionally and damage the partition.
1208      # b) If version >= 3, it won't even reach here.
1209      script.AppendExtra(('abort("%s partition has unexpected contents");\n'
1210                          'endif;') % (partition,))
1211
1212  def _WriteUpdate(self, script, output_zip):
1213    ZipWrite(output_zip,
1214             '{}.transfer.list'.format(self.path),
1215             '{}.transfer.list'.format(self.partition))
1216    ZipWrite(output_zip,
1217             '{}.new.dat'.format(self.path),
1218             '{}.new.dat'.format(self.partition))
1219    ZipWrite(output_zip,
1220             '{}.patch.dat'.format(self.path),
1221             '{}.patch.dat'.format(self.partition),
1222             compress_type=zipfile.ZIP_STORED)
1223
1224    call = ('block_image_update("{device}", '
1225            'package_extract_file("{partition}.transfer.list"), '
1226            '"{partition}.new.dat", "{partition}.patch.dat");\n'.format(
1227                device=self.device, partition=self.partition))
1228    script.AppendExtra(script.WordWrap(call))
1229
1230  def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use
1231    data = source.ReadRangeSet(ranges)
1232    ctx = sha1()
1233
1234    for p in data:
1235      ctx.update(p)
1236
1237    return ctx.hexdigest()
1238
1239  # TODO(tbao): Due to http://b/20939131, block 0 may be changed without
1240  # remounting R/W. Will change the checking to a finer-grained way to
1241  # mask off those bits.
1242  def _CheckFirstBlock(self, script):
1243    r = rangelib.RangeSet((0, 1))
1244    srchash = self._HashBlocks(self.src, r)
1245
1246    script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || '
1247                        'abort("%s has been remounted R/W; '
1248                        'reflash device to reenable OTA updates");')
1249                       % (self.device, r.to_string_raw(), srchash,
1250                          self.device))
1251
1252DataImage = blockimgdiff.DataImage
1253
1254
1255# map recovery.fstab's fs_types to mount/format "partition types"
1256PARTITION_TYPES = {
1257    "yaffs2": "MTD",
1258    "mtd": "MTD",
1259    "ext4": "EMMC",
1260    "emmc": "EMMC",
1261    "f2fs": "EMMC",
1262    "squashfs": "EMMC"
1263}
1264
1265def GetTypeAndDevice(mount_point, info):
1266  fstab = info["fstab"]
1267  if fstab:
1268    return (PARTITION_TYPES[fstab[mount_point].fs_type],
1269            fstab[mount_point].device)
1270  else:
1271    raise KeyError
1272
1273
1274def ParseCertificate(data):
1275  """Parse a PEM-format certificate."""
1276  cert = []
1277  save = False
1278  for line in data.split("\n"):
1279    if "--END CERTIFICATE--" in line:
1280      break
1281    if save:
1282      cert.append(line)
1283    if "--BEGIN CERTIFICATE--" in line:
1284      save = True
1285  cert = "".join(cert).decode('base64')
1286  return cert
1287
1288def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
1289                      info_dict=None):
1290  """Generate a binary patch that creates the recovery image starting
1291  with the boot image.  (Most of the space in these images is just the
1292  kernel, which is identical for the two, so the resulting patch
1293  should be efficient.)  Add it to the output zip, along with a shell
1294  script that is run from init.rc on first boot to actually do the
1295  patching and install the new recovery image.
1296
1297  recovery_img and boot_img should be File objects for the
1298  corresponding images.  info should be the dictionary returned by
1299  common.LoadInfoDict() on the input target_files.
1300  """
1301
1302  if info_dict is None:
1303    info_dict = OPTIONS.info_dict
1304
1305  diff_program = ["imgdiff"]
1306  path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
1307  if os.path.exists(path):
1308    diff_program.append("-b")
1309    diff_program.append(path)
1310    bonus_args = "-b /system/etc/recovery-resource.dat"
1311  else:
1312    bonus_args = ""
1313
1314  d = Difference(recovery_img, boot_img, diff_program=diff_program)
1315  _, _, patch = d.ComputePatch()
1316  output_sink("recovery-from-boot.p", patch)
1317
1318  try:
1319    boot_type, boot_device = GetTypeAndDevice("/boot", info_dict)
1320    recovery_type, recovery_device = GetTypeAndDevice("/recovery", info_dict)
1321  except KeyError:
1322    return
1323
1324  sh = """#!/system/bin/sh
1325if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
1326  applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
1327else
1328  log -t recovery "Recovery image already installed"
1329fi
1330""" % {'boot_size': boot_img.size,
1331       'boot_sha1': boot_img.sha1,
1332       'recovery_size': recovery_img.size,
1333       'recovery_sha1': recovery_img.sha1,
1334       'boot_type': boot_type,
1335       'boot_device': boot_device,
1336       'recovery_type': recovery_type,
1337       'recovery_device': recovery_device,
1338       'bonus_args': bonus_args}
1339
1340  # The install script location moved from /system/etc to /system/bin
1341  # in the L release.  Parse the init.rc file to find out where the
1342  # target-files expects it to be, and put it there.
1343  sh_location = "etc/install-recovery.sh"
1344  try:
1345    with open(os.path.join(input_dir, "BOOT", "RAMDISK", "init.rc")) as f:
1346      for line in f:
1347        m = re.match(r"^service flash_recovery /system/(\S+)\s*$", line)
1348        if m:
1349          sh_location = m.group(1)
1350          print "putting script in", sh_location
1351          break
1352  except (OSError, IOError) as e:
1353    print "failed to read init.rc: %s" % (e,)
1354
1355  output_sink(sh_location, sh)
1356