common.py revision ebb19aa01a53e07594fed9104d5d6a3040b5a60a
1# Copyright (C) 2008 The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15import copy
16import errno
17import getopt
18import getpass
19import imp
20import os
21import platform
22import re
23import shlex
24import shutil
25import subprocess
26import sys
27import tempfile
28import threading
29import time
30import zipfile
31
32import blockimgdiff
33import rangelib
34
35try:
36  from hashlib import sha1 as sha1
37except ImportError:
38  from sha import sha as sha1
39
40
41class Options(object):
42  def __init__(self):
43    platform_search_path = {
44        "linux2": "out/host/linux-x86",
45        "darwin": "out/host/darwin-x86",
46    }
47
48    self.search_path = platform_search_path.get(sys.platform, None)
49    self.signapk_path = "framework/signapk.jar"  # Relative to search_path
50    self.extra_signapk_args = []
51    self.java_path = "java"  # Use the one on the path by default.
52    self.java_args = "-Xmx2048m" # JVM Args
53    self.public_key_suffix = ".x509.pem"
54    self.private_key_suffix = ".pk8"
55    self.verbose = False
56    self.tempfiles = []
57    self.device_specific = None
58    self.extras = {}
59    self.info_dict = None
60    self.worker_threads = None
61
62
63OPTIONS = Options()
64
65
66# Values for "certificate" in apkcerts that mean special things.
67SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL")
68
69
70class ExternalError(RuntimeError):
71  pass
72
73
74def Run(args, **kwargs):
75  """Create and return a subprocess.Popen object, printing the command
76  line on the terminal if -v was specified."""
77  if OPTIONS.verbose:
78    print "  running: ", " ".join(args)
79  return subprocess.Popen(args, **kwargs)
80
81
82def CloseInheritedPipes():
83  """ Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds
84  before doing other work."""
85  if platform.system() != "Darwin":
86    return
87  for d in range(3, 1025):
88    try:
89      stat = os.fstat(d)
90      if stat is not None:
91        pipebit = stat[0] & 0x1000
92        if pipebit != 0:
93          os.close(d)
94    except OSError:
95      pass
96
97
98def LoadInfoDict(input_file):
99  """Read and parse the META/misc_info.txt key/value pairs from the
100  input target files and return a dict."""
101
102  def read_helper(fn):
103    if isinstance(input_file, zipfile.ZipFile):
104      return input_file.read(fn)
105    else:
106      path = os.path.join(input_file, *fn.split("/"))
107      try:
108        with open(path) as f:
109          return f.read()
110      except IOError as e:
111        if e.errno == errno.ENOENT:
112          raise KeyError(fn)
113  d = {}
114  try:
115    d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n"))
116  except KeyError:
117    # ok if misc_info.txt doesn't exist
118    pass
119
120  # backwards compatibility: These values used to be in their own
121  # files.  Look for them, in case we're processing an old
122  # target_files zip.
123
124  if "mkyaffs2_extra_flags" not in d:
125    try:
126      d["mkyaffs2_extra_flags"] = read_helper(
127          "META/mkyaffs2-extra-flags.txt").strip()
128    except KeyError:
129      # ok if flags don't exist
130      pass
131
132  if "recovery_api_version" not in d:
133    try:
134      d["recovery_api_version"] = read_helper(
135          "META/recovery-api-version.txt").strip()
136    except KeyError:
137      raise ValueError("can't find recovery API version in input target-files")
138
139  if "tool_extensions" not in d:
140    try:
141      d["tool_extensions"] = read_helper("META/tool-extensions.txt").strip()
142    except KeyError:
143      # ok if extensions don't exist
144      pass
145
146  if "fstab_version" not in d:
147    d["fstab_version"] = "1"
148
149  try:
150    data = read_helper("META/imagesizes.txt")
151    for line in data.split("\n"):
152      if not line:
153        continue
154      name, value = line.split(" ", 1)
155      if not value:
156        continue
157      if name == "blocksize":
158        d[name] = value
159      else:
160        d[name + "_size"] = value
161  except KeyError:
162    pass
163
164  def makeint(key):
165    if key in d:
166      d[key] = int(d[key], 0)
167
168  makeint("recovery_api_version")
169  makeint("blocksize")
170  makeint("system_size")
171  makeint("vendor_size")
172  makeint("userdata_size")
173  makeint("cache_size")
174  makeint("recovery_size")
175  makeint("boot_size")
176  makeint("fstab_version")
177
178  d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"])
179  d["build.prop"] = LoadBuildProp(read_helper)
180  return d
181
182def LoadBuildProp(read_helper):
183  try:
184    data = read_helper("SYSTEM/build.prop")
185  except KeyError:
186    print "Warning: could not find SYSTEM/build.prop in %s" % zip
187    data = ""
188  return LoadDictionaryFromLines(data.split("\n"))
189
190def LoadDictionaryFromLines(lines):
191  d = {}
192  for line in lines:
193    line = line.strip()
194    if not line or line.startswith("#"):
195      continue
196    if "=" in line:
197      name, value = line.split("=", 1)
198      d[name] = value
199  return d
200
201def LoadRecoveryFSTab(read_helper, fstab_version):
202  class Partition(object):
203    def __init__(self, mount_point, fs_type, device, length, device2):
204      self.mount_point = mount_point
205      self.fs_type = fs_type
206      self.device = device
207      self.length = length
208      self.device2 = device2
209
210  try:
211    data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab")
212  except KeyError:
213    print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab"
214    data = ""
215
216  if fstab_version == 1:
217    d = {}
218    for line in data.split("\n"):
219      line = line.strip()
220      if not line or line.startswith("#"):
221        continue
222      pieces = line.split()
223      if not 3 <= len(pieces) <= 4:
224        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
225      options = None
226      if len(pieces) >= 4:
227        if pieces[3].startswith("/"):
228          device2 = pieces[3]
229          if len(pieces) >= 5:
230            options = pieces[4]
231        else:
232          device2 = None
233          options = pieces[3]
234      else:
235        device2 = None
236
237      mount_point = pieces[0]
238      length = 0
239      if options:
240        options = options.split(",")
241        for i in options:
242          if i.startswith("length="):
243            length = int(i[7:])
244          else:
245            print "%s: unknown option \"%s\"" % (mount_point, i)
246
247      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[1],
248                                 device=pieces[2], length=length,
249                                 device2=device2)
250
251  elif fstab_version == 2:
252    d = {}
253    for line in data.split("\n"):
254      line = line.strip()
255      if not line or line.startswith("#"):
256        continue
257      pieces = line.split()
258      if len(pieces) != 5:
259        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
260
261      # Ignore entries that are managed by vold
262      options = pieces[4]
263      if "voldmanaged=" in options:
264        continue
265
266      # It's a good line, parse it
267      length = 0
268      options = options.split(",")
269      for i in options:
270        if i.startswith("length="):
271          length = int(i[7:])
272        else:
273          # Ignore all unknown options in the unified fstab
274          continue
275
276      mount_point = pieces[1]
277      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[2],
278                                 device=pieces[0], length=length, device2=None)
279
280  else:
281    raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,))
282
283  return d
284
285
286def DumpInfoDict(d):
287  for k, v in sorted(d.items()):
288    print "%-25s = (%s) %s" % (k, type(v).__name__, v)
289
290
291def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
292  """Take a kernel, cmdline, and ramdisk directory from the input (in
293  'sourcedir'), and turn them into a boot image.  Return the image
294  data, or None if sourcedir does not appear to contains files for
295  building the requested image."""
296
297  if (not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK) or
298      not os.access(os.path.join(sourcedir, "kernel"), os.F_OK)):
299    return None
300
301  if info_dict is None:
302    info_dict = OPTIONS.info_dict
303
304  ramdisk_img = tempfile.NamedTemporaryFile()
305  img = tempfile.NamedTemporaryFile()
306
307  if os.access(fs_config_file, os.F_OK):
308    cmd = ["mkbootfs", "-f", fs_config_file, os.path.join(sourcedir, "RAMDISK")]
309  else:
310    cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
311  p1 = Run(cmd, stdout=subprocess.PIPE)
312  p2 = Run(["minigzip"],
313           stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
314
315  p2.wait()
316  p1.wait()
317  assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (sourcedir,)
318  assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (sourcedir,)
319
320  # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
321  mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
322
323  cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")]
324
325  fn = os.path.join(sourcedir, "second")
326  if os.access(fn, os.F_OK):
327    cmd.append("--second")
328    cmd.append(fn)
329
330  fn = os.path.join(sourcedir, "cmdline")
331  if os.access(fn, os.F_OK):
332    cmd.append("--cmdline")
333    cmd.append(open(fn).read().rstrip("\n"))
334
335  fn = os.path.join(sourcedir, "base")
336  if os.access(fn, os.F_OK):
337    cmd.append("--base")
338    cmd.append(open(fn).read().rstrip("\n"))
339
340  fn = os.path.join(sourcedir, "pagesize")
341  if os.access(fn, os.F_OK):
342    cmd.append("--pagesize")
343    cmd.append(open(fn).read().rstrip("\n"))
344
345  args = info_dict.get("mkbootimg_args", None)
346  if args and args.strip():
347    cmd.extend(shlex.split(args))
348
349  cmd.extend(["--ramdisk", ramdisk_img.name,
350              "--output", img.name])
351
352  p = Run(cmd, stdout=subprocess.PIPE)
353  p.communicate()
354  assert p.returncode == 0, "mkbootimg of %s image failed" % (
355      os.path.basename(sourcedir),)
356
357  if info_dict.get("verity_key", None):
358    path = "/" + os.path.basename(sourcedir).lower()
359    cmd = ["boot_signer", path, img.name, info_dict["verity_key"] + ".pk8",
360           info_dict["verity_key"] + ".x509.pem", img.name]
361    p = Run(cmd, stdout=subprocess.PIPE)
362    p.communicate()
363    assert p.returncode == 0, "boot_signer of %s image failed" % path
364
365  img.seek(os.SEEK_SET, 0)
366  data = img.read()
367
368  ramdisk_img.close()
369  img.close()
370
371  return data
372
373
374def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
375                     info_dict=None):
376  """Return a File object (with name 'name') with the desired bootable
377  image.  Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name
378  'prebuilt_name', otherwise look for it under 'unpack_dir'/IMAGES,
379  otherwise construct it from the source files in
380  'unpack_dir'/'tree_subdir'."""
381
382  prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
383  if os.path.exists(prebuilt_path):
384    print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,)
385    return File.FromLocalFile(name, prebuilt_path)
386
387  prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
388  if os.path.exists(prebuilt_path):
389    print "using prebuilt %s from IMAGES..." % (prebuilt_name,)
390    return File.FromLocalFile(name, prebuilt_path)
391
392  print "building image from target_files %s..." % (tree_subdir,)
393  fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
394  data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
395                            os.path.join(unpack_dir, fs_config),
396                            info_dict)
397  if data:
398    return File(name, data)
399  return None
400
401
402def UnzipTemp(filename, pattern=None):
403  """Unzip the given archive into a temporary directory and return the name.
404
405  If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a
406  temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
407
408  Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the
409  main file), open for reading.
410  """
411
412  tmp = tempfile.mkdtemp(prefix="targetfiles-")
413  OPTIONS.tempfiles.append(tmp)
414
415  def unzip_to_dir(filename, dirname):
416    cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
417    if pattern is not None:
418      cmd.append(pattern)
419    p = Run(cmd, stdout=subprocess.PIPE)
420    p.communicate()
421    if p.returncode != 0:
422      raise ExternalError("failed to unzip input target-files \"%s\"" %
423                          (filename,))
424
425  m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
426  if m:
427    unzip_to_dir(m.group(1), tmp)
428    unzip_to_dir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"))
429    filename = m.group(1)
430  else:
431    unzip_to_dir(filename, tmp)
432
433  return tmp, zipfile.ZipFile(filename, "r")
434
435
436def GetKeyPasswords(keylist):
437  """Given a list of keys, prompt the user to enter passwords for
438  those which require them.  Return a {key: password} dict.  password
439  will be None if the key has no password."""
440
441  no_passwords = []
442  need_passwords = []
443  key_passwords = {}
444  devnull = open("/dev/null", "w+b")
445  for k in sorted(keylist):
446    # We don't need a password for things that aren't really keys.
447    if k in SPECIAL_CERT_STRINGS:
448      no_passwords.append(k)
449      continue
450
451    p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
452             "-inform", "DER", "-nocrypt"],
453            stdin=devnull.fileno(),
454            stdout=devnull.fileno(),
455            stderr=subprocess.STDOUT)
456    p.communicate()
457    if p.returncode == 0:
458      # Definitely an unencrypted key.
459      no_passwords.append(k)
460    else:
461      p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
462               "-inform", "DER", "-passin", "pass:"],
463              stdin=devnull.fileno(),
464              stdout=devnull.fileno(),
465              stderr=subprocess.PIPE)
466      _, stderr = p.communicate()
467      if p.returncode == 0:
468        # Encrypted key with empty string as password.
469        key_passwords[k] = ''
470      elif stderr.startswith('Error decrypting key'):
471        # Definitely encrypted key.
472        # It would have said "Error reading key" if it didn't parse correctly.
473        need_passwords.append(k)
474      else:
475        # Potentially, a type of key that openssl doesn't understand.
476        # We'll let the routines in signapk.jar handle it.
477        no_passwords.append(k)
478  devnull.close()
479
480  key_passwords.update(PasswordManager().GetPasswords(need_passwords))
481  key_passwords.update(dict.fromkeys(no_passwords, None))
482  return key_passwords
483
484
485def SignFile(input_name, output_name, key, password, align=None,
486             whole_file=False):
487  """Sign the input_name zip/jar/apk, producing output_name.  Use the
488  given key and password (the latter may be None if the key does not
489  have a password.
490
491  If align is an integer > 1, zipalign is run to align stored files in
492  the output zip on 'align'-byte boundaries.
493
494  If whole_file is true, use the "-w" option to SignApk to embed a
495  signature that covers the whole file in the archive comment of the
496  zip file.
497  """
498
499  if align == 0 or align == 1:
500    align = None
501
502  if align:
503    temp = tempfile.NamedTemporaryFile()
504    sign_name = temp.name
505  else:
506    sign_name = output_name
507
508  cmd = [OPTIONS.java_path, OPTIONS.java_args, "-jar",
509         os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)]
510  cmd.extend(OPTIONS.extra_signapk_args)
511  if whole_file:
512    cmd.append("-w")
513  cmd.extend([key + OPTIONS.public_key_suffix,
514              key + OPTIONS.private_key_suffix,
515              input_name, sign_name])
516
517  p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
518  if password is not None:
519    password += "\n"
520  p.communicate(password)
521  if p.returncode != 0:
522    raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
523
524  if align:
525    p = Run(["zipalign", "-f", str(align), sign_name, output_name])
526    p.communicate()
527    if p.returncode != 0:
528      raise ExternalError("zipalign failed: return code %s" % (p.returncode,))
529    temp.close()
530
531
532def CheckSize(data, target, info_dict):
533  """Check the data string passed against the max size limit, if
534  any, for the given target.  Raise exception if the data is too big.
535  Print a warning if the data is nearing the maximum size."""
536
537  if target.endswith(".img"):
538    target = target[:-4]
539  mount_point = "/" + target
540
541  fs_type = None
542  limit = None
543  if info_dict["fstab"]:
544    if mount_point == "/userdata":
545      mount_point = "/data"
546    p = info_dict["fstab"][mount_point]
547    fs_type = p.fs_type
548    device = p.device
549    if "/" in device:
550      device = device[device.rfind("/")+1:]
551    limit = info_dict.get(device + "_size", None)
552  if not fs_type or not limit:
553    return
554
555  if fs_type == "yaffs2":
556    # image size should be increased by 1/64th to account for the
557    # spare area (64 bytes per 2k page)
558    limit = limit / 2048 * (2048+64)
559  size = len(data)
560  pct = float(size) * 100.0 / limit
561  msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit)
562  if pct >= 99.0:
563    raise ExternalError(msg)
564  elif pct >= 95.0:
565    print
566    print "  WARNING: ", msg
567    print
568  elif OPTIONS.verbose:
569    print "  ", msg
570
571
572def ReadApkCerts(tf_zip):
573  """Given a target_files ZipFile, parse the META/apkcerts.txt file
574  and return a {package: cert} dict."""
575  certmap = {}
576  for line in tf_zip.read("META/apkcerts.txt").split("\n"):
577    line = line.strip()
578    if not line:
579      continue
580    m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+'
581                 r'private_key="(.*)"$', line)
582    if m:
583      name, cert, privkey = m.groups()
584      public_key_suffix_len = len(OPTIONS.public_key_suffix)
585      private_key_suffix_len = len(OPTIONS.private_key_suffix)
586      if cert in SPECIAL_CERT_STRINGS and not privkey:
587        certmap[name] = cert
588      elif (cert.endswith(OPTIONS.public_key_suffix) and
589            privkey.endswith(OPTIONS.private_key_suffix) and
590            cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
591        certmap[name] = cert[:-public_key_suffix_len]
592      else:
593        raise ValueError("failed to parse line from apkcerts.txt:\n" + line)
594  return certmap
595
596
597COMMON_DOCSTRING = """
598  -p  (--path)  <dir>
599      Prepend <dir>/bin to the list of places to search for binaries
600      run by this script, and expect to find jars in <dir>/framework.
601
602  -s  (--device_specific) <file>
603      Path to the python module containing device-specific
604      releasetools code.
605
606  -x  (--extra)  <key=value>
607      Add a key/value pair to the 'extras' dict, which device-specific
608      extension code may look at.
609
610  -v  (--verbose)
611      Show command lines being executed.
612
613  -h  (--help)
614      Display this usage message and exit.
615"""
616
617def Usage(docstring):
618  print docstring.rstrip("\n")
619  print COMMON_DOCSTRING
620
621
622def ParseOptions(argv,
623                 docstring,
624                 extra_opts="", extra_long_opts=(),
625                 extra_option_handler=None):
626  """Parse the options in argv and return any arguments that aren't
627  flags.  docstring is the calling module's docstring, to be displayed
628  for errors and -h.  extra_opts and extra_long_opts are for flags
629  defined by the caller, which are processed by passing them to
630  extra_option_handler."""
631
632  try:
633    opts, args = getopt.getopt(
634        argv, "hvp:s:x:" + extra_opts,
635        ["help", "verbose", "path=", "signapk_path=", "extra_signapk_args=",
636         "java_path=", "java_args=", "public_key_suffix=",
637         "private_key_suffix=", "device_specific=", "extra="] +
638        list(extra_long_opts))
639  except getopt.GetoptError as err:
640    Usage(docstring)
641    print "**", str(err), "**"
642    sys.exit(2)
643
644  for o, a in opts:
645    if o in ("-h", "--help"):
646      Usage(docstring)
647      sys.exit()
648    elif o in ("-v", "--verbose"):
649      OPTIONS.verbose = True
650    elif o in ("-p", "--path"):
651      OPTIONS.search_path = a
652    elif o in ("--signapk_path",):
653      OPTIONS.signapk_path = a
654    elif o in ("--extra_signapk_args",):
655      OPTIONS.extra_signapk_args = shlex.split(a)
656    elif o in ("--java_path",):
657      OPTIONS.java_path = a
658    elif o in ("--java_args",):
659      OPTIONS.java_args = a
660    elif o in ("--public_key_suffix",):
661      OPTIONS.public_key_suffix = a
662    elif o in ("--private_key_suffix",):
663      OPTIONS.private_key_suffix = a
664    elif o in ("-s", "--device_specific"):
665      OPTIONS.device_specific = a
666    elif o in ("-x", "--extra"):
667      key, value = a.split("=", 1)
668      OPTIONS.extras[key] = value
669    else:
670      if extra_option_handler is None or not extra_option_handler(o, a):
671        assert False, "unknown option \"%s\"" % (o,)
672
673  if OPTIONS.search_path:
674    os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") +
675                          os.pathsep + os.environ["PATH"])
676
677  return args
678
679
680def MakeTempFile(prefix=None, suffix=None):
681  """Make a temp file and add it to the list of things to be deleted
682  when Cleanup() is called.  Return the filename."""
683  fd, fn = tempfile.mkstemp(prefix=prefix, suffix=suffix)
684  os.close(fd)
685  OPTIONS.tempfiles.append(fn)
686  return fn
687
688
689def Cleanup():
690  for i in OPTIONS.tempfiles:
691    if os.path.isdir(i):
692      shutil.rmtree(i)
693    else:
694      os.remove(i)
695
696
697class PasswordManager(object):
698  def __init__(self):
699    self.editor = os.getenv("EDITOR", None)
700    self.pwfile = os.getenv("ANDROID_PW_FILE", None)
701
702  def GetPasswords(self, items):
703    """Get passwords corresponding to each string in 'items',
704    returning a dict.  (The dict may have keys in addition to the
705    values in 'items'.)
706
707    Uses the passwords in $ANDROID_PW_FILE if available, letting the
708    user edit that file to add more needed passwords.  If no editor is
709    available, or $ANDROID_PW_FILE isn't define, prompts the user
710    interactively in the ordinary way.
711    """
712
713    current = self.ReadFile()
714
715    first = True
716    while True:
717      missing = []
718      for i in items:
719        if i not in current or not current[i]:
720          missing.append(i)
721      # Are all the passwords already in the file?
722      if not missing:
723        return current
724
725      for i in missing:
726        current[i] = ""
727
728      if not first:
729        print "key file %s still missing some passwords." % (self.pwfile,)
730        answer = raw_input("try to edit again? [y]> ").strip()
731        if answer and answer[0] not in 'yY':
732          raise RuntimeError("key passwords unavailable")
733      first = False
734
735      current = self.UpdateAndReadFile(current)
736
737  def PromptResult(self, current): # pylint: disable=no-self-use
738    """Prompt the user to enter a value (password) for each key in
739    'current' whose value is fales.  Returns a new dict with all the
740    values.
741    """
742    result = {}
743    for k, v in sorted(current.iteritems()):
744      if v:
745        result[k] = v
746      else:
747        while True:
748          result[k] = getpass.getpass(
749              "Enter password for %s key> " % k).strip()
750          if result[k]:
751            break
752    return result
753
754  def UpdateAndReadFile(self, current):
755    if not self.editor or not self.pwfile:
756      return self.PromptResult(current)
757
758    f = open(self.pwfile, "w")
759    os.chmod(self.pwfile, 0o600)
760    f.write("# Enter key passwords between the [[[ ]]] brackets.\n")
761    f.write("# (Additional spaces are harmless.)\n\n")
762
763    first_line = None
764    sorted_list = sorted([(not v, k, v) for (k, v) in current.iteritems()])
765    for i, (_, k, v) in enumerate(sorted_list):
766      f.write("[[[  %s  ]]] %s\n" % (v, k))
767      if not v and first_line is None:
768        # position cursor on first line with no password.
769        first_line = i + 4
770    f.close()
771
772    p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
773    _, _ = p.communicate()
774
775    return self.ReadFile()
776
777  def ReadFile(self):
778    result = {}
779    if self.pwfile is None:
780      return result
781    try:
782      f = open(self.pwfile, "r")
783      for line in f:
784        line = line.strip()
785        if not line or line[0] == '#':
786          continue
787        m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
788        if not m:
789          print "failed to parse password file: ", line
790        else:
791          result[m.group(2)] = m.group(1)
792      f.close()
793    except IOError as e:
794      if e.errno != errno.ENOENT:
795        print "error reading password file: ", str(e)
796    return result
797
798
799def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
800             compress_type=None):
801  import datetime
802
803  # http://b/18015246
804  # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
805  # for files larger than 2GiB. We can work around this by adjusting their
806  # limit. Note that `zipfile.writestr()` will not work for strings larger than
807  # 2GiB. The Python interpreter sometimes rejects strings that large (though
808  # it isn't clear to me exactly what circumstances cause this).
809  # `zipfile.write()` must be used directly to work around this.
810  #
811  # This mess can be avoided if we port to python3.
812  saved_zip64_limit = zipfile.ZIP64_LIMIT
813  zipfile.ZIP64_LIMIT = (1 << 32) - 1
814
815  if compress_type is None:
816    compress_type = zip_file.compression
817  if arcname is None:
818    arcname = filename
819
820  saved_stat = os.stat(filename)
821
822  try:
823    # `zipfile.write()` doesn't allow us to pass ZipInfo, so just modify the
824    # file to be zipped and reset it when we're done.
825    os.chmod(filename, perms)
826
827    # Use a fixed timestamp so the output is repeatable.
828    epoch = datetime.datetime.fromtimestamp(0)
829    timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
830    os.utime(filename, (timestamp, timestamp))
831
832    zip_file.write(filename, arcname=arcname, compress_type=compress_type)
833  finally:
834    os.chmod(filename, saved_stat.st_mode)
835    os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
836    zipfile.ZIP64_LIMIT = saved_zip64_limit
837
838
839def ZipWriteStr(zip_file, filename, data, perms=0o644, compression=None):
840  # use a fixed timestamp so the output is repeatable.
841  zinfo = zipfile.ZipInfo(filename=filename,
842                          date_time=(2009, 1, 1, 0, 0, 0))
843  if compression is None:
844    zinfo.compress_type = zip_file.compression
845  else:
846    zinfo.compress_type = compression
847  zinfo.external_attr = perms << 16
848  zip_file.writestr(zinfo, data)
849
850
851class DeviceSpecificParams(object):
852  module = None
853  def __init__(self, **kwargs):
854    """Keyword arguments to the constructor become attributes of this
855    object, which is passed to all functions in the device-specific
856    module."""
857    for k, v in kwargs.iteritems():
858      setattr(self, k, v)
859    self.extras = OPTIONS.extras
860
861    if self.module is None:
862      path = OPTIONS.device_specific
863      if not path:
864        return
865      try:
866        if os.path.isdir(path):
867          info = imp.find_module("releasetools", [path])
868        else:
869          d, f = os.path.split(path)
870          b, x = os.path.splitext(f)
871          if x == ".py":
872            f = b
873          info = imp.find_module(f, [d])
874        print "loaded device-specific extensions from", path
875        self.module = imp.load_module("device_specific", *info)
876      except ImportError:
877        print "unable to load device-specific module; assuming none"
878
879  def _DoCall(self, function_name, *args, **kwargs):
880    """Call the named function in the device-specific module, passing
881    the given args and kwargs.  The first argument to the call will be
882    the DeviceSpecific object itself.  If there is no module, or the
883    module does not define the function, return the value of the
884    'default' kwarg (which itself defaults to None)."""
885    if self.module is None or not hasattr(self.module, function_name):
886      return kwargs.get("default", None)
887    return getattr(self.module, function_name)(*((self,) + args), **kwargs)
888
889  def FullOTA_Assertions(self):
890    """Called after emitting the block of assertions at the top of a
891    full OTA package.  Implementations can add whatever additional
892    assertions they like."""
893    return self._DoCall("FullOTA_Assertions")
894
895  def FullOTA_InstallBegin(self):
896    """Called at the start of full OTA installation."""
897    return self._DoCall("FullOTA_InstallBegin")
898
899  def FullOTA_InstallEnd(self):
900    """Called at the end of full OTA installation; typically this is
901    used to install the image for the device's baseband processor."""
902    return self._DoCall("FullOTA_InstallEnd")
903
904  def IncrementalOTA_Assertions(self):
905    """Called after emitting the block of assertions at the top of an
906    incremental OTA package.  Implementations can add whatever
907    additional assertions they like."""
908    return self._DoCall("IncrementalOTA_Assertions")
909
910  def IncrementalOTA_VerifyBegin(self):
911    """Called at the start of the verification phase of incremental
912    OTA installation; additional checks can be placed here to abort
913    the script before any changes are made."""
914    return self._DoCall("IncrementalOTA_VerifyBegin")
915
916  def IncrementalOTA_VerifyEnd(self):
917    """Called at the end of the verification phase of incremental OTA
918    installation; additional checks can be placed here to abort the
919    script before any changes are made."""
920    return self._DoCall("IncrementalOTA_VerifyEnd")
921
922  def IncrementalOTA_InstallBegin(self):
923    """Called at the start of incremental OTA installation (after
924    verification is complete)."""
925    return self._DoCall("IncrementalOTA_InstallBegin")
926
927  def IncrementalOTA_InstallEnd(self):
928    """Called at the end of incremental OTA installation; typically
929    this is used to install the image for the device's baseband
930    processor."""
931    return self._DoCall("IncrementalOTA_InstallEnd")
932
933class File(object):
934  def __init__(self, name, data):
935    self.name = name
936    self.data = data
937    self.size = len(data)
938    self.sha1 = sha1(data).hexdigest()
939
940  @classmethod
941  def FromLocalFile(cls, name, diskname):
942    f = open(diskname, "rb")
943    data = f.read()
944    f.close()
945    return File(name, data)
946
947  def WriteToTemp(self):
948    t = tempfile.NamedTemporaryFile()
949    t.write(self.data)
950    t.flush()
951    return t
952
953  def AddToZip(self, z, compression=None):
954    ZipWriteStr(z, self.name, self.data, compression=compression)
955
956DIFF_PROGRAM_BY_EXT = {
957    ".gz" : "imgdiff",
958    ".zip" : ["imgdiff", "-z"],
959    ".jar" : ["imgdiff", "-z"],
960    ".apk" : ["imgdiff", "-z"],
961    ".img" : "imgdiff",
962    }
963
964class Difference(object):
965  def __init__(self, tf, sf, diff_program=None):
966    self.tf = tf
967    self.sf = sf
968    self.patch = None
969    self.diff_program = diff_program
970
971  def ComputePatch(self):
972    """Compute the patch (as a string of data) needed to turn sf into
973    tf.  Returns the same tuple as GetPatch()."""
974
975    tf = self.tf
976    sf = self.sf
977
978    if self.diff_program:
979      diff_program = self.diff_program
980    else:
981      ext = os.path.splitext(tf.name)[1]
982      diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff")
983
984    ttemp = tf.WriteToTemp()
985    stemp = sf.WriteToTemp()
986
987    ext = os.path.splitext(tf.name)[1]
988
989    try:
990      ptemp = tempfile.NamedTemporaryFile()
991      if isinstance(diff_program, list):
992        cmd = copy.copy(diff_program)
993      else:
994        cmd = [diff_program]
995      cmd.append(stemp.name)
996      cmd.append(ttemp.name)
997      cmd.append(ptemp.name)
998      p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
999      err = []
1000      def run():
1001        _, e = p.communicate()
1002        if e:
1003          err.append(e)
1004      th = threading.Thread(target=run)
1005      th.start()
1006      th.join(timeout=300)   # 5 mins
1007      if th.is_alive():
1008        print "WARNING: diff command timed out"
1009        p.terminate()
1010        th.join(5)
1011        if th.is_alive():
1012          p.kill()
1013          th.join()
1014
1015      if err or p.returncode != 0:
1016        print "WARNING: failure running %s:\n%s\n" % (
1017            diff_program, "".join(err))
1018        self.patch = None
1019        return None, None, None
1020      diff = ptemp.read()
1021    finally:
1022      ptemp.close()
1023      stemp.close()
1024      ttemp.close()
1025
1026    self.patch = diff
1027    return self.tf, self.sf, self.patch
1028
1029
1030  def GetPatch(self):
1031    """Return a tuple (target_file, source_file, patch_data).
1032    patch_data may be None if ComputePatch hasn't been called, or if
1033    computing the patch failed."""
1034    return self.tf, self.sf, self.patch
1035
1036
1037def ComputeDifferences(diffs):
1038  """Call ComputePatch on all the Difference objects in 'diffs'."""
1039  print len(diffs), "diffs to compute"
1040
1041  # Do the largest files first, to try and reduce the long-pole effect.
1042  by_size = [(i.tf.size, i) for i in diffs]
1043  by_size.sort(reverse=True)
1044  by_size = [i[1] for i in by_size]
1045
1046  lock = threading.Lock()
1047  diff_iter = iter(by_size)   # accessed under lock
1048
1049  def worker():
1050    try:
1051      lock.acquire()
1052      for d in diff_iter:
1053        lock.release()
1054        start = time.time()
1055        d.ComputePatch()
1056        dur = time.time() - start
1057        lock.acquire()
1058
1059        tf, sf, patch = d.GetPatch()
1060        if sf.name == tf.name:
1061          name = tf.name
1062        else:
1063          name = "%s (%s)" % (tf.name, sf.name)
1064        if patch is None:
1065          print "patching failed!                                  %s" % (name,)
1066        else:
1067          print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
1068              dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name)
1069      lock.release()
1070    except Exception as e:
1071      print e
1072      raise
1073
1074  # start worker threads; wait for them all to finish.
1075  threads = [threading.Thread(target=worker)
1076             for i in range(OPTIONS.worker_threads)]
1077  for th in threads:
1078    th.start()
1079  while threads:
1080    threads.pop().join()
1081
1082
1083class BlockDifference(object):
1084  def __init__(self, partition, tgt, src=None, check_first_block=False,
1085               version=None):
1086    self.tgt = tgt
1087    self.src = src
1088    self.partition = partition
1089    self.check_first_block = check_first_block
1090
1091    if version is None:
1092      version = 1
1093      if OPTIONS.info_dict:
1094        version = max(
1095            int(i) for i in
1096            OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
1097    self.version = version
1098
1099    b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
1100                                    version=self.version)
1101    tmpdir = tempfile.mkdtemp()
1102    OPTIONS.tempfiles.append(tmpdir)
1103    self.path = os.path.join(tmpdir, partition)
1104    b.Compute(self.path)
1105
1106    _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
1107
1108  def WriteScript(self, script, output_zip, progress=None):
1109    if not self.src:
1110      # write the output unconditionally
1111      script.Print("Patching %s image unconditionally..." % (self.partition,))
1112    else:
1113      script.Print("Patching %s image after verification." % (self.partition,))
1114
1115    if progress:
1116      script.ShowProgress(progress, 0)
1117    self._WriteUpdate(script, output_zip)
1118
1119  def WriteVerifyScript(self, script):
1120    partition = self.partition
1121    if not self.src:
1122      script.Print("Image %s will be patched unconditionally." % (partition,))
1123    else:
1124      if self.version >= 3:
1125        script.AppendExtra(('if block_image_verify("%s", '
1126                            'package_extract_file("%s.transfer.list"), '
1127                            '"%s.new.dat", "%s.patch.dat") then') %
1128                           (self.device, partition, partition, partition))
1129      else:
1130        script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1131            self.device, self.src.care_map.to_string_raw(),
1132            self.src.TotalSha1()))
1133      script.Print('Verified %s image...' % (partition,))
1134      script.AppendExtra('else')
1135
1136      # When generating incrementals for the system and vendor partitions,
1137      # explicitly check the first block (which contains the superblock) of
1138      # the partition to see if it's what we expect. If this check fails,
1139      # give an explicit log message about the partition having been
1140      # remounted R/W (the most likely explanation) and the need to flash to
1141      # get OTAs working again.
1142      if self.check_first_block:
1143        self._CheckFirstBlock(script)
1144
1145      # Abort the OTA update. Note that the incremental OTA cannot be applied
1146      # even if it may match the checksum of the target partition.
1147      # a) If version < 3, operations like move and erase will make changes
1148      #    unconditionally and damage the partition.
1149      # b) If version >= 3, it won't even reach here.
1150      script.AppendExtra(('abort("%s partition has unexpected contents");\n'
1151                          'endif;') % (partition,))
1152
1153  def _WriteUpdate(self, script, output_zip):
1154    ZipWrite(output_zip,
1155             '{}.transfer.list'.format(self.path),
1156             '{}.transfer.list'.format(self.partition))
1157    ZipWrite(output_zip,
1158             '{}.new.dat'.format(self.path),
1159             '{}.new.dat'.format(self.partition))
1160    ZipWrite(output_zip,
1161             '{}.patch.dat'.format(self.path),
1162             '{}.patch.dat'.format(self.partition),
1163             compress_type=zipfile.ZIP_STORED)
1164
1165    call = ('block_image_update("{device}", '
1166            'package_extract_file("{partition}.transfer.list"), '
1167            '"{partition}.new.dat", "{partition}.patch.dat");\n'.format(
1168                device=self.device, partition=self.partition))
1169    script.AppendExtra(script.WordWrap(call))
1170
1171  def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use
1172    data = source.ReadRangeSet(ranges)
1173    ctx = sha1()
1174
1175    for p in data:
1176      ctx.update(p)
1177
1178    return ctx.hexdigest()
1179
1180  def _CheckFirstBlock(self, script):
1181    r = rangelib.RangeSet((0, 1))
1182    srchash = self._HashBlocks(self.src, r)
1183
1184    script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || '
1185                        'abort("%s has been remounted R/W; '
1186                        'reflash device to reenable OTA updates");')
1187                       % (self.device, r.to_string_raw(), srchash,
1188                          self.device))
1189
1190DataImage = blockimgdiff.DataImage
1191
1192
1193# map recovery.fstab's fs_types to mount/format "partition types"
1194PARTITION_TYPES = {
1195    "yaffs2": "MTD",
1196    "mtd": "MTD",
1197    "ext4": "EMMC",
1198    "emmc": "EMMC",
1199    "f2fs": "EMMC"
1200}
1201
1202def GetTypeAndDevice(mount_point, info):
1203  fstab = info["fstab"]
1204  if fstab:
1205    return (PARTITION_TYPES[fstab[mount_point].fs_type],
1206            fstab[mount_point].device)
1207  else:
1208    raise KeyError
1209
1210
1211def ParseCertificate(data):
1212  """Parse a PEM-format certificate."""
1213  cert = []
1214  save = False
1215  for line in data.split("\n"):
1216    if "--END CERTIFICATE--" in line:
1217      break
1218    if save:
1219      cert.append(line)
1220    if "--BEGIN CERTIFICATE--" in line:
1221      save = True
1222  cert = "".join(cert).decode('base64')
1223  return cert
1224
1225def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
1226                      info_dict=None):
1227  """Generate a binary patch that creates the recovery image starting
1228  with the boot image.  (Most of the space in these images is just the
1229  kernel, which is identical for the two, so the resulting patch
1230  should be efficient.)  Add it to the output zip, along with a shell
1231  script that is run from init.rc on first boot to actually do the
1232  patching and install the new recovery image.
1233
1234  recovery_img and boot_img should be File objects for the
1235  corresponding images.  info should be the dictionary returned by
1236  common.LoadInfoDict() on the input target_files.
1237  """
1238
1239  if info_dict is None:
1240    info_dict = OPTIONS.info_dict
1241
1242  diff_program = ["imgdiff"]
1243  path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
1244  if os.path.exists(path):
1245    diff_program.append("-b")
1246    diff_program.append(path)
1247    bonus_args = "-b /system/etc/recovery-resource.dat"
1248  else:
1249    bonus_args = ""
1250
1251  d = Difference(recovery_img, boot_img, diff_program=diff_program)
1252  _, _, patch = d.ComputePatch()
1253  output_sink("recovery-from-boot.p", patch)
1254
1255  try:
1256    boot_type, boot_device = GetTypeAndDevice("/boot", info_dict)
1257    recovery_type, recovery_device = GetTypeAndDevice("/recovery", info_dict)
1258  except KeyError:
1259    return
1260
1261  sh = """#!/system/bin/sh
1262if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
1263  applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
1264else
1265  log -t recovery "Recovery image already installed"
1266fi
1267""" % {'boot_size': boot_img.size,
1268       'boot_sha1': boot_img.sha1,
1269       'recovery_size': recovery_img.size,
1270       'recovery_sha1': recovery_img.sha1,
1271       'boot_type': boot_type,
1272       'boot_device': boot_device,
1273       'recovery_type': recovery_type,
1274       'recovery_device': recovery_device,
1275       'bonus_args': bonus_args}
1276
1277  # The install script location moved from /system/etc to /system/bin
1278  # in the L release.  Parse the init.rc file to find out where the
1279  # target-files expects it to be, and put it there.
1280  sh_location = "etc/install-recovery.sh"
1281  try:
1282    with open(os.path.join(input_dir, "BOOT", "RAMDISK", "init.rc")) as f:
1283      for line in f:
1284        m = re.match(r"^service flash_recovery /system/(\S+)\s*$", line)
1285        if m:
1286          sh_location = m.group(1)
1287          print "putting script in", sh_location
1288          break
1289  except (OSError, IOError) as e:
1290    print "failed to read init.rc: %s" % (e,)
1291
1292  output_sink(sh_location, sh)
1293