common.py revision 8e0178d41b9eeb6754eda07292d78762e3169140
1# Copyright (C) 2008 The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15import copy
16import errno
17import getopt
18import getpass
19import imp
20import os
21import platform
22import re
23import shlex
24import shutil
25import subprocess
26import sys
27import tempfile
28import threading
29import time
30import zipfile
31
32import blockimgdiff
33from rangelib import *
34
35try:
36  from hashlib import sha1 as sha1
37except ImportError:
38  from sha import sha as sha1
39
40# missing in Python 2.4 and before
41if not hasattr(os, "SEEK_SET"):
42  os.SEEK_SET = 0
43
44class Options(object): pass
45OPTIONS = Options()
46
47DEFAULT_SEARCH_PATH_BY_PLATFORM = {
48    "linux2": "out/host/linux-x86",
49    "darwin": "out/host/darwin-x86",
50    }
51OPTIONS.search_path = DEFAULT_SEARCH_PATH_BY_PLATFORM.get(sys.platform, None)
52
53OPTIONS.signapk_path = "framework/signapk.jar"  # Relative to search_path
54OPTIONS.extra_signapk_args = []
55OPTIONS.java_path = "java"  # Use the one on the path by default.
56OPTIONS.java_args = "-Xmx2048m" # JVM Args
57OPTIONS.public_key_suffix = ".x509.pem"
58OPTIONS.private_key_suffix = ".pk8"
59OPTIONS.verbose = False
60OPTIONS.tempfiles = []
61OPTIONS.device_specific = None
62OPTIONS.extras = {}
63OPTIONS.info_dict = None
64
65
66# Values for "certificate" in apkcerts that mean special things.
67SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL")
68
69
70class ExternalError(RuntimeError): pass
71
72
73def Run(args, **kwargs):
74  """Create and return a subprocess.Popen object, printing the command
75  line on the terminal if -v was specified."""
76  if OPTIONS.verbose:
77    print "  running: ", " ".join(args)
78  return subprocess.Popen(args, **kwargs)
79
80
81def CloseInheritedPipes():
82  """ Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds
83  before doing other work."""
84  if platform.system() != "Darwin":
85    return
86  for d in range(3, 1025):
87    try:
88      stat = os.fstat(d)
89      if stat is not None:
90        pipebit = stat[0] & 0x1000
91        if pipebit != 0:
92          os.close(d)
93    except OSError:
94      pass
95
96
97def LoadInfoDict(input):
98  """Read and parse the META/misc_info.txt key/value pairs from the
99  input target files and return a dict."""
100
101  def read_helper(fn):
102    if isinstance(input, zipfile.ZipFile):
103      return input.read(fn)
104    else:
105      path = os.path.join(input, *fn.split("/"))
106      try:
107        with open(path) as f:
108          return f.read()
109      except IOError, e:
110        if e.errno == errno.ENOENT:
111          raise KeyError(fn)
112  d = {}
113  try:
114    d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n"))
115  except KeyError:
116    # ok if misc_info.txt doesn't exist
117    pass
118
119  # backwards compatibility: These values used to be in their own
120  # files.  Look for them, in case we're processing an old
121  # target_files zip.
122
123  if "mkyaffs2_extra_flags" not in d:
124    try:
125      d["mkyaffs2_extra_flags"] = read_helper("META/mkyaffs2-extra-flags.txt").strip()
126    except KeyError:
127      # ok if flags don't exist
128      pass
129
130  if "recovery_api_version" not in d:
131    try:
132      d["recovery_api_version"] = read_helper("META/recovery-api-version.txt").strip()
133    except KeyError:
134      raise ValueError("can't find recovery API version in input target-files")
135
136  if "tool_extensions" not in d:
137    try:
138      d["tool_extensions"] = read_helper("META/tool-extensions.txt").strip()
139    except KeyError:
140      # ok if extensions don't exist
141      pass
142
143  if "fstab_version" not in d:
144    d["fstab_version"] = "1"
145
146  try:
147    data = read_helper("META/imagesizes.txt")
148    for line in data.split("\n"):
149      if not line: continue
150      name, value = line.split(" ", 1)
151      if not value: continue
152      if name == "blocksize":
153        d[name] = value
154      else:
155        d[name + "_size"] = value
156  except KeyError:
157    pass
158
159  def makeint(key):
160    if key in d:
161      d[key] = int(d[key], 0)
162
163  makeint("recovery_api_version")
164  makeint("blocksize")
165  makeint("system_size")
166  makeint("vendor_size")
167  makeint("userdata_size")
168  makeint("cache_size")
169  makeint("recovery_size")
170  makeint("boot_size")
171  makeint("fstab_version")
172
173  d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"])
174  d["build.prop"] = LoadBuildProp(read_helper)
175  return d
176
177def LoadBuildProp(read_helper):
178  try:
179    data = read_helper("SYSTEM/build.prop")
180  except KeyError:
181    print "Warning: could not find SYSTEM/build.prop in %s" % zip
182    data = ""
183  return LoadDictionaryFromLines(data.split("\n"))
184
185def LoadDictionaryFromLines(lines):
186  d = {}
187  for line in lines:
188    line = line.strip()
189    if not line or line.startswith("#"): continue
190    if "=" in line:
191      name, value = line.split("=", 1)
192      d[name] = value
193  return d
194
195def LoadRecoveryFSTab(read_helper, fstab_version):
196  class Partition(object):
197    pass
198
199  try:
200    data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab")
201  except KeyError:
202    print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab"
203    data = ""
204
205  if fstab_version == 1:
206    d = {}
207    for line in data.split("\n"):
208      line = line.strip()
209      if not line or line.startswith("#"): continue
210      pieces = line.split()
211      if not (3 <= len(pieces) <= 4):
212        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
213
214      p = Partition()
215      p.mount_point = pieces[0]
216      p.fs_type = pieces[1]
217      p.device = pieces[2]
218      p.length = 0
219      options = None
220      if len(pieces) >= 4:
221        if pieces[3].startswith("/"):
222          p.device2 = pieces[3]
223          if len(pieces) >= 5:
224            options = pieces[4]
225        else:
226          p.device2 = None
227          options = pieces[3]
228      else:
229        p.device2 = None
230
231      if options:
232        options = options.split(",")
233        for i in options:
234          if i.startswith("length="):
235            p.length = int(i[7:])
236          else:
237              print "%s: unknown option \"%s\"" % (p.mount_point, i)
238
239      d[p.mount_point] = p
240
241  elif fstab_version == 2:
242    d = {}
243    for line in data.split("\n"):
244      line = line.strip()
245      if not line or line.startswith("#"): continue
246      pieces = line.split()
247      if len(pieces) != 5:
248        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
249
250      # Ignore entries that are managed by vold
251      options = pieces[4]
252      if "voldmanaged=" in options: continue
253
254      # It's a good line, parse it
255      p = Partition()
256      p.device = pieces[0]
257      p.mount_point = pieces[1]
258      p.fs_type = pieces[2]
259      p.device2 = None
260      p.length = 0
261
262      options = options.split(",")
263      for i in options:
264        if i.startswith("length="):
265          p.length = int(i[7:])
266        else:
267          # Ignore all unknown options in the unified fstab
268          continue
269
270      d[p.mount_point] = p
271
272  else:
273    raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,))
274
275  return d
276
277
278def DumpInfoDict(d):
279  for k, v in sorted(d.items()):
280    print "%-25s = (%s) %s" % (k, type(v).__name__, v)
281
282def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
283  """Take a kernel, cmdline, and ramdisk directory from the input (in
284  'sourcedir'), and turn them into a boot image.  Return the image
285  data, or None if sourcedir does not appear to contains files for
286  building the requested image."""
287
288  if (not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK) or
289      not os.access(os.path.join(sourcedir, "kernel"), os.F_OK)):
290    return None
291
292  if info_dict is None:
293    info_dict = OPTIONS.info_dict
294
295  ramdisk_img = tempfile.NamedTemporaryFile()
296  img = tempfile.NamedTemporaryFile()
297
298  if os.access(fs_config_file, os.F_OK):
299    cmd = ["mkbootfs", "-f", fs_config_file, os.path.join(sourcedir, "RAMDISK")]
300  else:
301    cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
302  p1 = Run(cmd, stdout=subprocess.PIPE)
303  p2 = Run(["minigzip"],
304           stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
305
306  p2.wait()
307  p1.wait()
308  assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (targetname,)
309  assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (targetname,)
310
311  # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
312  mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
313
314  cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")]
315
316  fn = os.path.join(sourcedir, "second")
317  if os.access(fn, os.F_OK):
318    cmd.append("--second")
319    cmd.append(fn)
320
321  fn = os.path.join(sourcedir, "cmdline")
322  if os.access(fn, os.F_OK):
323    cmd.append("--cmdline")
324    cmd.append(open(fn).read().rstrip("\n"))
325
326  fn = os.path.join(sourcedir, "base")
327  if os.access(fn, os.F_OK):
328    cmd.append("--base")
329    cmd.append(open(fn).read().rstrip("\n"))
330
331  fn = os.path.join(sourcedir, "pagesize")
332  if os.access(fn, os.F_OK):
333    cmd.append("--pagesize")
334    cmd.append(open(fn).read().rstrip("\n"))
335
336  args = info_dict.get("mkbootimg_args", None)
337  if args and args.strip():
338    cmd.extend(shlex.split(args))
339
340  cmd.extend(["--ramdisk", ramdisk_img.name,
341              "--output", img.name])
342
343  p = Run(cmd, stdout=subprocess.PIPE)
344  p.communicate()
345  assert p.returncode == 0, "mkbootimg of %s image failed" % (
346      os.path.basename(sourcedir),)
347
348  if info_dict.get("verity_key", None):
349    path = "/" + os.path.basename(sourcedir).lower()
350    cmd = ["boot_signer", path, img.name, info_dict["verity_key"] + ".pk8", info_dict["verity_key"] + ".x509.pem", img.name]
351    p = Run(cmd, stdout=subprocess.PIPE)
352    p.communicate()
353    assert p.returncode == 0, "boot_signer of %s image failed" % path
354
355  img.seek(os.SEEK_SET, 0)
356  data = img.read()
357
358  ramdisk_img.close()
359  img.close()
360
361  return data
362
363
364def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
365                     info_dict=None):
366  """Return a File object (with name 'name') with the desired bootable
367  image.  Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name
368  'prebuilt_name', otherwise look for it under 'unpack_dir'/IMAGES,
369  otherwise construct it from the source files in
370  'unpack_dir'/'tree_subdir'."""
371
372  prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
373  if os.path.exists(prebuilt_path):
374    print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,)
375    return File.FromLocalFile(name, prebuilt_path)
376
377  prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
378  if os.path.exists(prebuilt_path):
379    print "using prebuilt %s from IMAGES..." % (prebuilt_name,)
380    return File.FromLocalFile(name, prebuilt_path)
381
382  print "building image from target_files %s..." % (tree_subdir,)
383  fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
384  data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
385                            os.path.join(unpack_dir, fs_config),
386                            info_dict)
387  if data:
388    return File(name, data)
389  return None
390
391
392def UnzipTemp(filename, pattern=None):
393  """Unzip the given archive into a temporary directory and return the name.
394
395  If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a
396  temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
397
398  Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the
399  main file), open for reading.
400  """
401
402  tmp = tempfile.mkdtemp(prefix="targetfiles-")
403  OPTIONS.tempfiles.append(tmp)
404
405  def unzip_to_dir(filename, dirname):
406    cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
407    if pattern is not None:
408      cmd.append(pattern)
409    p = Run(cmd, stdout=subprocess.PIPE)
410    p.communicate()
411    if p.returncode != 0:
412      raise ExternalError("failed to unzip input target-files \"%s\"" %
413                          (filename,))
414
415  m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
416  if m:
417    unzip_to_dir(m.group(1), tmp)
418    unzip_to_dir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"))
419    filename = m.group(1)
420  else:
421    unzip_to_dir(filename, tmp)
422
423  return tmp, zipfile.ZipFile(filename, "r")
424
425
426def GetKeyPasswords(keylist):
427  """Given a list of keys, prompt the user to enter passwords for
428  those which require them.  Return a {key: password} dict.  password
429  will be None if the key has no password."""
430
431  no_passwords = []
432  need_passwords = []
433  key_passwords = {}
434  devnull = open("/dev/null", "w+b")
435  for k in sorted(keylist):
436    # We don't need a password for things that aren't really keys.
437    if k in SPECIAL_CERT_STRINGS:
438      no_passwords.append(k)
439      continue
440
441    p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
442             "-inform", "DER", "-nocrypt"],
443            stdin=devnull.fileno(),
444            stdout=devnull.fileno(),
445            stderr=subprocess.STDOUT)
446    p.communicate()
447    if p.returncode == 0:
448      # Definitely an unencrypted key.
449      no_passwords.append(k)
450    else:
451      p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
452               "-inform", "DER", "-passin", "pass:"],
453              stdin=devnull.fileno(),
454              stdout=devnull.fileno(),
455              stderr=subprocess.PIPE)
456      stdout, stderr = p.communicate()
457      if p.returncode == 0:
458        # Encrypted key with empty string as password.
459        key_passwords[k] = ''
460      elif stderr.startswith('Error decrypting key'):
461        # Definitely encrypted key.
462        # It would have said "Error reading key" if it didn't parse correctly.
463        need_passwords.append(k)
464      else:
465        # Potentially, a type of key that openssl doesn't understand.
466        # We'll let the routines in signapk.jar handle it.
467        no_passwords.append(k)
468  devnull.close()
469
470  key_passwords.update(PasswordManager().GetPasswords(need_passwords))
471  key_passwords.update(dict.fromkeys(no_passwords, None))
472  return key_passwords
473
474
475def SignFile(input_name, output_name, key, password, align=None,
476             whole_file=False):
477  """Sign the input_name zip/jar/apk, producing output_name.  Use the
478  given key and password (the latter may be None if the key does not
479  have a password.
480
481  If align is an integer > 1, zipalign is run to align stored files in
482  the output zip on 'align'-byte boundaries.
483
484  If whole_file is true, use the "-w" option to SignApk to embed a
485  signature that covers the whole file in the archive comment of the
486  zip file.
487  """
488
489  if align == 0 or align == 1:
490    align = None
491
492  if align:
493    temp = tempfile.NamedTemporaryFile()
494    sign_name = temp.name
495  else:
496    sign_name = output_name
497
498  cmd = [OPTIONS.java_path, OPTIONS.java_args, "-jar",
499         os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)]
500  cmd.extend(OPTIONS.extra_signapk_args)
501  if whole_file:
502    cmd.append("-w")
503  cmd.extend([key + OPTIONS.public_key_suffix,
504              key + OPTIONS.private_key_suffix,
505              input_name, sign_name])
506
507  p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
508  if password is not None:
509    password += "\n"
510  p.communicate(password)
511  if p.returncode != 0:
512    raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
513
514  if align:
515    p = Run(["zipalign", "-f", str(align), sign_name, output_name])
516    p.communicate()
517    if p.returncode != 0:
518      raise ExternalError("zipalign failed: return code %s" % (p.returncode,))
519    temp.close()
520
521
522def CheckSize(data, target, info_dict):
523  """Check the data string passed against the max size limit, if
524  any, for the given target.  Raise exception if the data is too big.
525  Print a warning if the data is nearing the maximum size."""
526
527  if target.endswith(".img"): target = target[:-4]
528  mount_point = "/" + target
529
530  fs_type = None
531  limit = None
532  if info_dict["fstab"]:
533    if mount_point == "/userdata": mount_point = "/data"
534    p = info_dict["fstab"][mount_point]
535    fs_type = p.fs_type
536    device = p.device
537    if "/" in device:
538      device = device[device.rfind("/")+1:]
539    limit = info_dict.get(device + "_size", None)
540  if not fs_type or not limit: return
541
542  if fs_type == "yaffs2":
543    # image size should be increased by 1/64th to account for the
544    # spare area (64 bytes per 2k page)
545    limit = limit / 2048 * (2048+64)
546  size = len(data)
547  pct = float(size) * 100.0 / limit
548  msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit)
549  if pct >= 99.0:
550    raise ExternalError(msg)
551  elif pct >= 95.0:
552    print
553    print "  WARNING: ", msg
554    print
555  elif OPTIONS.verbose:
556    print "  ", msg
557
558
559def ReadApkCerts(tf_zip):
560  """Given a target_files ZipFile, parse the META/apkcerts.txt file
561  and return a {package: cert} dict."""
562  certmap = {}
563  for line in tf_zip.read("META/apkcerts.txt").split("\n"):
564    line = line.strip()
565    if not line: continue
566    m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+'
567                 r'private_key="(.*)"$', line)
568    if m:
569      name, cert, privkey = m.groups()
570      public_key_suffix_len = len(OPTIONS.public_key_suffix)
571      private_key_suffix_len = len(OPTIONS.private_key_suffix)
572      if cert in SPECIAL_CERT_STRINGS and not privkey:
573        certmap[name] = cert
574      elif (cert.endswith(OPTIONS.public_key_suffix) and
575            privkey.endswith(OPTIONS.private_key_suffix) and
576            cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
577        certmap[name] = cert[:-public_key_suffix_len]
578      else:
579        raise ValueError("failed to parse line from apkcerts.txt:\n" + line)
580  return certmap
581
582
583COMMON_DOCSTRING = """
584  -p  (--path)  <dir>
585      Prepend <dir>/bin to the list of places to search for binaries
586      run by this script, and expect to find jars in <dir>/framework.
587
588  -s  (--device_specific) <file>
589      Path to the python module containing device-specific
590      releasetools code.
591
592  -x  (--extra)  <key=value>
593      Add a key/value pair to the 'extras' dict, which device-specific
594      extension code may look at.
595
596  -v  (--verbose)
597      Show command lines being executed.
598
599  -h  (--help)
600      Display this usage message and exit.
601"""
602
603def Usage(docstring):
604  print docstring.rstrip("\n")
605  print COMMON_DOCSTRING
606
607
608def ParseOptions(argv,
609                 docstring,
610                 extra_opts="", extra_long_opts=(),
611                 extra_option_handler=None):
612  """Parse the options in argv and return any arguments that aren't
613  flags.  docstring is the calling module's docstring, to be displayed
614  for errors and -h.  extra_opts and extra_long_opts are for flags
615  defined by the caller, which are processed by passing them to
616  extra_option_handler."""
617
618  try:
619    opts, args = getopt.getopt(
620        argv, "hvp:s:x:" + extra_opts,
621        ["help", "verbose", "path=", "signapk_path=", "extra_signapk_args=",
622         "java_path=", "java_args=", "public_key_suffix=",
623         "private_key_suffix=", "device_specific=", "extra="] +
624        list(extra_long_opts))
625  except getopt.GetoptError, err:
626    Usage(docstring)
627    print "**", str(err), "**"
628    sys.exit(2)
629
630  path_specified = False
631
632  for o, a in opts:
633    if o in ("-h", "--help"):
634      Usage(docstring)
635      sys.exit()
636    elif o in ("-v", "--verbose"):
637      OPTIONS.verbose = True
638    elif o in ("-p", "--path"):
639      OPTIONS.search_path = a
640    elif o in ("--signapk_path",):
641      OPTIONS.signapk_path = a
642    elif o in ("--extra_signapk_args",):
643      OPTIONS.extra_signapk_args = shlex.split(a)
644    elif o in ("--java_path",):
645      OPTIONS.java_path = a
646    elif o in ("--java_args",):
647      OPTIONS.java_args = a
648    elif o in ("--public_key_suffix",):
649      OPTIONS.public_key_suffix = a
650    elif o in ("--private_key_suffix",):
651      OPTIONS.private_key_suffix = a
652    elif o in ("-s", "--device_specific"):
653      OPTIONS.device_specific = a
654    elif o in ("-x", "--extra"):
655      key, value = a.split("=", 1)
656      OPTIONS.extras[key] = value
657    else:
658      if extra_option_handler is None or not extra_option_handler(o, a):
659        assert False, "unknown option \"%s\"" % (o,)
660
661  if OPTIONS.search_path:
662    os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") +
663                          os.pathsep + os.environ["PATH"])
664
665  return args
666
667
668def MakeTempFile(prefix=None, suffix=None):
669  """Make a temp file and add it to the list of things to be deleted
670  when Cleanup() is called.  Return the filename."""
671  fd, fn = tempfile.mkstemp(prefix=prefix, suffix=suffix)
672  os.close(fd)
673  OPTIONS.tempfiles.append(fn)
674  return fn
675
676
677def Cleanup():
678  for i in OPTIONS.tempfiles:
679    if os.path.isdir(i):
680      shutil.rmtree(i)
681    else:
682      os.remove(i)
683
684
685class PasswordManager(object):
686  def __init__(self):
687    self.editor = os.getenv("EDITOR", None)
688    self.pwfile = os.getenv("ANDROID_PW_FILE", None)
689
690  def GetPasswords(self, items):
691    """Get passwords corresponding to each string in 'items',
692    returning a dict.  (The dict may have keys in addition to the
693    values in 'items'.)
694
695    Uses the passwords in $ANDROID_PW_FILE if available, letting the
696    user edit that file to add more needed passwords.  If no editor is
697    available, or $ANDROID_PW_FILE isn't define, prompts the user
698    interactively in the ordinary way.
699    """
700
701    current = self.ReadFile()
702
703    first = True
704    while True:
705      missing = []
706      for i in items:
707        if i not in current or not current[i]:
708          missing.append(i)
709      # Are all the passwords already in the file?
710      if not missing: return current
711
712      for i in missing:
713        current[i] = ""
714
715      if not first:
716        print "key file %s still missing some passwords." % (self.pwfile,)
717        answer = raw_input("try to edit again? [y]> ").strip()
718        if answer and answer[0] not in 'yY':
719          raise RuntimeError("key passwords unavailable")
720      first = False
721
722      current = self.UpdateAndReadFile(current)
723
724  def PromptResult(self, current):
725    """Prompt the user to enter a value (password) for each key in
726    'current' whose value is fales.  Returns a new dict with all the
727    values.
728    """
729    result = {}
730    for k, v in sorted(current.iteritems()):
731      if v:
732        result[k] = v
733      else:
734        while True:
735          result[k] = getpass.getpass("Enter password for %s key> "
736                                      % (k,)).strip()
737          if result[k]: break
738    return result
739
740  def UpdateAndReadFile(self, current):
741    if not self.editor or not self.pwfile:
742      return self.PromptResult(current)
743
744    f = open(self.pwfile, "w")
745    os.chmod(self.pwfile, 0600)
746    f.write("# Enter key passwords between the [[[ ]]] brackets.\n")
747    f.write("# (Additional spaces are harmless.)\n\n")
748
749    first_line = None
750    sorted = [(not v, k, v) for (k, v) in current.iteritems()]
751    sorted.sort()
752    for i, (_, k, v) in enumerate(sorted):
753      f.write("[[[  %s  ]]] %s\n" % (v, k))
754      if not v and first_line is None:
755        # position cursor on first line with no password.
756        first_line = i + 4
757    f.close()
758
759    p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
760    _, _ = p.communicate()
761
762    return self.ReadFile()
763
764  def ReadFile(self):
765    result = {}
766    if self.pwfile is None: return result
767    try:
768      f = open(self.pwfile, "r")
769      for line in f:
770        line = line.strip()
771        if not line or line[0] == '#': continue
772        m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
773        if not m:
774          print "failed to parse password file: ", line
775        else:
776          result[m.group(2)] = m.group(1)
777      f.close()
778    except IOError, e:
779      if e.errno != errno.ENOENT:
780        print "error reading password file: ", str(e)
781    return result
782
783
784def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
785             compress_type=None):
786  import datetime
787
788  # http://b/18015246
789  # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
790  # for files larger than 2GiB. We can work around this by adjusting their
791  # limit. Note that `zipfile.writestr()` will not work for strings larger than
792  # 2GiB. The Python interpreter sometimes rejects strings that large (though
793  # it isn't clear to me exactly what circumstances cause this).
794  # `zipfile.write()` must be used directly to work around this.
795  #
796  # This mess can be avoided if we port to python3.
797  saved_zip64_limit = zipfile.ZIP64_LIMIT
798  zipfile.ZIP64_LIMIT = (1 << 32) - 1
799
800  if compress_type is None:
801    compress_type = zip_file.compression
802  if arcname is None:
803    arcname = filename
804
805  saved_stat = os.stat(filename)
806
807  try:
808    # `zipfile.write()` doesn't allow us to pass ZipInfo, so just modify the
809    # file to be zipped and reset it when we're done.
810    os.chmod(filename, perms)
811
812    # Use a fixed timestamp so the output is repeatable.
813    epoch = datetime.datetime.fromtimestamp(0)
814    timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
815    os.utime(filename, (timestamp, timestamp))
816
817    zip_file.write(filename, arcname=arcname, compress_type=compress_type)
818  finally:
819    os.chmod(filename, saved_stat.st_mode)
820    os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
821    zipfile.ZIP64_LIMIT = saved_zip64_limit
822
823
824def ZipWriteStr(zip, filename, data, perms=0644, compression=None):
825  # use a fixed timestamp so the output is repeatable.
826  zinfo = zipfile.ZipInfo(filename=filename,
827                          date_time=(2009, 1, 1, 0, 0, 0))
828  if compression is None:
829    zinfo.compress_type = zip.compression
830  else:
831    zinfo.compress_type = compression
832  zinfo.external_attr = perms << 16
833  zip.writestr(zinfo, data)
834
835
836class DeviceSpecificParams(object):
837  module = None
838  def __init__(self, **kwargs):
839    """Keyword arguments to the constructor become attributes of this
840    object, which is passed to all functions in the device-specific
841    module."""
842    for k, v in kwargs.iteritems():
843      setattr(self, k, v)
844    self.extras = OPTIONS.extras
845
846    if self.module is None:
847      path = OPTIONS.device_specific
848      if not path: return
849      try:
850        if os.path.isdir(path):
851          info = imp.find_module("releasetools", [path])
852        else:
853          d, f = os.path.split(path)
854          b, x = os.path.splitext(f)
855          if x == ".py":
856            f = b
857          info = imp.find_module(f, [d])
858        print "loaded device-specific extensions from", path
859        self.module = imp.load_module("device_specific", *info)
860      except ImportError:
861        print "unable to load device-specific module; assuming none"
862
863  def _DoCall(self, function_name, *args, **kwargs):
864    """Call the named function in the device-specific module, passing
865    the given args and kwargs.  The first argument to the call will be
866    the DeviceSpecific object itself.  If there is no module, or the
867    module does not define the function, return the value of the
868    'default' kwarg (which itself defaults to None)."""
869    if self.module is None or not hasattr(self.module, function_name):
870      return kwargs.get("default", None)
871    return getattr(self.module, function_name)(*((self,) + args), **kwargs)
872
873  def FullOTA_Assertions(self):
874    """Called after emitting the block of assertions at the top of a
875    full OTA package.  Implementations can add whatever additional
876    assertions they like."""
877    return self._DoCall("FullOTA_Assertions")
878
879  def FullOTA_InstallBegin(self):
880    """Called at the start of full OTA installation."""
881    return self._DoCall("FullOTA_InstallBegin")
882
883  def FullOTA_InstallEnd(self):
884    """Called at the end of full OTA installation; typically this is
885    used to install the image for the device's baseband processor."""
886    return self._DoCall("FullOTA_InstallEnd")
887
888  def IncrementalOTA_Assertions(self):
889    """Called after emitting the block of assertions at the top of an
890    incremental OTA package.  Implementations can add whatever
891    additional assertions they like."""
892    return self._DoCall("IncrementalOTA_Assertions")
893
894  def IncrementalOTA_VerifyBegin(self):
895    """Called at the start of the verification phase of incremental
896    OTA installation; additional checks can be placed here to abort
897    the script before any changes are made."""
898    return self._DoCall("IncrementalOTA_VerifyBegin")
899
900  def IncrementalOTA_VerifyEnd(self):
901    """Called at the end of the verification phase of incremental OTA
902    installation; additional checks can be placed here to abort the
903    script before any changes are made."""
904    return self._DoCall("IncrementalOTA_VerifyEnd")
905
906  def IncrementalOTA_InstallBegin(self):
907    """Called at the start of incremental OTA installation (after
908    verification is complete)."""
909    return self._DoCall("IncrementalOTA_InstallBegin")
910
911  def IncrementalOTA_InstallEnd(self):
912    """Called at the end of incremental OTA installation; typically
913    this is used to install the image for the device's baseband
914    processor."""
915    return self._DoCall("IncrementalOTA_InstallEnd")
916
917class File(object):
918  def __init__(self, name, data):
919    self.name = name
920    self.data = data
921    self.size = len(data)
922    self.sha1 = sha1(data).hexdigest()
923
924  @classmethod
925  def FromLocalFile(cls, name, diskname):
926    f = open(diskname, "rb")
927    data = f.read()
928    f.close()
929    return File(name, data)
930
931  def WriteToTemp(self):
932    t = tempfile.NamedTemporaryFile()
933    t.write(self.data)
934    t.flush()
935    return t
936
937  def AddToZip(self, z, compression=None):
938    ZipWriteStr(z, self.name, self.data, compression=compression)
939
940DIFF_PROGRAM_BY_EXT = {
941    ".gz" : "imgdiff",
942    ".zip" : ["imgdiff", "-z"],
943    ".jar" : ["imgdiff", "-z"],
944    ".apk" : ["imgdiff", "-z"],
945    ".img" : "imgdiff",
946    }
947
948class Difference(object):
949  def __init__(self, tf, sf, diff_program=None):
950    self.tf = tf
951    self.sf = sf
952    self.patch = None
953    self.diff_program = diff_program
954
955  def ComputePatch(self):
956    """Compute the patch (as a string of data) needed to turn sf into
957    tf.  Returns the same tuple as GetPatch()."""
958
959    tf = self.tf
960    sf = self.sf
961
962    if self.diff_program:
963      diff_program = self.diff_program
964    else:
965      ext = os.path.splitext(tf.name)[1]
966      diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff")
967
968    ttemp = tf.WriteToTemp()
969    stemp = sf.WriteToTemp()
970
971    ext = os.path.splitext(tf.name)[1]
972
973    try:
974      ptemp = tempfile.NamedTemporaryFile()
975      if isinstance(diff_program, list):
976        cmd = copy.copy(diff_program)
977      else:
978        cmd = [diff_program]
979      cmd.append(stemp.name)
980      cmd.append(ttemp.name)
981      cmd.append(ptemp.name)
982      p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
983      err = []
984      def run():
985        _, e = p.communicate()
986        if e: err.append(e)
987      th = threading.Thread(target=run)
988      th.start()
989      th.join(timeout=300)   # 5 mins
990      if th.is_alive():
991        print "WARNING: diff command timed out"
992        p.terminate()
993        th.join(5)
994        if th.is_alive():
995          p.kill()
996          th.join()
997
998      if err or p.returncode != 0:
999        print "WARNING: failure running %s:\n%s\n" % (
1000            diff_program, "".join(err))
1001        self.patch = None
1002        return None, None, None
1003      diff = ptemp.read()
1004    finally:
1005      ptemp.close()
1006      stemp.close()
1007      ttemp.close()
1008
1009    self.patch = diff
1010    return self.tf, self.sf, self.patch
1011
1012
1013  def GetPatch(self):
1014    """Return a tuple (target_file, source_file, patch_data).
1015    patch_data may be None if ComputePatch hasn't been called, or if
1016    computing the patch failed."""
1017    return self.tf, self.sf, self.patch
1018
1019
1020def ComputeDifferences(diffs):
1021  """Call ComputePatch on all the Difference objects in 'diffs'."""
1022  print len(diffs), "diffs to compute"
1023
1024  # Do the largest files first, to try and reduce the long-pole effect.
1025  by_size = [(i.tf.size, i) for i in diffs]
1026  by_size.sort(reverse=True)
1027  by_size = [i[1] for i in by_size]
1028
1029  lock = threading.Lock()
1030  diff_iter = iter(by_size)   # accessed under lock
1031
1032  def worker():
1033    try:
1034      lock.acquire()
1035      for d in diff_iter:
1036        lock.release()
1037        start = time.time()
1038        d.ComputePatch()
1039        dur = time.time() - start
1040        lock.acquire()
1041
1042        tf, sf, patch = d.GetPatch()
1043        if sf.name == tf.name:
1044          name = tf.name
1045        else:
1046          name = "%s (%s)" % (tf.name, sf.name)
1047        if patch is None:
1048          print "patching failed!                                  %s" % (name,)
1049        else:
1050          print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
1051              dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name)
1052      lock.release()
1053    except Exception, e:
1054      print e
1055      raise
1056
1057  # start worker threads; wait for them all to finish.
1058  threads = [threading.Thread(target=worker)
1059             for i in range(OPTIONS.worker_threads)]
1060  for th in threads:
1061    th.start()
1062  while threads:
1063    threads.pop().join()
1064
1065
1066class BlockDifference:
1067  def __init__(self, partition, tgt, src=None, check_first_block=False, version=None):
1068    self.tgt = tgt
1069    self.src = src
1070    self.partition = partition
1071    self.check_first_block = check_first_block
1072
1073    if version is None:
1074      version = 1
1075      if OPTIONS.info_dict:
1076        version = max(
1077            int(i) for i in
1078            OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
1079    self.version = version
1080
1081    b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
1082                                    version=self.version)
1083    tmpdir = tempfile.mkdtemp()
1084    OPTIONS.tempfiles.append(tmpdir)
1085    self.path = os.path.join(tmpdir, partition)
1086    b.Compute(self.path)
1087
1088    _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
1089
1090  def WriteScript(self, script, output_zip, progress=None):
1091    if not self.src:
1092      # write the output unconditionally
1093      script.Print("Patching %s image unconditionally..." % (self.partition,))
1094    else:
1095      script.Print("Patching %s image after verification." % (self.partition,))
1096
1097    if progress: script.ShowProgress(progress, 0)
1098    self._WriteUpdate(script, output_zip)
1099
1100  def WriteVerifyScript(self, script):
1101    partition = self.partition
1102    if not self.src:
1103      script.Print("Image %s will be patched unconditionally." % (partition,))
1104    else:
1105      if self.version >= 3:
1106        script.AppendExtra(('if block_image_verify("%s", '
1107                            'package_extract_file("%s.transfer.list"), '
1108                            '"%s.new.dat", "%s.patch.dat") then') %
1109                           (self.device, partition, partition, partition))
1110      else:
1111        script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' %
1112                            (self.device, self.src.care_map.to_string_raw(),
1113                            self.src.TotalSha1()))
1114      script.Print('Verified %s image...' % (partition,))
1115      script.AppendExtra('else');
1116
1117      # When generating incrementals for the system and vendor partitions,
1118      # explicitly check the first block (which contains the superblock) of
1119      # the partition to see if it's what we expect. If this check fails,
1120      # give an explicit log message about the partition having been
1121      # remounted R/W (the most likely explanation) and the need to flash to
1122      # get OTAs working again.
1123      if self.check_first_block:
1124        self._CheckFirstBlock(script)
1125
1126      # Abort the OTA update. Note that the incremental OTA cannot be applied
1127      # even if it may match the checksum of the target partition.
1128      # a) If version < 3, operations like move and erase will make changes
1129      #    unconditionally and damage the partition.
1130      # b) If version >= 3, it won't even reach here.
1131      script.AppendExtra(('abort("%s partition has unexpected contents");\n'
1132                          'endif;') % (partition,))
1133
1134  def _WriteUpdate(self, script, output_zip):
1135    ZipWrite(output_zip,
1136             '{}.transfer.list'.format(self.path),
1137             '{}.transfer.list'.format(self.partition))
1138    ZipWrite(output_zip,
1139             '{}.new.dat'.format(self.path),
1140             '{}.new.dat'.format(self.partition))
1141    ZipWrite(output_zip,
1142             '{}.patch.dat'.format(self.path),
1143             '{}.patch.dat'.format(self.partition),
1144             compress_type=zipfile.ZIP_STORED)
1145
1146    call = ('block_image_update("{device}", '
1147            'package_extract_file("{partition}.transfer.list"), '
1148            '"{partition}.new.dat", "{partition}.patch.dat");\n'.format(
1149                device=self.device, partition=self.partition))
1150    script.AppendExtra(script._WordWrap(call))
1151
1152  def _HashBlocks(self, source, ranges):
1153    data = source.ReadRangeSet(ranges)
1154    ctx = sha1()
1155
1156    for p in data:
1157      ctx.update(p)
1158
1159    return ctx.hexdigest()
1160
1161  def _CheckFirstBlock(self, script):
1162    r = RangeSet((0, 1))
1163    srchash = self._HashBlocks(self.src, r);
1164
1165    script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || '
1166                        'abort("%s has been remounted R/W; '
1167                        'reflash device to reenable OTA updates");')
1168                       % (self.device, r.to_string_raw(), srchash,
1169                          self.device))
1170
1171DataImage = blockimgdiff.DataImage
1172
1173
1174# map recovery.fstab's fs_types to mount/format "partition types"
1175PARTITION_TYPES = { "yaffs2": "MTD", "mtd": "MTD",
1176                    "ext4": "EMMC", "emmc": "EMMC",
1177                    "f2fs": "EMMC" }
1178
1179def GetTypeAndDevice(mount_point, info):
1180  fstab = info["fstab"]
1181  if fstab:
1182    return PARTITION_TYPES[fstab[mount_point].fs_type], fstab[mount_point].device
1183  else:
1184    return None
1185
1186
1187def ParseCertificate(data):
1188  """Parse a PEM-format certificate."""
1189  cert = []
1190  save = False
1191  for line in data.split("\n"):
1192    if "--END CERTIFICATE--" in line:
1193      break
1194    if save:
1195      cert.append(line)
1196    if "--BEGIN CERTIFICATE--" in line:
1197      save = True
1198  cert = "".join(cert).decode('base64')
1199  return cert
1200
1201def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
1202                      info_dict=None):
1203  """Generate a binary patch that creates the recovery image starting
1204  with the boot image.  (Most of the space in these images is just the
1205  kernel, which is identical for the two, so the resulting patch
1206  should be efficient.)  Add it to the output zip, along with a shell
1207  script that is run from init.rc on first boot to actually do the
1208  patching and install the new recovery image.
1209
1210  recovery_img and boot_img should be File objects for the
1211  corresponding images.  info should be the dictionary returned by
1212  common.LoadInfoDict() on the input target_files.
1213  """
1214
1215  if info_dict is None:
1216    info_dict = OPTIONS.info_dict
1217
1218  diff_program = ["imgdiff"]
1219  path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
1220  if os.path.exists(path):
1221    diff_program.append("-b")
1222    diff_program.append(path)
1223    bonus_args = "-b /system/etc/recovery-resource.dat"
1224  else:
1225    bonus_args = ""
1226
1227  d = Difference(recovery_img, boot_img, diff_program=diff_program)
1228  _, _, patch = d.ComputePatch()
1229  output_sink("recovery-from-boot.p", patch)
1230
1231  td_pair = GetTypeAndDevice("/boot", info_dict)
1232  if not td_pair:
1233    return
1234  boot_type, boot_device = td_pair
1235  td_pair = GetTypeAndDevice("/recovery", info_dict)
1236  if not td_pair:
1237    return
1238  recovery_type, recovery_device = td_pair
1239
1240  sh = """#!/system/bin/sh
1241if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
1242  applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
1243else
1244  log -t recovery "Recovery image already installed"
1245fi
1246""" % { 'boot_size': boot_img.size,
1247        'boot_sha1': boot_img.sha1,
1248        'recovery_size': recovery_img.size,
1249        'recovery_sha1': recovery_img.sha1,
1250        'boot_type': boot_type,
1251        'boot_device': boot_device,
1252        'recovery_type': recovery_type,
1253        'recovery_device': recovery_device,
1254        'bonus_args': bonus_args,
1255        }
1256
1257  # The install script location moved from /system/etc to /system/bin
1258  # in the L release.  Parse the init.rc file to find out where the
1259  # target-files expects it to be, and put it there.
1260  sh_location = "etc/install-recovery.sh"
1261  try:
1262    with open(os.path.join(input_dir, "BOOT", "RAMDISK", "init.rc")) as f:
1263      for line in f:
1264        m = re.match("^service flash_recovery /system/(\S+)\s*$", line)
1265        if m:
1266          sh_location = m.group(1)
1267          print "putting script in", sh_location
1268          break
1269  except (OSError, IOError), e:
1270    print "failed to read init.rc: %s" % (e,)
1271
1272  output_sink(sh_location, sh)
1273