common.py revision dd67a295cc91ad636b81b705b5bedda945035568
1# Copyright (C) 2008 The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15import copy
16import errno
17import getopt
18import getpass
19import imp
20import os
21import platform
22import re
23import shlex
24import shutil
25import subprocess
26import sys
27import tempfile
28import threading
29import time
30import zipfile
31
32import blockimgdiff
33from rangelib import *
34
35try:
36  from hashlib import sha1 as sha1
37except ImportError:
38  from sha import sha as sha1
39
40# missing in Python 2.4 and before
41if not hasattr(os, "SEEK_SET"):
42  os.SEEK_SET = 0
43
44class Options(object): pass
45OPTIONS = Options()
46
47DEFAULT_SEARCH_PATH_BY_PLATFORM = {
48    "linux2": "out/host/linux-x86",
49    "darwin": "out/host/darwin-x86",
50    }
51OPTIONS.search_path = DEFAULT_SEARCH_PATH_BY_PLATFORM.get(sys.platform, None)
52
53OPTIONS.signapk_path = "framework/signapk.jar"  # Relative to search_path
54OPTIONS.extra_signapk_args = []
55OPTIONS.java_path = "java"  # Use the one on the path by default.
56OPTIONS.java_args = "-Xmx2048m" # JVM Args
57OPTIONS.public_key_suffix = ".x509.pem"
58OPTIONS.private_key_suffix = ".pk8"
59OPTIONS.verbose = False
60OPTIONS.tempfiles = []
61OPTIONS.device_specific = None
62OPTIONS.extras = {}
63OPTIONS.info_dict = None
64
65
66# Values for "certificate" in apkcerts that mean special things.
67SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL")
68
69
70class ExternalError(RuntimeError): pass
71
72
73def Run(args, **kwargs):
74  """Create and return a subprocess.Popen object, printing the command
75  line on the terminal if -v was specified."""
76  if OPTIONS.verbose:
77    print "  running: ", " ".join(args)
78  return subprocess.Popen(args, **kwargs)
79
80
81def CloseInheritedPipes():
82  """ Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds
83  before doing other work."""
84  if platform.system() != "Darwin":
85    return
86  for d in range(3, 1025):
87    try:
88      stat = os.fstat(d)
89      if stat is not None:
90        pipebit = stat[0] & 0x1000
91        if pipebit != 0:
92          os.close(d)
93    except OSError:
94      pass
95
96
97def LoadInfoDict(input):
98  """Read and parse the META/misc_info.txt key/value pairs from the
99  input target files and return a dict."""
100
101  def read_helper(fn):
102    if isinstance(input, zipfile.ZipFile):
103      return input.read(fn)
104    else:
105      path = os.path.join(input, *fn.split("/"))
106      try:
107        with open(path) as f:
108          return f.read()
109      except IOError, e:
110        if e.errno == errno.ENOENT:
111          raise KeyError(fn)
112  d = {}
113  try:
114    d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n"))
115  except KeyError:
116    # ok if misc_info.txt doesn't exist
117    pass
118
119  # backwards compatibility: These values used to be in their own
120  # files.  Look for them, in case we're processing an old
121  # target_files zip.
122
123  if "mkyaffs2_extra_flags" not in d:
124    try:
125      d["mkyaffs2_extra_flags"] = read_helper("META/mkyaffs2-extra-flags.txt").strip()
126    except KeyError:
127      # ok if flags don't exist
128      pass
129
130  if "recovery_api_version" not in d:
131    try:
132      d["recovery_api_version"] = read_helper("META/recovery-api-version.txt").strip()
133    except KeyError:
134      raise ValueError("can't find recovery API version in input target-files")
135
136  if "tool_extensions" not in d:
137    try:
138      d["tool_extensions"] = read_helper("META/tool-extensions.txt").strip()
139    except KeyError:
140      # ok if extensions don't exist
141      pass
142
143  if "fstab_version" not in d:
144    d["fstab_version"] = "1"
145
146  try:
147    data = read_helper("META/imagesizes.txt")
148    for line in data.split("\n"):
149      if not line: continue
150      name, value = line.split(" ", 1)
151      if not value: continue
152      if name == "blocksize":
153        d[name] = value
154      else:
155        d[name + "_size"] = value
156  except KeyError:
157    pass
158
159  def makeint(key):
160    if key in d:
161      d[key] = int(d[key], 0)
162
163  makeint("recovery_api_version")
164  makeint("blocksize")
165  makeint("system_size")
166  makeint("vendor_size")
167  makeint("userdata_size")
168  makeint("cache_size")
169  makeint("recovery_size")
170  makeint("boot_size")
171  makeint("fstab_version")
172
173  d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"])
174  d["build.prop"] = LoadBuildProp(read_helper)
175  return d
176
177def LoadBuildProp(read_helper):
178  try:
179    data = read_helper("SYSTEM/build.prop")
180  except KeyError:
181    print "Warning: could not find SYSTEM/build.prop in %s" % zip
182    data = ""
183  return LoadDictionaryFromLines(data.split("\n"))
184
185def LoadDictionaryFromLines(lines):
186  d = {}
187  for line in lines:
188    line = line.strip()
189    if not line or line.startswith("#"): continue
190    if "=" in line:
191      name, value = line.split("=", 1)
192      d[name] = value
193  return d
194
195def LoadRecoveryFSTab(read_helper, fstab_version):
196  class Partition(object):
197    pass
198
199  try:
200    data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab")
201  except KeyError:
202    print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab"
203    data = ""
204
205  if fstab_version == 1:
206    d = {}
207    for line in data.split("\n"):
208      line = line.strip()
209      if not line or line.startswith("#"): continue
210      pieces = line.split()
211      if not (3 <= len(pieces) <= 4):
212        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
213
214      p = Partition()
215      p.mount_point = pieces[0]
216      p.fs_type = pieces[1]
217      p.device = pieces[2]
218      p.length = 0
219      options = None
220      if len(pieces) >= 4:
221        if pieces[3].startswith("/"):
222          p.device2 = pieces[3]
223          if len(pieces) >= 5:
224            options = pieces[4]
225        else:
226          p.device2 = None
227          options = pieces[3]
228      else:
229        p.device2 = None
230
231      if options:
232        options = options.split(",")
233        for i in options:
234          if i.startswith("length="):
235            p.length = int(i[7:])
236          else:
237              print "%s: unknown option \"%s\"" % (p.mount_point, i)
238
239      d[p.mount_point] = p
240
241  elif fstab_version == 2:
242    d = {}
243    for line in data.split("\n"):
244      line = line.strip()
245      if not line or line.startswith("#"): continue
246      pieces = line.split()
247      if len(pieces) != 5:
248        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
249
250      # Ignore entries that are managed by vold
251      options = pieces[4]
252      if "voldmanaged=" in options: continue
253
254      # It's a good line, parse it
255      p = Partition()
256      p.device = pieces[0]
257      p.mount_point = pieces[1]
258      p.fs_type = pieces[2]
259      p.device2 = None
260      p.length = 0
261
262      options = options.split(",")
263      for i in options:
264        if i.startswith("length="):
265          p.length = int(i[7:])
266        else:
267          # Ignore all unknown options in the unified fstab
268          continue
269
270      d[p.mount_point] = p
271
272  else:
273    raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,))
274
275  return d
276
277
278def DumpInfoDict(d):
279  for k, v in sorted(d.items()):
280    print "%-25s = (%s) %s" % (k, type(v).__name__, v)
281
282def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
283  """Take a kernel, cmdline, and ramdisk directory from the input (in
284  'sourcedir'), and turn them into a boot image.  Return the image
285  data, or None if sourcedir does not appear to contains files for
286  building the requested image."""
287
288  if (not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK) or
289      not os.access(os.path.join(sourcedir, "kernel"), os.F_OK)):
290    return None
291
292  if info_dict is None:
293    info_dict = OPTIONS.info_dict
294
295  ramdisk_img = tempfile.NamedTemporaryFile()
296  img = tempfile.NamedTemporaryFile()
297
298  if os.access(fs_config_file, os.F_OK):
299    cmd = ["mkbootfs", "-f", fs_config_file, os.path.join(sourcedir, "RAMDISK")]
300  else:
301    cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
302  p1 = Run(cmd, stdout=subprocess.PIPE)
303  p2 = Run(["minigzip"],
304           stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
305
306  p2.wait()
307  p1.wait()
308  assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (targetname,)
309  assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (targetname,)
310
311  # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
312  mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
313
314  cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")]
315
316  fn = os.path.join(sourcedir, "second")
317  if os.access(fn, os.F_OK):
318    cmd.append("--second")
319    cmd.append(fn)
320
321  fn = os.path.join(sourcedir, "cmdline")
322  if os.access(fn, os.F_OK):
323    cmd.append("--cmdline")
324    cmd.append(open(fn).read().rstrip("\n"))
325
326  fn = os.path.join(sourcedir, "base")
327  if os.access(fn, os.F_OK):
328    cmd.append("--base")
329    cmd.append(open(fn).read().rstrip("\n"))
330
331  fn = os.path.join(sourcedir, "pagesize")
332  if os.access(fn, os.F_OK):
333    cmd.append("--pagesize")
334    cmd.append(open(fn).read().rstrip("\n"))
335
336  args = info_dict.get("mkbootimg_args", None)
337  if args and args.strip():
338    cmd.extend(shlex.split(args))
339
340  cmd.extend(["--ramdisk", ramdisk_img.name,
341              "--output", img.name])
342
343  p = Run(cmd, stdout=subprocess.PIPE)
344  p.communicate()
345  assert p.returncode == 0, "mkbootimg of %s image failed" % (
346      os.path.basename(sourcedir),)
347
348  if info_dict.get("verity_key", None):
349    path = "/" + os.path.basename(sourcedir).lower()
350    cmd = ["boot_signer", path, img.name, info_dict["verity_key"] + ".pk8", info_dict["verity_key"] + ".x509.pem", img.name]
351    p = Run(cmd, stdout=subprocess.PIPE)
352    p.communicate()
353    assert p.returncode == 0, "boot_signer of %s image failed" % path
354
355  img.seek(os.SEEK_SET, 0)
356  data = img.read()
357
358  ramdisk_img.close()
359  img.close()
360
361  return data
362
363
364def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
365                     info_dict=None):
366  """Return a File object (with name 'name') with the desired bootable
367  image.  Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name
368  'prebuilt_name', otherwise look for it under 'unpack_dir'/IMAGES,
369  otherwise construct it from the source files in
370  'unpack_dir'/'tree_subdir'."""
371
372  prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
373  if os.path.exists(prebuilt_path):
374    print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,)
375    return File.FromLocalFile(name, prebuilt_path)
376
377  prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
378  if os.path.exists(prebuilt_path):
379    print "using prebuilt %s from IMAGES..." % (prebuilt_name,)
380    return File.FromLocalFile(name, prebuilt_path)
381
382  print "building image from target_files %s..." % (tree_subdir,)
383  fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
384  data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
385                            os.path.join(unpack_dir, fs_config),
386                            info_dict)
387  if data:
388    return File(name, data)
389  return None
390
391
392def UnzipTemp(filename, pattern=None):
393  """Unzip the given archive into a temporary directory and return the name.
394
395  If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a
396  temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
397
398  Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the
399  main file), open for reading.
400  """
401
402  tmp = tempfile.mkdtemp(prefix="targetfiles-")
403  OPTIONS.tempfiles.append(tmp)
404
405  def unzip_to_dir(filename, dirname):
406    cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
407    if pattern is not None:
408      cmd.append(pattern)
409    p = Run(cmd, stdout=subprocess.PIPE)
410    p.communicate()
411    if p.returncode != 0:
412      raise ExternalError("failed to unzip input target-files \"%s\"" %
413                          (filename,))
414
415  m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
416  if m:
417    unzip_to_dir(m.group(1), tmp)
418    unzip_to_dir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"))
419    filename = m.group(1)
420  else:
421    unzip_to_dir(filename, tmp)
422
423  return tmp, zipfile.ZipFile(filename, "r")
424
425
426def GetKeyPasswords(keylist):
427  """Given a list of keys, prompt the user to enter passwords for
428  those which require them.  Return a {key: password} dict.  password
429  will be None if the key has no password."""
430
431  no_passwords = []
432  need_passwords = []
433  key_passwords = {}
434  devnull = open("/dev/null", "w+b")
435  for k in sorted(keylist):
436    # We don't need a password for things that aren't really keys.
437    if k in SPECIAL_CERT_STRINGS:
438      no_passwords.append(k)
439      continue
440
441    p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
442             "-inform", "DER", "-nocrypt"],
443            stdin=devnull.fileno(),
444            stdout=devnull.fileno(),
445            stderr=subprocess.STDOUT)
446    p.communicate()
447    if p.returncode == 0:
448      # Definitely an unencrypted key.
449      no_passwords.append(k)
450    else:
451      p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
452               "-inform", "DER", "-passin", "pass:"],
453              stdin=devnull.fileno(),
454              stdout=devnull.fileno(),
455              stderr=subprocess.PIPE)
456      stdout, stderr = p.communicate()
457      if p.returncode == 0:
458        # Encrypted key with empty string as password.
459        key_passwords[k] = ''
460      elif stderr.startswith('Error decrypting key'):
461        # Definitely encrypted key.
462        # It would have said "Error reading key" if it didn't parse correctly.
463        need_passwords.append(k)
464      else:
465        # Potentially, a type of key that openssl doesn't understand.
466        # We'll let the routines in signapk.jar handle it.
467        no_passwords.append(k)
468  devnull.close()
469
470  key_passwords.update(PasswordManager().GetPasswords(need_passwords))
471  key_passwords.update(dict.fromkeys(no_passwords, None))
472  return key_passwords
473
474
475def SignFile(input_name, output_name, key, password, align=None,
476             whole_file=False):
477  """Sign the input_name zip/jar/apk, producing output_name.  Use the
478  given key and password (the latter may be None if the key does not
479  have a password.
480
481  If align is an integer > 1, zipalign is run to align stored files in
482  the output zip on 'align'-byte boundaries.
483
484  If whole_file is true, use the "-w" option to SignApk to embed a
485  signature that covers the whole file in the archive comment of the
486  zip file.
487  """
488
489  if align == 0 or align == 1:
490    align = None
491
492  if align:
493    temp = tempfile.NamedTemporaryFile()
494    sign_name = temp.name
495  else:
496    sign_name = output_name
497
498  cmd = [OPTIONS.java_path, OPTIONS.java_args, "-jar",
499         os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)]
500  cmd.extend(OPTIONS.extra_signapk_args)
501  if whole_file:
502    cmd.append("-w")
503  cmd.extend([key + OPTIONS.public_key_suffix,
504              key + OPTIONS.private_key_suffix,
505              input_name, sign_name])
506
507  p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
508  if password is not None:
509    password += "\n"
510  p.communicate(password)
511  if p.returncode != 0:
512    raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
513
514  if align:
515    p = Run(["zipalign", "-f", str(align), sign_name, output_name])
516    p.communicate()
517    if p.returncode != 0:
518      raise ExternalError("zipalign failed: return code %s" % (p.returncode,))
519    temp.close()
520
521
522def CheckSize(data, target, info_dict):
523  """Check the data string passed against the max size limit, if
524  any, for the given target.  Raise exception if the data is too big.
525  Print a warning if the data is nearing the maximum size."""
526
527  if target.endswith(".img"): target = target[:-4]
528  mount_point = "/" + target
529
530  fs_type = None
531  limit = None
532  if info_dict["fstab"]:
533    if mount_point == "/userdata": mount_point = "/data"
534    p = info_dict["fstab"][mount_point]
535    fs_type = p.fs_type
536    device = p.device
537    if "/" in device:
538      device = device[device.rfind("/")+1:]
539    limit = info_dict.get(device + "_size", None)
540  if not fs_type or not limit: return
541
542  if fs_type == "yaffs2":
543    # image size should be increased by 1/64th to account for the
544    # spare area (64 bytes per 2k page)
545    limit = limit / 2048 * (2048+64)
546  size = len(data)
547  pct = float(size) * 100.0 / limit
548  msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit)
549  if pct >= 99.0:
550    raise ExternalError(msg)
551  elif pct >= 95.0:
552    print
553    print "  WARNING: ", msg
554    print
555  elif OPTIONS.verbose:
556    print "  ", msg
557
558
559def ReadApkCerts(tf_zip):
560  """Given a target_files ZipFile, parse the META/apkcerts.txt file
561  and return a {package: cert} dict."""
562  certmap = {}
563  for line in tf_zip.read("META/apkcerts.txt").split("\n"):
564    line = line.strip()
565    if not line: continue
566    m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+'
567                 r'private_key="(.*)"$', line)
568    if m:
569      name, cert, privkey = m.groups()
570      public_key_suffix_len = len(OPTIONS.public_key_suffix)
571      private_key_suffix_len = len(OPTIONS.private_key_suffix)
572      if cert in SPECIAL_CERT_STRINGS and not privkey:
573        certmap[name] = cert
574      elif (cert.endswith(OPTIONS.public_key_suffix) and
575            privkey.endswith(OPTIONS.private_key_suffix) and
576            cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
577        certmap[name] = cert[:-public_key_suffix_len]
578      else:
579        raise ValueError("failed to parse line from apkcerts.txt:\n" + line)
580  return certmap
581
582
583COMMON_DOCSTRING = """
584  -p  (--path)  <dir>
585      Prepend <dir>/bin to the list of places to search for binaries
586      run by this script, and expect to find jars in <dir>/framework.
587
588  -s  (--device_specific) <file>
589      Path to the python module containing device-specific
590      releasetools code.
591
592  -x  (--extra)  <key=value>
593      Add a key/value pair to the 'extras' dict, which device-specific
594      extension code may look at.
595
596  -v  (--verbose)
597      Show command lines being executed.
598
599  -h  (--help)
600      Display this usage message and exit.
601"""
602
603def Usage(docstring):
604  print docstring.rstrip("\n")
605  print COMMON_DOCSTRING
606
607
608def ParseOptions(argv,
609                 docstring,
610                 extra_opts="", extra_long_opts=(),
611                 extra_option_handler=None):
612  """Parse the options in argv and return any arguments that aren't
613  flags.  docstring is the calling module's docstring, to be displayed
614  for errors and -h.  extra_opts and extra_long_opts are for flags
615  defined by the caller, which are processed by passing them to
616  extra_option_handler."""
617
618  try:
619    opts, args = getopt.getopt(
620        argv, "hvp:s:x:" + extra_opts,
621        ["help", "verbose", "path=", "signapk_path=", "extra_signapk_args=",
622         "java_path=", "java_args=", "public_key_suffix=",
623         "private_key_suffix=", "device_specific=", "extra="] +
624        list(extra_long_opts))
625  except getopt.GetoptError, err:
626    Usage(docstring)
627    print "**", str(err), "**"
628    sys.exit(2)
629
630  path_specified = False
631
632  for o, a in opts:
633    if o in ("-h", "--help"):
634      Usage(docstring)
635      sys.exit()
636    elif o in ("-v", "--verbose"):
637      OPTIONS.verbose = True
638    elif o in ("-p", "--path"):
639      OPTIONS.search_path = a
640    elif o in ("--signapk_path",):
641      OPTIONS.signapk_path = a
642    elif o in ("--extra_signapk_args",):
643      OPTIONS.extra_signapk_args = shlex.split(a)
644    elif o in ("--java_path",):
645      OPTIONS.java_path = a
646    elif o in ("--java_args",):
647      OPTIONS.java_args = a
648    elif o in ("--public_key_suffix",):
649      OPTIONS.public_key_suffix = a
650    elif o in ("--private_key_suffix",):
651      OPTIONS.private_key_suffix = a
652    elif o in ("-s", "--device_specific"):
653      OPTIONS.device_specific = a
654    elif o in ("-x", "--extra"):
655      key, value = a.split("=", 1)
656      OPTIONS.extras[key] = value
657    else:
658      if extra_option_handler is None or not extra_option_handler(o, a):
659        assert False, "unknown option \"%s\"" % (o,)
660
661  if OPTIONS.search_path:
662    os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") +
663                          os.pathsep + os.environ["PATH"])
664
665  return args
666
667
668def MakeTempFile(prefix=None, suffix=None):
669  """Make a temp file and add it to the list of things to be deleted
670  when Cleanup() is called.  Return the filename."""
671  fd, fn = tempfile.mkstemp(prefix=prefix, suffix=suffix)
672  os.close(fd)
673  OPTIONS.tempfiles.append(fn)
674  return fn
675
676
677def Cleanup():
678  for i in OPTIONS.tempfiles:
679    if os.path.isdir(i):
680      shutil.rmtree(i)
681    else:
682      os.remove(i)
683
684
685class PasswordManager(object):
686  def __init__(self):
687    self.editor = os.getenv("EDITOR", None)
688    self.pwfile = os.getenv("ANDROID_PW_FILE", None)
689
690  def GetPasswords(self, items):
691    """Get passwords corresponding to each string in 'items',
692    returning a dict.  (The dict may have keys in addition to the
693    values in 'items'.)
694
695    Uses the passwords in $ANDROID_PW_FILE if available, letting the
696    user edit that file to add more needed passwords.  If no editor is
697    available, or $ANDROID_PW_FILE isn't define, prompts the user
698    interactively in the ordinary way.
699    """
700
701    current = self.ReadFile()
702
703    first = True
704    while True:
705      missing = []
706      for i in items:
707        if i not in current or not current[i]:
708          missing.append(i)
709      # Are all the passwords already in the file?
710      if not missing: return current
711
712      for i in missing:
713        current[i] = ""
714
715      if not first:
716        print "key file %s still missing some passwords." % (self.pwfile,)
717        answer = raw_input("try to edit again? [y]> ").strip()
718        if answer and answer[0] not in 'yY':
719          raise RuntimeError("key passwords unavailable")
720      first = False
721
722      current = self.UpdateAndReadFile(current)
723
724  def PromptResult(self, current):
725    """Prompt the user to enter a value (password) for each key in
726    'current' whose value is fales.  Returns a new dict with all the
727    values.
728    """
729    result = {}
730    for k, v in sorted(current.iteritems()):
731      if v:
732        result[k] = v
733      else:
734        while True:
735          result[k] = getpass.getpass("Enter password for %s key> "
736                                      % (k,)).strip()
737          if result[k]: break
738    return result
739
740  def UpdateAndReadFile(self, current):
741    if not self.editor or not self.pwfile:
742      return self.PromptResult(current)
743
744    f = open(self.pwfile, "w")
745    os.chmod(self.pwfile, 0600)
746    f.write("# Enter key passwords between the [[[ ]]] brackets.\n")
747    f.write("# (Additional spaces are harmless.)\n\n")
748
749    first_line = None
750    sorted = [(not v, k, v) for (k, v) in current.iteritems()]
751    sorted.sort()
752    for i, (_, k, v) in enumerate(sorted):
753      f.write("[[[  %s  ]]] %s\n" % (v, k))
754      if not v and first_line is None:
755        # position cursor on first line with no password.
756        first_line = i + 4
757    f.close()
758
759    p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
760    _, _ = p.communicate()
761
762    return self.ReadFile()
763
764  def ReadFile(self):
765    result = {}
766    if self.pwfile is None: return result
767    try:
768      f = open(self.pwfile, "r")
769      for line in f:
770        line = line.strip()
771        if not line or line[0] == '#': continue
772        m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
773        if not m:
774          print "failed to parse password file: ", line
775        else:
776          result[m.group(2)] = m.group(1)
777      f.close()
778    except IOError, e:
779      if e.errno != errno.ENOENT:
780        print "error reading password file: ", str(e)
781    return result
782
783
784def ZipWriteStr(zip, filename, data, perms=0644, compression=None):
785  # use a fixed timestamp so the output is repeatable.
786  zinfo = zipfile.ZipInfo(filename=filename,
787                          date_time=(2009, 1, 1, 0, 0, 0))
788  if compression is None:
789    zinfo.compress_type = zip.compression
790  else:
791    zinfo.compress_type = compression
792  zinfo.external_attr = perms << 16
793  zip.writestr(zinfo, data)
794
795
796class DeviceSpecificParams(object):
797  module = None
798  def __init__(self, **kwargs):
799    """Keyword arguments to the constructor become attributes of this
800    object, which is passed to all functions in the device-specific
801    module."""
802    for k, v in kwargs.iteritems():
803      setattr(self, k, v)
804    self.extras = OPTIONS.extras
805
806    if self.module is None:
807      path = OPTIONS.device_specific
808      if not path: return
809      try:
810        if os.path.isdir(path):
811          info = imp.find_module("releasetools", [path])
812        else:
813          d, f = os.path.split(path)
814          b, x = os.path.splitext(f)
815          if x == ".py":
816            f = b
817          info = imp.find_module(f, [d])
818        print "loaded device-specific extensions from", path
819        self.module = imp.load_module("device_specific", *info)
820      except ImportError:
821        print "unable to load device-specific module; assuming none"
822
823  def _DoCall(self, function_name, *args, **kwargs):
824    """Call the named function in the device-specific module, passing
825    the given args and kwargs.  The first argument to the call will be
826    the DeviceSpecific object itself.  If there is no module, or the
827    module does not define the function, return the value of the
828    'default' kwarg (which itself defaults to None)."""
829    if self.module is None or not hasattr(self.module, function_name):
830      return kwargs.get("default", None)
831    return getattr(self.module, function_name)(*((self,) + args), **kwargs)
832
833  def FullOTA_Assertions(self):
834    """Called after emitting the block of assertions at the top of a
835    full OTA package.  Implementations can add whatever additional
836    assertions they like."""
837    return self._DoCall("FullOTA_Assertions")
838
839  def FullOTA_InstallBegin(self):
840    """Called at the start of full OTA installation."""
841    return self._DoCall("FullOTA_InstallBegin")
842
843  def FullOTA_InstallEnd(self):
844    """Called at the end of full OTA installation; typically this is
845    used to install the image for the device's baseband processor."""
846    return self._DoCall("FullOTA_InstallEnd")
847
848  def IncrementalOTA_Assertions(self):
849    """Called after emitting the block of assertions at the top of an
850    incremental OTA package.  Implementations can add whatever
851    additional assertions they like."""
852    return self._DoCall("IncrementalOTA_Assertions")
853
854  def IncrementalOTA_VerifyBegin(self):
855    """Called at the start of the verification phase of incremental
856    OTA installation; additional checks can be placed here to abort
857    the script before any changes are made."""
858    return self._DoCall("IncrementalOTA_VerifyBegin")
859
860  def IncrementalOTA_VerifyEnd(self):
861    """Called at the end of the verification phase of incremental OTA
862    installation; additional checks can be placed here to abort the
863    script before any changes are made."""
864    return self._DoCall("IncrementalOTA_VerifyEnd")
865
866  def IncrementalOTA_InstallBegin(self):
867    """Called at the start of incremental OTA installation (after
868    verification is complete)."""
869    return self._DoCall("IncrementalOTA_InstallBegin")
870
871  def IncrementalOTA_InstallEnd(self):
872    """Called at the end of incremental OTA installation; typically
873    this is used to install the image for the device's baseband
874    processor."""
875    return self._DoCall("IncrementalOTA_InstallEnd")
876
877class File(object):
878  def __init__(self, name, data):
879    self.name = name
880    self.data = data
881    self.size = len(data)
882    self.sha1 = sha1(data).hexdigest()
883
884  @classmethod
885  def FromLocalFile(cls, name, diskname):
886    f = open(diskname, "rb")
887    data = f.read()
888    f.close()
889    return File(name, data)
890
891  def WriteToTemp(self):
892    t = tempfile.NamedTemporaryFile()
893    t.write(self.data)
894    t.flush()
895    return t
896
897  def AddToZip(self, z, compression=None):
898    ZipWriteStr(z, self.name, self.data, compression=compression)
899
900DIFF_PROGRAM_BY_EXT = {
901    ".gz" : "imgdiff",
902    ".zip" : ["imgdiff", "-z"],
903    ".jar" : ["imgdiff", "-z"],
904    ".apk" : ["imgdiff", "-z"],
905    ".img" : "imgdiff",
906    }
907
908class Difference(object):
909  def __init__(self, tf, sf, diff_program=None):
910    self.tf = tf
911    self.sf = sf
912    self.patch = None
913    self.diff_program = diff_program
914
915  def ComputePatch(self):
916    """Compute the patch (as a string of data) needed to turn sf into
917    tf.  Returns the same tuple as GetPatch()."""
918
919    tf = self.tf
920    sf = self.sf
921
922    if self.diff_program:
923      diff_program = self.diff_program
924    else:
925      ext = os.path.splitext(tf.name)[1]
926      diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff")
927
928    ttemp = tf.WriteToTemp()
929    stemp = sf.WriteToTemp()
930
931    ext = os.path.splitext(tf.name)[1]
932
933    try:
934      ptemp = tempfile.NamedTemporaryFile()
935      if isinstance(diff_program, list):
936        cmd = copy.copy(diff_program)
937      else:
938        cmd = [diff_program]
939      cmd.append(stemp.name)
940      cmd.append(ttemp.name)
941      cmd.append(ptemp.name)
942      p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
943      err = []
944      def run():
945        _, e = p.communicate()
946        if e: err.append(e)
947      th = threading.Thread(target=run)
948      th.start()
949      th.join(timeout=300)   # 5 mins
950      if th.is_alive():
951        print "WARNING: diff command timed out"
952        p.terminate()
953        th.join(5)
954        if th.is_alive():
955          p.kill()
956          th.join()
957
958      if err or p.returncode != 0:
959        print "WARNING: failure running %s:\n%s\n" % (
960            diff_program, "".join(err))
961        self.patch = None
962        return None, None, None
963      diff = ptemp.read()
964    finally:
965      ptemp.close()
966      stemp.close()
967      ttemp.close()
968
969    self.patch = diff
970    return self.tf, self.sf, self.patch
971
972
973  def GetPatch(self):
974    """Return a tuple (target_file, source_file, patch_data).
975    patch_data may be None if ComputePatch hasn't been called, or if
976    computing the patch failed."""
977    return self.tf, self.sf, self.patch
978
979
980def ComputeDifferences(diffs):
981  """Call ComputePatch on all the Difference objects in 'diffs'."""
982  print len(diffs), "diffs to compute"
983
984  # Do the largest files first, to try and reduce the long-pole effect.
985  by_size = [(i.tf.size, i) for i in diffs]
986  by_size.sort(reverse=True)
987  by_size = [i[1] for i in by_size]
988
989  lock = threading.Lock()
990  diff_iter = iter(by_size)   # accessed under lock
991
992  def worker():
993    try:
994      lock.acquire()
995      for d in diff_iter:
996        lock.release()
997        start = time.time()
998        d.ComputePatch()
999        dur = time.time() - start
1000        lock.acquire()
1001
1002        tf, sf, patch = d.GetPatch()
1003        if sf.name == tf.name:
1004          name = tf.name
1005        else:
1006          name = "%s (%s)" % (tf.name, sf.name)
1007        if patch is None:
1008          print "patching failed!                                  %s" % (name,)
1009        else:
1010          print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
1011              dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name)
1012      lock.release()
1013    except Exception, e:
1014      print e
1015      raise
1016
1017  # start worker threads; wait for them all to finish.
1018  threads = [threading.Thread(target=worker)
1019             for i in range(OPTIONS.worker_threads)]
1020  for th in threads:
1021    th.start()
1022  while threads:
1023    threads.pop().join()
1024
1025
1026class BlockDifference:
1027  def __init__(self, partition, tgt, src=None, check_first_block=False):
1028    self.tgt = tgt
1029    self.src = src
1030    self.partition = partition
1031    self.check_first_block = check_first_block
1032
1033    version = 1
1034    if OPTIONS.info_dict:
1035      version = max(
1036          int(i) for i in
1037          OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
1038
1039    b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
1040                                    version=version)
1041    tmpdir = tempfile.mkdtemp()
1042    OPTIONS.tempfiles.append(tmpdir)
1043    self.path = os.path.join(tmpdir, partition)
1044    b.Compute(self.path)
1045
1046    _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
1047
1048  def WriteScript(self, script, output_zip, progress=None):
1049    if not self.src:
1050      # write the output unconditionally
1051      script.Print("Patching %s image unconditionally..." % (self.partition,))
1052    else:
1053      script.Print("Patching %s image after verification." % (self.partition,))
1054
1055    if progress: script.ShowProgress(progress, 0)
1056    self._WriteUpdate(script, output_zip)
1057
1058  def WriteVerifyScript(self, script):
1059    partition = self.partition
1060    if not self.src:
1061      script.Print("Image %s will be patched unconditionally." % (partition,))
1062    else:
1063      script.AppendExtra(('if block_image_verify("%s", '
1064                          'package_extract_file("%s.transfer.list"), '
1065                          '"%s.new.dat", "%s.patch.dat") then') %
1066                         (self.device, partition, partition, partition))
1067      script.Print("Verified %s image..." % (partition,))
1068      script.AppendExtra('else');
1069
1070      if self.check_first_block:
1071        self._CheckFirstBlock(script)
1072
1073      script.AppendExtra(('(range_sha1("%s", "%s") == "%s") ||\n'
1074                          '  abort("%s partition has unexpected contents");\n'
1075                          'endif;') %
1076                         (self.device, self.tgt.care_map.to_string_raw(),
1077                          self.tgt.TotalSha1(), self.partition))
1078
1079  def _WriteUpdate(self, script, output_zip):
1080    partition = self.partition
1081    with open(self.path + ".transfer.list", "rb") as f:
1082      ZipWriteStr(output_zip, partition + ".transfer.list", f.read())
1083    with open(self.path + ".new.dat", "rb") as f:
1084      ZipWriteStr(output_zip, partition + ".new.dat", f.read())
1085    with open(self.path + ".patch.dat", "rb") as f:
1086      ZipWriteStr(output_zip, partition + ".patch.dat", f.read(),
1087                         compression=zipfile.ZIP_STORED)
1088
1089    call = (('block_image_update("%s", '
1090             'package_extract_file("%s.transfer.list"), '
1091             '"%s.new.dat", "%s.patch.dat");\n') %
1092            (self.device, partition, partition, partition))
1093    script.AppendExtra(script._WordWrap(call))
1094
1095  def _HashBlocks(self, source, ranges):
1096    data = source.ReadRangeSet(ranges)
1097    ctx = sha1()
1098
1099    for p in data:
1100      ctx.update(p)
1101
1102    return ctx.hexdigest()
1103
1104  def _CheckFirstBlock(self, script):
1105    r = RangeSet((0, 1))
1106    srchash = self._HashBlocks(self.src, r);
1107    tgthash = self._HashBlocks(self.tgt, r);
1108
1109    script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || '
1110                        '(range_sha1("%s", "%s") == "%s") || '
1111                        'abort("%s has been remounted R/W; '
1112                        'reflash device to reenable OTA updates");')
1113                       % (self.device, r.to_string_raw(), srchash,
1114                          self.device, r.to_string_raw(), tgthash,
1115                          self.device))
1116
1117DataImage = blockimgdiff.DataImage
1118
1119
1120# map recovery.fstab's fs_types to mount/format "partition types"
1121PARTITION_TYPES = { "yaffs2": "MTD", "mtd": "MTD",
1122                    "ext4": "EMMC", "emmc": "EMMC",
1123                    "f2fs": "EMMC" }
1124
1125def GetTypeAndDevice(mount_point, info):
1126  fstab = info["fstab"]
1127  if fstab:
1128    return PARTITION_TYPES[fstab[mount_point].fs_type], fstab[mount_point].device
1129  else:
1130    return None
1131
1132
1133def ParseCertificate(data):
1134  """Parse a PEM-format certificate."""
1135  cert = []
1136  save = False
1137  for line in data.split("\n"):
1138    if "--END CERTIFICATE--" in line:
1139      break
1140    if save:
1141      cert.append(line)
1142    if "--BEGIN CERTIFICATE--" in line:
1143      save = True
1144  cert = "".join(cert).decode('base64')
1145  return cert
1146
1147def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
1148                      info_dict=None):
1149  """Generate a binary patch that creates the recovery image starting
1150  with the boot image.  (Most of the space in these images is just the
1151  kernel, which is identical for the two, so the resulting patch
1152  should be efficient.)  Add it to the output zip, along with a shell
1153  script that is run from init.rc on first boot to actually do the
1154  patching and install the new recovery image.
1155
1156  recovery_img and boot_img should be File objects for the
1157  corresponding images.  info should be the dictionary returned by
1158  common.LoadInfoDict() on the input target_files.
1159  """
1160
1161  if info_dict is None:
1162    info_dict = OPTIONS.info_dict
1163
1164  diff_program = ["imgdiff"]
1165  path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
1166  if os.path.exists(path):
1167    diff_program.append("-b")
1168    diff_program.append(path)
1169    bonus_args = "-b /system/etc/recovery-resource.dat"
1170  else:
1171    bonus_args = ""
1172
1173  d = Difference(recovery_img, boot_img, diff_program=diff_program)
1174  _, _, patch = d.ComputePatch()
1175  output_sink("recovery-from-boot.p", patch)
1176
1177  td_pair = GetTypeAndDevice("/boot", info_dict)
1178  if not td_pair:
1179    return
1180  boot_type, boot_device = td_pair
1181  td_pair = GetTypeAndDevice("/recovery", info_dict)
1182  if not td_pair:
1183    return
1184  recovery_type, recovery_device = td_pair
1185
1186  sh = """#!/system/bin/sh
1187if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
1188  applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
1189else
1190  log -t recovery "Recovery image already installed"
1191fi
1192""" % { 'boot_size': boot_img.size,
1193        'boot_sha1': boot_img.sha1,
1194        'recovery_size': recovery_img.size,
1195        'recovery_sha1': recovery_img.sha1,
1196        'boot_type': boot_type,
1197        'boot_device': boot_device,
1198        'recovery_type': recovery_type,
1199        'recovery_device': recovery_device,
1200        'bonus_args': bonus_args,
1201        }
1202
1203  # The install script location moved from /system/etc to /system/bin
1204  # in the L release.  Parse the init.rc file to find out where the
1205  # target-files expects it to be, and put it there.
1206  sh_location = "etc/install-recovery.sh"
1207  try:
1208    with open(os.path.join(input_dir, "BOOT", "RAMDISK", "init.rc")) as f:
1209      for line in f:
1210        m = re.match("^service flash_recovery /system/(\S+)\s*$", line)
1211        if m:
1212          sh_location = m.group(1)
1213          print "putting script in", sh_location
1214          break
1215  except (OSError, IOError), e:
1216    print "failed to read init.rc: %s" % (e,)
1217
1218  output_sink(sh_location, sh)
1219