common.py revision f3282b4a7fda46dfb546f2822e0f2081b4ced7ff
1# Copyright (C) 2008 The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15import copy
16import errno
17import getopt
18import getpass
19import imp
20import os
21import platform
22import re
23import shlex
24import shutil
25import subprocess
26import sys
27import tempfile
28import threading
29import time
30import zipfile
31
32import blockimgdiff
33import rangelib
34
35from hashlib import sha1 as sha1
36
37
38class Options(object):
39  def __init__(self):
40    platform_search_path = {
41        "linux2": "out/host/linux-x86",
42        "darwin": "out/host/darwin-x86",
43    }
44
45    self.search_path = platform_search_path.get(sys.platform, None)
46    self.signapk_path = "framework/signapk.jar"  # Relative to search_path
47    self.extra_signapk_args = []
48    self.java_path = "java"  # Use the one on the path by default.
49    self.java_args = "-Xmx2048m" # JVM Args
50    self.public_key_suffix = ".x509.pem"
51    self.private_key_suffix = ".pk8"
52    # use otatools built boot_signer by default
53    self.boot_signer_path = "boot_signer"
54    self.verbose = False
55    self.tempfiles = []
56    self.device_specific = None
57    self.extras = {}
58    self.info_dict = None
59    self.worker_threads = None
60
61
62OPTIONS = Options()
63
64
65# Values for "certificate" in apkcerts that mean special things.
66SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL")
67
68
69class ExternalError(RuntimeError):
70  pass
71
72
73def Run(args, **kwargs):
74  """Create and return a subprocess.Popen object, printing the command
75  line on the terminal if -v was specified."""
76  if OPTIONS.verbose:
77    print "  running: ", " ".join(args)
78  return subprocess.Popen(args, **kwargs)
79
80
81def CloseInheritedPipes():
82  """ Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds
83  before doing other work."""
84  if platform.system() != "Darwin":
85    return
86  for d in range(3, 1025):
87    try:
88      stat = os.fstat(d)
89      if stat is not None:
90        pipebit = stat[0] & 0x1000
91        if pipebit != 0:
92          os.close(d)
93    except OSError:
94      pass
95
96
97def LoadInfoDict(input_file):
98  """Read and parse the META/misc_info.txt key/value pairs from the
99  input target files and return a dict."""
100
101  def read_helper(fn):
102    if isinstance(input_file, zipfile.ZipFile):
103      return input_file.read(fn)
104    else:
105      path = os.path.join(input_file, *fn.split("/"))
106      try:
107        with open(path) as f:
108          return f.read()
109      except IOError as e:
110        if e.errno == errno.ENOENT:
111          raise KeyError(fn)
112  d = {}
113  try:
114    d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n"))
115  except KeyError:
116    # ok if misc_info.txt doesn't exist
117    pass
118
119  # backwards compatibility: These values used to be in their own
120  # files.  Look for them, in case we're processing an old
121  # target_files zip.
122
123  if "mkyaffs2_extra_flags" not in d:
124    try:
125      d["mkyaffs2_extra_flags"] = read_helper(
126          "META/mkyaffs2-extra-flags.txt").strip()
127    except KeyError:
128      # ok if flags don't exist
129      pass
130
131  if "recovery_api_version" not in d:
132    try:
133      d["recovery_api_version"] = read_helper(
134          "META/recovery-api-version.txt").strip()
135    except KeyError:
136      raise ValueError("can't find recovery API version in input target-files")
137
138  if "tool_extensions" not in d:
139    try:
140      d["tool_extensions"] = read_helper("META/tool-extensions.txt").strip()
141    except KeyError:
142      # ok if extensions don't exist
143      pass
144
145  if "fstab_version" not in d:
146    d["fstab_version"] = "1"
147
148  try:
149    data = read_helper("META/imagesizes.txt")
150    for line in data.split("\n"):
151      if not line:
152        continue
153      name, value = line.split(" ", 1)
154      if not value:
155        continue
156      if name == "blocksize":
157        d[name] = value
158      else:
159        d[name + "_size"] = value
160  except KeyError:
161    pass
162
163  def makeint(key):
164    if key in d:
165      d[key] = int(d[key], 0)
166
167  makeint("recovery_api_version")
168  makeint("blocksize")
169  makeint("system_size")
170  makeint("vendor_size")
171  makeint("userdata_size")
172  makeint("cache_size")
173  makeint("recovery_size")
174  makeint("boot_size")
175  makeint("fstab_version")
176
177  d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"])
178  d["build.prop"] = LoadBuildProp(read_helper)
179  return d
180
181def LoadBuildProp(read_helper):
182  try:
183    data = read_helper("SYSTEM/build.prop")
184  except KeyError:
185    print "Warning: could not find SYSTEM/build.prop in %s" % zip
186    data = ""
187  return LoadDictionaryFromLines(data.split("\n"))
188
189def LoadDictionaryFromLines(lines):
190  d = {}
191  for line in lines:
192    line = line.strip()
193    if not line or line.startswith("#"):
194      continue
195    if "=" in line:
196      name, value = line.split("=", 1)
197      d[name] = value
198  return d
199
200def LoadRecoveryFSTab(read_helper, fstab_version):
201  class Partition(object):
202    def __init__(self, mount_point, fs_type, device, length, device2):
203      self.mount_point = mount_point
204      self.fs_type = fs_type
205      self.device = device
206      self.length = length
207      self.device2 = device2
208
209  try:
210    data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab")
211  except KeyError:
212    print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab"
213    data = ""
214
215  if fstab_version == 1:
216    d = {}
217    for line in data.split("\n"):
218      line = line.strip()
219      if not line or line.startswith("#"):
220        continue
221      pieces = line.split()
222      if not 3 <= len(pieces) <= 4:
223        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
224      options = None
225      if len(pieces) >= 4:
226        if pieces[3].startswith("/"):
227          device2 = pieces[3]
228          if len(pieces) >= 5:
229            options = pieces[4]
230        else:
231          device2 = None
232          options = pieces[3]
233      else:
234        device2 = None
235
236      mount_point = pieces[0]
237      length = 0
238      if options:
239        options = options.split(",")
240        for i in options:
241          if i.startswith("length="):
242            length = int(i[7:])
243          else:
244            print "%s: unknown option \"%s\"" % (mount_point, i)
245
246      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[1],
247                                 device=pieces[2], length=length,
248                                 device2=device2)
249
250  elif fstab_version == 2:
251    d = {}
252    for line in data.split("\n"):
253      line = line.strip()
254      if not line or line.startswith("#"):
255        continue
256      pieces = line.split()
257      if len(pieces) != 5:
258        raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
259
260      # Ignore entries that are managed by vold
261      options = pieces[4]
262      if "voldmanaged=" in options:
263        continue
264
265      # It's a good line, parse it
266      length = 0
267      options = options.split(",")
268      for i in options:
269        if i.startswith("length="):
270          length = int(i[7:])
271        else:
272          # Ignore all unknown options in the unified fstab
273          continue
274
275      mount_point = pieces[1]
276      d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[2],
277                                 device=pieces[0], length=length, device2=None)
278
279  else:
280    raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,))
281
282  return d
283
284
285def DumpInfoDict(d):
286  for k, v in sorted(d.items()):
287    print "%-25s = (%s) %s" % (k, type(v).__name__, v)
288
289
290def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
291  """Take a kernel, cmdline, and ramdisk directory from the input (in
292  'sourcedir'), and turn them into a boot image.  Return the image
293  data, or None if sourcedir does not appear to contains files for
294  building the requested image."""
295
296  if (not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK) or
297      not os.access(os.path.join(sourcedir, "kernel"), os.F_OK)):
298    return None
299
300  if info_dict is None:
301    info_dict = OPTIONS.info_dict
302
303  ramdisk_img = tempfile.NamedTemporaryFile()
304  img = tempfile.NamedTemporaryFile()
305
306  if os.access(fs_config_file, os.F_OK):
307    cmd = ["mkbootfs", "-f", fs_config_file, os.path.join(sourcedir, "RAMDISK")]
308  else:
309    cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
310  p1 = Run(cmd, stdout=subprocess.PIPE)
311  p2 = Run(["minigzip"],
312           stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
313
314  p2.wait()
315  p1.wait()
316  assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (sourcedir,)
317  assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (sourcedir,)
318
319  # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
320  mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
321
322  cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")]
323
324  fn = os.path.join(sourcedir, "second")
325  if os.access(fn, os.F_OK):
326    cmd.append("--second")
327    cmd.append(fn)
328
329  fn = os.path.join(sourcedir, "cmdline")
330  if os.access(fn, os.F_OK):
331    cmd.append("--cmdline")
332    cmd.append(open(fn).read().rstrip("\n"))
333
334  fn = os.path.join(sourcedir, "base")
335  if os.access(fn, os.F_OK):
336    cmd.append("--base")
337    cmd.append(open(fn).read().rstrip("\n"))
338
339  fn = os.path.join(sourcedir, "pagesize")
340  if os.access(fn, os.F_OK):
341    cmd.append("--pagesize")
342    cmd.append(open(fn).read().rstrip("\n"))
343
344  args = info_dict.get("mkbootimg_args", None)
345  if args and args.strip():
346    cmd.extend(shlex.split(args))
347
348  img_unsigned = None
349  if info_dict.get("vboot", None):
350    img_unsigned = tempfile.NamedTemporaryFile()
351    cmd.extend(["--ramdisk", ramdisk_img.name,
352                "--output", img_unsigned.name])
353  else:
354    cmd.extend(["--ramdisk", ramdisk_img.name,
355                "--output", img.name])
356
357  p = Run(cmd, stdout=subprocess.PIPE)
358  p.communicate()
359  assert p.returncode == 0, "mkbootimg of %s image failed" % (
360      os.path.basename(sourcedir),)
361
362  if (info_dict.get("boot_signer", None) == "true" and
363      info_dict.get("verity_key", None)):
364    path = "/" + os.path.basename(sourcedir).lower()
365    cmd = [OPTIONS.boot_signer_path, path, img.name,
366           info_dict["verity_key"] + ".pk8",
367           info_dict["verity_key"] + ".x509.pem", img.name]
368    p = Run(cmd, stdout=subprocess.PIPE)
369    p.communicate()
370    assert p.returncode == 0, "boot_signer of %s image failed" % path
371
372  # Sign the image if vboot is non-empty.
373  elif info_dict.get("vboot", None):
374    path = "/" + os.path.basename(sourcedir).lower()
375    img_keyblock = tempfile.NamedTemporaryFile()
376    cmd = [info_dict["vboot_signer_cmd"], info_dict["futility"],
377           img_unsigned.name, info_dict["vboot_key"] + ".vbpubk",
378           info_dict["vboot_key"] + ".vbprivk", img_keyblock.name,
379           img.name]
380    p = Run(cmd, stdout=subprocess.PIPE)
381    p.communicate()
382    assert p.returncode == 0, "vboot_signer of %s image failed" % path
383
384    # Clean up the temp files.
385    img_unsigned.close()
386    img_keyblock.close()
387
388  img.seek(os.SEEK_SET, 0)
389  data = img.read()
390
391  ramdisk_img.close()
392  img.close()
393
394  return data
395
396
397def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
398                     info_dict=None):
399  """Return a File object (with name 'name') with the desired bootable
400  image.  Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name
401  'prebuilt_name', otherwise look for it under 'unpack_dir'/IMAGES,
402  otherwise construct it from the source files in
403  'unpack_dir'/'tree_subdir'."""
404
405  prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
406  if os.path.exists(prebuilt_path):
407    print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,)
408    return File.FromLocalFile(name, prebuilt_path)
409
410  prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
411  if os.path.exists(prebuilt_path):
412    print "using prebuilt %s from IMAGES..." % (prebuilt_name,)
413    return File.FromLocalFile(name, prebuilt_path)
414
415  print "building image from target_files %s..." % (tree_subdir,)
416  fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
417  data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
418                            os.path.join(unpack_dir, fs_config),
419                            info_dict)
420  if data:
421    return File(name, data)
422  return None
423
424
425def UnzipTemp(filename, pattern=None):
426  """Unzip the given archive into a temporary directory and return the name.
427
428  If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a
429  temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
430
431  Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the
432  main file), open for reading.
433  """
434
435  tmp = tempfile.mkdtemp(prefix="targetfiles-")
436  OPTIONS.tempfiles.append(tmp)
437
438  def unzip_to_dir(filename, dirname):
439    cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
440    if pattern is not None:
441      cmd.append(pattern)
442    p = Run(cmd, stdout=subprocess.PIPE)
443    p.communicate()
444    if p.returncode != 0:
445      raise ExternalError("failed to unzip input target-files \"%s\"" %
446                          (filename,))
447
448  m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
449  if m:
450    unzip_to_dir(m.group(1), tmp)
451    unzip_to_dir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"))
452    filename = m.group(1)
453  else:
454    unzip_to_dir(filename, tmp)
455
456  return tmp, zipfile.ZipFile(filename, "r")
457
458
459def GetKeyPasswords(keylist):
460  """Given a list of keys, prompt the user to enter passwords for
461  those which require them.  Return a {key: password} dict.  password
462  will be None if the key has no password."""
463
464  no_passwords = []
465  need_passwords = []
466  key_passwords = {}
467  devnull = open("/dev/null", "w+b")
468  for k in sorted(keylist):
469    # We don't need a password for things that aren't really keys.
470    if k in SPECIAL_CERT_STRINGS:
471      no_passwords.append(k)
472      continue
473
474    p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
475             "-inform", "DER", "-nocrypt"],
476            stdin=devnull.fileno(),
477            stdout=devnull.fileno(),
478            stderr=subprocess.STDOUT)
479    p.communicate()
480    if p.returncode == 0:
481      # Definitely an unencrypted key.
482      no_passwords.append(k)
483    else:
484      p = Run(["openssl", "pkcs8", "-in", k+OPTIONS.private_key_suffix,
485               "-inform", "DER", "-passin", "pass:"],
486              stdin=devnull.fileno(),
487              stdout=devnull.fileno(),
488              stderr=subprocess.PIPE)
489      _, stderr = p.communicate()
490      if p.returncode == 0:
491        # Encrypted key with empty string as password.
492        key_passwords[k] = ''
493      elif stderr.startswith('Error decrypting key'):
494        # Definitely encrypted key.
495        # It would have said "Error reading key" if it didn't parse correctly.
496        need_passwords.append(k)
497      else:
498        # Potentially, a type of key that openssl doesn't understand.
499        # We'll let the routines in signapk.jar handle it.
500        no_passwords.append(k)
501  devnull.close()
502
503  key_passwords.update(PasswordManager().GetPasswords(need_passwords))
504  key_passwords.update(dict.fromkeys(no_passwords, None))
505  return key_passwords
506
507
508def SignFile(input_name, output_name, key, password, align=None,
509             whole_file=False):
510  """Sign the input_name zip/jar/apk, producing output_name.  Use the
511  given key and password (the latter may be None if the key does not
512  have a password.
513
514  If align is an integer > 1, zipalign is run to align stored files in
515  the output zip on 'align'-byte boundaries.
516
517  If whole_file is true, use the "-w" option to SignApk to embed a
518  signature that covers the whole file in the archive comment of the
519  zip file.
520  """
521
522  if align == 0 or align == 1:
523    align = None
524
525  if align:
526    temp = tempfile.NamedTemporaryFile()
527    sign_name = temp.name
528  else:
529    sign_name = output_name
530
531  cmd = [OPTIONS.java_path, OPTIONS.java_args, "-jar",
532         os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)]
533  cmd.extend(OPTIONS.extra_signapk_args)
534  if whole_file:
535    cmd.append("-w")
536  cmd.extend([key + OPTIONS.public_key_suffix,
537              key + OPTIONS.private_key_suffix,
538              input_name, sign_name])
539
540  p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
541  if password is not None:
542    password += "\n"
543  p.communicate(password)
544  if p.returncode != 0:
545    raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
546
547  if align:
548    p = Run(["zipalign", "-f", str(align), sign_name, output_name])
549    p.communicate()
550    if p.returncode != 0:
551      raise ExternalError("zipalign failed: return code %s" % (p.returncode,))
552    temp.close()
553
554
555def CheckSize(data, target, info_dict):
556  """Check the data string passed against the max size limit, if
557  any, for the given target.  Raise exception if the data is too big.
558  Print a warning if the data is nearing the maximum size."""
559
560  if target.endswith(".img"):
561    target = target[:-4]
562  mount_point = "/" + target
563
564  fs_type = None
565  limit = None
566  if info_dict["fstab"]:
567    if mount_point == "/userdata":
568      mount_point = "/data"
569    p = info_dict["fstab"][mount_point]
570    fs_type = p.fs_type
571    device = p.device
572    if "/" in device:
573      device = device[device.rfind("/")+1:]
574    limit = info_dict.get(device + "_size", None)
575  if not fs_type or not limit:
576    return
577
578  if fs_type == "yaffs2":
579    # image size should be increased by 1/64th to account for the
580    # spare area (64 bytes per 2k page)
581    limit = limit / 2048 * (2048+64)
582  size = len(data)
583  pct = float(size) * 100.0 / limit
584  msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit)
585  if pct >= 99.0:
586    raise ExternalError(msg)
587  elif pct >= 95.0:
588    print
589    print "  WARNING: ", msg
590    print
591  elif OPTIONS.verbose:
592    print "  ", msg
593
594
595def ReadApkCerts(tf_zip):
596  """Given a target_files ZipFile, parse the META/apkcerts.txt file
597  and return a {package: cert} dict."""
598  certmap = {}
599  for line in tf_zip.read("META/apkcerts.txt").split("\n"):
600    line = line.strip()
601    if not line:
602      continue
603    m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+'
604                 r'private_key="(.*)"$', line)
605    if m:
606      name, cert, privkey = m.groups()
607      public_key_suffix_len = len(OPTIONS.public_key_suffix)
608      private_key_suffix_len = len(OPTIONS.private_key_suffix)
609      if cert in SPECIAL_CERT_STRINGS and not privkey:
610        certmap[name] = cert
611      elif (cert.endswith(OPTIONS.public_key_suffix) and
612            privkey.endswith(OPTIONS.private_key_suffix) and
613            cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
614        certmap[name] = cert[:-public_key_suffix_len]
615      else:
616        raise ValueError("failed to parse line from apkcerts.txt:\n" + line)
617  return certmap
618
619
620COMMON_DOCSTRING = """
621  -p  (--path)  <dir>
622      Prepend <dir>/bin to the list of places to search for binaries
623      run by this script, and expect to find jars in <dir>/framework.
624
625  -s  (--device_specific) <file>
626      Path to the python module containing device-specific
627      releasetools code.
628
629  -x  (--extra)  <key=value>
630      Add a key/value pair to the 'extras' dict, which device-specific
631      extension code may look at.
632
633  -v  (--verbose)
634      Show command lines being executed.
635
636  -h  (--help)
637      Display this usage message and exit.
638"""
639
640def Usage(docstring):
641  print docstring.rstrip("\n")
642  print COMMON_DOCSTRING
643
644
645def ParseOptions(argv,
646                 docstring,
647                 extra_opts="", extra_long_opts=(),
648                 extra_option_handler=None):
649  """Parse the options in argv and return any arguments that aren't
650  flags.  docstring is the calling module's docstring, to be displayed
651  for errors and -h.  extra_opts and extra_long_opts are for flags
652  defined by the caller, which are processed by passing them to
653  extra_option_handler."""
654
655  try:
656    opts, args = getopt.getopt(
657        argv, "hvp:s:x:" + extra_opts,
658        ["help", "verbose", "path=", "signapk_path=", "extra_signapk_args=",
659         "java_path=", "java_args=", "public_key_suffix=",
660         "private_key_suffix=", "boot_signer_path=", "device_specific=",
661         "extra="] +
662        list(extra_long_opts))
663  except getopt.GetoptError as err:
664    Usage(docstring)
665    print "**", str(err), "**"
666    sys.exit(2)
667
668  for o, a in opts:
669    if o in ("-h", "--help"):
670      Usage(docstring)
671      sys.exit()
672    elif o in ("-v", "--verbose"):
673      OPTIONS.verbose = True
674    elif o in ("-p", "--path"):
675      OPTIONS.search_path = a
676    elif o in ("--signapk_path",):
677      OPTIONS.signapk_path = a
678    elif o in ("--extra_signapk_args",):
679      OPTIONS.extra_signapk_args = shlex.split(a)
680    elif o in ("--java_path",):
681      OPTIONS.java_path = a
682    elif o in ("--java_args",):
683      OPTIONS.java_args = a
684    elif o in ("--public_key_suffix",):
685      OPTIONS.public_key_suffix = a
686    elif o in ("--private_key_suffix",):
687      OPTIONS.private_key_suffix = a
688    elif o in ("--boot_signer_path",):
689      OPTIONS.boot_signer_path = a
690    elif o in ("-s", "--device_specific"):
691      OPTIONS.device_specific = a
692    elif o in ("-x", "--extra"):
693      key, value = a.split("=", 1)
694      OPTIONS.extras[key] = value
695    else:
696      if extra_option_handler is None or not extra_option_handler(o, a):
697        assert False, "unknown option \"%s\"" % (o,)
698
699  if OPTIONS.search_path:
700    os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") +
701                          os.pathsep + os.environ["PATH"])
702
703  return args
704
705
706def MakeTempFile(prefix=None, suffix=None):
707  """Make a temp file and add it to the list of things to be deleted
708  when Cleanup() is called.  Return the filename."""
709  fd, fn = tempfile.mkstemp(prefix=prefix, suffix=suffix)
710  os.close(fd)
711  OPTIONS.tempfiles.append(fn)
712  return fn
713
714
715def Cleanup():
716  for i in OPTIONS.tempfiles:
717    if os.path.isdir(i):
718      shutil.rmtree(i)
719    else:
720      os.remove(i)
721
722
723class PasswordManager(object):
724  def __init__(self):
725    self.editor = os.getenv("EDITOR", None)
726    self.pwfile = os.getenv("ANDROID_PW_FILE", None)
727
728  def GetPasswords(self, items):
729    """Get passwords corresponding to each string in 'items',
730    returning a dict.  (The dict may have keys in addition to the
731    values in 'items'.)
732
733    Uses the passwords in $ANDROID_PW_FILE if available, letting the
734    user edit that file to add more needed passwords.  If no editor is
735    available, or $ANDROID_PW_FILE isn't define, prompts the user
736    interactively in the ordinary way.
737    """
738
739    current = self.ReadFile()
740
741    first = True
742    while True:
743      missing = []
744      for i in items:
745        if i not in current or not current[i]:
746          missing.append(i)
747      # Are all the passwords already in the file?
748      if not missing:
749        return current
750
751      for i in missing:
752        current[i] = ""
753
754      if not first:
755        print "key file %s still missing some passwords." % (self.pwfile,)
756        answer = raw_input("try to edit again? [y]> ").strip()
757        if answer and answer[0] not in 'yY':
758          raise RuntimeError("key passwords unavailable")
759      first = False
760
761      current = self.UpdateAndReadFile(current)
762
763  def PromptResult(self, current): # pylint: disable=no-self-use
764    """Prompt the user to enter a value (password) for each key in
765    'current' whose value is fales.  Returns a new dict with all the
766    values.
767    """
768    result = {}
769    for k, v in sorted(current.iteritems()):
770      if v:
771        result[k] = v
772      else:
773        while True:
774          result[k] = getpass.getpass(
775              "Enter password for %s key> " % k).strip()
776          if result[k]:
777            break
778    return result
779
780  def UpdateAndReadFile(self, current):
781    if not self.editor or not self.pwfile:
782      return self.PromptResult(current)
783
784    f = open(self.pwfile, "w")
785    os.chmod(self.pwfile, 0o600)
786    f.write("# Enter key passwords between the [[[ ]]] brackets.\n")
787    f.write("# (Additional spaces are harmless.)\n\n")
788
789    first_line = None
790    sorted_list = sorted([(not v, k, v) for (k, v) in current.iteritems()])
791    for i, (_, k, v) in enumerate(sorted_list):
792      f.write("[[[  %s  ]]] %s\n" % (v, k))
793      if not v and first_line is None:
794        # position cursor on first line with no password.
795        first_line = i + 4
796    f.close()
797
798    p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
799    _, _ = p.communicate()
800
801    return self.ReadFile()
802
803  def ReadFile(self):
804    result = {}
805    if self.pwfile is None:
806      return result
807    try:
808      f = open(self.pwfile, "r")
809      for line in f:
810        line = line.strip()
811        if not line or line[0] == '#':
812          continue
813        m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
814        if not m:
815          print "failed to parse password file: ", line
816        else:
817          result[m.group(2)] = m.group(1)
818      f.close()
819    except IOError as e:
820      if e.errno != errno.ENOENT:
821        print "error reading password file: ", str(e)
822    return result
823
824
825def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
826             compress_type=None):
827  import datetime
828
829  # http://b/18015246
830  # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
831  # for files larger than 2GiB. We can work around this by adjusting their
832  # limit. Note that `zipfile.writestr()` will not work for strings larger than
833  # 2GiB. The Python interpreter sometimes rejects strings that large (though
834  # it isn't clear to me exactly what circumstances cause this).
835  # `zipfile.write()` must be used directly to work around this.
836  #
837  # This mess can be avoided if we port to python3.
838  saved_zip64_limit = zipfile.ZIP64_LIMIT
839  zipfile.ZIP64_LIMIT = (1 << 32) - 1
840
841  if compress_type is None:
842    compress_type = zip_file.compression
843  if arcname is None:
844    arcname = filename
845
846  saved_stat = os.stat(filename)
847
848  try:
849    # `zipfile.write()` doesn't allow us to pass ZipInfo, so just modify the
850    # file to be zipped and reset it when we're done.
851    os.chmod(filename, perms)
852
853    # Use a fixed timestamp so the output is repeatable.
854    epoch = datetime.datetime.fromtimestamp(0)
855    timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
856    os.utime(filename, (timestamp, timestamp))
857
858    zip_file.write(filename, arcname=arcname, compress_type=compress_type)
859  finally:
860    os.chmod(filename, saved_stat.st_mode)
861    os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
862    zipfile.ZIP64_LIMIT = saved_zip64_limit
863
864
865def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=0o644,
866                compress_type=None):
867  """Wrap zipfile.writestr() function to work around the zip64 limit.
868
869  Even with the ZIP64_LIMIT workaround, it won't allow writing a string
870  longer than 2GiB. It gives 'OverflowError: size does not fit in an int'
871  when calling crc32(bytes).
872
873  But it still works fine to write a shorter string into a large zip file.
874  We should use ZipWrite() whenever possible, and only use ZipWriteStr()
875  when we know the string won't be too long.
876  """
877
878  saved_zip64_limit = zipfile.ZIP64_LIMIT
879  zipfile.ZIP64_LIMIT = (1 << 32) - 1
880
881  if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
882    zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
883    zinfo.compress_type = zip_file.compression
884  else:
885    zinfo = zinfo_or_arcname
886
887  # If compress_type is given, it overrides the value in zinfo.
888  if compress_type is not None:
889    zinfo.compress_type = compress_type
890
891  # Use a fixed timestamp so the output is repeatable.
892  zinfo.external_attr = perms << 16
893  zinfo.date_time = (2009, 1, 1, 0, 0, 0)
894
895  zip_file.writestr(zinfo, data)
896  zipfile.ZIP64_LIMIT = saved_zip64_limit
897
898
899def ZipClose(zip_file):
900  # http://b/18015246
901  # zipfile also refers to ZIP64_LIMIT during close() when it writes out the
902  # central directory.
903  saved_zip64_limit = zipfile.ZIP64_LIMIT
904  zipfile.ZIP64_LIMIT = (1 << 32) - 1
905
906  zip_file.close()
907
908  zipfile.ZIP64_LIMIT = saved_zip64_limit
909
910
911class DeviceSpecificParams(object):
912  module = None
913  def __init__(self, **kwargs):
914    """Keyword arguments to the constructor become attributes of this
915    object, which is passed to all functions in the device-specific
916    module."""
917    for k, v in kwargs.iteritems():
918      setattr(self, k, v)
919    self.extras = OPTIONS.extras
920
921    if self.module is None:
922      path = OPTIONS.device_specific
923      if not path:
924        return
925      try:
926        if os.path.isdir(path):
927          info = imp.find_module("releasetools", [path])
928        else:
929          d, f = os.path.split(path)
930          b, x = os.path.splitext(f)
931          if x == ".py":
932            f = b
933          info = imp.find_module(f, [d])
934        print "loaded device-specific extensions from", path
935        self.module = imp.load_module("device_specific", *info)
936      except ImportError:
937        print "unable to load device-specific module; assuming none"
938
939  def _DoCall(self, function_name, *args, **kwargs):
940    """Call the named function in the device-specific module, passing
941    the given args and kwargs.  The first argument to the call will be
942    the DeviceSpecific object itself.  If there is no module, or the
943    module does not define the function, return the value of the
944    'default' kwarg (which itself defaults to None)."""
945    if self.module is None or not hasattr(self.module, function_name):
946      return kwargs.get("default", None)
947    return getattr(self.module, function_name)(*((self,) + args), **kwargs)
948
949  def FullOTA_Assertions(self):
950    """Called after emitting the block of assertions at the top of a
951    full OTA package.  Implementations can add whatever additional
952    assertions they like."""
953    return self._DoCall("FullOTA_Assertions")
954
955  def FullOTA_InstallBegin(self):
956    """Called at the start of full OTA installation."""
957    return self._DoCall("FullOTA_InstallBegin")
958
959  def FullOTA_InstallEnd(self):
960    """Called at the end of full OTA installation; typically this is
961    used to install the image for the device's baseband processor."""
962    return self._DoCall("FullOTA_InstallEnd")
963
964  def IncrementalOTA_Assertions(self):
965    """Called after emitting the block of assertions at the top of an
966    incremental OTA package.  Implementations can add whatever
967    additional assertions they like."""
968    return self._DoCall("IncrementalOTA_Assertions")
969
970  def IncrementalOTA_VerifyBegin(self):
971    """Called at the start of the verification phase of incremental
972    OTA installation; additional checks can be placed here to abort
973    the script before any changes are made."""
974    return self._DoCall("IncrementalOTA_VerifyBegin")
975
976  def IncrementalOTA_VerifyEnd(self):
977    """Called at the end of the verification phase of incremental OTA
978    installation; additional checks can be placed here to abort the
979    script before any changes are made."""
980    return self._DoCall("IncrementalOTA_VerifyEnd")
981
982  def IncrementalOTA_InstallBegin(self):
983    """Called at the start of incremental OTA installation (after
984    verification is complete)."""
985    return self._DoCall("IncrementalOTA_InstallBegin")
986
987  def IncrementalOTA_InstallEnd(self):
988    """Called at the end of incremental OTA installation; typically
989    this is used to install the image for the device's baseband
990    processor."""
991    return self._DoCall("IncrementalOTA_InstallEnd")
992
993class File(object):
994  def __init__(self, name, data):
995    self.name = name
996    self.data = data
997    self.size = len(data)
998    self.sha1 = sha1(data).hexdigest()
999
1000  @classmethod
1001  def FromLocalFile(cls, name, diskname):
1002    f = open(diskname, "rb")
1003    data = f.read()
1004    f.close()
1005    return File(name, data)
1006
1007  def WriteToTemp(self):
1008    t = tempfile.NamedTemporaryFile()
1009    t.write(self.data)
1010    t.flush()
1011    return t
1012
1013  def AddToZip(self, z, compression=None):
1014    ZipWriteStr(z, self.name, self.data, compress_type=compression)
1015
1016DIFF_PROGRAM_BY_EXT = {
1017    ".gz" : "imgdiff",
1018    ".zip" : ["imgdiff", "-z"],
1019    ".jar" : ["imgdiff", "-z"],
1020    ".apk" : ["imgdiff", "-z"],
1021    ".img" : "imgdiff",
1022    }
1023
1024class Difference(object):
1025  def __init__(self, tf, sf, diff_program=None):
1026    self.tf = tf
1027    self.sf = sf
1028    self.patch = None
1029    self.diff_program = diff_program
1030
1031  def ComputePatch(self):
1032    """Compute the patch (as a string of data) needed to turn sf into
1033    tf.  Returns the same tuple as GetPatch()."""
1034
1035    tf = self.tf
1036    sf = self.sf
1037
1038    if self.diff_program:
1039      diff_program = self.diff_program
1040    else:
1041      ext = os.path.splitext(tf.name)[1]
1042      diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff")
1043
1044    ttemp = tf.WriteToTemp()
1045    stemp = sf.WriteToTemp()
1046
1047    ext = os.path.splitext(tf.name)[1]
1048
1049    try:
1050      ptemp = tempfile.NamedTemporaryFile()
1051      if isinstance(diff_program, list):
1052        cmd = copy.copy(diff_program)
1053      else:
1054        cmd = [diff_program]
1055      cmd.append(stemp.name)
1056      cmd.append(ttemp.name)
1057      cmd.append(ptemp.name)
1058      p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1059      err = []
1060      def run():
1061        _, e = p.communicate()
1062        if e:
1063          err.append(e)
1064      th = threading.Thread(target=run)
1065      th.start()
1066      th.join(timeout=300)   # 5 mins
1067      if th.is_alive():
1068        print "WARNING: diff command timed out"
1069        p.terminate()
1070        th.join(5)
1071        if th.is_alive():
1072          p.kill()
1073          th.join()
1074
1075      if err or p.returncode != 0:
1076        print "WARNING: failure running %s:\n%s\n" % (
1077            diff_program, "".join(err))
1078        self.patch = None
1079        return None, None, None
1080      diff = ptemp.read()
1081    finally:
1082      ptemp.close()
1083      stemp.close()
1084      ttemp.close()
1085
1086    self.patch = diff
1087    return self.tf, self.sf, self.patch
1088
1089
1090  def GetPatch(self):
1091    """Return a tuple (target_file, source_file, patch_data).
1092    patch_data may be None if ComputePatch hasn't been called, or if
1093    computing the patch failed."""
1094    return self.tf, self.sf, self.patch
1095
1096
1097def ComputeDifferences(diffs):
1098  """Call ComputePatch on all the Difference objects in 'diffs'."""
1099  print len(diffs), "diffs to compute"
1100
1101  # Do the largest files first, to try and reduce the long-pole effect.
1102  by_size = [(i.tf.size, i) for i in diffs]
1103  by_size.sort(reverse=True)
1104  by_size = [i[1] for i in by_size]
1105
1106  lock = threading.Lock()
1107  diff_iter = iter(by_size)   # accessed under lock
1108
1109  def worker():
1110    try:
1111      lock.acquire()
1112      for d in diff_iter:
1113        lock.release()
1114        start = time.time()
1115        d.ComputePatch()
1116        dur = time.time() - start
1117        lock.acquire()
1118
1119        tf, sf, patch = d.GetPatch()
1120        if sf.name == tf.name:
1121          name = tf.name
1122        else:
1123          name = "%s (%s)" % (tf.name, sf.name)
1124        if patch is None:
1125          print "patching failed!                                  %s" % (name,)
1126        else:
1127          print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
1128              dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name)
1129      lock.release()
1130    except Exception as e:
1131      print e
1132      raise
1133
1134  # start worker threads; wait for them all to finish.
1135  threads = [threading.Thread(target=worker)
1136             for i in range(OPTIONS.worker_threads)]
1137  for th in threads:
1138    th.start()
1139  while threads:
1140    threads.pop().join()
1141
1142
1143class BlockDifference(object):
1144  def __init__(self, partition, tgt, src=None, check_first_block=False,
1145               version=None):
1146    self.tgt = tgt
1147    self.src = src
1148    self.partition = partition
1149    self.check_first_block = check_first_block
1150
1151    if version is None:
1152      version = 1
1153      if OPTIONS.info_dict:
1154        version = max(
1155            int(i) for i in
1156            OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
1157    self.version = version
1158
1159    b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
1160                                    version=self.version)
1161    tmpdir = tempfile.mkdtemp()
1162    OPTIONS.tempfiles.append(tmpdir)
1163    self.path = os.path.join(tmpdir, partition)
1164    b.Compute(self.path)
1165
1166    _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
1167
1168  def WriteScript(self, script, output_zip, progress=None):
1169    if not self.src:
1170      # write the output unconditionally
1171      script.Print("Patching %s image unconditionally..." % (self.partition,))
1172    else:
1173      script.Print("Patching %s image after verification." % (self.partition,))
1174
1175    if progress:
1176      script.ShowProgress(progress, 0)
1177    self._WriteUpdate(script, output_zip)
1178
1179  def WriteVerifyScript(self, script):
1180    partition = self.partition
1181    if not self.src:
1182      script.Print("Image %s will be patched unconditionally." % (partition,))
1183    else:
1184      if self.version >= 3:
1185        script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || '
1186                            'block_image_verify("%s", '
1187                            'package_extract_file("%s.transfer.list"), '
1188                            '"%s.new.dat", "%s.patch.dat")) then') % (
1189                            self.device, self.src.care_map.to_string_raw(),
1190                            self.src.TotalSha1(),
1191                            self.device, partition, partition, partition))
1192      else:
1193        script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
1194            self.device, self.src.care_map.to_string_raw(),
1195            self.src.TotalSha1()))
1196      script.Print('Verified %s image...' % (partition,))
1197      script.AppendExtra('else')
1198
1199      # When generating incrementals for the system and vendor partitions,
1200      # explicitly check the first block (which contains the superblock) of
1201      # the partition to see if it's what we expect. If this check fails,
1202      # give an explicit log message about the partition having been
1203      # remounted R/W (the most likely explanation) and the need to flash to
1204      # get OTAs working again.
1205      if self.check_first_block:
1206        self._CheckFirstBlock(script)
1207
1208      # Abort the OTA update. Note that the incremental OTA cannot be applied
1209      # even if it may match the checksum of the target partition.
1210      # a) If version < 3, operations like move and erase will make changes
1211      #    unconditionally and damage the partition.
1212      # b) If version >= 3, it won't even reach here.
1213      script.AppendExtra(('abort("%s partition has unexpected contents");\n'
1214                          'endif;') % (partition,))
1215
1216  def _WriteUpdate(self, script, output_zip):
1217    ZipWrite(output_zip,
1218             '{}.transfer.list'.format(self.path),
1219             '{}.transfer.list'.format(self.partition))
1220    ZipWrite(output_zip,
1221             '{}.new.dat'.format(self.path),
1222             '{}.new.dat'.format(self.partition))
1223    ZipWrite(output_zip,
1224             '{}.patch.dat'.format(self.path),
1225             '{}.patch.dat'.format(self.partition),
1226             compress_type=zipfile.ZIP_STORED)
1227
1228    call = ('block_image_update("{device}", '
1229            'package_extract_file("{partition}.transfer.list"), '
1230            '"{partition}.new.dat", "{partition}.patch.dat");\n'.format(
1231                device=self.device, partition=self.partition))
1232    script.AppendExtra(script.WordWrap(call))
1233
1234  def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use
1235    data = source.ReadRangeSet(ranges)
1236    ctx = sha1()
1237
1238    for p in data:
1239      ctx.update(p)
1240
1241    return ctx.hexdigest()
1242
1243  def _CheckFirstBlock(self, script):
1244    r = rangelib.RangeSet((0, 1))
1245    srchash = self._HashBlocks(self.src, r)
1246
1247    script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || '
1248                        'abort("%s has been remounted R/W; '
1249                        'reflash device to reenable OTA updates");')
1250                       % (self.device, r.to_string_raw(), srchash,
1251                          self.device))
1252
1253DataImage = blockimgdiff.DataImage
1254
1255
1256# map recovery.fstab's fs_types to mount/format "partition types"
1257PARTITION_TYPES = {
1258    "yaffs2": "MTD",
1259    "mtd": "MTD",
1260    "ext4": "EMMC",
1261    "emmc": "EMMC",
1262    "f2fs": "EMMC",
1263    "squashfs": "EMMC"
1264}
1265
1266def GetTypeAndDevice(mount_point, info):
1267  fstab = info["fstab"]
1268  if fstab:
1269    return (PARTITION_TYPES[fstab[mount_point].fs_type],
1270            fstab[mount_point].device)
1271  else:
1272    raise KeyError
1273
1274
1275def ParseCertificate(data):
1276  """Parse a PEM-format certificate."""
1277  cert = []
1278  save = False
1279  for line in data.split("\n"):
1280    if "--END CERTIFICATE--" in line:
1281      break
1282    if save:
1283      cert.append(line)
1284    if "--BEGIN CERTIFICATE--" in line:
1285      save = True
1286  cert = "".join(cert).decode('base64')
1287  return cert
1288
1289def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
1290                      info_dict=None):
1291  """Generate a binary patch that creates the recovery image starting
1292  with the boot image.  (Most of the space in these images is just the
1293  kernel, which is identical for the two, so the resulting patch
1294  should be efficient.)  Add it to the output zip, along with a shell
1295  script that is run from init.rc on first boot to actually do the
1296  patching and install the new recovery image.
1297
1298  recovery_img and boot_img should be File objects for the
1299  corresponding images.  info should be the dictionary returned by
1300  common.LoadInfoDict() on the input target_files.
1301  """
1302
1303  if info_dict is None:
1304    info_dict = OPTIONS.info_dict
1305
1306  diff_program = ["imgdiff"]
1307  path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
1308  if os.path.exists(path):
1309    diff_program.append("-b")
1310    diff_program.append(path)
1311    bonus_args = "-b /system/etc/recovery-resource.dat"
1312  else:
1313    bonus_args = ""
1314
1315  d = Difference(recovery_img, boot_img, diff_program=diff_program)
1316  _, _, patch = d.ComputePatch()
1317  output_sink("recovery-from-boot.p", patch)
1318
1319  try:
1320    boot_type, boot_device = GetTypeAndDevice("/boot", info_dict)
1321    recovery_type, recovery_device = GetTypeAndDevice("/recovery", info_dict)
1322  except KeyError:
1323    return
1324
1325  sh = """#!/system/bin/sh
1326if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
1327  applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
1328else
1329  log -t recovery "Recovery image already installed"
1330fi
1331""" % {'boot_size': boot_img.size,
1332       'boot_sha1': boot_img.sha1,
1333       'recovery_size': recovery_img.size,
1334       'recovery_sha1': recovery_img.sha1,
1335       'boot_type': boot_type,
1336       'boot_device': boot_device,
1337       'recovery_type': recovery_type,
1338       'recovery_device': recovery_device,
1339       'bonus_args': bonus_args}
1340
1341  # The install script location moved from /system/etc to /system/bin
1342  # in the L release.  Parse the init.rc file to find out where the
1343  # target-files expects it to be, and put it there.
1344  sh_location = "etc/install-recovery.sh"
1345  try:
1346    with open(os.path.join(input_dir, "BOOT", "RAMDISK", "init.rc")) as f:
1347      for line in f:
1348        m = re.match(r"^service flash_recovery /system/(\S+)\s*$", line)
1349        if m:
1350          sh_location = m.group(1)
1351          print "putting script in", sh_location
1352          break
1353  except (OSError, IOError) as e:
1354    print "failed to read init.rc: %s" % (e,)
1355
1356  output_sink(sh_location, sh)
1357