1# Copyright 2014 The Chromium Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5"""Bootstrap Chrome Telemetry by downloading all its files from SVN servers.
6
7Requires a DEPS file to specify which directories on which SVN servers
8are required to run Telemetry. Format of that DEPS file is a subset of the
9normal DEPS file format[1]; currently only only the "deps" dictionary is
10supported and nothing else.
11
12Fetches all files in the specified directories using WebDAV (SVN is WebDAV under
13the hood).
14
15[1] http://dev.chromium.org/developers/how-tos/depottools#TOC-DEPS-file
16"""
17
18import imp
19import logging
20import os
21import urllib
22import urlparse
23
24# Dummy module for DAVclient.
25davclient = None
26
27
28# Link to file containing the 'davclient' WebDAV client library.
29_DAVCLIENT_URL = ('https://src.chromium.org/chrome/trunk/src/tools/'
30                  'telemetry/third_party/davclient/davclient.py')
31
32
33def _DownloadAndImportDAVClientModule():
34  """Dynamically import davclient helper library."""
35  global davclient
36  davclient_src = urllib.urlopen(_DAVCLIENT_URL).read()
37  davclient = imp.new_module('davclient')
38  exec davclient_src in davclient.__dict__
39
40
41class DAVClientWrapper():
42  """Knows how to retrieve subdirectories and files from WebDAV/SVN servers."""
43
44  def __init__(self, root_url):
45    """Initialize SVN server root_url, save files to local dest_dir.
46
47    Args:
48      root_url: string url of SVN/WebDAV server
49    """
50    self.root_url = root_url
51    self.client = davclient.DAVClient(root_url)
52
53  @staticmethod
54  def __norm_path_keys(dict_with_path_keys):
55    """Returns a dictionary with os.path.normpath called on every key."""
56    return dict((os.path.normpath(k), v) for (k, v) in
57                dict_with_path_keys.items())
58
59  def GetDirList(self, path):
60    """Returns string names of all files and subdirs of path on the server."""
61    props = self.__norm_path_keys(self.client.propfind(path, depth=1))
62    # remove this path
63    del props[os.path.normpath(path)]
64    return [os.path.basename(p) for p in props.keys()]
65
66  def IsFile(self, path):
67    """Returns True if the path is a file on the server, False if directory."""
68    props = self.__norm_path_keys(self.client.propfind(path, depth=1))
69    return props[os.path.normpath(path)]['resourcetype'] is None
70
71  def Traverse(self, src_path, dst_path):
72    """Walks the directory hierarchy pointed to by src_path download all files.
73
74    Recursively walks src_path and saves all files and subfolders into
75    dst_path.
76
77    Args:
78      src_path: string path on SVN server to save (absolute path on server).
79      dest_path: string local path (relative or absolute) to save to.
80    """
81    if self.IsFile(src_path):
82      if not os.path.exists(os.path.dirname(dst_path)):
83        logging.info('Creating %s', os.path.dirname(dst_path))
84        os.makedirs(os.path.dirname(dst_path))
85      if os.path.isfile(dst_path):
86        logging.info('Skipping %s', dst_path)
87      else:
88        logging.info('Saving %s to %s', self.root_url + src_path, dst_path)
89        urllib.urlretrieve(self.root_url + src_path, dst_path)
90      return
91    else:
92      for subdir in self.GetDirList(src_path):
93        self.Traverse(os.path.join(src_path, subdir),
94                      os.path.join(dst_path, subdir))
95
96
97def ListAllDepsPaths(deps_file):
98  """Recursively returns a list of all paths indicated in this deps file.
99
100  Note that this discards information about where path dependencies come from,
101  so this is only useful in the context of a Chromium source checkout that has
102  already fetched all dependencies.
103
104  Args:
105    deps_file: File containing deps information to be evaluated, in the
106               format given in the header of this file.
107  Returns:
108    A list of string paths starting under src that are required by the
109    given deps file, and all of its sub-dependencies. This amounts to
110    the keys of the 'deps' dictionary.
111  """
112  deps = {}
113  deps_includes = {}
114
115  chrome_root = os.path.dirname(__file__)
116  while os.path.basename(chrome_root) != 'src':
117    chrome_root = os.path.abspath(os.path.join(chrome_root, os.pardir))
118
119  exec open(deps_file).read()
120
121  deps_paths = deps.keys()
122
123  for path in deps_includes.keys():
124    # Need to localize the paths.
125    path = os.path.join(chrome_root, os.pardir, path)
126    deps_paths += ListAllDepsPaths(path)
127
128  return deps_paths
129
130
131def DownloadDeps(destination_dir, url):
132  """Saves all the dependencies in deps_path.
133
134  Opens and reads url, assuming the contents are in the simple DEPS-like file
135  format specified in the header of this file, then download all
136  files/directories listed to the destination_dir.
137
138  Args:
139    destination_dir: String path to directory to download files into.
140    url: URL containing deps information to be evaluated.
141  """
142  logging.warning('Downloading deps from %s...', url)
143  # TODO(wiltzius): Add a parameter for which revision to pull.
144  _DownloadAndImportDAVClientModule()
145
146  deps = {}
147  deps_includes = {}
148
149  exec urllib.urlopen(url).read()
150
151  for dst_path, src_path in deps.iteritems():
152    full_dst_path = os.path.join(destination_dir, dst_path)
153    parsed_url = urlparse.urlparse(src_path)
154    root_url = parsed_url.scheme + '://' + parsed_url.netloc
155
156    dav_client = DAVClientWrapper(root_url)
157    dav_client.Traverse(parsed_url.path, full_dst_path)
158
159  for url in deps_includes.values():
160    DownloadDeps(destination_dir, url)
161