1# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5import errno
6import logging
7import os
8import shutil
9import subprocess
10import urllib2
11
12from autotest_lib.client.common_lib import global_config
13
14def rm_dir_if_exists(dir_to_remove):
15    """
16    Removes a directory. Does not fail if the directory does NOT exist.
17
18    @param dir_to_remove: path, directory to be removed.
19
20    """
21    try:
22        shutil.rmtree(dir_to_remove)
23    except OSError as e:
24        if e.errno != errno.ENOENT:
25            raise
26
27
28def rm_dirs_if_exist(dirs_to_remove):
29    """
30    Removes multiple directories. Does not fail if directories do NOT exist.
31
32    @param dirs_to_remove: list of directory paths to be removed.
33
34    """
35    for dr in dirs_to_remove:
36        rm_dir_if_exists(dr)
37
38
39def ensure_file_exists(filepath):
40    """
41    Verifies path given points to an existing file.
42
43    @param filepath: path, path to check.
44
45    @raises IOError if the path given does not point to a valid file.
46
47    """
48    error_msg = 'File %s does not exist.' % filepath
49    if not os.path.isfile(filepath):
50        raise IOError(error_msg)
51
52
53def ensure_all_files_exist(filepaths):
54    """
55    Verifies all paths given point to existing files.
56
57    @param filepaths: List of paths to check.
58
59    @raises IOError if given paths do not point to existing files.
60
61    """
62    for filepath in filepaths:
63        ensure_file_exists(filepath)
64
65
66def ensure_dir_exists(dirpath):
67    """
68    Verifies path given points to an existing directory.
69
70    @param dirpath: path, dir to check.
71
72    @raises IOError if path does not point to an existing directory.
73
74    """
75    error_msg = 'Directory %s does not exist.' % dirpath
76    if not os.path.isdir(dirpath):
77        raise IOError(error_msg)
78
79
80def ensure_all_dirs_exist(dirpaths):
81    """
82    Verifies all paths given point to existing directories.
83
84    @param dirpaths: list of directory paths to check.
85
86    @raises IOError if given paths do not point to existing directories.
87
88    """
89    for dirpath in dirpaths:
90        ensure_dir_exists(dirpath)
91
92
93def make_leaf_dir(dirpath):
94    """
95    Creates a directory, also creating parent directories if they do not exist.
96
97    @param dirpath: path, directory to create.
98
99    @raises whatever exception raised other than "path already exist".
100
101    """
102    try:
103        os.makedirs(dirpath)
104    except OSError as e:
105        if e.errno != errno.EEXIST:
106            raise
107
108
109def make_leaf_dirs(dirpaths):
110    """
111    Creates multiple directories building all respective parent directories if
112    they do not exist.
113
114    @param dirpaths: list of directory paths to create.
115
116    @raises whatever exception raised other than "path already exists".
117    """
118    for dirpath in dirpaths:
119        make_leaf_dir(dirpath)
120
121
122def download_file(remote_path, local_path):
123    """
124    Download file from a remote resource.
125
126    @param remote_path: path, complete path to the remote file.
127    @param local_path: path, complete path to save downloaded file.
128
129    @raises: urllib2.HTTPError or urlib2.URLError exception. Both with added
130            debug information
131
132    """
133    client_config = global_config.global_config.get_section_values('CLIENT')
134    proxies = {}
135
136    for name, value in client_config.items('CLIENT'):
137        if value and name.endswith('_proxy'):
138            proxies[name[:-6]] = value
139
140    if proxies:
141        proxy_handler = urllib2.ProxyHandler(proxies)
142        opener = urllib2.build_opener(proxy_handler)
143        urllib2.install_opener(opener)
144
145    # Unlike urllib.urlopen urllib2.urlopen will immediately throw on error
146    # If we could not find the file pointed by remote_path we will get an
147    # exception, catch the exception to log useful information then re-raise
148
149    try:
150        remote_file = urllib2.urlopen(remote_path)
151
152        # Catch exceptions, extract exception properties and then re-raise
153        # This helps us with debugging what went wrong quickly as we get to see
154        # test_that output immediately
155
156    except urllib2.HTTPError as e:
157        e.msg = (("""HTTPError raised while retrieving file %s\n.
158                       Http Code = %s.\n. Reason = %s\n. Headers = %s.\n
159                       Original Message = %s.\n""")
160                 % (remote_path, e.code, e.reason, e.headers, e.msg))
161        raise
162
163    except urllib2.URLError as e:
164        e.msg = (("""URLError raised while retrieving file %s\n.
165                        Reason = %s\n. Original Message = %s\n.""")
166                 % (remote_path, e.reason, e.message))
167        raise
168
169    with open(local_path, 'wb') as local_file:
170        while True:
171            block = remote_file.read(128 * 1024)
172            if not block:
173                break
174            local_file.write(block)
175
176
177def get_directory_size_kibibytes(directory):
178    """Calculate the total size of a directory with all its contents.
179
180    @param directory: Path to the directory
181
182    @return Size of the directory in kibibytes.
183    """
184    cmd = ['du', '-sk', directory]
185    process = subprocess.Popen(cmd,
186                               stdout=subprocess.PIPE,
187                               stderr=subprocess.PIPE)
188    stdout_data, stderr_data = process.communicate()
189
190    if process.returncode != 0:
191        # This function is used for statistics only, if it fails,
192        # nothing else should crash.
193        logging.warning('Getting size of %s failed. Stderr:', directory)
194        logging.warning(stderr_data)
195        return 0
196
197    return int(stdout_data.split('\t', 1)[0])
198