1# Copyright 2014 The Chromium Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5import datetime
6import logging
7import os
8
9from integration_tests import network_metrics
10from telemetry.page import page_test
11from telemetry.value import scalar
12
13
14class ChromeProxyMetricException(page_test.MeasurementFailure):
15  pass
16
17
18CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
19CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy'
20
21PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443'
22PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS
23PROXY_DEV_SETTING_HTTPS_WITH_SCHEME = 'http://proxy-dev.googlezip.net:80'
24PROXY_SETTING_HTTP = 'compress.googlezip.net:80'
25PROXY_SETTING_DIRECT = 'direct://'
26
27# The default Chrome Proxy bypass time is a range from one to five mintues.
28# See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc.
29DEFAULT_BYPASS_MIN_SECONDS = 60
30DEFAULT_BYPASS_MAX_SECONDS = 5 * 60
31
32def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'):
33  tab.Navigate(url)
34  with open(os.path.join(os.path.dirname(__file__),
35                         'chrome_proxy_metrics.js')) as f:
36    js = f.read()
37    tab.ExecuteJavaScript(js)
38  tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
39  info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()')
40  return info
41
42
43def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30):
44  return (retry_time >= low and
45          (retry_time < high + datetime.timedelta(seconds=grace_seconds)))
46
47
48class ChromeProxyResponse(network_metrics.HTTPResponse):
49  """ Represents an HTTP response from a timeleine event."""
50  def __init__(self, event):
51    super(ChromeProxyResponse, self).__init__(event)
52
53  def ShouldHaveChromeProxyViaHeader(self):
54    resp = self.response
55    # Ignore https and data url
56    if resp.url.startswith('https') or resp.url.startswith('data:'):
57      return False
58    # Ignore 304 Not Modified and cache hit.
59    if resp.status == 304 or resp.served_from_cache:
60      return False
61    # Ignore invalid responses that don't have any header. Log a warning.
62    if not resp.headers:
63      logging.warning('response for %s does not any have header '
64                      '(refer=%s, status=%s)',
65                      resp.url, resp.GetHeader('Referer'), resp.status)
66      return False
67    return True
68
69  def HasChromeProxyViaHeader(self):
70    via_header = self.response.GetHeader('Via')
71    if not via_header:
72      return False
73    vias = [v.strip(' ') for v in via_header.split(',')]
74    # The Via header is valid if it is the old format or the new format
75    # with 4-character version prefix, for example,
76    # "1.1 Chrome-Compression-Proxy".
77    return (CHROME_PROXY_VIA_HEADER_DEPRECATED in vias or
78            any(v[4:] == CHROME_PROXY_VIA_HEADER for v in vias))
79
80  def IsValidByViaHeader(self):
81    return (not self.ShouldHaveChromeProxyViaHeader() or
82            self.HasChromeProxyViaHeader())
83
84  def IsSafebrowsingResponse(self):
85    if (self.response.status == 307 and
86        self.response.GetHeader('X-Malware-Url') == '1' and
87        self.IsValidByViaHeader() and
88        self.response.GetHeader('Location') == self.response.url):
89      return True
90    return False
91
92
93class ChromeProxyMetric(network_metrics.NetworkMetric):
94  """A Chrome proxy timeline metric."""
95
96  def __init__(self):
97    super(ChromeProxyMetric, self).__init__()
98    self.compute_data_saving = True
99    self.effective_proxies = {
100        "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME,
101        "proxy-dev": PROXY_DEV_SETTING_HTTPS_WITH_SCHEME,
102        "fallback": PROXY_SETTING_HTTP,
103        "direct": PROXY_SETTING_DIRECT,
104        }
105
106  def SetEvents(self, events):
107    """Used for unittest."""
108    self._events = events
109
110  def ResponseFromEvent(self, event):
111    return ChromeProxyResponse(event)
112
113  def AddResults(self, tab, results):
114    raise NotImplementedError
115
116  def AddResultsForDataSaving(self, tab, results):
117    resources_via_proxy = 0
118    resources_from_cache = 0
119    resources_direct = 0
120
121    super(ChromeProxyMetric, self).AddResults(tab, results)
122    for resp in self.IterResponses(tab):
123      if resp.response.served_from_cache:
124        resources_from_cache += 1
125      if resp.HasChromeProxyViaHeader():
126        resources_via_proxy += 1
127      else:
128        resources_direct += 1
129
130    results.AddValue(scalar.ScalarValue(
131        results.current_page, 'resources_via_proxy', 'count',
132        resources_via_proxy))
133    results.AddValue(scalar.ScalarValue(
134        results.current_page, 'resources_from_cache', 'count',
135        resources_from_cache))
136    results.AddValue(scalar.ScalarValue(
137        results.current_page, 'resources_direct', 'count', resources_direct))
138
139  def AddResultsForHeaderValidation(self, tab, results):
140    via_count = 0
141    bypass_count = 0
142    for resp in self.IterResponses(tab):
143      if resp.IsValidByViaHeader():
144        via_count += 1
145      else:
146        bypassed, _ = self.IsProxyBypassed(tab)
147        if tab and bypassed:
148          logging.warning('Proxy bypassed for %s', resp.response.url)
149          bypass_count += 1
150        else:
151          r = resp.response
152          raise ChromeProxyMetricException, (
153              '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
154                  r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
155    results.AddValue(scalar.ScalarValue(
156        results.current_page, 'checked_via_header', 'count', via_count))
157    results.AddValue(scalar.ScalarValue(
158        results.current_page, 'request_bypassed', 'count', bypass_count))
159
160  def AddResultsForClientVersion(self, tab, results):
161    for resp in self.IterResponses(tab):
162      r = resp.response
163      if resp.response.status != 200:
164        raise ChromeProxyMetricException, ('%s: Response is not 200: %d' %
165                                           (r.url, r.status))
166      if not resp.IsValidByViaHeader():
167        raise ChromeProxyMetricException, ('%s: Response missing via header' %
168                                           (r.url))
169    results.AddValue(scalar.ScalarValue(
170        results.current_page, 'version_test', 'count', 1))
171
172
173  def IsProxyBypassed(self, tab):
174    """ Returns True if all configured proxies are bypassed."""
175    if not tab:
176      return False, []
177
178    info = GetProxyInfoFromNetworkInternals(tab)
179    if not info['enabled']:
180      raise ChromeProxyMetricException, (
181          'Chrome proxy should be enabled. proxy info: %s' % info)
182
183    bad_proxies = [str(p['proxy']) for p in info['badProxies']]
184    bad_proxies.sort()
185    proxies = [self.effective_proxies['proxy'],
186               self.effective_proxies['fallback']]
187    proxies.sort()
188    proxies_dev = [self.effective_proxies['proxy-dev'],
189                   self.effective_proxies['fallback']]
190    proxies_dev.sort()
191    if bad_proxies == proxies:
192      return True, proxies
193    elif bad_proxies == proxies_dev:
194      return True, proxies_dev
195    return False, []
196
197  @staticmethod
198  def VerifyBadProxies(
199      badProxies, expected_proxies,
200      retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS,
201      retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS):
202    """Verify the bad proxy list and their retry times are expected. """
203    if not badProxies or (len(badProxies) != len(expected_proxies)):
204      return False
205
206    # Check all expected proxies.
207    proxies = [p['proxy'] for p in badProxies]
208    expected_proxies.sort()
209    proxies.sort()
210    if not expected_proxies == proxies:
211      raise ChromeProxyMetricException, (
212          'Bad proxies: got %s want %s' % (
213              str(badProxies), str(expected_proxies)))
214
215    # Check retry time
216    for p in badProxies:
217      retry_time_low = (datetime.datetime.now() +
218                        datetime.timedelta(seconds=retry_seconds_low))
219      retry_time_high = (datetime.datetime.now() +
220                        datetime.timedelta(seconds=retry_seconds_high))
221      got_retry_time = datetime.datetime.fromtimestamp(int(p['retry'])/1000)
222      if not ProxyRetryTimeInRange(
223          got_retry_time, retry_time_low, retry_time_high):
224        raise ChromeProxyMetricException, (
225            'Bad proxy %s retry time (%s) should be within range (%s-%s).' % (
226                p['proxy'], str(got_retry_time), str(retry_time_low),
227                str(retry_time_high)))
228    return True
229
230  def AddResultsForBypass(self, tab, results):
231    bypass_count = 0
232    for resp in self.IterResponses(tab):
233      if resp.HasChromeProxyViaHeader():
234        r = resp.response
235        raise ChromeProxyMetricException, (
236            '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
237                r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
238      bypass_count += 1
239
240    if tab:
241      info = GetProxyInfoFromNetworkInternals(tab)
242      if not info['enabled']:
243        raise ChromeProxyMetricException, (
244            'Chrome proxy should be enabled. proxy info: %s' % info)
245      _, expected_bad_proxies = self.IsProxyBypassed(tab)
246      self.VerifyBadProxies(info['badProxies'], expected_bad_proxies)
247
248    results.AddValue(scalar.ScalarValue(
249        results.current_page, 'bypass', 'count', bypass_count))
250
251  def AddResultsForBlockOnce(self, tab, results):
252    eligible_response_count = 0
253    bypass_count = 0
254    for resp in self.IterResponses(tab):
255      if resp.ShouldHaveChromeProxyViaHeader():
256        eligible_response_count += 1
257        if not resp.HasChromeProxyViaHeader():
258          bypass_count += 1
259
260    if tab:
261      info = GetProxyInfoFromNetworkInternals(tab)
262      if not info['enabled']:
263        raise ChromeProxyMetricException, (
264            'Chrome proxy should be enabled. proxy info: %s' % info)
265      self.VerifyBadProxies(info['badProxies'], [])
266
267    if eligible_response_count <= 1:
268      raise ChromeProxyMetricException, (
269          'There should be more than one DRP eligible response '
270          '(eligible_response_count=%d, bypass_count=%d)\n' % (
271              eligible_response_count, bypass_count))
272    elif bypass_count != 1:
273      raise ChromeProxyMetricException, (
274          'Exactly one response should be bypassed. '
275          '(eligible_response_count=%d, bypass_count=%d)\n' % (
276              eligible_response_count, bypass_count))
277    else:
278      results.AddValue(scalar.ScalarValue(
279          results.current_page, 'eligible_responses', 'count',
280          eligible_response_count))
281      results.AddValue(scalar.ScalarValue(
282          results.current_page, 'bypass', 'count', bypass_count))
283
284  def AddResultsForSafebrowsing(self, tab, results):
285    count = 0
286    safebrowsing_count = 0
287    for resp in self.IterResponses(tab):
288      count += 1
289      if resp.IsSafebrowsingResponse():
290        safebrowsing_count += 1
291      else:
292        r = resp.response
293        raise ChromeProxyMetricException, (
294            '%s: Not a valid safe browsing response.\n'
295            'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
296                r.url, r.status, r.status_text, r.headers))
297    if count == safebrowsing_count:
298      results.AddValue(scalar.ScalarValue(
299          results.current_page, 'safebrowsing', 'boolean', True))
300    else:
301      raise ChromeProxyMetricException, (
302          'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
303              count, safebrowsing_count))
304
305  def AddResultsForHTTPFallback(
306      self, tab, results, expected_proxies=None, expected_bad_proxies=None):
307    info = GetProxyInfoFromNetworkInternals(tab)
308    if not 'enabled' in info or not info['enabled']:
309      raise ChromeProxyMetricException, (
310          'Chrome proxy should be enabled. proxy info: %s' % info)
311
312    if not expected_proxies:
313      expected_proxies = [self.effective_proxies['fallback'],
314                          self.effective_proxies['direct']]
315    if not expected_bad_proxies:
316      expected_bad_proxies = []
317
318    proxies = info['proxies']
319    if proxies != expected_proxies:
320      raise ChromeProxyMetricException, (
321          'Wrong effective proxies (%s). Expect: "%s"' % (
322          str(proxies), str(expected_proxies)))
323
324    bad_proxies = []
325    if 'badProxies' in info and info['badProxies']:
326      bad_proxies = [p['proxy'] for p in info['badProxies']
327                     if 'proxy' in p and p['proxy']]
328    if bad_proxies != expected_bad_proxies:
329      raise ChromeProxyMetricException, (
330          'Wrong bad proxies (%s). Expect: "%s"' % (
331          str(bad_proxies), str(expected_bad_proxies)))
332    results.AddValue(scalar.ScalarValue(
333        results.current_page, 'http_fallback', 'boolean', True))
334