proxy_script_fetcher_impl_unittest.cc revision 4a5e2dc747d50c653511c68ccb2cfbfb740bd5a7
1// Copyright (c) 2010 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "net/proxy/proxy_script_fetcher_impl.h"
6
7#include <string>
8
9#include "base/file_path.h"
10#include "base/compiler_specific.h"
11#include "base/path_service.h"
12#include "base/utf_string_conversions.h"
13#include "net/base/net_util.h"
14#include "net/base/ssl_config_service_defaults.h"
15#include "net/base/test_completion_callback.h"
16#include "net/disk_cache/disk_cache.h"
17#include "net/http/http_cache.h"
18#include "net/url_request/url_request_unittest.h"
19#include "testing/gtest/include/gtest/gtest.h"
20#include "testing/platform_test.h"
21
22// TODO(eroman):
23//   - Test canceling an outstanding request.
24//   - Test deleting ProxyScriptFetcher while a request is in progress.
25
26const FilePath::CharType kDocRoot[] =
27    FILE_PATH_LITERAL("net/data/proxy_script_fetcher_unittest");
28
29struct FetchResult {
30  int code;
31  string16 text;
32};
33
34// A non-mock URL request which can access http:// and file:// urls.
35class RequestContext : public URLRequestContext {
36 public:
37  RequestContext() {
38    net::ProxyConfig no_proxy;
39    host_resolver_ =
40        net::CreateSystemHostResolver(net::HostResolver::kDefaultParallelism,
41                                      NULL, NULL);
42    proxy_service_ = net::ProxyService::CreateFixed(no_proxy);
43    ssl_config_service_ = new net::SSLConfigServiceDefaults;
44
45    http_transaction_factory_ = new net::HttpCache(
46        net::HttpNetworkLayer::CreateFactory(host_resolver_, NULL, NULL,
47            proxy_service_, ssl_config_service_, NULL, NULL, NULL),
48        net::HttpCache::DefaultBackend::InMemory(0));
49  }
50
51 private:
52  ~RequestContext() {
53    delete http_transaction_factory_;
54    delete host_resolver_;
55  }
56};
57
58// Required to be in net namespace by FRIEND_TEST.
59namespace net {
60
61// Get a file:// url relative to net/data/proxy/proxy_script_fetcher_unittest.
62GURL GetTestFileUrl(const std::string& relpath) {
63  FilePath path;
64  PathService::Get(base::DIR_SOURCE_ROOT, &path);
65  path = path.AppendASCII("net");
66  path = path.AppendASCII("data");
67  path = path.AppendASCII("proxy_script_fetcher_unittest");
68  GURL base_url = FilePathToFileURL(path);
69  return GURL(base_url.spec() + "/" + relpath);
70}
71
72class ProxyScriptFetcherImplTest : public PlatformTest {
73 public:
74  ProxyScriptFetcherImplTest()
75      : test_server_(net::TestServer::TYPE_HTTP, FilePath(kDocRoot)) {
76  }
77
78  static void SetUpTestCase() {
79    URLRequest::AllowFileAccess();
80  }
81
82 protected:
83  net::TestServer test_server_;
84};
85
86TEST_F(ProxyScriptFetcherImplTest, FileUrl) {
87  scoped_refptr<URLRequestContext> context(new RequestContext);
88  scoped_ptr<ProxyScriptFetcher> pac_fetcher(
89      new ProxyScriptFetcherImpl(context));
90
91  { // Fetch a non-existent file.
92    string16 text;
93    TestCompletionCallback callback;
94    int result = pac_fetcher->Fetch(GetTestFileUrl("does-not-exist"),
95                                    &text, &callback);
96    EXPECT_EQ(ERR_IO_PENDING, result);
97    EXPECT_EQ(ERR_FILE_NOT_FOUND, callback.WaitForResult());
98    EXPECT_TRUE(text.empty());
99  }
100  { // Fetch a file that exists.
101    string16 text;
102    TestCompletionCallback callback;
103    int result = pac_fetcher->Fetch(GetTestFileUrl("pac.txt"),
104                                    &text, &callback);
105    EXPECT_EQ(ERR_IO_PENDING, result);
106    EXPECT_EQ(OK, callback.WaitForResult());
107    EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text);
108  }
109}
110
111// Note that all mime types are allowed for PAC file, to be consistent
112// with other browsers.
113TEST_F(ProxyScriptFetcherImplTest, HttpMimeType) {
114  ASSERT_TRUE(test_server_.Start());
115
116  scoped_refptr<URLRequestContext> context(new RequestContext);
117  scoped_ptr<ProxyScriptFetcher> pac_fetcher(
118      new ProxyScriptFetcherImpl(context));
119
120  { // Fetch a PAC with mime type "text/plain"
121    GURL url(test_server_.GetURL("files/pac.txt"));
122    string16 text;
123    TestCompletionCallback callback;
124    int result = pac_fetcher->Fetch(url, &text, &callback);
125    EXPECT_EQ(ERR_IO_PENDING, result);
126    EXPECT_EQ(OK, callback.WaitForResult());
127    EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text);
128  }
129  { // Fetch a PAC with mime type "text/html"
130    GURL url(test_server_.GetURL("files/pac.html"));
131    string16 text;
132    TestCompletionCallback callback;
133    int result = pac_fetcher->Fetch(url, &text, &callback);
134    EXPECT_EQ(ERR_IO_PENDING, result);
135    EXPECT_EQ(OK, callback.WaitForResult());
136    EXPECT_EQ(ASCIIToUTF16("-pac.html-\n"), text);
137  }
138  { // Fetch a PAC with mime type "application/x-ns-proxy-autoconfig"
139    GURL url(test_server_.GetURL("files/pac.nsproxy"));
140    string16 text;
141    TestCompletionCallback callback;
142    int result = pac_fetcher->Fetch(url, &text, &callback);
143    EXPECT_EQ(ERR_IO_PENDING, result);
144    EXPECT_EQ(OK, callback.WaitForResult());
145    EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text);
146  }
147}
148
149TEST_F(ProxyScriptFetcherImplTest, HttpStatusCode) {
150  ASSERT_TRUE(test_server_.Start());
151
152  scoped_refptr<URLRequestContext> context(new RequestContext);
153  scoped_ptr<ProxyScriptFetcher> pac_fetcher(
154      new ProxyScriptFetcherImpl(context));
155
156  { // Fetch a PAC which gives a 500 -- FAIL
157    GURL url(test_server_.GetURL("files/500.pac"));
158    string16 text;
159    TestCompletionCallback callback;
160    int result = pac_fetcher->Fetch(url, &text, &callback);
161    EXPECT_EQ(ERR_IO_PENDING, result);
162    EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult());
163    EXPECT_TRUE(text.empty());
164  }
165  { // Fetch a PAC which gives a 404 -- FAIL
166    GURL url(test_server_.GetURL("files/404.pac"));
167    string16 text;
168    TestCompletionCallback callback;
169    int result = pac_fetcher->Fetch(url, &text, &callback);
170    EXPECT_EQ(ERR_IO_PENDING, result);
171    EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult());
172    EXPECT_TRUE(text.empty());
173  }
174}
175
176TEST_F(ProxyScriptFetcherImplTest, ContentDisposition) {
177  ASSERT_TRUE(test_server_.Start());
178
179  scoped_refptr<URLRequestContext> context(new RequestContext);
180  scoped_ptr<ProxyScriptFetcher> pac_fetcher(
181      new ProxyScriptFetcherImpl(context));
182
183  // Fetch PAC scripts via HTTP with a Content-Disposition header -- should
184  // have no effect.
185  GURL url(test_server_.GetURL("files/downloadable.pac"));
186  string16 text;
187  TestCompletionCallback callback;
188  int result = pac_fetcher->Fetch(url, &text, &callback);
189  EXPECT_EQ(ERR_IO_PENDING, result);
190  EXPECT_EQ(OK, callback.WaitForResult());
191  EXPECT_EQ(ASCIIToUTF16("-downloadable.pac-\n"), text);
192}
193
194TEST_F(ProxyScriptFetcherImplTest, NoCache) {
195  ASSERT_TRUE(test_server_.Start());
196
197  scoped_refptr<URLRequestContext> context(new RequestContext);
198  scoped_ptr<ProxyScriptFetcher> pac_fetcher(
199      new ProxyScriptFetcherImpl(context));
200
201  // Fetch a PAC script whose HTTP headers make it cacheable for 1 hour.
202  GURL url(test_server_.GetURL("files/cacheable_1hr.pac"));
203  {
204    string16 text;
205    TestCompletionCallback callback;
206    int result = pac_fetcher->Fetch(url, &text, &callback);
207    EXPECT_EQ(ERR_IO_PENDING, result);
208    EXPECT_EQ(OK, callback.WaitForResult());
209    EXPECT_EQ(ASCIIToUTF16("-cacheable_1hr.pac-\n"), text);
210  }
211
212  // Now kill the HTTP server.
213  ASSERT_TRUE(test_server_.Stop());
214
215  // Try to fetch the file again -- if should fail, since the server is not
216  // running anymore. (If it were instead being loaded from cache, we would
217  // get a success.
218  {
219    string16 text;
220    TestCompletionCallback callback;
221    int result = pac_fetcher->Fetch(url, &text, &callback);
222    EXPECT_EQ(ERR_IO_PENDING, result);
223    EXPECT_EQ(ERR_CONNECTION_REFUSED, callback.WaitForResult());
224  }
225}
226
227TEST_F(ProxyScriptFetcherImplTest, TooLarge) {
228  ASSERT_TRUE(test_server_.Start());
229
230  scoped_refptr<URLRequestContext> context(new RequestContext);
231  scoped_ptr<ProxyScriptFetcherImpl> pac_fetcher(
232      new ProxyScriptFetcherImpl(context));
233
234  // Set the maximum response size to 50 bytes.
235  int prev_size = pac_fetcher->SetSizeConstraint(50);
236
237  // These two URLs are the same file, but are http:// vs file://
238  GURL urls[] = {
239    test_server_.GetURL("files/large-pac.nsproxy"),
240    GetTestFileUrl("large-pac.nsproxy")
241  };
242
243  // Try fetching URLs that are 101 bytes large. We should abort the request
244  // after 50 bytes have been read, and fail with a too large error.
245  for (size_t i = 0; i < arraysize(urls); ++i) {
246    const GURL& url = urls[i];
247    string16 text;
248    TestCompletionCallback callback;
249    int result = pac_fetcher->Fetch(url, &text, &callback);
250    EXPECT_EQ(ERR_IO_PENDING, result);
251    EXPECT_EQ(ERR_FILE_TOO_BIG, callback.WaitForResult());
252    EXPECT_TRUE(text.empty());
253  }
254
255  // Restore the original size bound.
256  pac_fetcher->SetSizeConstraint(prev_size);
257
258  { // Make sure we can still fetch regular URLs.
259    GURL url(test_server_.GetURL("files/pac.nsproxy"));
260    string16 text;
261    TestCompletionCallback callback;
262    int result = pac_fetcher->Fetch(url, &text, &callback);
263    EXPECT_EQ(ERR_IO_PENDING, result);
264    EXPECT_EQ(OK, callback.WaitForResult());
265    EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text);
266  }
267}
268
269TEST_F(ProxyScriptFetcherImplTest, Hang) {
270  ASSERT_TRUE(test_server_.Start());
271
272  scoped_refptr<URLRequestContext> context(new RequestContext);
273  scoped_ptr<ProxyScriptFetcherImpl> pac_fetcher(
274      new ProxyScriptFetcherImpl(context));
275
276  // Set the timeout period to 0.5 seconds.
277  base::TimeDelta prev_timeout = pac_fetcher->SetTimeoutConstraint(
278      base::TimeDelta::FromMilliseconds(500));
279
280  // Try fetching a URL which takes 1.2 seconds. We should abort the request
281  // after 500 ms, and fail with a timeout error.
282  { GURL url(test_server_.GetURL("slow/proxy.pac?1.2"));
283    string16 text;
284    TestCompletionCallback callback;
285    int result = pac_fetcher->Fetch(url, &text, &callback);
286    EXPECT_EQ(ERR_IO_PENDING, result);
287    EXPECT_EQ(ERR_TIMED_OUT, callback.WaitForResult());
288    EXPECT_TRUE(text.empty());
289  }
290
291  // Restore the original timeout period.
292  pac_fetcher->SetTimeoutConstraint(prev_timeout);
293
294  { // Make sure we can still fetch regular URLs.
295    GURL url(test_server_.GetURL("files/pac.nsproxy"));
296    string16 text;
297    TestCompletionCallback callback;
298    int result = pac_fetcher->Fetch(url, &text, &callback);
299    EXPECT_EQ(ERR_IO_PENDING, result);
300    EXPECT_EQ(OK, callback.WaitForResult());
301    EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text);
302  }
303}
304
305// The ProxyScriptFetcher should decode any content-codings
306// (like gzip, bzip, etc.), and apply any charset conversions to yield
307// UTF8.
308TEST_F(ProxyScriptFetcherImplTest, Encodings) {
309  ASSERT_TRUE(test_server_.Start());
310
311  scoped_refptr<URLRequestContext> context(new RequestContext);
312  scoped_ptr<ProxyScriptFetcher> pac_fetcher(
313      new ProxyScriptFetcherImpl(context));
314
315  // Test a response that is gzip-encoded -- should get inflated.
316  {
317    GURL url(test_server_.GetURL("files/gzipped_pac"));
318    string16 text;
319    TestCompletionCallback callback;
320    int result = pac_fetcher->Fetch(url, &text, &callback);
321    EXPECT_EQ(ERR_IO_PENDING, result);
322    EXPECT_EQ(OK, callback.WaitForResult());
323    EXPECT_EQ(ASCIIToUTF16("This data was gzipped.\n"), text);
324  }
325
326  // Test a response that was served as UTF-16 (BE). It should
327  // be converted to UTF8.
328  {
329    GURL url(test_server_.GetURL("files/utf16be_pac"));
330    string16 text;
331    TestCompletionCallback callback;
332    int result = pac_fetcher->Fetch(url, &text, &callback);
333    EXPECT_EQ(ERR_IO_PENDING, result);
334    EXPECT_EQ(OK, callback.WaitForResult());
335    EXPECT_EQ(ASCIIToUTF16("This was encoded as UTF-16BE.\n"), text);
336  }
337}
338
339}  // namespace net
340