url_request_slow_download_job.cc revision 2a99a7e74a7f215066514fe81d2bfa6639d9eddd
1// Copyright (c) 2012 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "content/test/net/url_request_slow_download_job.h"
6
7#include "base/bind.h"
8#include "base/compiler_specific.h"
9#include "base/logging.h"
10#include "base/message_loop.h"
11#include "base/string_util.h"
12#include "base/stringprintf.h"
13#include "content/public/browser/browser_thread.h"
14#include "googleurl/src/gurl.h"
15#include "net/base/io_buffer.h"
16#include "net/base/net_errors.h"
17#include "net/http/http_response_headers.h"
18#include "net/url_request/url_request.h"
19#include "net/url_request/url_request_filter.h"
20
21namespace content {
22
23const char URLRequestSlowDownloadJob::kUnknownSizeUrl[] =
24  "http://url.handled.by.slow.download/download-unknown-size";
25const char URLRequestSlowDownloadJob::kKnownSizeUrl[] =
26  "http://url.handled.by.slow.download/download-known-size";
27const char URLRequestSlowDownloadJob::kFinishDownloadUrl[] =
28  "http://url.handled.by.slow.download/download-finish";
29const char URLRequestSlowDownloadJob::kErrorDownloadUrl[] =
30  "http://url.handled.by.slow.download/download-error";
31
32const int URLRequestSlowDownloadJob::kFirstDownloadSize = 1024 * 35;
33const int URLRequestSlowDownloadJob::kSecondDownloadSize = 1024 * 10;
34
35// static
36base::LazyInstance<URLRequestSlowDownloadJob::SlowJobsSet>::Leaky
37    URLRequestSlowDownloadJob::pending_requests_ = LAZY_INSTANCE_INITIALIZER;
38
39void URLRequestSlowDownloadJob::Start() {
40  MessageLoop::current()->PostTask(
41      FROM_HERE,
42      base::Bind(&URLRequestSlowDownloadJob::StartAsync,
43                 weak_factory_.GetWeakPtr()));
44}
45
46// static
47void URLRequestSlowDownloadJob::AddUrlHandler() {
48  net::URLRequestFilter* filter = net::URLRequestFilter::GetInstance();
49  filter->AddUrlHandler(GURL(kUnknownSizeUrl),
50                        &URLRequestSlowDownloadJob::Factory);
51  filter->AddUrlHandler(GURL(kKnownSizeUrl),
52                        &URLRequestSlowDownloadJob::Factory);
53  filter->AddUrlHandler(GURL(kFinishDownloadUrl),
54                        &URLRequestSlowDownloadJob::Factory);
55  filter->AddUrlHandler(GURL(kErrorDownloadUrl),
56                        &URLRequestSlowDownloadJob::Factory);
57}
58
59// static
60net::URLRequestJob* URLRequestSlowDownloadJob::Factory(
61    net::URLRequest* request,
62    net::NetworkDelegate* network_delegate,
63    const std::string& scheme) {
64  DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
65  URLRequestSlowDownloadJob* job = new URLRequestSlowDownloadJob(
66      request, network_delegate);
67  if (request->url().spec() != kFinishDownloadUrl &&
68      request->url().spec() != kErrorDownloadUrl)
69    pending_requests_.Get().insert(job);
70  return job;
71}
72
73// static
74size_t URLRequestSlowDownloadJob::NumberOutstandingRequests() {
75  DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
76  return pending_requests_.Get().size();
77}
78
79// static
80void URLRequestSlowDownloadJob::FinishPendingRequests() {
81  typedef std::set<URLRequestSlowDownloadJob*> JobList;
82  DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
83  for (JobList::iterator it = pending_requests_.Get().begin(); it !=
84       pending_requests_.Get().end(); ++it) {
85    (*it)->set_should_finish_download();
86  }
87}
88
89void URLRequestSlowDownloadJob::ErrorPendingRequests() {
90  typedef std::set<URLRequestSlowDownloadJob*> JobList;
91  DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
92  for (JobList::iterator it = pending_requests_.Get().begin(); it !=
93       pending_requests_.Get().end(); ++it) {
94    (*it)->set_should_error_download();
95  }
96}
97
98URLRequestSlowDownloadJob::URLRequestSlowDownloadJob(
99    net::URLRequest* request, net::NetworkDelegate* network_delegate)
100    : net::URLRequestJob(request, network_delegate),
101      bytes_already_sent_(0),
102      should_error_download_(false),
103      should_finish_download_(false),
104      buffer_size_(0),
105      ALLOW_THIS_IN_INITIALIZER_LIST(weak_factory_(this)) {
106}
107
108void URLRequestSlowDownloadJob::StartAsync() {
109  if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str()))
110    URLRequestSlowDownloadJob::FinishPendingRequests();
111  if (LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str()))
112    URLRequestSlowDownloadJob::ErrorPendingRequests();
113
114  NotifyHeadersComplete();
115}
116
117// ReadRawData and CheckDoneStatus together implement a state
118// machine.  ReadRawData may be called arbitrarily by the network stack.
119// It responds by:
120//      * If there are bytes remaining in the first chunk, they are
121//        returned.
122//      [No bytes remaining in first chunk.   ]
123//      * If should_finish_download_ is not set, it returns IO_PENDING,
124//        and starts calling CheckDoneStatus on a regular timer.
125//      [should_finish_download_ set.]
126//      * If there are bytes remaining in the second chunk, they are filled.
127//      * Otherwise, return *bytes_read = 0 to indicate end of request.
128// CheckDoneStatus is called on a regular basis, in the specific
129// case where we have transmitted all of the first chunk and none of the
130// second.  If should_finish_download_ becomes set, it will "complete"
131// the ReadRawData call that spawned off the CheckDoneStatus() repeated call.
132//
133// FillBufferHelper is a helper function that does the actual work of figuring
134// out where in the state machine we are and how we should fill the buffer.
135// It returns an enum indicating the state of the read.
136URLRequestSlowDownloadJob::ReadStatus
137URLRequestSlowDownloadJob::FillBufferHelper(
138    net::IOBuffer* buf, int buf_size, int* bytes_written) {
139  if (bytes_already_sent_ < kFirstDownloadSize) {
140    int bytes_to_write = std::min(kFirstDownloadSize - bytes_already_sent_,
141                                  buf_size);
142    for (int i = 0; i < bytes_to_write; ++i) {
143      buf->data()[i] = '*';
144    }
145    *bytes_written = bytes_to_write;
146    bytes_already_sent_ += bytes_to_write;
147    return BUFFER_FILLED;
148  }
149
150  if (!should_finish_download_)
151    return REQUEST_BLOCKED;
152
153  if (bytes_already_sent_ < kFirstDownloadSize + kSecondDownloadSize) {
154    int bytes_to_write =
155        std::min(kFirstDownloadSize + kSecondDownloadSize - bytes_already_sent_,
156                 buf_size);
157    for (int i = 0; i < bytes_to_write; ++i) {
158      buf->data()[i] = '*';
159    }
160    *bytes_written = bytes_to_write;
161    bytes_already_sent_ += bytes_to_write;
162    return BUFFER_FILLED;
163  }
164
165  return REQUEST_COMPLETE;
166}
167
168bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size,
169                                            int* bytes_read) {
170  if (LowerCaseEqualsASCII(kFinishDownloadUrl,
171                           request_->url().spec().c_str()) ||
172      LowerCaseEqualsASCII(kErrorDownloadUrl,
173                           request_->url().spec().c_str())) {
174    VLOG(10) << __FUNCTION__ << " called w/ kFinish/ErrorDownloadUrl.";
175    *bytes_read = 0;
176    return true;
177  }
178
179  VLOG(10) << __FUNCTION__ << " called at position "
180           << bytes_already_sent_ << " in the stream.";
181  ReadStatus status = FillBufferHelper(buf, buf_size, bytes_read);
182  switch (status) {
183    case BUFFER_FILLED:
184      return true;
185    case REQUEST_BLOCKED:
186      buffer_ = buf;
187      buffer_size_ = buf_size;
188      SetStatus(net::URLRequestStatus(net::URLRequestStatus::IO_PENDING, 0));
189      MessageLoop::current()->PostDelayedTask(
190          FROM_HERE,
191          base::Bind(&URLRequestSlowDownloadJob::CheckDoneStatus,
192                     weak_factory_.GetWeakPtr()),
193          base::TimeDelta::FromMilliseconds(100));
194      return false;
195    case REQUEST_COMPLETE:
196      *bytes_read = 0;
197      return true;
198  }
199  NOTREACHED();
200  return true;
201}
202
203void URLRequestSlowDownloadJob::CheckDoneStatus() {
204  if (should_finish_download_) {
205    VLOG(10) << __FUNCTION__ << " called w/ should_finish_download_ set.";
206    DCHECK(NULL != buffer_);
207    int bytes_written = 0;
208    ReadStatus status = FillBufferHelper(buffer_, buffer_size_, &bytes_written);
209    DCHECK_EQ(BUFFER_FILLED, status);
210    buffer_ = NULL;                     // Release the reference.
211    SetStatus(net::URLRequestStatus());
212    NotifyReadComplete(bytes_written);
213  } else if (should_error_download_) {
214    VLOG(10) << __FUNCTION__ << " called w/ should_finish_ownload_ set.";
215    NotifyDone(net::URLRequestStatus(
216        net::URLRequestStatus::FAILED, net::ERR_CONNECTION_RESET));
217  } else {
218    MessageLoop::current()->PostDelayedTask(
219        FROM_HERE,
220        base::Bind(&URLRequestSlowDownloadJob::CheckDoneStatus,
221                   weak_factory_.GetWeakPtr()),
222        base::TimeDelta::FromMilliseconds(100));
223  }
224}
225
226// Public virtual version.
227void URLRequestSlowDownloadJob::GetResponseInfo(net::HttpResponseInfo* info) {
228  // Forward to private const version.
229  GetResponseInfoConst(info);
230}
231
232URLRequestSlowDownloadJob::~URLRequestSlowDownloadJob() {
233  DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
234  pending_requests_.Get().erase(this);
235}
236
237// Private const version.
238void URLRequestSlowDownloadJob::GetResponseInfoConst(
239    net::HttpResponseInfo* info) const {
240  // Send back mock headers.
241  std::string raw_headers;
242  if (LowerCaseEqualsASCII(kFinishDownloadUrl,
243                           request_->url().spec().c_str()) ||
244      LowerCaseEqualsASCII(kErrorDownloadUrl,
245                           request_->url().spec().c_str())) {
246    raw_headers.append(
247      "HTTP/1.1 200 OK\n"
248      "Content-type: text/plain\n");
249  } else {
250    raw_headers.append(
251      "HTTP/1.1 200 OK\n"
252      "Content-type: application/octet-stream\n"
253      "Cache-Control: max-age=0\n");
254
255    if (LowerCaseEqualsASCII(kKnownSizeUrl, request_->url().spec().c_str())) {
256      raw_headers.append(base::StringPrintf(
257          "Content-Length: %d\n",
258          kFirstDownloadSize + kSecondDownloadSize));
259    }
260  }
261
262  // ParseRawHeaders expects \0 to end each header line.
263  ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", std::string("\0", 1));
264  info->headers = new net::HttpResponseHeaders(raw_headers);
265}
266
267bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) const {
268  net::HttpResponseInfo info;
269  GetResponseInfoConst(&info);
270  return info.headers && info.headers->GetMimeType(mime_type);
271}
272
273}  // namespace content
274