1#!/usr/bin/python
2
3# Copyright 2012 Google Inc. All Rights Reserved.
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9#    https://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16#
17# Modified by Linus Nielsen Feltzing for inclusion in the libcurl test
18# framework
19#
20try:
21    import socketserver
22except:
23    import SocketServer as socketserver
24import argparse
25import re
26import select
27import socket
28import time
29import pprint
30import os
31
32INFO_MESSAGE = '''
33This is a test server to test the libcurl pipelining functionality.
34It is a modified version if Google's HTTP pipelining test server. More
35information can be found here:
36
37https://dev.chromium.org/developers/design-documents/network-stack/http-pipelining
38
39Source code can be found here:
40
41https://code.google.com/archive/p/http-pipelining-test/
42'''
43MAX_REQUEST_SIZE = 1024  # bytes
44MIN_POLL_TIME = 0.01  # seconds. Minimum time to poll, in order to prevent
45                      # excessive looping because Python refuses to poll for
46                      # small timeouts.
47SEND_BUFFER_TIME = 0.5  # seconds
48TIMEOUT = 30  # seconds
49
50
51class Error(Exception):
52  pass
53
54
55class RequestTooLargeError(Error):
56  pass
57
58
59class ServeIndexError(Error):
60  pass
61
62
63class UnexpectedMethodError(Error):
64  pass
65
66
67class RequestParser(object):
68  """Parses an input buffer looking for HTTP GET requests."""
69
70  global logfile
71
72  LOOKING_FOR_GET = 1
73  READING_HEADERS = 2
74
75  HEADER_RE = re.compile('([^:]+):(.*)\n')
76  REQUEST_RE = re.compile('([^ ]+) ([^ ]+) HTTP/(\d+)\.(\d+)\n')
77
78  def __init__(self):
79    """Initializer."""
80    self._buffer = ""
81    self._pending_headers = {}
82    self._pending_request = ""
83    self._state = self.LOOKING_FOR_GET
84    self._were_all_requests_http_1_1 = True
85    self._valid_requests = []
86
87  def ParseAdditionalData(self, data):
88    """Finds HTTP requests in |data|.
89
90    Args:
91      data: (String) Newly received input data from the socket.
92
93    Returns:
94      (List of Tuples)
95        (String) The request path.
96        (Map of String to String) The header name and value.
97
98    Raises:
99      RequestTooLargeError: If the request exceeds MAX_REQUEST_SIZE.
100      UnexpectedMethodError: On a non-GET method.
101      Error: On a programming error.
102    """
103    logfile = open('log/server.input', 'a')
104    logfile.write(data)
105    logfile.close()
106    self._buffer += data.replace('\r', '')
107    should_continue_parsing = True
108    while should_continue_parsing:
109      if self._state == self.LOOKING_FOR_GET:
110        should_continue_parsing = self._DoLookForGet()
111      elif self._state == self.READING_HEADERS:
112        should_continue_parsing = self._DoReadHeader()
113      else:
114        raise Error('Unexpected state: ' + self._state)
115    if len(self._buffer) > MAX_REQUEST_SIZE:
116      raise RequestTooLargeError(
117          'Request is at least %d bytes' % len(self._buffer))
118    valid_requests = self._valid_requests
119    self._valid_requests = []
120    return valid_requests
121
122  @property
123  def were_all_requests_http_1_1(self):
124    return self._were_all_requests_http_1_1
125
126  def _DoLookForGet(self):
127    """Tries to parse an HTTTP request line.
128
129    Returns:
130      (Boolean) True if a request was found.
131
132    Raises:
133      UnexpectedMethodError: On a non-GET method.
134    """
135    m = self.REQUEST_RE.match(self._buffer)
136    if not m:
137      return False
138    method, path, http_major, http_minor = m.groups()
139
140    if method != 'GET':
141      raise UnexpectedMethodError('Unexpected method: ' + method)
142    if path in ['/', '/index.htm', '/index.html']:
143      raise ServeIndexError()
144
145    if http_major != '1' or http_minor != '1':
146      self._were_all_requests_http_1_1 = False
147
148#    print method, path
149
150    self._pending_request = path
151    self._buffer = self._buffer[m.end():]
152    self._state = self.READING_HEADERS
153    return True
154
155  def _DoReadHeader(self):
156    """Tries to parse a HTTP header.
157
158    Returns:
159      (Boolean) True if it found the end of the request or a HTTP header.
160    """
161    if self._buffer.startswith('\n'):
162      self._buffer = self._buffer[1:]
163      self._state = self.LOOKING_FOR_GET
164      self._valid_requests.append((self._pending_request,
165                                   self._pending_headers))
166      self._pending_headers = {}
167      self._pending_request = ""
168      return True
169
170    m = self.HEADER_RE.match(self._buffer)
171    if not m:
172      return False
173
174    header = m.group(1).lower()
175    value = m.group(2).strip().lower()
176    if header not in self._pending_headers:
177      self._pending_headers[header] = value
178    self._buffer = self._buffer[m.end():]
179    return True
180
181
182class ResponseBuilder(object):
183  """Builds HTTP responses for a list of accumulated requests."""
184
185  def __init__(self):
186    """Initializer."""
187    self._max_pipeline_depth = 0
188    self._requested_paths = []
189    self._processed_end = False
190    self._were_all_requests_http_1_1 = True
191
192  def QueueRequests(self, requested_paths, were_all_requests_http_1_1):
193    """Adds requests to the queue of requests.
194
195    Args:
196      requested_paths: (List of Strings) Requested paths.
197    """
198    self._requested_paths.extend(requested_paths)
199    self._were_all_requests_http_1_1 = were_all_requests_http_1_1
200
201  def Chunkify(self, data, chunksize):
202    """ Divides a string into chunks
203    """
204    return [hex(chunksize)[2:] + "\r\n" + data[i:i+chunksize] + "\r\n" for i in range(0, len(data), chunksize)]
205
206  def BuildResponses(self):
207    """Converts the queue of requests into responses.
208
209    Returns:
210      (String) Buffer containing all of the responses.
211    """
212    result = ""
213    self._max_pipeline_depth = max(self._max_pipeline_depth,
214                                   len(self._requested_paths))
215    for path, headers in self._requested_paths:
216      if path == '/verifiedserver':
217        body = "WE ROOLZ: {}\r\n".format(os.getpid());
218        result += self._BuildResponse(
219            '200 OK', ['Server: Apache',
220                       'Content-Length: {}'.format(len(body)),
221                       'Cache-Control: no-store'], body)
222
223      elif path == '/alphabet.txt':
224        body = 'abcdefghijklmnopqrstuvwxyz'
225        result += self._BuildResponse(
226            '200 OK', ['Server: Apache',
227                       'Content-Length: 26',
228                       'Cache-Control: no-store'], body)
229
230      elif path == '/reverse.txt':
231        body = 'zyxwvutsrqponmlkjihgfedcba'
232        result += self._BuildResponse(
233            '200 OK', ['Content-Length: 26', 'Cache-Control: no-store'], body)
234
235      elif path == '/chunked.txt':
236        body = ('7\r\nchunked\r\n'
237                '8\r\nencoding\r\n'
238                '2\r\nis\r\n'
239                '3\r\nfun\r\n'
240                '0\r\n\r\n')
241        result += self._BuildResponse(
242            '200 OK', ['Transfer-Encoding: chunked', 'Cache-Control: no-store'],
243            body)
244
245      elif path == '/cached.txt':
246        body = 'azbycxdwevfugthsirjqkplomn'
247        result += self._BuildResponse(
248            '200 OK', ['Content-Length: 26', 'Cache-Control: max-age=60'], body)
249
250      elif path == '/connection_close.txt':
251        body = 'azbycxdwevfugthsirjqkplomn'
252        result += self._BuildResponse(
253            '200 OK', ['Content-Length: 26', 'Cache-Control: max-age=60', 'Connection: close'], body)
254        self._processed_end = True
255
256      elif path == '/1k.txt':
257        body = '0123456789abcdef' * 64
258        result += self._BuildResponse(
259            '200 OK', ['Server: Apache',
260                       'Content-Length: 1024',
261                       'Cache-Control: max-age=60'], body)
262
263      elif path == '/10k.txt':
264        body = '0123456789abcdef' * 640
265        result += self._BuildResponse(
266            '200 OK', ['Server: Apache',
267                       'Content-Length: 10240',
268                       'Cache-Control: max-age=60'], body)
269
270      elif path == '/100k.txt':
271        body = '0123456789abcdef' * 6400
272        result += self._BuildResponse(
273            '200 OK',
274            ['Server: Apache',
275             'Content-Length: 102400',
276             'Cache-Control: max-age=60'],
277            body)
278
279      elif path == '/100k_chunked.txt':
280        body = self.Chunkify('0123456789abcdef' * 6400, 20480)
281        body.append('0\r\n\r\n')
282        body = ''.join(body)
283
284        result += self._BuildResponse(
285            '200 OK', ['Transfer-Encoding: chunked', 'Cache-Control: no-store'], body)
286
287      elif path == '/stats.txt':
288        results = {
289            'max_pipeline_depth': self._max_pipeline_depth,
290            'were_all_requests_http_1_1': int(self._were_all_requests_http_1_1),
291        }
292        body = ','.join(['%s:%s' % (k, v) for k, v in results.items()])
293        result += self._BuildResponse(
294            '200 OK',
295            ['Content-Length: %s' % len(body), 'Cache-Control: no-store'], body)
296        self._processed_end = True
297
298      else:
299        result += self._BuildResponse('404 Not Found', ['Content-Length: 7'], 'Go away')
300      if self._processed_end:
301          break
302    self._requested_paths = []
303    return result
304
305  def WriteError(self, status, error):
306    """Returns an HTTP response for the specified error.
307
308    Args:
309      status: (String) Response code and descrtion (e.g. "404 Not Found")
310
311    Returns:
312      (String) Text of HTTP response.
313    """
314    return self._BuildResponse(
315        status, ['Connection: close', 'Content-Type: text/plain'], error)
316
317  @property
318  def processed_end(self):
319    return self._processed_end
320
321  def _BuildResponse(self, status, headers, body):
322    """Builds an HTTP response.
323
324    Args:
325      status: (String) Response code and descrtion (e.g. "200 OK")
326      headers: (List of Strings) Headers (e.g. "Connection: close")
327      body: (String) Response body.
328
329    Returns:
330      (String) Text of HTTP response.
331    """
332    return ('HTTP/1.1 %s\r\n'
333            '%s\r\n'
334            '\r\n'
335            '%s' % (status, '\r\n'.join(headers), body))
336
337
338class PipelineRequestHandler(socketserver.BaseRequestHandler):
339  """Called on an incoming TCP connection."""
340
341  def _GetTimeUntilTimeout(self):
342    return self._start_time + TIMEOUT - time.time()
343
344  def _GetTimeUntilNextSend(self):
345    if not self._last_queued_time:
346      return TIMEOUT
347    return self._last_queued_time + SEND_BUFFER_TIME - time.time()
348
349  def handle(self):
350    self._request_parser = RequestParser()
351    self._response_builder = ResponseBuilder()
352    self._last_queued_time = 0
353    self._num_queued = 0
354    self._num_written = 0
355    self._send_buffer = ""
356    self._start_time = time.time()
357    try:
358      while not self._response_builder.processed_end or self._send_buffer:
359
360        time_left = self._GetTimeUntilTimeout()
361        time_until_next_send = self._GetTimeUntilNextSend()
362        max_poll_time = min(time_left, time_until_next_send) + MIN_POLL_TIME
363
364        rlist, wlist, xlist = [], [], []
365        fileno = self.request.fileno()
366        if max_poll_time > 0:
367          rlist.append(fileno)
368          if self._send_buffer:
369            wlist.append(fileno)
370          rlist, wlist, xlist = select.select(rlist, wlist, xlist, max_poll_time)
371
372        if self._GetTimeUntilTimeout() <= 0:
373          return
374
375        if self._GetTimeUntilNextSend() <= 0:
376          self._send_buffer += self._response_builder.BuildResponses()
377          self._num_written = self._num_queued
378          self._last_queued_time = 0
379
380        if fileno in rlist:
381          self.request.setblocking(False)
382          new_data = self.request.recv(MAX_REQUEST_SIZE)
383          self.request.setblocking(True)
384          if not new_data:
385            return
386          new_requests = self._request_parser.ParseAdditionalData(new_data)
387          self._response_builder.QueueRequests(
388              new_requests, self._request_parser.were_all_requests_http_1_1)
389          self._num_queued += len(new_requests)
390          self._last_queued_time = time.time()
391        elif fileno in wlist:
392          num_bytes_sent = self.request.send(self._send_buffer[0:4096])
393          self._send_buffer = self._send_buffer[num_bytes_sent:]
394          time.sleep(0.05)
395
396    except RequestTooLargeError as e:
397      self.request.send(self._response_builder.WriteError(
398          '413 Request Entity Too Large', e))
399      raise
400    except UnexpectedMethodError as e:
401      self.request.send(self._response_builder.WriteError(
402          '405 Method Not Allowed', e))
403      raise
404    except ServeIndexError:
405      self.request.send(self._response_builder.WriteError(
406          '200 OK', INFO_MESSAGE))
407    except Exception as e:
408      print(e)
409    self.request.close()
410
411
412class PipelineServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
413  pass
414
415
416parser = argparse.ArgumentParser()
417parser.add_argument("--port", action="store", default=0,
418                  type=int, help="port to listen on")
419parser.add_argument("--verbose", action="store", default=0,
420                  type=int, help="verbose output")
421parser.add_argument("--pidfile", action="store", default=0,
422                  help="file name for the PID")
423parser.add_argument("--logfile", action="store", default=0,
424                  help="file name for the log")
425parser.add_argument("--srcdir", action="store", default=0,
426                  help="test directory")
427parser.add_argument("--id", action="store", default=0,
428                  help="server ID")
429parser.add_argument("--ipv4", action="store_true", default=0,
430                  help="IPv4 flag")
431args = parser.parse_args()
432
433if args.pidfile:
434    pid = os.getpid()
435    f = open(args.pidfile, 'w')
436    f.write('{}'.format(pid))
437    f.close()
438
439server = PipelineServer(('0.0.0.0', args.port), PipelineRequestHandler)
440server.allow_reuse_address = True
441server.serve_forever()
442