コード例 #1
0
  def test_handle_one_request_closes_connection(self):
    # By default, BaseHTTPServer.py treats all HTTP 1.1 requests as keep-alive.
    # Intentionally use HTTP 1.0 to prevent this behavior.
    response = httparchive.ArchivedHttpResponse(
        version=10, status=200, reason="OK",
        headers=[], response_data=["bat1"])
    self.set_up_proxy_server(response)
    t = threading.Thread(
        target=HttpProxyTest.serve_requests_forever, args=(self,))
    t.start()

    initial_thread_count = threading.activeCount()

    # Make a bunch of requests.
    request_count = 10
    for _ in range(request_count):
      conn = httplib.HTTPConnection('localhost', 8889, timeout=10)
      conn.request("GET", "/index.html")
      res = conn.getresponse().read()
      self.assertEqual(res, "bat1")
      conn.close()

    # Check to make sure that there is no leaked thread.
    util.WaitFor(lambda: threading.activeCount() == initial_thread_count, 2)

    self.assertEqual(request_count, HttpProxyTest.HANDLED_REQUEST_COUNT)
コード例 #2
0
    def test_max_fd(self):
        response = httparchive.ArchivedHttpResponse(version=11,
                                                    status=200,
                                                    reason="OK",
                                                    headers=[("Connection",
                                                              "keep-alive")],
                                                    response_data=["bat1"])
        self.set_up_proxy_server(response)
        t = threading.Thread(target=HttpProxyTest.serve_requests_forever,
                             args=(self, ))
        t.start()

        # Make a bunch of requests.
        request_count = 400
        connections = []
        for _ in range(request_count):
            conn = httplib.HTTPConnection('localhost', 8889, timeout=10)
            conn.request("GET",
                         "/index.html",
                         headers={"Connection": "keep-alive"})
            res = conn.getresponse().read()
            self.assertEqual(res, "bat1")
            connections.append(conn)

        # Check that the right number of requests have been handled.
        self.assertEqual(request_count, HttpProxyTest.HANDLED_REQUEST_COUNT)

        for conn in connections:
            conn.close()
コード例 #3
0
 def createTestResponse(self):
     return httparchive.ArchivedHttpResponse(
         11,
         200,
         'OK', [('content-type', 'text/html')],
         '<body>test</body>',
         request_time=datetime.datetime(2016, 11, 17))
コード例 #4
0
ファイル: httpclient.py プロジェクト: bbmjja8123/chromium-1
    def __call__(self, request):
        """Fetch an HTTP request.

    Args:
      request: an ArchivedHttpRequest
    Returns:
      an ArchivedHttpResponse
    """
        logging.debug('RealHttpFetch: %s %s', request.host, request.path)
        if ':' in request.host:
            parts = request.host.split(':')
            truehost = parts[0]
            trueport = int(parts[1])
        else:
            truehost = request.host
            trueport = None

        host_ip = self._real_dns_lookup(truehost)
        if not host_ip:
            logging.critical('Unable to find host ip for name: %s', truehost)
            return None
        retries = 3
        while True:
            try:
                if request.is_ssl:
                    if trueport:
                        connection = DetailedHTTPSConnection(host_ip, trueport)
                    else:
                        connection = DetailedHTTPSConnection(host_ip)
                else:
                    if trueport:
                        connection = DetailedHTTPConnection(host_ip, trueport)
                    else:
                        connection = DetailedHTTPConnection(host_ip)
                start = TIMER()
                connection.request(request.command, request.path,
                                   request.request_body, request.headers)
                response = connection.getresponse()
                headers_delay = int((TIMER() - start) * 1000)
                headers_delay -= self._get_server_rtt(request.host)

                chunks, chunk_delays = response.read_chunks()
                delays = {'headers': headers_delay, 'data': chunk_delays}
                archived_http_response = httparchive.ArchivedHttpResponse(
                    response.version, response.status, response.reason,
                    RealHttpFetch._ToTuples(response.msg.headers), chunks,
                    delays)
                return archived_http_response
            except Exception, e:
                if retries:
                    retries -= 1
                    logging.warning('Retrying fetch %s: %s', request, e)
                    continue
                logging.critical('Could not fetch %s: %s', request, e)
                return None
コード例 #5
0
def convert_response(response, timings):
  version = convert_version(response["httpVersion"])
  status = response["status"]
  reason = convert_unicode(response["statusText"])
  headers = convert_headers_to_tuples(response["headers"])
  # TODO(cs): deal with chunks properly.
  response_data = [""]
  if "text" in response["content"]:
    response_data = [response["content"]["text"]]
  delays = convert_timings(timings)
  return httparchive.ArchivedHttpResponse(version, status, reason,
                                          headers, response_data,
                                          delays=delays)
コード例 #6
0
  def __call__(self, request):
    """Fetch an HTTP request.

    Args:
      request: an ArchivedHttpRequest
    Returns:
      an ArchivedHttpResponse
    """
    logging.debug('RealHttpFetch: %s %s', request.host, request.full_path)
    request_host, request_port = self._get_request_host_port(request)
    retries = 3
    while True:
      try:
        connection = self._get_connection(
            request_host, request_port, request.is_ssl)
        connect_start = TIMER()
        connection.connect()
        connect_delay = int((TIMER() - connect_start) * 1000)
        start = TIMER()
        connection.request(
            request.command,
            request.full_path,
            request.request_body,
            request.headers)
        response = connection.getresponse()
        headers_delay = int((TIMER() - start) * 1000)

        chunks, chunk_delays = response.read_chunks()
        delays = {
            'connect': connect_delay,
            'headers': headers_delay,
            'data': chunk_delays
            }
        archived_http_response = httparchive.ArchivedHttpResponse(
            response.version,
            response.status,
            response.reason,
            RealHttpFetch._ToTuples(response.msg.headers),
            chunks,
            delays)
        return archived_http_response
      except Exception, e:
        if retries:
          retries -= 1
          logging.warning('Retrying fetch %s: %s', request, e)
          continue
        logging.critical('Could not fetch %s: %s', request, e)
        return None
コード例 #7
0
    def test_keep_alive_header(self):
        response = httparchive.ArchivedHttpResponse(version=11,
                                                    status=200,
                                                    reason="OK",
                                                    headers=[("Connection",
                                                              "keep-alive")],
                                                    response_data=["bat1"])
        self.set_up_proxy_server(response)
        t = threading.Thread(target=HttpProxyTest.serve_requests_forever,
                             args=(self, ))
        t.start()

        initial_thread_count = threading.activeCount()

        # Make a bunch of requests.
        request_count = 10
        connections = []
        for _ in range(request_count):
            conn = httplib.HTTPConnection('localhost', 8889, timeout=10)
            conn.request("GET",
                         "/index.html",
                         headers={"Connection": "keep-alive"})
            res = conn.getresponse().read()
            self.assertEqual(res, "bat1")
            connections.append(conn)

        # Repeat the same requests.
        for conn in connections:
            conn.request("GET",
                         "/index.html",
                         headers={"Connection": "keep-alive"})
            res = conn.getresponse().read()
            self.assertEqual(res, "bat1")

        # Check that the right number of requests have been handled.
        self.assertEqual(2 * request_count,
                         HttpProxyTest.HANDLED_REQUEST_COUNT)

        # Check to make sure that exactly "request_count" new threads are active.
        self.assertEqual(threading.activeCount(),
                         initial_thread_count + request_count)

        for conn in connections:
            conn.close()

        util.WaitFor(lambda: threading.activeCount() == initial_thread_count,
                     1)
コード例 #8
0
 def test_generate_304(self):
     REQUEST_HEADERS = [{}, {
         'If-Modified-Since': 'whatever'
     }, {
         'If-None-Match': 'whatever yet again'
     }]
     RESPONSE_STATUSES = [200, 204, 304, 404]
     for allow_generate_304 in [False, True]:
         self.allow_generate_304 = allow_generate_304
         for serve_response_by_http_archive in [False, True]:
             self.serve_response_by_http_archive = serve_response_by_http_archive
             for response_status in RESPONSE_STATUSES:
                 response = None
                 if response_status != 404:
                     response = httparchive.ArchivedHttpResponse(
                         version=11,
                         status=response_status,
                         reason="OK",
                         headers=[],
                         response_data=["some content"])
                 self.set_up_proxy_server(response)
                 t = threading.Thread(
                     target=HttpProxyTest.serve_requests_forever,
                     args=(self, ))
                 t.start()
                 for method in ['GET', 'HEAD', 'POST']:
                     for headers in REQUEST_HEADERS:
                         connection = httplib.HTTPConnection('localhost',
                                                             8889,
                                                             timeout=10)
                         connection.request(method,
                                            "/index.html",
                                            headers=headers)
                         response = connection.getresponse()
                         connection.close()
                         if (allow_generate_304
                                 and serve_response_by_http_archive
                                 and method in ['GET', 'HEAD'] and headers
                                 and response_status == 200):
                             self.assertEqual(304, response.status)
                             self.assertEqual('', response.read())
                         else:
                             self.assertEqual(response_status,
                                              response.status)
                 self.tear_down_proxy_server()
コード例 #9
0
def create_response(headers):
    return httparchive.ArchivedHttpResponse(11, 200, 'OK', headers, '')
コード例 #10
0
 def createTestResponse(self):
     return httparchive.ArchivedHttpResponse(
         11,
         200,
         'OK', [('content-type', 'text/html')], ['<body>test</body>'],
         request_time=HttpArchiveFetchTest.TEST_REQUEST_TIME)