class HttpProxyServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer, daemonserver.DaemonServer): HANDLER = HttpArchiveHandler # Increase the request queue size. The default value, 5, is set in # SocketServer.TCPServer (the parent of BaseHTTPServer.HTTPServer). # Since we're intercepting many domains through this single server, # it is quite possible to get more than 5 concurrent requests. request_queue_size = 128 # Don't prevent python from exiting when there is thread activity. daemon_threads = True def __init__(self, http_archive_fetch, custom_handlers, host='localhost', port=80, use_delays=False, is_ssl=False, protocol='HTTP', down_bandwidth='0', up_bandwidth='0', delay_ms='0'): """Start HTTP server. Args: host: a host string (name or IP) for the web proxy. port: a port string (e.g. '80') for the web proxy. use_delays: if True, add response data delays during replay. is_ssl: True iff proxy is using SSL. up_bandwidth: Upload bandwidth down_bandwidth: Download bandwidth Bandwidths measured in [K|M]{bit/s|Byte/s}. '0' means unlimited. delay_ms: Propagation delay in milliseconds. '0' means no delay. """ try: BaseHTTPServer.HTTPServer.__init__(self, (host, port), self.HANDLER) except Exception, e: raise HttpProxyServerError( 'Could not start HTTPServer on port %d: %s' % (port, e)) self.http_archive_fetch = http_archive_fetch self.custom_handlers = custom_handlers self.use_delays = use_delays self.is_ssl = is_ssl self.traffic_shaping_down_bps = proxyshaper.GetBitsPerSecond( down_bandwidth) self.traffic_shaping_up_bps = proxyshaper.GetBitsPerSecond( up_bandwidth) self.traffic_shaping_delay_ms = int(delay_ms) self.num_active_requests = 0 self.total_request_time = 0 self.protocol = protocol # Note: This message may be scraped. Do not change it. logging.warning( '%s server started on %s:%d' % (self.protocol, self.server_address[0], self.server_address[1]))
class HttpProxyServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer, daemonserver.DaemonServer): HANDLER = HttpArchiveHandler # Increase the request queue size. The default value, 5, is set in # SocketServer.TCPServer (the parent of BaseHTTPServer.HTTPServer). # Since we're intercepting many domains through this single server, # it is quite possible to get more than 5 concurrent requests. request_queue_size = 256 # The number of simultaneous connections that the HTTP server supports. This # is primarily limited by system limits such as RLIMIT_NOFILE. connection_limit = 500 # Allow sockets to be reused. See # http://svn.python.org/projects/python/trunk/Lib/SocketServer.py for more # details. allow_reuse_address = True # Don't prevent python from exiting when there is thread activity. daemon_threads = True def __init__(self, http_archive_fetch, custom_handlers, host='localhost', port=80, use_delays=False, is_ssl=False, protocol='HTTP', down_bandwidth='0', up_bandwidth='0', delay_ms='0'): """Start HTTP server. Args: host: a host string (name or IP) for the web proxy. port: a port string (e.g. '80') for the web proxy. use_delays: if True, add response data delays during replay. is_ssl: True iff proxy is using SSL. up_bandwidth: Upload bandwidth down_bandwidth: Download bandwidth Bandwidths measured in [K|M]{bit/s|Byte/s}. '0' means unlimited. delay_ms: Propagation delay in milliseconds. '0' means no delay. """ if platformsettings.SupportsFdLimitControl(): # BaseHTTPServer opens a new thread and two fds for each connection. # Check that the process can open at least 1000 fds. soft_limit, hard_limit = platformsettings.GetFdLimit() # Add some wiggle room since there are probably fds not associated with # connections. wiggle_room = 100 desired_limit = 2 * HttpProxyServer.connection_limit + wiggle_room if soft_limit < desired_limit: assert desired_limit <= hard_limit, ( 'The hard limit for number of open files per process is %s which ' 'is lower than the desired limit of %s.' % (hard_limit, desired_limit)) platformsettings.AdjustFdLimit(desired_limit, hard_limit) try: BaseHTTPServer.HTTPServer.__init__(self, (host, port), self.HANDLER) except Exception, e: raise HttpProxyServerError( 'Could not start HTTPServer on port %d: %s' % (port, e)) self.http_archive_fetch = http_archive_fetch self.custom_handlers = custom_handlers self.use_delays = use_delays self.is_ssl = is_ssl self.traffic_shaping_down_bps = proxyshaper.GetBitsPerSecond( down_bandwidth) self.traffic_shaping_up_bps = proxyshaper.GetBitsPerSecond( up_bandwidth) self.traffic_shaping_delay_ms = int(delay_ms) self.num_active_requests = 0 self.num_active_connections = 0 self.total_request_time = 0 self.protocol = protocol # Note: This message may be scraped. Do not change it. logging.warning( '%s server started on %s:%d' % (self.protocol, self.server_address[0], self.server_address[1]))
def testConvertsValidValues(self): for dummynet_option, expected_bps in VALID_RATES: bps = proxyshaper.GetBitsPerSecond(dummynet_option) self.assertEqual( expected_bps, bps, 'Unexpected result for %s: %s != %s' % ( dummynet_option, expected_bps, bps))