def test_proxy_with_http_scheme(self): http_proxy_agent = ProxyAgent(self.reactor, use_proxy=True) self.assertIsInstance(http_proxy_agent.http_proxy_endpoint, HostnameEndpoint) self.assertEqual(http_proxy_agent.http_proxy_endpoint._hostStr, "proxy.com") self.assertEqual(http_proxy_agent.http_proxy_endpoint._port, 8888)
def test_https_request_via_no_proxy_star(self): agent = ProxyAgent( self.reactor, contextFactory=get_test_https_policy(), use_proxy=True, ) self._test_request_direct_connection(agent, b"https", b"test.com", b"abc")
def _test_request_direct_connection( self, agent: ProxyAgent, scheme: bytes, hostname: bytes, path: bytes, ): """Runs a test case for a direct connection not going through a proxy. Args: agent: the proxy agent being tested scheme: expected to be either "http" or "https" hostname: the hostname to connect to in the test path: the path to connect to in the test """ is_https = scheme == b"https" self.reactor.lookups[hostname.decode()] = "1.2.3.4" d = agent.request(b"GET", scheme + b"://" + hostname + b"/" + path) # there should be a pending TCP connection clients = self.reactor.tcpClients self.assertEqual(len(clients), 1) (host, port, client_factory, _timeout, _bindAddress) = clients[0] self.assertEqual(host, "1.2.3.4") self.assertEqual(port, 443 if is_https else 80) # make a test server, and wire up the client http_server = self._make_connection( client_factory, _get_test_protocol_factory(), ssl=is_https, expected_sni=hostname if is_https else None, ) # the FakeTransport is async, so we need to pump the reactor self.reactor.advance(0) # now there should be a pending request self.assertEqual(len(http_server.requests), 1) request = http_server.requests[0] self.assertEqual(request.method, b"GET") self.assertEqual(request.path, b"/" + path) self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [hostname]) request.write(b"result") request.finish() self.reactor.advance(0) resp = self.successResultOf(d) body = self.successResultOf(treq.content(resp)) self.assertEqual(body, b"result")
def __init__( self, reactor: ISynapseReactor, tls_client_options_factory: Optional[FederationPolicyForHTTPS], user_agent: bytes, ip_blacklist: IPSet, proxy_reactor: Optional[ISynapseReactor] = None, _srv_resolver: Optional[SrvResolver] = None, _well_known_resolver: Optional[WellKnownResolver] = None, ): self._reactor = reactor self._clock = Clock(reactor) self._pool = HTTPConnectionPool(reactor) self._pool.retryAutomatically = False self._pool.maxPersistentPerHost = 5 self._pool.cachedConnectionTimeout = 2 * 60 if proxy_reactor is None: self.proxy_reactor = reactor else: self.proxy_reactor = proxy_reactor proxies = getproxies() https_proxy = proxies["https"].encode() if "https" in proxies else None self._agent = Agent.usingEndpointFactory( self._reactor, MatrixHostnameEndpointFactory( reactor, self.proxy_reactor, tls_client_options_factory, _srv_resolver, https_proxy, ), pool=self._pool, ) self.user_agent = user_agent if _well_known_resolver is None: # Note that the name resolver has already been wrapped in a # IPBlacklistingResolver by MatrixFederationHttpClient. _well_known_resolver = WellKnownResolver( self._reactor, agent=BlacklistingAgentWrapper( ProxyAgent( self._reactor, self.proxy_reactor, pool=self._pool, contextFactory=tls_client_options_factory, use_proxy=True, ), ip_blacklist=ip_blacklist, ), user_agent=self.user_agent, ) self._well_known_resolver = _well_known_resolver
def test_http_request_via_proxy_with_blacklist(self): # The blacklist includes the configured proxy IP. agent = ProxyAgent( BlacklistingReactorWrapper(self.reactor, ip_whitelist=None, ip_blacklist=IPSet(["1.0.0.0/8"])), self.reactor, use_proxy=True, ) self.reactor.lookups["proxy.com"] = "1.2.3.5" d = agent.request(b"GET", b"http://test.com") # there should be a pending TCP connection clients = self.reactor.tcpClients self.assertEqual(len(clients), 1) (host, port, client_factory, _timeout, _bindAddress) = clients[0] self.assertEqual(host, "1.2.3.5") self.assertEqual(port, 8888) # make a test server, and wire up the client http_server = self._make_connection(client_factory, _get_test_protocol_factory()) # the FakeTransport is async, so we need to pump the reactor self.reactor.advance(0) # now there should be a pending request self.assertEqual(len(http_server.requests), 1) request = http_server.requests[0] self.assertEqual(request.method, b"GET") self.assertEqual(request.path, b"http://test.com") self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"]) request.write(b"result") request.finish() self.reactor.advance(0) resp = self.successResultOf(d) body = self.successResultOf(treq.content(resp)) self.assertEqual(body, b"result")
def test_https_request(self): agent = ProxyAgent(self.reactor, contextFactory=get_test_https_policy()) self.reactor.lookups["test.com"] = "1.2.3.4" d = agent.request(b"GET", b"https://test.com/abc") # there should be a pending TCP connection clients = self.reactor.tcpClients self.assertEqual(len(clients), 1) (host, port, client_factory, _timeout, _bindAddress) = clients[0] self.assertEqual(host, "1.2.3.4") self.assertEqual(port, 443) # make a test server, and wire up the client http_server = self._make_connection( client_factory, _get_test_protocol_factory(), ssl=True, expected_sni=b"test.com", ) # the FakeTransport is async, so we need to pump the reactor self.reactor.advance(0) # now there should be a pending request self.assertEqual(len(http_server.requests), 1) request = http_server.requests[0] self.assertEqual(request.method, b"GET") self.assertEqual(request.path, b"/abc") self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"]) request.write(b"result") request.finish() self.reactor.advance(0) resp = self.successResultOf(d) body = self.successResultOf(treq.content(resp)) self.assertEqual(body, b"result")
def __init__( self, reactor: ISynapseReactor, tls_client_options_factory: Optional[FederationPolicyForHTTPS], user_agent: bytes, ip_whitelist: IPSet, ip_blacklist: IPSet, _srv_resolver: Optional[SrvResolver] = None, _well_known_resolver: Optional[WellKnownResolver] = None, ): # proxy_reactor is not blacklisted proxy_reactor = reactor # We need to use a DNS resolver which filters out blacklisted IP # addresses, to prevent DNS rebinding. reactor = BlacklistingReactorWrapper(reactor, ip_whitelist, ip_blacklist) self._clock = Clock(reactor) self._pool = HTTPConnectionPool(reactor) self._pool.retryAutomatically = False self._pool.maxPersistentPerHost = 5 self._pool.cachedConnectionTimeout = 2 * 60 self._agent = Agent.usingEndpointFactory( reactor, MatrixHostnameEndpointFactory( reactor, proxy_reactor, tls_client_options_factory, _srv_resolver, ), pool=self._pool, ) self.user_agent = user_agent if _well_known_resolver is None: _well_known_resolver = WellKnownResolver( reactor, agent=BlacklistingAgentWrapper( ProxyAgent( reactor, proxy_reactor, pool=self._pool, contextFactory=tls_client_options_factory, use_proxy=True, ), ip_blacklist=ip_blacklist, ), user_agent=self.user_agent, ) self._well_known_resolver = _well_known_resolver
def test_https_request_via_uppercase_proxy_with_blacklist(self): # The blacklist includes the configured proxy IP. agent = ProxyAgent( BlacklistingReactorWrapper(self.reactor, ip_whitelist=None, ip_blacklist=IPSet(["1.0.0.0/8"])), self.reactor, contextFactory=get_test_https_policy(), use_proxy=True, ) self.reactor.lookups["proxy.com"] = "1.2.3.5" d = agent.request(b"GET", b"https://test.com/abc") # there should be a pending TCP connection clients = self.reactor.tcpClients self.assertEqual(len(clients), 1) (host, port, client_factory, _timeout, _bindAddress) = clients[0] self.assertEqual(host, "1.2.3.5") self.assertEqual(port, 1080) # make a test HTTP server, and wire up the client proxy_server = self._make_connection(client_factory, _get_test_protocol_factory()) # fish the transports back out so that we can do the old switcheroo s2c_transport = proxy_server.transport client_protocol = s2c_transport.other c2s_transport = client_protocol.transport # the FakeTransport is async, so we need to pump the reactor self.reactor.advance(0) # now there should be a pending CONNECT request self.assertEqual(len(proxy_server.requests), 1) request = proxy_server.requests[0] self.assertEqual(request.method, b"CONNECT") self.assertEqual(request.path, b"test.com:443") # tell the proxy server not to close the connection proxy_server.persistent = True # this just stops the http Request trying to do a chunked response # request.setHeader(b"Content-Length", b"0") request.finish() # now we can replace the proxy channel with a new, SSL-wrapped HTTP channel ssl_factory = _wrap_server_factory_for_tls( _get_test_protocol_factory()) ssl_protocol = ssl_factory.buildProtocol(None) http_server = ssl_protocol.wrappedProtocol ssl_protocol.makeConnection( FakeTransport(client_protocol, self.reactor, ssl_protocol)) c2s_transport.other = ssl_protocol self.reactor.advance(0) server_name = ssl_protocol._tlsConnection.get_servername() expected_sni = b"test.com" self.assertEqual( server_name, expected_sni, "Expected SNI %s but got %s" % (expected_sni, server_name), ) # now there should be a pending request self.assertEqual(len(http_server.requests), 1) request = http_server.requests[0] self.assertEqual(request.method, b"GET") self.assertEqual(request.path, b"/abc") self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"]) request.write(b"result") request.finish() self.reactor.advance(0) resp = self.successResultOf(d) body = self.successResultOf(treq.content(resp)) self.assertEqual(body, b"result")
def test_http_request_via_no_proxy_star(self): agent = ProxyAgent(self.reactor, use_proxy=True) self._test_request_direct_connection(agent, b"http", b"test.com", b"")
def test_http_request_use_proxy_empty_environment(self): agent = ProxyAgent(self.reactor, use_proxy=True) self._test_request_direct_connection(agent, b"http", b"test.com", b"")
def test_https_request(self): agent = ProxyAgent(self.reactor, contextFactory=get_test_https_policy()) self._test_request_direct_connection(agent, b"https", b"test.com", b"abc")
def _do_https_request_via_proxy( self, auth_credentials: Optional[str] = None, ): agent = ProxyAgent( self.reactor, contextFactory=get_test_https_policy(), use_proxy=True, ) self.reactor.lookups["proxy.com"] = "1.2.3.5" d = agent.request(b"GET", b"https://test.com/abc") # there should be a pending TCP connection clients = self.reactor.tcpClients self.assertEqual(len(clients), 1) (host, port, client_factory, _timeout, _bindAddress) = clients[0] self.assertEqual(host, "1.2.3.5") self.assertEqual(port, 1080) # make a test HTTP server, and wire up the client proxy_server = self._make_connection(client_factory, _get_test_protocol_factory()) # fish the transports back out so that we can do the old switcheroo s2c_transport = proxy_server.transport client_protocol = s2c_transport.other c2s_transport = client_protocol.transport # the FakeTransport is async, so we need to pump the reactor self.reactor.advance(0) # now there should be a pending CONNECT request self.assertEqual(len(proxy_server.requests), 1) request = proxy_server.requests[0] self.assertEqual(request.method, b"CONNECT") self.assertEqual(request.path, b"test.com:443") # Check whether auth credentials have been supplied to the proxy proxy_auth_header_values = request.requestHeaders.getRawHeaders( b"Proxy-Authorization") if auth_credentials is not None: # Compute the correct header value for Proxy-Authorization encoded_credentials = base64.b64encode(b"bob:pinkponies") expected_header_value = b"Basic " + encoded_credentials # Validate the header's value self.assertIn(expected_header_value, proxy_auth_header_values) else: # Check that the Proxy-Authorization header has not been supplied to the proxy self.assertIsNone(proxy_auth_header_values) # tell the proxy server not to close the connection proxy_server.persistent = True # this just stops the http Request trying to do a chunked response # request.setHeader(b"Content-Length", b"0") request.finish() # now we can replace the proxy channel with a new, SSL-wrapped HTTP channel ssl_factory = _wrap_server_factory_for_tls( _get_test_protocol_factory()) ssl_protocol = ssl_factory.buildProtocol(None) http_server = ssl_protocol.wrappedProtocol ssl_protocol.makeConnection( FakeTransport(client_protocol, self.reactor, ssl_protocol)) c2s_transport.other = ssl_protocol self.reactor.advance(0) server_name = ssl_protocol._tlsConnection.get_servername() expected_sni = b"test.com" self.assertEqual( server_name, expected_sni, "Expected SNI %s but got %s" % (expected_sni, server_name), ) # now there should be a pending request self.assertEqual(len(http_server.requests), 1) request = http_server.requests[0] self.assertEqual(request.method, b"GET") self.assertEqual(request.path, b"/abc") self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"]) # Check that the destination server DID NOT receive proxy credentials proxy_auth_header_values = request.requestHeaders.getRawHeaders( b"Proxy-Authorization") self.assertIsNone(proxy_auth_header_values) request.write(b"result") request.finish() self.reactor.advance(0) resp = self.successResultOf(d) body = self.successResultOf(treq.content(resp)) self.assertEqual(body, b"result")
def test_proxy_with_unsupported_scheme(self): with self.assertRaises(ValueError): ProxyAgent(self.reactor, use_proxy=True)
def _do_https_request_via_proxy( self, expect_proxy_ssl: bool = False, expected_auth_credentials: Optional[bytes] = None, ): """Send a https request via an agent and check that it is correctly received at the proxy and client. The proxy can use either http or https. Args: expect_proxy_ssl: True if we expect the request to connect via https to proxy expected_auth_credentials: credentials to authenticate at proxy """ agent = ProxyAgent( self.reactor, contextFactory=get_test_https_policy(), use_proxy=True, ) self.reactor.lookups["proxy.com"] = "1.2.3.5" d = agent.request(b"GET", b"https://test.com/abc") # there should be a pending TCP connection clients = self.reactor.tcpClients self.assertEqual(len(clients), 1) (host, port, client_factory, _timeout, _bindAddress) = clients[0] self.assertEqual(host, "1.2.3.5") self.assertEqual(port, 1080) # make a test server to act as the proxy, and wire up the client proxy_server = self._make_connection( client_factory, _get_test_protocol_factory(), ssl=expect_proxy_ssl, tls_sanlist=[b"DNS:proxy.com"] if expect_proxy_ssl else None, expected_sni=b"proxy.com" if expect_proxy_ssl else None, ) assert isinstance(proxy_server, HTTPChannel) # now there should be a pending CONNECT request self.assertEqual(len(proxy_server.requests), 1) request = proxy_server.requests[0] self.assertEqual(request.method, b"CONNECT") self.assertEqual(request.path, b"test.com:443") # Check whether auth credentials have been supplied to the proxy proxy_auth_header_values = request.requestHeaders.getRawHeaders( b"Proxy-Authorization") if expected_auth_credentials is not None: # Compute the correct header value for Proxy-Authorization encoded_credentials = base64.b64encode(expected_auth_credentials) expected_header_value = b"Basic " + encoded_credentials # Validate the header's value self.assertIn(expected_header_value, proxy_auth_header_values) else: # Check that the Proxy-Authorization header has not been supplied to the proxy self.assertIsNone(proxy_auth_header_values) # tell the proxy server not to close the connection proxy_server.persistent = True request.finish() # now we make another test server to act as the upstream HTTP server. server_ssl_protocol = _wrap_server_factory_for_tls( _get_test_protocol_factory()).buildProtocol(None) # Tell the HTTP server to send outgoing traffic back via the proxy's transport. proxy_server_transport = proxy_server.transport server_ssl_protocol.makeConnection(proxy_server_transport) # ... and replace the protocol on the proxy's transport with the # TLSMemoryBIOProtocol for the test server, so that incoming traffic # to the proxy gets sent over to the HTTP(s) server. # # This needs a bit of gut-wrenching, which is different depending on whether # the proxy is using TLS or not. # # (an alternative, possibly more elegant, approach would be to use a custom # Protocol to implement the proxy, which starts out by forwarding to an # HTTPChannel (to implement the CONNECT command) and can then be switched # into a mode where it forwards its traffic to another Protocol.) if expect_proxy_ssl: assert isinstance(proxy_server_transport, TLSMemoryBIOProtocol) proxy_server_transport.wrappedProtocol = server_ssl_protocol else: assert isinstance(proxy_server_transport, FakeTransport) client_protocol = proxy_server_transport.other c2s_transport = client_protocol.transport c2s_transport.other = server_ssl_protocol self.reactor.advance(0) server_name = server_ssl_protocol._tlsConnection.get_servername() expected_sni = b"test.com" self.assertEqual( server_name, expected_sni, f"Expected SNI {expected_sni!s} but got {server_name!s}", ) # now there should be a pending request http_server = server_ssl_protocol.wrappedProtocol self.assertEqual(len(http_server.requests), 1) request = http_server.requests[0] self.assertEqual(request.method, b"GET") self.assertEqual(request.path, b"/abc") self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"]) # Check that the destination server DID NOT receive proxy credentials proxy_auth_header_values = request.requestHeaders.getRawHeaders( b"Proxy-Authorization") self.assertIsNone(proxy_auth_header_values) request.write(b"result") request.finish() self.reactor.advance(0) resp = self.successResultOf(d) body = self.successResultOf(treq.content(resp)) self.assertEqual(body, b"result")
def _do_http_request_via_proxy( self, expect_proxy_ssl: bool = False, expected_auth_credentials: Optional[bytes] = None, ): """Send a http request via an agent and check that it is correctly received at the proxy. The proxy can use either http or https. Args: expect_proxy_ssl: True if we expect the request to connect via https to proxy expected_auth_credentials: credentials to authenticate at proxy """ if expect_proxy_ssl: agent = ProxyAgent(self.reactor, use_proxy=True, contextFactory=get_test_https_policy()) else: agent = ProxyAgent(self.reactor, use_proxy=True) self.reactor.lookups["proxy.com"] = "1.2.3.5" d = agent.request(b"GET", b"http://test.com") # there should be a pending TCP connection clients = self.reactor.tcpClients self.assertEqual(len(clients), 1) (host, port, client_factory, _timeout, _bindAddress) = clients[0] self.assertEqual(host, "1.2.3.5") self.assertEqual(port, 8888) # make a test server, and wire up the client http_server = self._make_connection( client_factory, _get_test_protocol_factory(), ssl=expect_proxy_ssl, tls_sanlist=[b"DNS:proxy.com"] if expect_proxy_ssl else None, expected_sni=b"proxy.com" if expect_proxy_ssl else None, ) # the FakeTransport is async, so we need to pump the reactor self.reactor.advance(0) # now there should be a pending request self.assertEqual(len(http_server.requests), 1) request = http_server.requests[0] # Check whether auth credentials have been supplied to the proxy proxy_auth_header_values = request.requestHeaders.getRawHeaders( b"Proxy-Authorization") if expected_auth_credentials is not None: # Compute the correct header value for Proxy-Authorization encoded_credentials = base64.b64encode(expected_auth_credentials) expected_header_value = b"Basic " + encoded_credentials # Validate the header's value self.assertIn(expected_header_value, proxy_auth_header_values) else: # Check that the Proxy-Authorization header has not been supplied to the proxy self.assertIsNone(proxy_auth_header_values) self.assertEqual(request.method, b"GET") self.assertEqual(request.path, b"http://test.com") self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"]) request.write(b"result") request.finish() self.reactor.advance(0) resp = self.successResultOf(d) body = self.successResultOf(treq.content(resp)) self.assertEqual(body, b"result")
def __init__( self, hs, treq_args={}, ip_whitelist=None, ip_blacklist=None, http_proxy=None, https_proxy=None, ): """ Args: hs (synapse.server.HomeServer) treq_args (dict): Extra keyword arguments to be given to treq.request. ip_blacklist (netaddr.IPSet): The IP addresses that are blacklisted that we may not request. ip_whitelist (netaddr.IPSet): The whitelisted IP addresses, that we can request if it were otherwise caught in a blacklist. http_proxy (bytes): proxy server to use for http connections. host[:port] https_proxy (bytes): proxy server to use for https connections. host[:port] """ self.hs = hs self._ip_whitelist = ip_whitelist self._ip_blacklist = ip_blacklist self._extra_treq_args = treq_args self.user_agent = hs.version_string self.clock = hs.get_clock() if hs.config.user_agent_suffix: self.user_agent = "%s %s" % (self.user_agent, hs.config.user_agent_suffix) self.user_agent = self.user_agent.encode("ascii") if self._ip_blacklist: real_reactor = hs.get_reactor() # If we have an IP blacklist, we need to use a DNS resolver which # filters out blacklisted IP addresses, to prevent DNS rebinding. nameResolver = IPBlacklistingResolver(real_reactor, self._ip_whitelist, self._ip_blacklist) @implementer(IReactorPluggableNameResolver) class Reactor(object): def __getattr__(_self, attr): if attr == "nameResolver": return nameResolver else: return getattr(real_reactor, attr) self.reactor = Reactor() else: self.reactor = hs.get_reactor() # the pusher makes lots of concurrent SSL connections to sygnal, and # tends to do so in batches, so we need to allow the pool to keep # lots of idle connections around. pool = HTTPConnectionPool(self.reactor) # XXX: The justification for using the cache factor here is that larger instances # will need both more cache and more connections. # Still, this should probably be a separate dial pool.maxPersistentPerHost = max( (100 * hs.config.caches.global_factor, 5)) pool.cachedConnectionTimeout = 2 * 60 self.agent = ProxyAgent( self.reactor, connectTimeout=15, contextFactory=self.hs.get_http_client_context_factory(), pool=pool, http_proxy=http_proxy, https_proxy=https_proxy, ) if self._ip_blacklist: # If we have an IP blacklist, we then install the blacklisting Agent # which prevents direct access to IP addresses, that are not caught # by the DNS resolution. self.agent = BlacklistingAgentWrapper( self.agent, self.reactor, ip_whitelist=self._ip_whitelist, ip_blacklist=self._ip_blacklist, )
def __init__( self, hs: "HomeServer", treq_args: Dict[str, Any] = {}, ip_whitelist: Optional[IPSet] = None, ip_blacklist: Optional[IPSet] = None, http_proxy: Optional[bytes] = None, https_proxy: Optional[bytes] = None, ): """ Args: hs treq_args: Extra keyword arguments to be given to treq.request. ip_blacklist: The IP addresses that are blacklisted that we may not request. ip_whitelist: The whitelisted IP addresses, that we can request if it were otherwise caught in a blacklist. http_proxy: proxy server to use for http connections. host[:port] https_proxy: proxy server to use for https connections. host[:port] """ self.hs = hs self._ip_whitelist = ip_whitelist self._ip_blacklist = ip_blacklist self._extra_treq_args = treq_args self.user_agent = hs.version_string self.clock = hs.get_clock() if hs.config.user_agent_suffix: self.user_agent = "%s %s" % (self.user_agent, hs.config.user_agent_suffix) # We use this for our body producers to ensure that they use the correct # reactor. self._cooperator = Cooperator( scheduler=_make_scheduler(hs.get_reactor())) self.user_agent = self.user_agent.encode("ascii") if self._ip_blacklist: # If we have an IP blacklist, we need to use a DNS resolver which # filters out blacklisted IP addresses, to prevent DNS rebinding. self.reactor = BlacklistingReactorWrapper(hs.get_reactor(), self._ip_whitelist, self._ip_blacklist) else: self.reactor = hs.get_reactor() # the pusher makes lots of concurrent SSL connections to sygnal, and # tends to do so in batches, so we need to allow the pool to keep # lots of idle connections around. pool = HTTPConnectionPool(self.reactor) # XXX: The justification for using the cache factor here is that larger instances # will need both more cache and more connections. # Still, this should probably be a separate dial pool.maxPersistentPerHost = max( (100 * hs.config.caches.global_factor, 5)) pool.cachedConnectionTimeout = 2 * 60 self.agent = ProxyAgent( self.reactor, hs.get_reactor(), connectTimeout=15, contextFactory=self.hs.get_http_client_context_factory(), pool=pool, http_proxy=http_proxy, https_proxy=https_proxy, ) if self._ip_blacklist: # If we have an IP blacklist, we then install the blacklisting Agent # which prevents direct access to IP addresses, that are not caught # by the DNS resolution. self.agent = BlacklistingAgentWrapper( self.agent, ip_whitelist=self._ip_whitelist, ip_blacklist=self._ip_blacklist, )
def test_http_request(self): agent = ProxyAgent(self.reactor) self._test_request_direct_connection(agent, b"http", b"test.com", b"")