def requestAvatarId(self, c): creds = credentials.IUsernamePassword(c, None) if creds is not None: locks = [] pool = HTTPConnectionPool(reactor, persistent=False) pool.cachedConnectionTimeout = self.timeout if self.max_concurrency: pool.persistent = True pool.maxPersistentPerHost = self.max_concurrency locks.append(defer.DeferredSemaphore(self.max_concurrency)) if self.global_max_concurrency: locks.append( defer.DeferredSemaphore(self.global_max_concurrency)) conn = ThrottledSwiftConnection(locks, self.auth_url, creds.username, creds.password, pool=pool, extra_headers=self.extra_headers, verbose=self.verbose) conn.user_agent = USER_AGENT d = conn.authenticate() d.addCallback(self._after_auth, conn) d.addErrback(eb_failed_auth) return d return defer.fail(error.UnauthorizedLogin())
def __init__(self, hs): self.hs = hs pool = HTTPConnectionPool(reactor) # the pusher makes lots of concurrent SSL connections to sygnal, and # tends to do so in batches, so we need to allow the pool to keep lots # of idle connections around. pool.maxPersistentPerHost = max((100 * CACHE_SIZE_FACTOR, 5)) pool.cachedConnectionTimeout = 2 * 60 # The default context factory in Twisted 14.0.0 (which we require) is # BrowserLikePolicyForHTTPS which will do regular cert validation # 'like a browser' self.agent = Agent( reactor, connectTimeout=15, contextFactory=hs.get_http_client_context_factory(), pool=pool, ) self.user_agent = hs.version_string self.clock = hs.get_clock() if hs.config.user_agent_suffix: self.user_agent = "%s %s" % ( self.user_agent, hs.config.user_agent_suffix, ) self.user_agent = self.user_agent.encode('ascii')
def requestAvatarId(self, c): creds = credentials.IUsernamePassword(c, None) if creds is not None: locks = [] pool = HTTPConnectionPool(reactor, persistent=False) pool.cachedConnectionTimeout = self.timeout if self.max_concurrency: pool.persistent = True pool.maxPersistentPerHost = self.max_concurrency locks.append( defer.DeferredSemaphore(self.max_concurrency)) if self.global_max_concurrency: locks.append( defer.DeferredSemaphore(self.global_max_concurrency)) conn = ThrottledSwiftConnection( locks, self.auth_url, creds.username, creds.password, pool=pool, extra_headers=self.extra_headers, verbose=self.verbose, ceph_compatible=self.ceph_compatible ) conn.user_agent = USER_AGENT d = conn.authenticate() d.addCallback(self._after_auth, conn) d.addErrback(eb_failed_auth) return d return defer.fail(error.UnauthorizedLogin())
def makeService(options): """ Makes a new swftp-ftp service. The only option is the config file location. The config file has the following options: - host - port - auth_url - num_persistent_connections - connection_timeout - welcome_message """ from twisted.protocols.ftp import FTPFactory from twisted.web.client import HTTPConnectionPool from twisted.cred.portal import Portal from swftp.ftp.server import SwiftFTPRealm from swftp.auth import SwiftBasedAuthDB from swftp.utils import print_runtime_info print('Starting SwFTP-ftp %s' % VERSION) c = get_config(options['config_file'], options) ftp_service = service.MultiService() # Add statsd service if c.get('ftp', 'log_statsd_host'): try: from swftp.statsd import makeService as makeStatsdService makeStatsdService( c.get('ftp', 'log_statsd_host'), c.getint('ftp', 'log_statsd_port'), sample_rate=c.getfloat('ftp', 'log_statsd_sample_rate'), prefix=c.get('ftp', 'log_statsd_metric_prefix') ).setServiceParent(ftp_service) except ImportError: log.err('Missing Statsd Module. Requires "txstatsd"') pool = HTTPConnectionPool(reactor, persistent=True) pool.maxPersistentPerHost = c.getint('ftp', 'num_persistent_connections') pool.cachedConnectionTimeout = c.getint('ftp', 'connection_timeout') authdb = SwiftBasedAuthDB(auth_url=c.get('ftp', 'auth_url'), verbose=c.getboolean('ftp', 'verbose')) ftpportal = Portal(SwiftFTPRealm()) ftpportal.registerChecker(authdb) ftpfactory = FTPFactory(ftpportal) ftpfactory.welcomeMessage = c.get('ftp', 'welcome_message') ftpfactory.allowAnonymous = False signal.signal(signal.SIGUSR1, print_runtime_info) signal.signal(signal.SIGUSR2, print_runtime_info) internet.TCPServer( c.getint('ftp', 'port'), ftpfactory, interface=c.get('ftp', 'host')).setServiceParent(ftp_service) return ftp_service
def _getConnectionPool(self): pool = HTTPConnectionPool(reactor, self._persistent) if self._persistent: pool.maxPersistentPerHost = self._maxPersistentPerHost pool.cachedConnectionTimeout = self._cachedConnectionTimeout pool.retryAutomatically = self._retryAutomatically return pool
def _getConnectionPool(self): pool = HTTPConnectionPool(reactor, self._persistent) if self._persistent: pool.maxPersistentPerHost = self._maxPersistentPerHost pool.cachedConnectionTimeout = self._cachedConnectionTimeout pool.retryAutomatically = self._retryAutomatically return pool
def __init__(self, hs): self.hs = hs self.signing_key = hs.config.signing_key[0] self.server_name = hs.hostname pool = HTTPConnectionPool(reactor) pool.maxPersistentPerHost = 5 pool.cachedConnectionTimeout = 2 * 60 self.agent = Agent.usingEndpointFactory( reactor, MatrixFederationEndpointFactory(hs), pool=pool) self.clock = hs.get_clock() self.version_string = hs.version_string self._next_id = 1
def __init__(self, debug=False): self.gpg = gnupg.GPG() self.mpex_url = 'http://polimedia.us/bitcoin/mpex.php' self.mpex_fingerprint = 'F1B69921' self.passphrase = None self.debug = debug if(self.debug) : self.df = open("mpex_%d.txt" % time.time(),'w') pool = HTTPConnectionPool(reactor) #close connections at same time as server to prevent ResponseNeverReceived error #timeout can be determined automatically from Keep-Alive header pool.cachedConnectionTimeout = 4 self.agent = Agent(reactor, pool=pool)
def _get_agent(): context_factory = MyWebClientContextFactory() try: # HTTPConnectionPool has been present since Twisted version 12.1 from twisted.web.client import HTTPConnectionPool pool = HTTPConnectionPool(reactor, persistent=True) pool.maxPersistentPerHost = _MAX_PERSISTENT_PER_HOST pool.cachedConnectionTimeout = _CACHED_CONNECTION_TIMEOUT agent = Agent(reactor, context_factory, connectTimeout=_CONNECT_TIMEOUT, pool=pool) except ImportError: from _zenclient import ZenAgent agent = ZenAgent(reactor, context_factory, persistent=True, maxConnectionsPerHostName=1) return agent
def __init__(self, hs): self.hs = hs self.signing_key = hs.config.signing_key[0] self.server_name = hs.hostname pool = HTTPConnectionPool(reactor) pool.maxPersistentPerHost = 5 pool.cachedConnectionTimeout = 2 * 60 self.agent = Agent.usingEndpointFactory( reactor, MatrixFederationEndpointFactory(hs), pool=pool ) self.clock = hs.get_clock() self._store = hs.get_datastore() self.version_string = hs.version_string self._next_id = 1
def _get_agent(): context_factory = MyWebClientContextFactory() try: # HTTPConnectionPool has been present since Twisted version 12.1 from twisted.web.client import HTTPConnectionPool pool = HTTPConnectionPool(reactor, persistent=True) pool.maxPersistentPerHost = _MAX_PERSISTENT_PER_HOST pool.cachedConnectionTimeout = _CACHED_CONNECTION_TIMEOUT agent = Agent(reactor, context_factory, connectTimeout=_CONNECT_TIMEOUT, pool=pool) except ImportError: from _zenclient import ZenAgent agent = ZenAgent(reactor, context_factory, persistent=True, maxConnectionsPerHostName=1) return agent
def agent(self, scheme='http'): if not self._agents: pool = HTTPConnectionPool(reactor) pool.maxPersistentPerHost = 10 pool.cachedConnectionTimeout = 15 contextFactory = PermissiveBrowserLikePolicyForHTTPS() proxies = getproxies() if 'http' in proxies or 'https' in proxies: # I've noticed some intermittent failures (ResponseNeverReceived) to # POST request through a proxy when persistent connections are enabled. pool.persistent = False if 'https' in proxies: proxy = urlparse(proxies.get('https')) if proxy: # Note- this isn't going to work completely. It's not being # passed the modified contextFactory, and in fact it doesn't # even work properly for other reasons (ZPS-2061) log.info("Creating https proxy (%s:%s)" % (proxy.hostname, proxy.port)) endpoint = TCP4ClientEndpoint(reactor, proxy.hostname, proxy.port, timeout=CONNECT_TIMEOUT) SessionManager._agents['https'] = \ ProxyAgent(endpoint, reactor, pool=pool) else: SessionManager._agents['https'] = \ Agent(reactor, pool=pool, connectTimeout=CONNECT_TIMEOUT, contextFactory=contextFactory) if 'http' in proxies: proxy = urlparse(proxies.get('http')) if proxy: log.info("Creating http proxy (%s:%s)" % (proxy.hostname, proxy.port)) endpoint = TCP4ClientEndpoint(reactor, proxy.hostname, proxy.port, timeout=CONNECT_TIMEOUT) SessionManager._agents['http'] = \ ProxyAgent(endpoint, reactor, pool=pool) else: SessionManager._agents['http'] = \ Agent(reactor, pool=pool, connectTimeout=CONNECT_TIMEOUT) return SessionManager._agents[scheme]
def __init__(self, hs): self.hs = hs self.signing_key = hs.config.signing_key[0] self.server_name = hs.hostname reactor = hs.get_reactor() pool = HTTPConnectionPool(reactor) pool.retryAutomatically = False pool.maxPersistentPerHost = 5 pool.cachedConnectionTimeout = 2 * 60 self.agent = Agent.usingEndpointFactory( reactor, MatrixFederationEndpointFactory(hs), pool=pool) self.clock = hs.get_clock() self._store = hs.get_datastore() self.version_string = hs.version_string.encode('ascii') self._next_id = 1 self.default_timeout = 60
def __init__(self, reactor): self.reactor = reactor pool = HTTPConnectionPool(reactor, persistent=True) pool.maxPersistentPerHost = 1 pool.cachedConnectionTimeout = 600 self.agent = RedirectAgent(Agent(reactor, pool=pool)) self.reqQ = HttpReqQ(self.agent, self.reactor) self.clientPlaylist = HlsPlaylist() self.verbose = False self.download = False self.outDir = "" self.encryptionHandled = False # required for the dump durations functionality self.dur_dump_file = None self.dur_avproble_acc = 0 self.dur_vt_acc = 0 self.dur_playlist_acc = 0
def _get_agent(): global _AGENT if _AGENT is None: try: # HTTPConnectionPool has been present since Twisted version 12.1 from twisted.web.client import HTTPConnectionPool pool = HTTPConnectionPool(reactor, persistent=True) pool.maxPersistentPerHost = _MAX_PERSISTENT_PER_HOST pool.cachedConnectionTimeout = _CACHED_CONNECTION_TIMEOUT _AGENT = Agent( reactor, connectTimeout=_CONNECT_TIMEOUT, pool=pool) except ImportError: try: # connectTimeout first showed up in Twisted version 11.1 _AGENT = Agent(reactor, connectTimeout=_CONNECT_TIMEOUT) except TypeError: _AGENT = Agent(reactor) return _AGENT
def __init__(self, reactor): self.reactor = reactor pool = HTTPConnectionPool(reactor, persistent=True) pool.maxPersistentPerHost = 1 pool.cachedConnectionTimeout = 600 self.agent = RedirectAgent(Agent(reactor, pool=pool)) self.reqQ = HttpReqQ(self.agent, self.reactor) self.clientPlaylist = HlsPlaylist() self.verbose = False self.download = False self.outDir = "" self.encryptionHandled=False # required for the dump durations functionality self.dur_dump_file = None self.dur_avproble_acc = 0 self.dur_vt_acc = 0 self.dur_playlist_acc = 0
def _get_agent(): global _AGENT if _AGENT is None: context_factory = MyWebClientContextFactory() try: # HTTPConnectionPool has been present since Twisted version 12.1 from twisted.web.client import HTTPConnectionPool pool = HTTPConnectionPool(reactor, persistent=True) pool.maxPersistentPerHost = _MAX_PERSISTENT_PER_HOST pool.cachedConnectionTimeout = _CACHED_CONNECTION_TIMEOUT _AGENT = Agent(reactor, context_factory, connectTimeout=_CONNECT_TIMEOUT, pool=pool) except ImportError: try: # connectTimeout first showed up in Twisted version 11.1 _AGENT = Agent( reactor, context_factory, connectTimeout=_CONNECT_TIMEOUT) except TypeError: _AGENT = Agent(reactor, context_factory) return _AGENT
def __init__(self, hs): self.hs = hs self.signing_key = hs.config.signing_key[0] self.server_name = hs.hostname reactor = hs.get_reactor() pool = HTTPConnectionPool(reactor) pool.retryAutomatically = False pool.maxPersistentPerHost = 5 pool.cachedConnectionTimeout = 2 * 60 self.agent = Agent.usingEndpointFactory( reactor, MatrixFederationEndpointFactory(hs), pool=pool ) self.clock = hs.get_clock() self._store = hs.get_datastore() self.version_string_bytes = hs.version_string.encode('ascii') self.default_timeout = 60 def schedule(x): reactor.callLater(_EPSILON, x) self._cooperator = Cooperator(scheduler=schedule)
def agent(self, scheme='http'): if not self._agents: pool = HTTPConnectionPool(reactor) pool.maxPersistentPerHost = 10 pool.cachedConnectionTimeout = 15 contextFactory = PermissiveBrowserLikePolicyForHTTPS() proxies = getproxies() if 'http' in proxies or 'https' in proxies: # I've noticed some intermittent failures (ResponseNeverReceived) to # POST request through a proxy when persistent connections are enabled. pool.persistent = False if 'https' in proxies: proxy = urlparse(proxies.get('https')) if proxy: # Note- this isn't going to work completely. It's not being # passed the modified contextFactory, and in fact it doesn't # even work properly for other reasons (ZPS-2061) log.info("Creating https proxy (%s:%s)" % (proxy.hostname, proxy.port)) endpoint = TCP4ClientEndpoint(reactor, proxy.hostname, proxy.port, timeout=CONNECT_TIMEOUT) SessionManager._agents['https'] = \ ProxyAgent(endpoint, reactor, pool=pool) else: SessionManager._agents['https'] = \ Agent(reactor, pool=pool, connectTimeout=CONNECT_TIMEOUT, contextFactory=contextFactory) if 'http' in proxies: proxy = urlparse(proxies.get('http')) if proxy: log.info("Creating http proxy (%s:%s)" % (proxy.hostname, proxy.port)) endpoint = TCP4ClientEndpoint(reactor, proxy.hostname, proxy.port, timeout=CONNECT_TIMEOUT) SessionManager._agents['http'] = \ ProxyAgent(endpoint, reactor, pool=pool) else: SessionManager._agents['http'] = \ Agent(reactor, pool=pool, connectTimeout=CONNECT_TIMEOUT) return SessionManager._agents[scheme]
from twisted.web.client import getPage from twisted.internet import reactor from twisted.internet.defer import Deferred from twisted.internet.protocol import Protocol from twisted.web.client import Agent, ContentDecoderAgent, RedirectAgent, GzipDecoder from twisted.web.client import HTTPConnectionPool from twisted.web.http_headers import Headers from twisted.internet.ssl import ClientContextFactory from twisted.internet.task import LoopingCall pnconn_pool = HTTPConnectionPool(reactor, persistent=True) pnconn_pool.maxPersistentPerHost = 100 pnconn_pool.cachedConnectionTimeout = 310 class Pubnub(PubnubCoreAsync): def start(self): reactor.run() def stop(self): reactor.stop() def timeout( self, delay, callback ): reactor.callLater( delay, callback ) def __init__( self, publish_key, subscribe_key, secret_key = False, cipher_key = False, ssl_on = False, origin = 'pubsub.pubnub.com' ) : super(Pubnub, self).__init__(
def __init__(self, hs, treq_args={}, ip_whitelist=None, ip_blacklist=None): """ Args: hs (synapse.server.HomeServer) treq_args (dict): Extra keyword arguments to be given to treq.request. ip_blacklist (netaddr.IPSet): The IP addresses that are blacklisted that we may not request. ip_whitelist (netaddr.IPSet): The whitelisted IP addresses, that we can request if it were otherwise caught in a blacklist. """ self.hs = hs self._ip_whitelist = ip_whitelist self._ip_blacklist = ip_blacklist self._extra_treq_args = treq_args self.user_agent = hs.version_string self.clock = hs.get_clock() if hs.config.user_agent_suffix: self.user_agent = "%s %s" % (self.user_agent, hs.config.user_agent_suffix) self.user_agent = self.user_agent.encode('ascii') if self._ip_blacklist: real_reactor = hs.get_reactor() # If we have an IP blacklist, we need to use a DNS resolver which # filters out blacklisted IP addresses, to prevent DNS rebinding. nameResolver = IPBlacklistingResolver( real_reactor, self._ip_whitelist, self._ip_blacklist ) @implementer(IReactorPluggableNameResolver) class Reactor(object): def __getattr__(_self, attr): if attr == "nameResolver": return nameResolver else: return getattr(real_reactor, attr) self.reactor = Reactor() else: self.reactor = hs.get_reactor() # the pusher makes lots of concurrent SSL connections to sygnal, and # tends to do so in batches, so we need to allow the pool to keep # lots of idle connections around. pool = HTTPConnectionPool(self.reactor) pool.maxPersistentPerHost = max((100 * CACHE_SIZE_FACTOR, 5)) pool.cachedConnectionTimeout = 2 * 60 # The default context factory in Twisted 14.0.0 (which we require) is # BrowserLikePolicyForHTTPS which will do regular cert validation # 'like a browser' self.agent = Agent( self.reactor, connectTimeout=15, contextFactory=self.hs.get_http_client_context_factory(), pool=pool, ) if self._ip_blacklist: # If we have an IP blacklist, we then install the blacklisting Agent # which prevents direct access to IP addresses, that are not caught # by the DNS resolution. self.agent = BlacklistingAgentWrapper( self.agent, self.reactor, ip_whitelist=self._ip_whitelist, ip_blacklist=self._ip_blacklist, )
def __init__(self, hs, treq_args={}, ip_whitelist=None, ip_blacklist=None): """ Args: hs (synapse.server.HomeServer) treq_args (dict): Extra keyword arguments to be given to treq.request. ip_blacklist (netaddr.IPSet): The IP addresses that are blacklisted that we may not request. ip_whitelist (netaddr.IPSet): The whitelisted IP addresses, that we can request if it were otherwise caught in a blacklist. """ self.hs = hs self._ip_whitelist = ip_whitelist self._ip_blacklist = ip_blacklist self._extra_treq_args = treq_args self.user_agent = hs.version_string self.clock = hs.get_clock() if hs.config.user_agent_suffix: self.user_agent = "%s %s" % (self.user_agent, hs.config.user_agent_suffix) self.user_agent = self.user_agent.encode('ascii') if self._ip_blacklist: real_reactor = hs.get_reactor() # If we have an IP blacklist, we need to use a DNS resolver which # filters out blacklisted IP addresses, to prevent DNS rebinding. nameResolver = IPBlacklistingResolver(real_reactor, self._ip_whitelist, self._ip_blacklist) @implementer(IReactorPluggableNameResolver) class Reactor(object): def __getattr__(_self, attr): if attr == "nameResolver": return nameResolver else: return getattr(real_reactor, attr) self.reactor = Reactor() else: self.reactor = hs.get_reactor() # the pusher makes lots of concurrent SSL connections to sygnal, and # tends to do so in batches, so we need to allow the pool to keep # lots of idle connections around. pool = HTTPConnectionPool(self.reactor) pool.maxPersistentPerHost = max((100 * CACHE_SIZE_FACTOR, 5)) pool.cachedConnectionTimeout = 2 * 60 # The default context factory in Twisted 14.0.0 (which we require) is # BrowserLikePolicyForHTTPS which will do regular cert validation # 'like a browser' self.agent = Agent( self.reactor, connectTimeout=15, contextFactory=self.hs.get_http_client_context_factory(), pool=pool, ) if self._ip_blacklist: # If we have an IP blacklist, we then install the blacklisting Agent # which prevents direct access to IP addresses, that are not caught # by the DNS resolution. self.agent = BlacklistingAgentWrapper( self.agent, self.reactor, ip_whitelist=self._ip_whitelist, ip_blacklist=self._ip_blacklist, )
from twisted.web.client import getPage from twisted.internet import reactor from twisted.internet.defer import Deferred from twisted.internet.protocol import Protocol from twisted.web.client import Agent, ContentDecoderAgent, RedirectAgent, GzipDecoder from twisted.web.client import HTTPConnectionPool from twisted.web.http_headers import Headers from twisted.internet.ssl import ClientContextFactory from twisted.internet.task import LoopingCall pnconn_pool = HTTPConnectionPool(reactor, persistent=True) pnconn_pool.maxPersistentPerHost = 100 pnconn_pool.cachedConnectionTimeout = 310 class Pubnub(PubnubCoreAsync): def start(self): reactor.run() def stop(self): reactor.stop() def timeout(self, delay, callback): reactor.callLater(delay, callback) def __init__(self, publish_key, subscribe_key, secret_key=False, cipher_key=False, ssl_on=False,
def makeService(options): """ Makes a new swftp-sftp service. The only option is the config file location. The config file has the following options: - host - port - auth_url - num_persistent_connections - connection_timeout - pub_key - priv_key """ from twisted.conch.ssh.factory import SSHFactory from twisted.conch.ssh.keys import Key from twisted.web.client import HTTPConnectionPool from twisted.cred.portal import Portal from swftp.sftp.server import SwiftSFTPRealm, SwiftSSHServerTransport from swftp.sftp.connection import SwiftConnection from swftp.auth import SwiftBasedAuthDB from swftp.utils import print_runtime_info c = get_config(options['config_file'], options) pool = HTTPConnectionPool(reactor, persistent=True) pool.maxPersistentPerHost = c.getint('sftp', 'num_persistent_connections') pool.cachedConnectionTimeout = c.getint('sftp', 'connection_timeout') sftp_service = service.MultiService() # ensure timezone is GMT os.environ['TZ'] = 'GMT' time.tzset() log.msg('Starting SwFTP-sftp %s' % VERSION) # Add statsd service if c.get('sftp', 'log_statsd_host'): try: from swftp.statsd import makeService as makeStatsdService makeStatsdService( c.get('sftp', 'log_statsd_host'), c.getint('sftp', 'log_statsd_port'), sample_rate=c.getfloat('sftp', 'log_statsd_sample_rate'), prefix=c.get('sftp', 'log_statsd_metric_prefix') ).setServiceParent(sftp_service) except ImportError: log.err('Missing Statsd Module. Requires "txstatsd"') authdb = SwiftBasedAuthDB(auth_url=c.get('sftp', 'auth_url'), verbose=c.getboolean('sftp', 'verbose')) sftpportal = Portal(SwiftSFTPRealm()) sftpportal.registerChecker(authdb) sshfactory = SSHFactory() sshfactory.protocol = SwiftSSHServerTransport sshfactory.noisy = False sshfactory.portal = sftpportal sshfactory.services['ssh-connection'] = SwiftConnection pub_key_string = file(c.get('sftp', 'pub_key')).read() priv_key_string = file(c.get('sftp', 'priv_key')).read() sshfactory.publicKeys = { 'ssh-rsa': Key.fromString(data=pub_key_string)} sshfactory.privateKeys = { 'ssh-rsa': Key.fromString(data=priv_key_string)} signal.signal(signal.SIGUSR1, print_runtime_info) signal.signal(signal.SIGUSR2, print_runtime_info) internet.TCPServer( c.getint('sftp', 'port'), sshfactory, interface=c.get('sftp', 'host')).setServiceParent(sftp_service) return sftp_service
def __init__( self, hs: "HomeServer", treq_args: Dict[str, Any] = {}, ip_whitelist: Optional[IPSet] = None, ip_blacklist: Optional[IPSet] = None, http_proxy: Optional[bytes] = None, https_proxy: Optional[bytes] = None, ): """ Args: hs treq_args: Extra keyword arguments to be given to treq.request. ip_blacklist: The IP addresses that are blacklisted that we may not request. ip_whitelist: The whitelisted IP addresses, that we can request if it were otherwise caught in a blacklist. http_proxy: proxy server to use for http connections. host[:port] https_proxy: proxy server to use for https connections. host[:port] """ self.hs = hs self._ip_whitelist = ip_whitelist self._ip_blacklist = ip_blacklist self._extra_treq_args = treq_args self.user_agent = hs.version_string self.clock = hs.get_clock() if hs.config.user_agent_suffix: self.user_agent = "%s %s" % (self.user_agent, hs.config.user_agent_suffix) # We use this for our body producers to ensure that they use the correct # reactor. self._cooperator = Cooperator( scheduler=_make_scheduler(hs.get_reactor())) self.user_agent = self.user_agent.encode("ascii") if self._ip_blacklist: # If we have an IP blacklist, we need to use a DNS resolver which # filters out blacklisted IP addresses, to prevent DNS rebinding. self.reactor = BlacklistingReactorWrapper(hs.get_reactor(), self._ip_whitelist, self._ip_blacklist) else: self.reactor = hs.get_reactor() # the pusher makes lots of concurrent SSL connections to sygnal, and # tends to do so in batches, so we need to allow the pool to keep # lots of idle connections around. pool = HTTPConnectionPool(self.reactor) # XXX: The justification for using the cache factor here is that larger instances # will need both more cache and more connections. # Still, this should probably be a separate dial pool.maxPersistentPerHost = max( (100 * hs.config.caches.global_factor, 5)) pool.cachedConnectionTimeout = 2 * 60 self.agent = ProxyAgent( self.reactor, hs.get_reactor(), connectTimeout=15, contextFactory=self.hs.get_http_client_context_factory(), pool=pool, http_proxy=http_proxy, https_proxy=https_proxy, ) if self._ip_blacklist: # If we have an IP blacklist, we then install the blacklisting Agent # which prevents direct access to IP addresses, that are not caught # by the DNS resolution. self.agent = BlacklistingAgentWrapper( self.agent, ip_whitelist=self._ip_whitelist, ip_blacklist=self._ip_blacklist, )
def __init__( self, hs, treq_args={}, ip_whitelist=None, ip_blacklist=None, http_proxy=None, https_proxy=None, ): """ Args: hs (synapse.server.HomeServer) treq_args (dict): Extra keyword arguments to be given to treq.request. ip_blacklist (netaddr.IPSet): The IP addresses that are blacklisted that we may not request. ip_whitelist (netaddr.IPSet): The whitelisted IP addresses, that we can request if it were otherwise caught in a blacklist. http_proxy (bytes): proxy server to use for http connections. host[:port] https_proxy (bytes): proxy server to use for https connections. host[:port] """ self.hs = hs self._ip_whitelist = ip_whitelist self._ip_blacklist = ip_blacklist self._extra_treq_args = treq_args self.user_agent = hs.version_string self.clock = hs.get_clock() if hs.config.user_agent_suffix: self.user_agent = "%s %s" % (self.user_agent, hs.config.user_agent_suffix) self.user_agent = self.user_agent.encode("ascii") if self._ip_blacklist: real_reactor = hs.get_reactor() # If we have an IP blacklist, we need to use a DNS resolver which # filters out blacklisted IP addresses, to prevent DNS rebinding. nameResolver = IPBlacklistingResolver(real_reactor, self._ip_whitelist, self._ip_blacklist) @implementer(IReactorPluggableNameResolver) class Reactor(object): def __getattr__(_self, attr): if attr == "nameResolver": return nameResolver else: return getattr(real_reactor, attr) self.reactor = Reactor() else: self.reactor = hs.get_reactor() # the pusher makes lots of concurrent SSL connections to sygnal, and # tends to do so in batches, so we need to allow the pool to keep # lots of idle connections around. pool = HTTPConnectionPool(self.reactor) # XXX: The justification for using the cache factor here is that larger instances # will need both more cache and more connections. # Still, this should probably be a separate dial pool.maxPersistentPerHost = max( (100 * hs.config.caches.global_factor, 5)) pool.cachedConnectionTimeout = 2 * 60 self.agent = ProxyAgent( self.reactor, connectTimeout=15, contextFactory=self.hs.get_http_client_context_factory(), pool=pool, http_proxy=http_proxy, https_proxy=https_proxy, ) if self._ip_blacklist: # If we have an IP blacklist, we then install the blacklisting Agent # which prevents direct access to IP addresses, that are not caught # by the DNS resolution. self.agent = BlacklistingAgentWrapper( self.agent, self.reactor, ip_whitelist=self._ip_whitelist, ip_blacklist=self._ip_blacklist, )