Пример #1
0
    def __init__(self, hs):
        self.hs = hs

        pool = HTTPConnectionPool(reactor)

        # the pusher makes lots of concurrent SSL connections to sygnal, and
        # tends to do so in batches, so we need to allow the pool to keep lots
        # of idle connections around.
        pool.maxPersistentPerHost = max((100 * CACHE_SIZE_FACTOR, 5))
        pool.cachedConnectionTimeout = 2 * 60

        # The default context factory in Twisted 14.0.0 (which we require) is
        # BrowserLikePolicyForHTTPS which will do regular cert validation
        # 'like a browser'
        self.agent = Agent(
            reactor,
            connectTimeout=15,
            contextFactory=hs.get_http_client_context_factory(),
            pool=pool,
        )
        self.user_agent = hs.version_string
        self.clock = hs.get_clock()
        if hs.config.user_agent_suffix:
            self.user_agent = "%s %s" % (
                self.user_agent,
                hs.config.user_agent_suffix,
            )

        self.user_agent = self.user_agent.encode('ascii')
Пример #2
0
    def requestAvatarId(self, c):
        creds = credentials.IUsernamePassword(c, None)

        if creds is not None:
            locks = []
            pool = HTTPConnectionPool(reactor, persistent=False)
            pool.cachedConnectionTimeout = self.timeout
            if self.max_concurrency:
                pool.persistent = True
                pool.maxPersistentPerHost = self.max_concurrency
                locks.append(defer.DeferredSemaphore(self.max_concurrency))

            if self.global_max_concurrency:
                locks.append(
                    defer.DeferredSemaphore(self.global_max_concurrency))

            conn = ThrottledSwiftConnection(locks,
                                            self.auth_url,
                                            creds.username,
                                            creds.password,
                                            pool=pool,
                                            extra_headers=self.extra_headers,
                                            verbose=self.verbose)
            conn.user_agent = USER_AGENT

            d = conn.authenticate()
            d.addCallback(self._after_auth, conn)
            d.addErrback(eb_failed_auth)
            return d
        return defer.fail(error.UnauthorizedLogin())
Пример #3
0
    def requestAvatarId(self, c):
        creds = credentials.IUsernamePassword(c, None)

        if creds is not None:
            locks = []
            pool = HTTPConnectionPool(reactor, persistent=False)
            pool.cachedConnectionTimeout = self.timeout
            if self.max_concurrency:
                pool.persistent = True
                pool.maxPersistentPerHost = self.max_concurrency
                locks.append(
                    defer.DeferredSemaphore(self.max_concurrency))

            if self.global_max_concurrency:
                locks.append(
                    defer.DeferredSemaphore(self.global_max_concurrency))

            conn = ThrottledSwiftConnection(
                locks, self.auth_url, creds.username, creds.password,
                pool=pool,
                extra_headers=self.extra_headers,
                verbose=self.verbose,
                ceph_compatible=self.ceph_compatible
            )
            conn.user_agent = USER_AGENT

            d = conn.authenticate()
            d.addCallback(self._after_auth, conn)
            d.addErrback(eb_failed_auth)
            return d
        return defer.fail(error.UnauthorizedLogin())
Пример #4
0
def get_agent_for_address(reactor, address):
    """Get an agent that will only connect to a specific IP address."""

    pool = HTTPConnectionPool(reactor, persistent=True)
    pool.maxPersistentPerHost = 2
    pool._factory = QuietHTTP11ClientFactory
    endpoint_factory = MyEndpointFactory(reactor, address)
    return Agent.usingEndpointFactory(reactor, endpoint_factory, pool=pool)
Пример #5
0
def makeService(options):
    """
    Makes a new swftp-ftp service. The only option is the config file
    location. The config file has the following options:
     - host
     - port
     - auth_url
     - num_persistent_connections
     - connection_timeout
     - welcome_message
    """
    from twisted.protocols.ftp import FTPFactory
    from twisted.web.client import HTTPConnectionPool
    from twisted.cred.portal import Portal

    from swftp.ftp.server import SwiftFTPRealm
    from swftp.auth import SwiftBasedAuthDB
    from swftp.utils import print_runtime_info

    print('Starting SwFTP-ftp %s' % VERSION)

    c = get_config(options['config_file'], options)
    ftp_service = service.MultiService()

    # Add statsd service
    if c.get('ftp', 'log_statsd_host'):
        try:
            from swftp.statsd import makeService as makeStatsdService
            makeStatsdService(
                c.get('ftp', 'log_statsd_host'),
                c.getint('ftp', 'log_statsd_port'),
                sample_rate=c.getfloat('ftp', 'log_statsd_sample_rate'),
                prefix=c.get('ftp', 'log_statsd_metric_prefix')
            ).setServiceParent(ftp_service)
        except ImportError:
            log.err('Missing Statsd Module. Requires "txstatsd"')

    pool = HTTPConnectionPool(reactor, persistent=True)
    pool.maxPersistentPerHost = c.getint('ftp', 'num_persistent_connections')
    pool.cachedConnectionTimeout = c.getint('ftp', 'connection_timeout')

    authdb = SwiftBasedAuthDB(auth_url=c.get('ftp', 'auth_url'),
                              verbose=c.getboolean('ftp', 'verbose'))

    ftpportal = Portal(SwiftFTPRealm())
    ftpportal.registerChecker(authdb)
    ftpfactory = FTPFactory(ftpportal)
    ftpfactory.welcomeMessage = c.get('ftp', 'welcome_message')
    ftpfactory.allowAnonymous = False

    signal.signal(signal.SIGUSR1, print_runtime_info)
    signal.signal(signal.SIGUSR2, print_runtime_info)

    internet.TCPServer(
        c.getint('ftp', 'port'),
        ftpfactory,
        interface=c.get('ftp', 'host')).setServiceParent(ftp_service)
    return ftp_service
Пример #6
0
    def _getConnectionPool(self):
        pool = HTTPConnectionPool(reactor, self._persistent)

        if self._persistent:
            pool.maxPersistentPerHost = self._maxPersistentPerHost
            pool.cachedConnectionTimeout = self._cachedConnectionTimeout
            pool.retryAutomatically = self._retryAutomatically

        return pool
 def __init__(self, hs):
     self.hs = hs
     self.signing_key = hs.config.signing_key[0]
     self.server_name = hs.hostname
     pool = HTTPConnectionPool(reactor)
     pool.maxPersistentPerHost = 10
     self.agent = MatrixFederationHttpAgent(reactor, pool=pool)
     self.clock = hs.get_clock()
     self.version_string = hs.version_string
Пример #8
0
def prepareNetwork():
    cookieJar = LWPCookieJar('photo.cookie')
    cookieJar.load()

    pool = HTTPConnectionPool(reactor, persistent=True)
    pool.maxPersistentPerHost = 15
    agent = CookieAgent(Agent(reactor, pool=pool), cookieJar)

    return agent
Пример #9
0
    def _getConnectionPool(self):
        pool = HTTPConnectionPool(reactor, self._persistent)

        if self._persistent:
            pool.maxPersistentPerHost = self._maxPersistentPerHost
            pool.cachedConnectionTimeout = self._cachedConnectionTimeout
            pool.retryAutomatically = self._retryAutomatically

        return pool
Пример #10
0
 def __init__(self, hs):
     self.hs = hs
     # The default context factory in Twisted 14.0.0 (which we require) is
     # BrowserLikePolicyForHTTPS which will do regular cert validation
     # 'like a browser'
     pool = HTTPConnectionPool(reactor)
     pool.maxPersistentPerHost = 10
     self.agent = Agent(reactor, pool=pool)
     self.version_string = hs.version_string
Пример #11
0
 def __init__(self, hs):
     self.hs = hs
     # The default context factory in Twisted 14.0.0 (which we require) is
     # BrowserLikePolicyForHTTPS which will do regular cert validation
     # 'like a browser'
     pool = HTTPConnectionPool(reactor)
     pool.maxPersistentPerHost = 10
     self.agent = Agent(reactor, pool=pool)
     self.version_string = hs.version_string
Пример #12
0
 def __init__(self, hs):
     self.hs = hs
     self.signing_key = hs.config.signing_key[0]
     self.server_name = hs.hostname
     pool = HTTPConnectionPool(reactor)
     pool.maxPersistentPerHost = 10
     self.agent = Agent.usingEndpointFactory(reactor, MatrixFederationEndpointFactory(hs), pool=pool)
     self.clock = hs.get_clock()
     self.version_string = hs.version_string
     self._next_id = 1
Пример #13
0
 def __init__(self, hs):
     self.hs = hs
     self.signing_key = hs.config.signing_key[0]
     self.server_name = hs.hostname
     pool = HTTPConnectionPool(reactor)
     pool.maxPersistentPerHost = 10
     self.agent = Agent.usingEndpointFactory(
         reactor, MatrixFederationEndpointFactory(hs), pool=pool)
     self.clock = hs.get_clock()
     self.version_string = hs.version_string
     self._next_id = 1
Пример #14
0
def create_agent(ca_cert, client_cert, client_key):
    ca_certificate = Certificate.loadPEM(FilePath(ca_cert).getContent())
    client_certificate = PrivateCertificate.loadPEM(
        FilePath(client_cert).getContent() + b"\n" +
        FilePath(client_key).getContent())

    customPolicy = BrowserLikePolicyForHTTPSWithClientCertificate(
        trustRoot=ca_certificate, clientCertificate=client_certificate)

    pool = HTTPConnectionPool(reactor, persistent=True)
    pool.maxPersistentPerHost = CONNECTION_COUNT
    agent = Agent(reactor, customPolicy, pool=pool)

    return agent
Пример #15
0
def _get_agent():
    context_factory = MyWebClientContextFactory()
    try:
        # HTTPConnectionPool has been present since Twisted version 12.1
        from twisted.web.client import HTTPConnectionPool
        pool = HTTPConnectionPool(reactor, persistent=True)
        pool.maxPersistentPerHost = _MAX_PERSISTENT_PER_HOST
        pool.cachedConnectionTimeout = _CACHED_CONNECTION_TIMEOUT
        agent = Agent(reactor, context_factory,
                       connectTimeout=_CONNECT_TIMEOUT, pool=pool)
    except ImportError:
        from _zenclient import ZenAgent
        agent = ZenAgent(reactor, context_factory, persistent=True, maxConnectionsPerHostName=1)
    return agent
Пример #16
0
def _get_agent():
    context_factory = MyWebClientContextFactory()
    try:
        # HTTPConnectionPool has been present since Twisted version 12.1
        from twisted.web.client import HTTPConnectionPool
        pool = HTTPConnectionPool(reactor, persistent=True)
        pool.maxPersistentPerHost = _MAX_PERSISTENT_PER_HOST
        pool.cachedConnectionTimeout = _CACHED_CONNECTION_TIMEOUT
        agent = Agent(reactor, context_factory,
                      connectTimeout=_CONNECT_TIMEOUT, pool=pool)
    except ImportError:
        from _zenclient import ZenAgent
        agent = ZenAgent(reactor, context_factory, persistent=True, maxConnectionsPerHostName=1)
    return agent
    def agent(self, scheme='http'):
        if not self._agents:
            pool = HTTPConnectionPool(reactor)
            pool.maxPersistentPerHost = 10
            pool.cachedConnectionTimeout = 15

            contextFactory = PermissiveBrowserLikePolicyForHTTPS()
            proxies = getproxies()

            if 'http' in proxies or 'https' in proxies:
                # I've noticed some intermittent failures (ResponseNeverReceived) to
                # POST request through a proxy when persistent connections are enabled.
                pool.persistent = False

            if 'https' in proxies:
                proxy = urlparse(proxies.get('https'))
                if proxy:
                    # Note- this isn't going to work completely.  It's not being
                    # passed the modified contextFactory, and in fact it doesn't
                    # even work properly for other reasons (ZPS-2061)
                    log.info("Creating https proxy (%s:%s)" %
                             (proxy.hostname, proxy.port))
                    endpoint = TCP4ClientEndpoint(reactor,
                                                  proxy.hostname,
                                                  proxy.port,
                                                  timeout=CONNECT_TIMEOUT)
                    SessionManager._agents['https'] = \
                        ProxyAgent(endpoint, reactor, pool=pool)
            else:
                SessionManager._agents['https'] = \
                    Agent(reactor, pool=pool, connectTimeout=CONNECT_TIMEOUT, contextFactory=contextFactory)

            if 'http' in proxies:
                proxy = urlparse(proxies.get('http'))
                if proxy:
                    log.info("Creating http proxy (%s:%s)" %
                             (proxy.hostname, proxy.port))
                    endpoint = TCP4ClientEndpoint(reactor,
                                                  proxy.hostname,
                                                  proxy.port,
                                                  timeout=CONNECT_TIMEOUT)
                    SessionManager._agents['http'] = \
                        ProxyAgent(endpoint, reactor, pool=pool)
            else:
                SessionManager._agents['http'] = \
                    Agent(reactor, pool=pool, connectTimeout=CONNECT_TIMEOUT)

        return SessionManager._agents[scheme]
Пример #18
0
    def __init__(self, reactor, node=('localhost', 4001), ca=None, cert=None):
        self.reactor = reactor
        self.node = node
        self.scheme = 'http'
        self.ca = ca
        self.cert = cert
        context = None
        if ca:
            self.scheme = 'https'
            context = PolicyForHTTPS(ca, cert)

        quietPool = HTTPConnectionPool(reactor, persistent = True)
        quietPool.maxPersistentPerHost = 2
        quietPool._factory = QuietHTTP11ClientFactory

        self.agent = Agent(self.reactor, contextFactory=context, pool=quietPool)
Пример #19
0
 def __init__(self, hs):
     self.hs = hs
     self.signing_key = hs.config.signing_key[0]
     self.server_name = hs.hostname
     reactor = hs.get_reactor()
     pool = HTTPConnectionPool(reactor)
     pool.retryAutomatically = False
     pool.maxPersistentPerHost = 5
     pool.cachedConnectionTimeout = 2 * 60
     self.agent = Agent.usingEndpointFactory(
         reactor, MatrixFederationEndpointFactory(hs), pool=pool)
     self.clock = hs.get_clock()
     self._store = hs.get_datastore()
     self.version_string = hs.version_string.encode('ascii')
     self._next_id = 1
     self.default_timeout = 60
Пример #20
0
    def __init__(self, reactor, node=('localhost', 4001), ca=None, cert=None):
        self.reactor = reactor
        self.node = node
        self.scheme = 'http'
        self.ca = ca
        self.cert = cert
        context = None
        if ca:
            self.scheme = 'https'
            context = PolicyForHTTPS(ca, cert)

        quietPool = HTTPConnectionPool(reactor, persistent=True)
        quietPool.maxPersistentPerHost = 2
        quietPool._factory = QuietHTTP11ClientFactory

        self.agent = Agent(self.reactor,
                           contextFactory=context,
                           pool=quietPool)
Пример #21
0
    def __init__(self, reactor):
        self.reactor = reactor
        pool = HTTPConnectionPool(reactor, persistent=True)
        pool.maxPersistentPerHost = 1
        pool.cachedConnectionTimeout = 600
        self.agent = RedirectAgent(Agent(reactor, pool=pool))
        self.reqQ = HttpReqQ(self.agent, self.reactor)
        self.clientPlaylist = HlsPlaylist()
        self.verbose = False
        self.download = False
        self.outDir = ""
        self.encryptionHandled=False

        # required for the dump durations functionality
        self.dur_dump_file = None
        self.dur_avproble_acc = 0
        self.dur_vt_acc = 0
        self.dur_playlist_acc = 0
Пример #22
0
    def __init__(self, reactor):
        self.reactor = reactor
        pool = HTTPConnectionPool(reactor, persistent=True)
        pool.maxPersistentPerHost = 1
        pool.cachedConnectionTimeout = 600
        self.agent = RedirectAgent(Agent(reactor, pool=pool))
        self.reqQ = HttpReqQ(self.agent, self.reactor)
        self.clientPlaylist = HlsPlaylist()
        self.verbose = False
        self.download = False
        self.outDir = ""
        self.encryptionHandled = False

        # required for the dump durations functionality
        self.dur_dump_file = None
        self.dur_avproble_acc = 0
        self.dur_vt_acc = 0
        self.dur_playlist_acc = 0
Пример #23
0
def _get_agent():
    global _AGENT
    if _AGENT is None:
        try:
            # HTTPConnectionPool has been present since Twisted version 12.1
            from twisted.web.client import HTTPConnectionPool
            pool = HTTPConnectionPool(reactor, persistent=True)
            pool.maxPersistentPerHost = _MAX_PERSISTENT_PER_HOST
            pool.cachedConnectionTimeout = _CACHED_CONNECTION_TIMEOUT
            _AGENT = Agent(
                reactor, connectTimeout=_CONNECT_TIMEOUT, pool=pool)
        except ImportError:
            try:
                # connectTimeout first showed up in Twisted version 11.1
                _AGENT = Agent(reactor, connectTimeout=_CONNECT_TIMEOUT)
            except TypeError:
                _AGENT = Agent(reactor)
    return _AGENT
Пример #24
0
	def parallelFetchAllProblems(self):
			pool = HTTPConnectionPool(reactor)
			pool.maxPersistentPerHost = self.concurrency
			agent = Agent(reactor, pool=pool)
			sem = DeferredSemaphore(self.concurrency)
			self.done = 0

			def assign():
				self.query.execute('BEGIN')
				for id in range(1001, self.getProblemMax()+1):
					sem.acquire().addCallback(requestFactory, id)
			
			def requestFactory(token, id):
				deferred = agent.request('GET', self.baseUrl + self.problemPath + str(id))
				deferred.addCallback(onHeader, id)
				deferred.addErrback(errorHandler, id)
				return deferred
			
			def onHeader(response, id):
				deferred = readBody(response)
				deferred.addCallback(onBody, id)
				deferred.addErrback(errorHandler, id)
				return deferred

			def onBody(html, id):
				sem.release()
				d = pyq(html)
				title = d('#content_body > center:nth-child(1) > span').text(),
				body = d('#content_body').text()
				print('Fetched ProblemID: %s, Title: %s, done: %s' % (id, title[0], self.done))
				self.storeProblem(id, title[0], body)
				self.done += 1
				if(self.done == self.problemCount):
					print('Fetch data used %s s' % (reactor.seconds() - startTimeStamp))
					print('Fetch data end, writing to database')
					self.query.execute('COMMIT')
					reactor.stop()

			def errorHandler(err, id):
				print('[%s] id %s: %s' % (reactor.seconds() - startTimeStamp, id, err))

			startTimeStamp = reactor.seconds()
			reactor.callWhenRunning(assign)
			reactor.run()
Пример #25
0
def _get_agent():
    global _AGENT
    if _AGENT is None:
        context_factory = MyWebClientContextFactory()
        try:
            # HTTPConnectionPool has been present since Twisted version 12.1
            from twisted.web.client import HTTPConnectionPool
            pool = HTTPConnectionPool(reactor, persistent=True)
            pool.maxPersistentPerHost = _MAX_PERSISTENT_PER_HOST
            pool.cachedConnectionTimeout = _CACHED_CONNECTION_TIMEOUT
            _AGENT = Agent(reactor, context_factory,
                           connectTimeout=_CONNECT_TIMEOUT, pool=pool)
        except ImportError:
            try:
                # connectTimeout first showed up in Twisted version 11.1
                _AGENT = Agent(
                    reactor, context_factory, connectTimeout=_CONNECT_TIMEOUT)
            except TypeError:
                _AGENT = Agent(reactor, context_factory)
    return _AGENT
    def __init__(self, hs):
        self.hs = hs
        self.signing_key = hs.config.signing_key[0]
        self.server_name = hs.hostname
        reactor = hs.get_reactor()
        pool = HTTPConnectionPool(reactor)
        pool.retryAutomatically = False
        pool.maxPersistentPerHost = 5
        pool.cachedConnectionTimeout = 2 * 60
        self.agent = Agent.usingEndpointFactory(
            reactor, MatrixFederationEndpointFactory(hs), pool=pool
        )
        self.clock = hs.get_clock()
        self._store = hs.get_datastore()
        self.version_string_bytes = hs.version_string.encode('ascii')
        self.default_timeout = 60

        def schedule(x):
            reactor.callLater(_EPSILON, x)

        self._cooperator = Cooperator(scheduler=schedule)
    def agent(self, scheme='http'):
        if not self._agents:
            pool = HTTPConnectionPool(reactor)
            pool.maxPersistentPerHost = 10
            pool.cachedConnectionTimeout = 15

            contextFactory = PermissiveBrowserLikePolicyForHTTPS()
            proxies = getproxies()

            if 'http' in proxies or 'https' in proxies:
                # I've noticed some intermittent failures (ResponseNeverReceived) to
                # POST request through a proxy when persistent connections are enabled.
                pool.persistent = False

            if 'https' in proxies:
                proxy = urlparse(proxies.get('https'))
                if proxy:
                    # Note- this isn't going to work completely.  It's not being
                    # passed the modified contextFactory, and in fact it doesn't
                    # even work properly for other reasons (ZPS-2061)
                    log.info("Creating https proxy (%s:%s)" % (proxy.hostname, proxy.port))
                    endpoint = TCP4ClientEndpoint(reactor, proxy.hostname, proxy.port, timeout=CONNECT_TIMEOUT)
                    SessionManager._agents['https'] = \
                        ProxyAgent(endpoint, reactor, pool=pool)
            else:
                SessionManager._agents['https'] = \
                    Agent(reactor, pool=pool, connectTimeout=CONNECT_TIMEOUT, contextFactory=contextFactory)

            if 'http' in proxies:
                proxy = urlparse(proxies.get('http'))
                if proxy:
                    log.info("Creating http proxy (%s:%s)" % (proxy.hostname, proxy.port))
                    endpoint = TCP4ClientEndpoint(reactor, proxy.hostname, proxy.port, timeout=CONNECT_TIMEOUT)
                    SessionManager._agents['http'] = \
                        ProxyAgent(endpoint, reactor, pool=pool)
            else:
                SessionManager._agents['http'] = \
                    Agent(reactor, pool=pool, connectTimeout=CONNECT_TIMEOUT)

        return SessionManager._agents[scheme]
Пример #28
0
    def __init__(self, page_archiver, cookie_file=None):
        self._logger = logging.getLogger(__name__)
        self._page_archiver = page_archiver
        self._logger.debug('Using page archiver: %s. Cookie file: %s',
                           page_archiver is not None,
                           cookie_file)
        if cookie_file:
            umask = os.umask(077)
            self._cj = LWPCookieJar(cookie_file)
            try:
                self._cj.load()
            except LoadError:
                self._logger.warning('Cannot load cookies from %s' % (cookie_file, ))
            os.umask(umask)
        else:
            self._cj = CookieJar()

        pool = HTTPConnectionPool(reactor, persistent=True)
        pool.maxPersistentPerHost = 10
        self._agent = CookieAgent(ContentDecoderAgent(Agent(reactor, pool=pool),
                                                       [('gzip', GzipDecoder)]), self._cj)
        self._lock = Lock()
Пример #29
0
	def __init__(self,reactor,api_key,api_secret):
		# Bitcoin.de API URI
		apihost = 'https://api.bitcoin.de'
		apiversion = 'v1'
		orderuri = apihost + '/' + apiversion + '/' + 'orders'
		tradeuri = apihost + '/' + apiversion + '/' + 'trades'
		accounturi = apihost + '/' + apiversion + '/' + 'account'
		# set initial nonce
		self.nonce = int(time.time())
		
		self.reactor = reactor
		pool = HTTPConnectionPool(reactor)		# Actually reusing the connection leads to correct credits
		pool.maxPersistentPerHost = 1
		self.contextFactory = WebClientContextFactory()
		self.agent = Agent(self.reactor, self.contextFactory,pool=pool)
		
		self.api_key = api_key
		self.api_secret = api_secret
		
		self.calls = {}
		# Method,uri,required params with allowed values,credits,field to return (after credits/pages are stripped)
		# Orders
		self.calls['showOrderbook'] = ['GET',orderuri,{'type':['sell','buy']},2]
		self.calls['showOrderbookCompact'] = ['GET',orderuri+'/compact',{},3]
		self.calls['createOrder'] = ['POST',orderuri,{'type':['sell','buy'],'max_amount':[],'price':[]},1]
		self.calls['deleteOrder'] = ['DELETE',orderuri,{'order_id':[]},2]
		self.calls['showMyOrders'] = ['GET',orderuri+'/my_own',{},2]	# Fix: all arguments are optional
		self.calls['showMyOrderDetails'] = ['GET',orderuri,{'order_id':[]},2]
		# Trades
		self.calls['executeTrade'] = ['POST',tradeuri,{'order_id':[],'amount':[]},1]
		self.calls['showMyTradeDetails'] = ['GET',tradeuri,{'trade_id':[]},3]
		self.calls['showMyTrades'] = ['GET',tradeuri,{},3]
		self.calls['showPublicTradeHistory'] = ['GET',tradeuri+'/history',{'since_tid':[]},3]
		# Account
		self.calls['showAccountInfo'] = ['GET',accounturi,{},2]
		self.calls['showAccountLedger'] = ['GET',accounturi+'/ledger',{},3]
		# Other
		self.calls['showRates'] = ['GET',apihost+'/'+apiversion+'/rates',{},3]
Пример #30
0
from twisted.python import log
from zope.interface import implements
from twisted.internet import reactor
from twisted.application.service import Service
from SharedLib.component_hacks import NamedAdapter
from ClientLib.Interfaces import IWebWorld
from twisted.internet.defer import Deferred
from twisted.internet.task import LoopingCall
from twisted.internet.protocol import Protocol
from twisted.web.client import HTTPConnectionPool
from twisted.web.client import Agent
from twisted.web.client import CookieAgent
from cookielib import CookieJar

http_pool = HTTPConnectionPool(reactor, persistent=True)
http_pool.maxPersistentPerHost = 1

class BodyReturner(Protocol):

    def __init__(self, finished, verbose=False):
        self.finished = finished
        self.received = ""
        self.verbose = verbose

    def dataReceived(self, bytes):
        self.received += bytes

    def connectionLost(self, reason):
        """ todo: test if reason is twisted.web.client.ResponseDone """
        if self.verbose:
            log.msg('Finished receiving body:', reason.getErrorMessage())
Пример #31
0
from twisted.internet import reactor
from twisted.web.client import HTTPConnectionPool, ContentDecoderAgent, GzipDecoder
from network.http_utils import Agent
from twisted.web.http_headers import Headers
from network.twisted_utils import JsonProducer, JsonReceiver, make_errback
from api.auth import get_auth_headers

pool = HTTPConnectionPool(reactor)
pool.maxPersistentPerHost = 30


def get(url, data=None, on_response=None, on_error=None):
    errback = on_error or make_errback(frames_back=2)
    try:

        def handle_response(response):
            if response.code == 200:
                response.deliverBody(JsonReceiver.create(on_response, errback))
            else:
                errback("returned %s" % response.code)

        agent = ContentDecoderAgent(Agent(reactor, pool=pool), [("gzip", GzipDecoder)])
        headers = Headers(get_auth_headers())
        headers.addRawHeader("User-Agent", "gzip")
        d = agent.request("GET", url, headers=headers, bodyProducer=JsonProducer(data) if data else None)
        d.addCallbacks(handle_response, errback)
    except Exception as ex:
        errback("error %s" % ex)
Пример #32
0
from twisted.internet import reactor
from twisted.web.client import HTTPConnectionPool, ContentDecoderAgent, GzipDecoder
from network.http_utils import Agent
from twisted.web.http_headers import Headers
from network.twisted_utils import JsonProducer, JsonReceiver, make_errback
from api.auth import get_auth_headers

pool = HTTPConnectionPool(reactor)
pool.maxPersistentPerHost = 30


def get(url, data=None, on_response=None, on_error=None):
    errback = on_error or make_errback(frames_back=2)
    try:

        def handle_response(response):
            if response.code == 200:
                response.deliverBody(JsonReceiver.create(on_response, errback))
            else:
                errback('returned %s' % response.code)

        agent = ContentDecoderAgent(Agent(reactor, pool=pool),
                                    [('gzip', GzipDecoder)])
        headers = Headers(get_auth_headers())
        headers.addRawHeader('User-Agent', 'gzip')
        d = agent.request('GET',
                          url,
                          headers=headers,
                          bodyProducer=JsonProducer(data) if data else None)
        d.addCallbacks(handle_response, errback)
    except Exception as ex:
Пример #33
0
def getPool():
    global pool
    if pool is None:
        pool = HTTPConnectionPool(reactor)
        pool.maxPersistentPerHost=3
    return pool
Пример #34
0
def getPool():
    global pool
    if pool is None:
        pool = HTTPConnectionPool(reactor)
        pool.maxPersistentPerHost = 3
    return pool
Пример #35
0
def createPool(maxPersistentPerHost=10, persistent=True):
    pool = HTTPConnectionPool(reactor, persistent)
    pool.maxPersistentPerHost = maxPersistentPerHost
    return pool
Пример #36
0
from twisted.web.client import getPage
from twisted.internet import reactor
from twisted.internet.defer import Deferred
from twisted.internet.protocol import Protocol
from twisted.web.client import Agent, ContentDecoderAgent, RedirectAgent, GzipDecoder
from twisted.web.client import HTTPConnectionPool
from twisted.web.http_headers import Headers
from twisted.internet.ssl import ClientContextFactory
from twisted.internet.task import LoopingCall

pnconn_pool = HTTPConnectionPool(reactor, persistent=True)
pnconn_pool.maxPersistentPerHost = 100
pnconn_pool.cachedConnectionTimeout = 310


class Pubnub(PubnubCoreAsync):
    def start(self):
        reactor.run()

    def stop(self):
        reactor.stop()

    def timeout(self, delay, callback):
        reactor.callLater(delay, callback)

    def __init__(self,
                 publish_key,
                 subscribe_key,
                 secret_key=False,
                 cipher_key=False,
                 ssl_on=False,
Пример #37
0
    def __init__(
        self,
        hs: "HomeServer",
        treq_args: Dict[str, Any] = {},
        ip_whitelist: Optional[IPSet] = None,
        ip_blacklist: Optional[IPSet] = None,
        http_proxy: Optional[bytes] = None,
        https_proxy: Optional[bytes] = None,
    ):
        """
        Args:
            hs
            treq_args: Extra keyword arguments to be given to treq.request.
            ip_blacklist: The IP addresses that are blacklisted that
                we may not request.
            ip_whitelist: The whitelisted IP addresses, that we can
               request if it were otherwise caught in a blacklist.
            http_proxy: proxy server to use for http connections. host[:port]
            https_proxy: proxy server to use for https connections. host[:port]
        """
        self.hs = hs

        self._ip_whitelist = ip_whitelist
        self._ip_blacklist = ip_blacklist
        self._extra_treq_args = treq_args

        self.user_agent = hs.version_string
        self.clock = hs.get_clock()
        if hs.config.user_agent_suffix:
            self.user_agent = "%s %s" % (self.user_agent,
                                         hs.config.user_agent_suffix)

        # We use this for our body producers to ensure that they use the correct
        # reactor.
        self._cooperator = Cooperator(
            scheduler=_make_scheduler(hs.get_reactor()))

        self.user_agent = self.user_agent.encode("ascii")

        if self._ip_blacklist:
            # If we have an IP blacklist, we need to use a DNS resolver which
            # filters out blacklisted IP addresses, to prevent DNS rebinding.
            self.reactor = BlacklistingReactorWrapper(hs.get_reactor(),
                                                      self._ip_whitelist,
                                                      self._ip_blacklist)
        else:
            self.reactor = hs.get_reactor()

        # the pusher makes lots of concurrent SSL connections to sygnal, and
        # tends to do so in batches, so we need to allow the pool to keep
        # lots of idle connections around.
        pool = HTTPConnectionPool(self.reactor)
        # XXX: The justification for using the cache factor here is that larger instances
        # will need both more cache and more connections.
        # Still, this should probably be a separate dial
        pool.maxPersistentPerHost = max(
            (100 * hs.config.caches.global_factor, 5))
        pool.cachedConnectionTimeout = 2 * 60

        self.agent = ProxyAgent(
            self.reactor,
            hs.get_reactor(),
            connectTimeout=15,
            contextFactory=self.hs.get_http_client_context_factory(),
            pool=pool,
            http_proxy=http_proxy,
            https_proxy=https_proxy,
        )

        if self._ip_blacklist:
            # If we have an IP blacklist, we then install the blacklisting Agent
            # which prevents direct access to IP addresses, that are not caught
            # by the DNS resolution.
            self.agent = BlacklistingAgentWrapper(
                self.agent,
                ip_whitelist=self._ip_whitelist,
                ip_blacklist=self._ip_blacklist,
            )
Пример #38
0
def createPool(maxPersistentPerHost=10, persistent=True):
    pool = HTTPConnectionPool(reactor, persistent)
    pool.maxPersistentPerHost = maxPersistentPerHost
    return pool
Пример #39
0
def makeService(options):
    """
    Makes a new swftp-sftp service. The only option is the config file
    location. The config file has the following options:
     - host
     - port
     - auth_url
     - num_persistent_connections
     - connection_timeout
     - pub_key
     - priv_key
    """
    from twisted.conch.ssh.factory import SSHFactory
    from twisted.conch.ssh.keys import Key
    from twisted.web.client import HTTPConnectionPool
    from twisted.cred.portal import Portal

    from swftp.sftp.server import SwiftSFTPRealm, SwiftSSHServerTransport
    from swftp.sftp.connection import SwiftConnection
    from swftp.auth import SwiftBasedAuthDB
    from swftp.utils import print_runtime_info

    c = get_config(options['config_file'], options)
    pool = HTTPConnectionPool(reactor, persistent=True)
    pool.maxPersistentPerHost = c.getint('sftp', 'num_persistent_connections')
    pool.cachedConnectionTimeout = c.getint('sftp', 'connection_timeout')

    sftp_service = service.MultiService()

    # ensure timezone is GMT
    os.environ['TZ'] = 'GMT'
    time.tzset()

    log.msg('Starting SwFTP-sftp %s' % VERSION)

    # Add statsd service
    if c.get('sftp', 'log_statsd_host'):
        try:
            from swftp.statsd import makeService as makeStatsdService
            makeStatsdService(
                c.get('sftp', 'log_statsd_host'),
                c.getint('sftp', 'log_statsd_port'),
                sample_rate=c.getfloat('sftp', 'log_statsd_sample_rate'),
                prefix=c.get('sftp', 'log_statsd_metric_prefix')
            ).setServiceParent(sftp_service)
        except ImportError:
            log.err('Missing Statsd Module. Requires "txstatsd"')

    authdb = SwiftBasedAuthDB(auth_url=c.get('sftp', 'auth_url'),
                              verbose=c.getboolean('sftp', 'verbose'))

    sftpportal = Portal(SwiftSFTPRealm())
    sftpportal.registerChecker(authdb)

    sshfactory = SSHFactory()
    sshfactory.protocol = SwiftSSHServerTransport
    sshfactory.noisy = False
    sshfactory.portal = sftpportal
    sshfactory.services['ssh-connection'] = SwiftConnection

    pub_key_string = file(c.get('sftp', 'pub_key')).read()
    priv_key_string = file(c.get('sftp', 'priv_key')).read()
    sshfactory.publicKeys = {
        'ssh-rsa': Key.fromString(data=pub_key_string)}
    sshfactory.privateKeys = {
        'ssh-rsa': Key.fromString(data=priv_key_string)}

    signal.signal(signal.SIGUSR1, print_runtime_info)
    signal.signal(signal.SIGUSR2, print_runtime_info)

    internet.TCPServer(
        c.getint('sftp', 'port'),
        sshfactory,
        interface=c.get('sftp', 'host')).setServiceParent(sftp_service)

    return sftp_service
Пример #40
0
    def __init__(
        self,
        hs,
        treq_args={},
        ip_whitelist=None,
        ip_blacklist=None,
        http_proxy=None,
        https_proxy=None,
    ):
        """
        Args:
            hs (synapse.server.HomeServer)
            treq_args (dict): Extra keyword arguments to be given to treq.request.
            ip_blacklist (netaddr.IPSet): The IP addresses that are blacklisted that
                we may not request.
            ip_whitelist (netaddr.IPSet): The whitelisted IP addresses, that we can
               request if it were otherwise caught in a blacklist.
            http_proxy (bytes): proxy server to use for http connections. host[:port]
            https_proxy (bytes): proxy server to use for https connections. host[:port]
        """
        self.hs = hs

        self._ip_whitelist = ip_whitelist
        self._ip_blacklist = ip_blacklist
        self._extra_treq_args = treq_args

        self.user_agent = hs.version_string
        self.clock = hs.get_clock()
        if hs.config.user_agent_suffix:
            self.user_agent = "%s %s" % (self.user_agent,
                                         hs.config.user_agent_suffix)

        self.user_agent = self.user_agent.encode("ascii")

        if self._ip_blacklist:
            real_reactor = hs.get_reactor()
            # If we have an IP blacklist, we need to use a DNS resolver which
            # filters out blacklisted IP addresses, to prevent DNS rebinding.
            nameResolver = IPBlacklistingResolver(real_reactor,
                                                  self._ip_whitelist,
                                                  self._ip_blacklist)

            @implementer(IReactorPluggableNameResolver)
            class Reactor(object):
                def __getattr__(_self, attr):
                    if attr == "nameResolver":
                        return nameResolver
                    else:
                        return getattr(real_reactor, attr)

            self.reactor = Reactor()
        else:
            self.reactor = hs.get_reactor()

        # the pusher makes lots of concurrent SSL connections to sygnal, and
        # tends to do so in batches, so we need to allow the pool to keep
        # lots of idle connections around.
        pool = HTTPConnectionPool(self.reactor)
        # XXX: The justification for using the cache factor here is that larger instances
        # will need both more cache and more connections.
        # Still, this should probably be a separate dial
        pool.maxPersistentPerHost = max(
            (100 * hs.config.caches.global_factor, 5))
        pool.cachedConnectionTimeout = 2 * 60

        self.agent = ProxyAgent(
            self.reactor,
            connectTimeout=15,
            contextFactory=self.hs.get_http_client_context_factory(),
            pool=pool,
            http_proxy=http_proxy,
            https_proxy=https_proxy,
        )

        if self._ip_blacklist:
            # If we have an IP blacklist, we then install the blacklisting Agent
            # which prevents direct access to IP addresses, that are not caught
            # by the DNS resolution.
            self.agent = BlacklistingAgentWrapper(
                self.agent,
                self.reactor,
                ip_whitelist=self._ip_whitelist,
                ip_blacklist=self._ip_blacklist,
            )
Пример #41
0
from twisted.python import util, log
from twisted.web.http_headers import Headers

from smap import core
from smap.util import periodicSequentialCall, BufferProtocol
import smap.driver
import smap.contrib.dtutil as dtutil

TIMEFMT = "%Y-%m-%d %H:%M:%S"

# make a connection pool
try:
    connection_pool
except NameError:
    connection_pool = HTTPConnectionPool(reactor, persistent=True)
    connection_pool.maxPersistentPerHost = 3


def make_field_idxs(type, header, location=None):
    paths = [None]
    map_ = sensordb.get_map(type, header=header, location=location)
    for t in header[1:]:
        paths.append(None)
        for channel in map_['sensors'] + map_['meters']:
            if t.strip().startswith(channel[0]):
                paths[-1] = (channel[2], channel[3])
                break
    ddups = {}
    for elt in paths:
        if elt:
            name = '-'.join(elt)
Пример #42
0
    def __init__(self, hs, treq_args={}, ip_whitelist=None, ip_blacklist=None):
        """
        Args:
            hs (synapse.server.HomeServer)
            treq_args (dict): Extra keyword arguments to be given to treq.request.
            ip_blacklist (netaddr.IPSet): The IP addresses that are blacklisted that
                we may not request.
            ip_whitelist (netaddr.IPSet): The whitelisted IP addresses, that we can
               request if it were otherwise caught in a blacklist.
        """
        self.hs = hs

        self._ip_whitelist = ip_whitelist
        self._ip_blacklist = ip_blacklist
        self._extra_treq_args = treq_args

        self.user_agent = hs.version_string
        self.clock = hs.get_clock()
        if hs.config.user_agent_suffix:
            self.user_agent = "%s %s" % (self.user_agent, hs.config.user_agent_suffix)

        self.user_agent = self.user_agent.encode('ascii')

        if self._ip_blacklist:
            real_reactor = hs.get_reactor()
            # If we have an IP blacklist, we need to use a DNS resolver which
            # filters out blacklisted IP addresses, to prevent DNS rebinding.
            nameResolver = IPBlacklistingResolver(
                real_reactor, self._ip_whitelist, self._ip_blacklist
            )

            @implementer(IReactorPluggableNameResolver)
            class Reactor(object):
                def __getattr__(_self, attr):
                    if attr == "nameResolver":
                        return nameResolver
                    else:
                        return getattr(real_reactor, attr)

            self.reactor = Reactor()
        else:
            self.reactor = hs.get_reactor()

        # the pusher makes lots of concurrent SSL connections to sygnal, and
        # tends to do so in batches, so we need to allow the pool to keep
        # lots of idle connections around.
        pool = HTTPConnectionPool(self.reactor)
        pool.maxPersistentPerHost = max((100 * CACHE_SIZE_FACTOR, 5))
        pool.cachedConnectionTimeout = 2 * 60

        # The default context factory in Twisted 14.0.0 (which we require) is
        # BrowserLikePolicyForHTTPS which will do regular cert validation
        # 'like a browser'
        self.agent = Agent(
            self.reactor,
            connectTimeout=15,
            contextFactory=self.hs.get_http_client_context_factory(),
            pool=pool,
        )

        if self._ip_blacklist:
            # If we have an IP blacklist, we then install the blacklisting Agent
            # which prevents direct access to IP addresses, that are not caught
            # by the DNS resolution.
            self.agent = BlacklistingAgentWrapper(
                self.agent,
                self.reactor,
                ip_whitelist=self._ip_whitelist,
                ip_blacklist=self._ip_blacklist,
            )
Пример #43
0
 def pool_factory(self, reactor):
     pool = HTTPConnectionPool(reactor, persistent=True)
     pool.maxPersistentPerHost = TPS_LIMIT
Пример #44
0
def dp5twistedclientFactory(state):
    global commonhttppool
    ## Build an async client
    cli = AsyncDP5Client(state)

    # Use a common pool of HTTPs connections
    if commonhttppool is None:
        httppool = HTTPConnectionPool(reactor, persistent=True)
        httppool.maxPersistentPerHost = 5
        httppool.retryAutomatically = False
    else:
        httppool = commonhttppool

    cli.pool = httppool
    cli.agent = Agent(reactor, pool=httppool)
    cli.inflight = 0

    ## Define the networking for registration
    def send_registration(cli, epoch, combined, msg, cb, xfail):
        if combined:
            ser = cli.state["combined"]["regServer"]
            surl = str("https://" + ser + "/register?epoch=%s" % (epoch - 1))
        else:
            ser = cli.state["standard"]["regServer"]
            surl = str("https://" + ser + "/register?epoch=%s" % (epoch - 1))

        cli.inflight += 1
        try:
            body = FileBodyProducer(StringIO(msg))

            d = cli.agent.request(
                'POST', surl, Headers({'User-Agent': ['DP5 Twisted Client']}),
                body)

            def err(*args):
                # print "REG ERROR", args
                # print args
                cli.inflight -= 1
                xfail(args[0])

            def cbRequest(response):
                finished = Deferred()
                finished.addCallback(cb)
                finished.addErrback(err)
                response.deliverBody(BufferedReception(finished))
                cli.inflight -= 1
                return finished

            d.addCallback(cbRequest)
            d.addErrback(err)
        except Exception as e:
            print e
            cli.inflight -= 1
            err(e)

    cli.register_handlers += [send_registration]

    ## Define the networking for lookups
    def send_lookup(cli, epoch, combined, seq, msg, cb, xfail):
        if msg == "":
            #print "No need to relay lookup"
            return cb("")

        if combined:
            ser = cli.state["combined"]["lookupServers"][seq]
            surl = str("https://" + ser + "/lookup?epoch=%s" % epoch)
        else:
            ser = cli.state["standard"]["lookupServers"][seq]
            surl = str("https://" + ser + "/lookup?epoch=%s" % epoch)

        cli.inflight += 1
        try:
            body = FileBodyProducer(StringIO(msg))

            d = cli.agent.request(
                'POST', surl, Headers({'User-Agent': ['DP5 Twisted Client']}),
                body)

            def err(*args):
                cli.inflight -= 1
                xfail(args[0])

            def cbRequest(response):
                finished = Deferred()
                finished.addCallback(cb)
                finished.addErrback(err)
                response.deliverBody(BufferedReception(finished))
                cli.inflight -= 1
                return finished

            d.addCallback(cbRequest)
            d.addErrback(err)
        except Exception as e:
            print e
            cli.inflight -= 1
            err(e)

    cli.lookup_handlers += [send_lookup]

    def loopupdate():
        cli.update()

    cli.l = task.LoopingCall(loopupdate)
    period = float(cli.state["epoch_lengthCB"] / 20.0)
    # cli.l.start(period) # call every second
    delay = 0.1  # random.random() * 10.0
    reactor.callLater(delay, cli.l.start, period)
    return cli
Пример #45
0
    def __init__(self, hs, treq_args={}, ip_whitelist=None, ip_blacklist=None):
        """
        Args:
            hs (synapse.server.HomeServer)
            treq_args (dict): Extra keyword arguments to be given to treq.request.
            ip_blacklist (netaddr.IPSet): The IP addresses that are blacklisted that
                we may not request.
            ip_whitelist (netaddr.IPSet): The whitelisted IP addresses, that we can
               request if it were otherwise caught in a blacklist.
        """
        self.hs = hs

        self._ip_whitelist = ip_whitelist
        self._ip_blacklist = ip_blacklist
        self._extra_treq_args = treq_args

        self.user_agent = hs.version_string
        self.clock = hs.get_clock()
        if hs.config.user_agent_suffix:
            self.user_agent = "%s %s" % (self.user_agent,
                                         hs.config.user_agent_suffix)

        self.user_agent = self.user_agent.encode('ascii')

        if self._ip_blacklist:
            real_reactor = hs.get_reactor()
            # If we have an IP blacklist, we need to use a DNS resolver which
            # filters out blacklisted IP addresses, to prevent DNS rebinding.
            nameResolver = IPBlacklistingResolver(real_reactor,
                                                  self._ip_whitelist,
                                                  self._ip_blacklist)

            @implementer(IReactorPluggableNameResolver)
            class Reactor(object):
                def __getattr__(_self, attr):
                    if attr == "nameResolver":
                        return nameResolver
                    else:
                        return getattr(real_reactor, attr)

            self.reactor = Reactor()
        else:
            self.reactor = hs.get_reactor()

        # the pusher makes lots of concurrent SSL connections to sygnal, and
        # tends to do so in batches, so we need to allow the pool to keep
        # lots of idle connections around.
        pool = HTTPConnectionPool(self.reactor)
        pool.maxPersistentPerHost = max((100 * CACHE_SIZE_FACTOR, 5))
        pool.cachedConnectionTimeout = 2 * 60

        # The default context factory in Twisted 14.0.0 (which we require) is
        # BrowserLikePolicyForHTTPS which will do regular cert validation
        # 'like a browser'
        self.agent = Agent(
            self.reactor,
            connectTimeout=15,
            contextFactory=self.hs.get_http_client_context_factory(),
            pool=pool,
        )

        if self._ip_blacklist:
            # If we have an IP blacklist, we then install the blacklisting Agent
            # which prevents direct access to IP addresses, that are not caught
            # by the DNS resolution.
            self.agent = BlacklistingAgentWrapper(
                self.agent,
                self.reactor,
                ip_whitelist=self._ip_whitelist,
                ip_blacklist=self._ip_blacklist,
            )
Пример #46
0
import copy
import random

import limits
limits.set_limits()

from users import User
import cPickle

SSLPOOL = True

## Common pool of HTTPs connection to
## ensure that SSL is not the bottle neck.
if SSLPOOL:
    commonhttppool = HTTPConnectionPool(reactor, persistent=True)
    commonhttppool.maxPersistentPerHost = 50
    commonhttppool.retryAutomatically = False
else:
    commonhttppool = None


class BufferedReception(Protocol):
    def __init__(self, finished):
        self.finished = finished
        self.bytes = None

    def dataReceived(self, bytes):
        if self.bytes == None:
            self.bytes = StringIO()
        self.bytes.write(bytes)
Пример #47
0
from twisted.web.client import getPage
from twisted.internet import reactor
from twisted.internet.defer import Deferred
from twisted.internet.protocol import Protocol
from twisted.web.client import Agent, ContentDecoderAgent, RedirectAgent, GzipDecoder
from twisted.web.client import HTTPConnectionPool
from twisted.web.http_headers import Headers
from twisted.internet.ssl import ClientContextFactory
from twisted.internet.task import LoopingCall

pnconn_pool = HTTPConnectionPool(reactor, persistent=True)
pnconn_pool.maxPersistentPerHost    = 100
pnconn_pool.cachedConnectionTimeout = 310

class Pubnub(PubnubCoreAsync):

    def start(self): reactor.run()
    def stop(self):  reactor.stop()
    def timeout( self, delay, callback ):
        reactor.callLater( delay, callback )

    def __init__(
        self,
        publish_key,
        subscribe_key,
        secret_key = False,
        cipher_key = False,
        ssl_on = False,
        origin = 'pubsub.pubnub.com'
    ) :
        super(Pubnub, self).__init__(
Пример #48
0
#!/usr/bin/env python
from twisted.internet import reactor
from twisted.web.client import Agent, HTTPConnectionPool, readBody
from twisted.internet.defer import DeferredSemaphore

baseUrl = 'http://acm.zju.edu.cn/onlinejudge/showProblem.do?problemCode='

start = 1001
end = 3800
count = end - start
concurrency = 10
pool = HTTPConnectionPool(reactor)
pool.maxPersistentPerHost = concurrency
agent = Agent(reactor, pool=pool)
sem = DeferredSemaphore(concurrency)
done = 0

def onHeader(response, i):
	deferred = readBody(response)
	deferred.addCallback(onBody, i)
	deferred.addErrback(errorHandler, i)
	return deferred

def onBody(body, i):
	sem.release()
	global done, count
	done += 1
	print('Received %s, Length %s, Done %s' % (i, len(body), done))
	if(done == count):
		print('All items fetched')
		reactor.stop()
Пример #49
0
import copy
import random

import limits
limits.set_limits()

from users import User
import cPickle

SSLPOOL = True

## Common pool of HTTPs connection to
## ensure that SSL is not the bottle neck.
if SSLPOOL:
    commonhttppool = HTTPConnectionPool(reactor, persistent=True)
    commonhttppool.maxPersistentPerHost = 50
    commonhttppool.retryAutomatically = False
else:
    commonhttppool = None


class BufferedReception(Protocol):
    def __init__(self, finished):
        self.finished = finished
        self.bytes = None

    def dataReceived(self, bytes):        
        if self.bytes == None:
            self.bytes = StringIO()
        self.bytes.write(bytes)
Пример #50
0
#encoding=utf-8
from __future__ import print_function

from twisted.internet import reactor
from twisted.web.client import Agent, HTTPConnectionPool
from twisted.web.http_headers import Headers

pool = HTTPConnectionPool(reactor, persistent=True)
pool.maxPersistentPerHost = 1 #自动重连

agent = Agent(reactor, pool)
#agent = Agent(reactor)

d = agent.request(
    b'GET',
    b'http://126.com/',
    Headers({'User-Agent': ['Twisted Web Client Example']}),
    None)

def cbResponse(ignored):
    print('Response received: '+str(ignored))
d.addCallback(cbResponse) #first call

def cbShutdown(ignored):
    print('cbShutdown.')
    reactor.stop()
#d.addBoth(cbShutdown) #second call
#d.addBoth(lambda ignored: reactor.stop())
d.addBoth(lambda ignored: (print('stop.'),reactor.stop()) )

reactor.run()
Пример #51
0
from twisted.python import log
from zope.interface import implements
from twisted.internet import reactor
from twisted.application.service import Service
from SharedLib.component_hacks import NamedAdapter
from ClientLib.Interfaces import IWebWorld
from twisted.internet.defer import Deferred
from twisted.internet.task import LoopingCall
from twisted.internet.protocol import Protocol
from twisted.web.client import HTTPConnectionPool
from twisted.web.client import Agent
from twisted.web.client import CookieAgent
from cookielib import CookieJar

http_pool = HTTPConnectionPool(reactor, persistent=True)
http_pool.maxPersistentPerHost = 1


class BodyReturner(Protocol):
    def __init__(self, finished, verbose=False):
        self.finished = finished
        self.received = ""
        self.verbose = verbose

    def dataReceived(self, bytes):
        self.received += bytes

    def connectionLost(self, reason):
        """ todo: test if reason is twisted.web.client.ResponseDone """
        if self.verbose:
            log.msg('Finished receiving body:', reason.getErrorMessage())
Пример #52
0
def dp5twistedclientFactory(state):
    global commonhttppool
    ## Build an async client
    cli = AsyncDP5Client(state)
    
    # Use a common pool of HTTPs connections
    if commonhttppool is None:
        httppool = HTTPConnectionPool(reactor, persistent=True)
        httppool.maxPersistentPerHost = 5
        httppool.retryAutomatically = False
    else:
        httppool = commonhttppool

    cli.pool = httppool
    cli.agent = Agent(reactor, pool=httppool)
    cli.inflight = 0

    ## Define the networking for registration
    def send_registration(cli, epoch, combined, msg, cb, xfail):
        if combined:
            ser = cli.state["combined"]["regServer"]
            surl = str("https://"+ser+"/register?epoch=%s" % (epoch-1))
        else:
            ser = cli.state["standard"]["regServer"]
            surl = str("https://" + ser + "/register?epoch=%s" % (epoch-1))

        cli.inflight += 1
        try:
            body = FileBodyProducer(StringIO(msg))

            d = cli.agent.request(
                'POST',
                surl,
                Headers({'User-Agent': ['DP5 Twisted Client']}),
                body)

            def err(*args):
                # print "REG ERROR", args
                # print args
                cli.inflight -= 1
                xfail(args[0])

            def cbRequest(response):
                finished = Deferred()
                finished.addCallback(cb)
                finished.addErrback(err)
                response.deliverBody(BufferedReception(finished))
                cli.inflight -= 1
                return finished

            d.addCallback(cbRequest)
            d.addErrback(err)
        except Exception as e:
            print e
            cli.inflight -= 1
            err(e)

    cli.register_handlers += [send_registration]

    ## Define the networking for lookups
    def send_lookup(cli, epoch, combined, seq, msg, cb, xfail):
        if msg == "":
            #print "No need to relay lookup"
            return cb("")

        if combined:
            ser = cli.state["combined"]["lookupServers"][seq]
            surl = str("https://"+ser+"/lookup?epoch=%s" % epoch)
        else:
            ser = cli.state["standard"]["lookupServers"][seq]
            surl = str("https://" + ser + "/lookup?epoch=%s" % epoch)

        cli.inflight += 1
        try:
            body = FileBodyProducer(StringIO(msg))

            d = cli.agent.request(
                'POST',
                surl,
                Headers({'User-Agent': ['DP5 Twisted Client']}),
                body)

            def err(*args):
                cli.inflight -= 1
                xfail(args[0])

            def cbRequest(response):
                finished = Deferred()
                finished.addCallback(cb)
                finished.addErrback(err)
                response.deliverBody(BufferedReception(finished))
                cli.inflight -= 1
                return finished
            
            d.addCallback(cbRequest)
            d.addErrback(err)
        except Exception as e:
            print e
            cli.inflight -= 1
            err(e)

    cli.lookup_handlers += [send_lookup]

    def loopupdate():
        cli.update()

    cli.l = task.LoopingCall(loopupdate)
    period = float(cli.state["epoch_lengthCB"] / 20.0)
    # cli.l.start(period) # call every second
    delay = 0.1 # random.random() * 10.0
    reactor.callLater(delay, cli.l.start, period)
    return cli