Пример #1
0
    def requestAvatarId(self, c):
        creds = credentials.IUsernamePassword(c, None)

        if creds is not None:
            locks = []
            pool = HTTPConnectionPool(reactor, persistent=False)
            pool.cachedConnectionTimeout = self.timeout
            if self.max_concurrency:
                pool.persistent = True
                pool.maxPersistentPerHost = self.max_concurrency
                locks.append(
                    defer.DeferredSemaphore(self.max_concurrency))

            if self.global_max_concurrency:
                locks.append(
                    defer.DeferredSemaphore(self.global_max_concurrency))

            conn = ThrottledSwiftConnection(
                locks, self.auth_url, creds.username, creds.password,
                pool=pool,
                extra_headers=self.extra_headers,
                verbose=self.verbose,
                ceph_compatible=self.ceph_compatible
            )
            conn.user_agent = USER_AGENT

            d = conn.authenticate()
            d.addCallback(self._after_auth, conn)
            d.addErrback(eb_failed_auth)
            return d
        return defer.fail(error.UnauthorizedLogin())
Пример #2
0
    def __init__(self, base_url='http://localhost:8888', quiet_requests=True, **kwargs):
        self.client_id = str(uuid4())
        self.base_url = base_url
        pool = HTTPConnectionPool(reactor, persistent=True)
        agent = ContentDecoderAgent(RedirectAgent(Agent(reactor, pool=pool)), [('gzip', GzipDecoder)])

        if quiet_requests:
            pool._factory = QuieterHTTP11ClientFactory

        auth_url = kwargs.get('auth_url')
        if auth_url:
            username = kwargs.get('username')
            password = kwargs.get('password')
            api_key = kwargs.get('api_key')

            if not username:
                raise RuntimeError('Marconi "auth_url" specified with no username')

            if api_key:
                cred = api_key
                auth_type = 'api_key'
            elif password:
                cred = password
                auth_type = 'password'
            else:
                raise RuntimeError('Marconi "auth_url" specified with no "password" or "api_key"')

            agent = KeystoneAgent(agent, auth_url, (username, cred), auth_type=auth_type)

        self.http_client = HTTPClient(agent)
Пример #3
0
def makeService(options):
    """
    Makes a new swftp-ftp service. The only option is the config file
    location. The config file has the following options:
     - host
     - port
     - auth_url
     - num_persistent_connections
     - connection_timeout
     - welcome_message
    """
    from twisted.protocols.ftp import FTPFactory
    from twisted.web.client import HTTPConnectionPool
    from twisted.cred.portal import Portal

    from swftp.ftp.server import SwiftFTPRealm
    from swftp.auth import SwiftBasedAuthDB
    from swftp.utils import print_runtime_info

    print('Starting SwFTP-ftp %s' % VERSION)

    c = get_config(options['config_file'], options)
    ftp_service = service.MultiService()

    # Add statsd service
    if c.get('ftp', 'log_statsd_host'):
        try:
            from swftp.statsd import makeService as makeStatsdService
            makeStatsdService(
                c.get('ftp', 'log_statsd_host'),
                c.getint('ftp', 'log_statsd_port'),
                sample_rate=c.getfloat('ftp', 'log_statsd_sample_rate'),
                prefix=c.get('ftp', 'log_statsd_metric_prefix')
            ).setServiceParent(ftp_service)
        except ImportError:
            log.err('Missing Statsd Module. Requires "txstatsd"')

    pool = HTTPConnectionPool(reactor, persistent=True)
    pool.maxPersistentPerHost = c.getint('ftp', 'num_persistent_connections')
    pool.cachedConnectionTimeout = c.getint('ftp', 'connection_timeout')

    authdb = SwiftBasedAuthDB(auth_url=c.get('ftp', 'auth_url'),
                              verbose=c.getboolean('ftp', 'verbose'))

    ftpportal = Portal(SwiftFTPRealm())
    ftpportal.registerChecker(authdb)
    ftpfactory = FTPFactory(ftpportal)
    ftpfactory.welcomeMessage = c.get('ftp', 'welcome_message')
    ftpfactory.allowAnonymous = False

    signal.signal(signal.SIGUSR1, print_runtime_info)
    signal.signal(signal.SIGUSR2, print_runtime_info)

    internet.TCPServer(
        c.getint('ftp', 'port'),
        ftpfactory,
        interface=c.get('ftp', 'host')).setServiceParent(ftp_service)
    return ftp_service
 def __init__(self, hs):
     self.hs = hs
     self.signing_key = hs.config.signing_key[0]
     self.server_name = hs.hostname
     pool = HTTPConnectionPool(reactor)
     pool.maxPersistentPerHost = 10
     self.agent = MatrixFederationHttpAgent(reactor, pool=pool)
     self.clock = hs.get_clock()
     self.version_string = hs.version_string
Пример #5
0
    def _getConnectionPool(self):
        pool = HTTPConnectionPool(reactor, self._persistent)

        if self._persistent:
            pool.maxPersistentPerHost = self._maxPersistentPerHost
            pool.cachedConnectionTimeout = self._cachedConnectionTimeout
            pool.retryAutomatically = self._retryAutomatically

        return pool
Пример #6
0
 def __init__(self, hs):
     self.hs = hs
     # The default context factory in Twisted 14.0.0 (which we require) is
     # BrowserLikePolicyForHTTPS which will do regular cert validation
     # 'like a browser'
     pool = HTTPConnectionPool(reactor)
     pool.maxPersistentPerHost = 10
     self.agent = Agent(reactor, pool=pool)
     self.version_string = hs.version_string
Пример #7
0
 def __init__(self, hs):
     self.hs = hs
     self.signing_key = hs.config.signing_key[0]
     self.server_name = hs.hostname
     pool = HTTPConnectionPool(reactor)
     pool.maxPersistentPerHost = 10
     self.agent = Agent.usingEndpointFactory(reactor, MatrixFederationEndpointFactory(hs), pool=pool)
     self.clock = hs.get_clock()
     self.version_string = hs.version_string
     self._next_id = 1
Пример #8
0
 def __init__(self, debug=False):
     self.gpg = gnupg.GPG()
     self.mpex_url = 'http://polimedia.us/bitcoin/mpex.php'
     self.mpex_fingerprint = 'F1B69921'
     self.passphrase = None
     self.debug = debug
     if(self.debug) :
         self.df = open("mpex_%d.txt" % time.time(),'w')
     pool = HTTPConnectionPool(reactor)
     #close connections at same time as server to prevent ResponseNeverReceived error
     #timeout can be determined automatically from Keep-Alive header
     pool.cachedConnectionTimeout = 4
     self.agent = Agent(reactor, pool=pool)
Пример #9
0
def _get_agent():
    context_factory = MyWebClientContextFactory()
    try:
        # HTTPConnectionPool has been present since Twisted version 12.1
        from twisted.web.client import HTTPConnectionPool
        pool = HTTPConnectionPool(reactor, persistent=True)
        pool.maxPersistentPerHost = _MAX_PERSISTENT_PER_HOST
        pool.cachedConnectionTimeout = _CACHED_CONNECTION_TIMEOUT
        agent = Agent(reactor, context_factory,
                       connectTimeout=_CONNECT_TIMEOUT, pool=pool)
    except ImportError:
        from _zenclient import ZenAgent
        agent = ZenAgent(reactor, context_factory, persistent=True, maxConnectionsPerHostName=1)
    return agent
Пример #10
0
    def __init__(self, token):
        self._pool = HTTPConnectionPool(reactor)
        self._token = token
        self._queues = collections.defaultdict(list)
        self._retry_timers = {}

        self._pool._factory.noisy = False
    def __init__(self, config, pool=None, reactor=None, clock=None):
        super(PubNubTwisted, self).__init__(config)

        self.clock = clock
        self._publish_sequence_manager = PublishSequenceManager(
            PubNubCore.MAX_SEQUENCE)

        if self.config.enable_subscribe:
            self._subscription_manager = TwistedSubscriptionManager(self)

        self.disconnected_times = 0

        if reactor is None:
            self.reactor = _reactor
        else:
            self.reactor = reactor

        if pool is None:
            self.pnconn_pool = HTTPConnectionPool(self.reactor,
                                                  persistent=True)
            self.pnconn_pool.maxPersistentPerHost = 3
            self.pnconn_pool.cachedConnectionTimeout = self.config.subscribe_request_timeout
            self.pnconn_pool.retryAutomatically = False
        else:
            self.pnconn_pool = pool

        self.headers = {
            'User-Agent': [self.sdk_name],
        }
Пример #12
0
    def __init__(self, reactor, node=('localhost', 4001), ca=None, cert=None):
        self.reactor = reactor
        self.node = node
        self.scheme = 'http'
        self.ca = ca
        self.cert = cert
        context = None
        if ca:
            self.scheme = 'https'
            context = PolicyForHTTPS(ca, cert)

        quietPool = HTTPConnectionPool(reactor, persistent = True)
        quietPool.maxPersistentPerHost = 2
        quietPool._factory = QuietHTTP11ClientFactory

        self.agent = Agent(self.reactor, contextFactory=context, pool=quietPool)
    def __init__(
        self,
        reactor,
        tls_client_options_factory,
        _well_known_tls_policy=None,
        _srv_resolver=None,
        _well_known_cache=well_known_cache,
    ):
        self._reactor = reactor
        self._clock = Clock(reactor)

        self._tls_client_options_factory = tls_client_options_factory
        if _srv_resolver is None:
            _srv_resolver = SrvResolver()
        self._srv_resolver = _srv_resolver

        self._pool = HTTPConnectionPool(reactor)
        self._pool.retryAutomatically = False
        self._pool.maxPersistentPerHost = 5
        self._pool.cachedConnectionTimeout = 2 * 60

        agent_args = {}
        if _well_known_tls_policy is not None:
            # the param is called 'contextFactory', but actually passing a
            # contextfactory is deprecated, and it expects an IPolicyForHTTPS.
            agent_args['contextFactory'] = _well_known_tls_policy
        _well_known_agent = RedirectAgent(
            Agent(self._reactor, pool=self._pool, **agent_args), )
        self._well_known_agent = _well_known_agent

        # our cache of .well-known lookup results, mapping from server name
        # to delegated name. The values can be:
        #   `bytes`:     a valid server-name
        #   `None`:      there is no (valid) .well-known here
        self._well_known_cache = _well_known_cache
Пример #14
0
class SFTPFuncTest(unittest.TestCase):
    @defer.inlineCallbacks
    def setUp(self):
        self.active_connections = []
        self.pool = HTTPConnectionPool(reactor, persistent=True)
        self.swift = get_swift_client(CONFIG, pool=self.pool)
        self.tmpdir = tempfile.mkdtemp()
        _, self.sftp = self.get_client()
        yield clean_swift(self.swift)

    @defer.inlineCallbacks
    def tearDown(self):
        shutil.rmtree(self.tmpdir)
        for (transport, conn) in self.active_connections:
            try:
                conn.close()
            except:
                pass
            try:
                transport.close()
            except:
                pass

        yield clean_swift(self.swift)
        yield self.pool.closeCachedConnections()

    def get_client(self):
        transport, conn = get_sftp_client_with_transport(CONFIG)
        self.active_connections.append((transport, conn))
        return transport, conn
Пример #15
0
class StateTestCase(unittest.TestCase):
    def setUp(self):
        pnconf_uuid_set = copy(pnconf)
        pnconf_uuid_set.uuid = 'someuuid'
        self.pool = HTTPConnectionPool(reactor, persistent=False)
        self.pubnub = PubNubTwisted(pnconf_uuid_set, reactor=reactor, pool=self.pool)

    def tearDown(self):
        return self.pool.closeCachedConnections()

    def assert_valid_state_envelope(self, envelope):
        self.assertIsInstance(envelope, TwistedEnvelope)
        self.assertIsInstance(envelope.result, PNSetStateResult)
        self.assertEqual(envelope.result.state, state)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/state/single_channel.yaml',
        filter_query_parameters=['uuid'])
    def test_state_single_channel(self):
        envelope = yield self.pubnub.set_state().channels(channel).state(state).deferred()
        self.assert_valid_state_envelope(envelope)
        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/state/multiple_channels.yaml',
        filter_query_parameters=['uuid'])
    def test_state_multiple_channels(self):
        envelope = yield self.pubnub.set_state().channels(channels).state(state).deferred()
        self.assert_valid_state_envelope(envelope)
        returnValue(envelope)
Пример #16
0
 def setUp(self):
     self.active_connections = []
     self.pool = HTTPConnectionPool(reactor, persistent=True)
     self.swift = get_swift_client(CONFIG, pool=self.pool)
     self.tmpdir = tempfile.mkdtemp()
     _, self.sftp = self.get_client()
     yield clean_swift(self.swift)
Пример #17
0
class HTTP11DownloadHandler(object):
    def __init__(self, settings):
        self.settings = settings
        self.ssl_methods = settings.get('DOWNLOAD_HANDLER_SSL_METHODS')
        self.context_factories = [CrawlmiClientContextFactory(method) for method in self.ssl_methods]
        self.pool = HTTPConnectionPool(reactor, persistent=True)
        self.pool.maxPersistentPerHost = settings.get_int('CONCURRENT_REQUESTS_PER_DOMAIN')
        self.pool._factory.noisy = False

    def download_request(self, request):
        '''Return a deferred for the HTTP download.'''
        dfd = None
        for context_factory in self.context_factories:
            if dfd is None:
                dfd = self._download(request, context_factory)
            else:
                def _failure(failure):
                    failure.trap(SSL.Error)
                    return self._download(request, context_factory)
                dfd.addErrback(_failure)
        return dfd

    def _download(self, request, context_factory):
        agent = CrawlmiAgent(
            context_factory,
            self.settings.get_float('DOWNLOAD_TIMEOUT', 180, request),
            self.settings.get_int('DOWNLOAD_SIZE_LIMIT', 0, request),
            request.meta.get('bind_address'),
            self.pool)
        return agent.download_request(request)

    def close(self):
        return self.pool.closeCachedConnections()
Пример #18
0
 def __init__(self, settings):
     self.settings = settings
     self.ssl_methods = settings.get('DOWNLOAD_HANDLER_SSL_METHODS')
     self.context_factories = [CrawlmiClientContextFactory(method) for method in self.ssl_methods]
     self.pool = HTTPConnectionPool(reactor, persistent=True)
     self.pool.maxPersistentPerHost = settings.get_int('CONCURRENT_REQUESTS_PER_DOMAIN')
     self.pool._factory.noisy = False
Пример #19
0
    def __init__(self, reactor, email, password):
        self.reactor = reactor
        self.email = email
        self.password = password

        # Set up an agent for sending HTTP requests.  Uses cookies
        # (part of the authentication), persistent HTTP connection
        # pool, automatic content decoding (gzip)

        # container to keep track of cookies
        self.cookiejar = cookielib.CookieJar()

        # HTTP persistent connection pool
        self.pool = HTTPConnectionPool(self.reactor, persistent=True)
        # for some reason, using >1 connection per host fails
        self.pool.maxPersistentPerHost = 1

        self.agent = ContentDecoderAgent(
            CookieAgent(Agent(self.reactor, pool=self.pool), self.cookiejar),
            [('gzip', GzipDecoder)])

        # this is the token that is used to authenticate API requests
        self.xsrf_token = None
        self.auth_token = None

        # who we are
        self.player_nickname = None
        self.player_guid = None
        self.team = None
        self.ap = None
        self.level = None
        self.start_date = None
        self.new_version = False
        self.inventory_done = False
        self.profile_done = False

        # for keeping track of item inventory
        self.inventory = b07.inventory.Inventory()

        # for keeping track of API requests that are delayed until
        # authentication has completed
        self._deferred_api_requests = []

        # for keeping track of periodic inventory refreshes
        self._periodic_inventory_refresh_delayedcall = None

        # list of functions to call every time inventory is refreshed
        self._on_inventory_refreshed = []

        # do an immediate inventory refresh
        self._first_inventory_ready = self._defer_until_authenticated(
            self._inventory0, (), {})

        # do an immediate profile refresh
        self._first_profile_ready = self._defer_until_authenticated(
            self._profile0, (), {})

        # start the authentication process
        self.reactor.callLater(0, self._authenticate0)
Пример #20
0
    def startup(cls):
        """Initiates connection pool and logging.

        We can not use persisten connections here as docker server
        has some troubles with those
        """
        cls.pool = HTTPConnectionPool(reactor, persistent=False)
        cls._init_logging()
Пример #21
0
 def startService(self):
     # treq only supports basicauth, so we force txrequests if the auth is
     # something else
     if self._auth is not None and not isinstance(self._auth, tuple):
         self.PREFER_TREQ = False
     if txrequests is not None and not self.PREFER_TREQ:
         self._session = txrequests.Session()
         self._doRequest = self._doTxRequest
     elif treq is None:
         raise ImportError("{classname} requires either txrequest or treq install."
                           " Users should call {classname}.checkAvailable() during checkConfig()"
                           " to properly alert the user.".format(classname=self.__class__.__name__))
     else:
         self._doRequest = self._doTReq
         self._pool = HTTPConnectionPool(self.master.reactor)
         self._pool.maxPersistentPerHost = self.MAX_THREADS
         self._agent = Agent(self.master.reactor, pool=self._pool)
Пример #22
0
 def __init__(self):
     self.pool = HTTPConnectionPool(reactor, persistent=True)
     self.pool.maxPersistentPerHost = 4
     self.pool._factory = QuietHTTP11ClientFactory
     self.contextFactory = WhitelistContextFactory()
     self.agent = Agent(reactor, pool=self.pool, contextFactory=self.contextFactory)
     self.queue = DeferredQueue()
     self.getRequest()
Пример #23
0
def default_pool(reactor, pool, persistent):
    """
    Return the specified pool or a pool with the specified reactor and
    persistence.
    """
    reactor = default_reactor(reactor)

    if pool is not None:
        return pool

    if persistent is False:
        return HTTPConnectionPool(reactor, persistent=persistent)

    if get_global_pool() is None:
        set_global_pool(HTTPConnectionPool(reactor, persistent=True))

    return get_global_pool()
Пример #24
0
    def __init__(self, logformatter, settings):
        # 管理连接的,作用request完成后,connections不会自动关闭,而是保持在缓存中,再次被利用
        self.lfm = logformatter
        #logger.debug(*self.lfm.crawled)
        logger.debug(
            *self.lfm.crawled('Downloader', 'HTTPDownloadHandler', '已初始化'))
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint(
            'CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False  # 用于设置proxy代理

        self._contextFactory = DownloaderClientContextFactory()

        self._default_maxsize = settings.getint('DOWNLOAD_MAXSIZE')
        self._default_warnsize = settings.getint('DOWNLOAD_WARNSIZE')
        self._fail_on_dataloss = settings.getbool('DOWNLOAD_FAIL_ON_DATALOSS')
        self._disconnect_timeout = 1
Пример #25
0
 def __init__(self, persistent=True):
     self.persistent = persistent
     self.agents = SortedCollection(key=lambda x: x.url.netloc)
     self.pool = HTTPConnectionPool(reactor)
     self.pool.maxPersistentPerHost = getattr(
         settings, 'DTX_WEB_DEFER_MAX_PERSISTENT_PER_HOST', 8)
     self.pool.cachedConnectionTimeout = getattr(
         settings, 'DTX_WEB_DEFER_CONNECT_TIMEOUT', 10)
 def _parse_fluentd_http(self, kind, args):
     return lambda reactor: FluentdDestination(
         # Construct the pool ourselves with the default of using
         # persistent connections to override Agent's default of not using
         # persistent connections.
         agent=Agent(reactor, pool=HTTPConnectionPool(reactor)),
         fluentd_url=URL.fromText(args),
     )
Пример #27
0
 def setUp(self):
     description = yield self._httpbin_process.server_description(
         reactor)
     self.baseurl = URL(scheme=u"http",
                        host=description.host,
                        port=description.port).asText()
     self.agent = Agent(reactor)
     self.pool = HTTPConnectionPool(reactor, False)
Пример #28
0
    def setUp(self):
        self.worker = yield self.get_worker()
        self.logging_api = RequestLoggingApi()
        self.logging_api.setup()
        self.addCleanup(self.logging_api.teardown)

        connection_pool = HTTPConnectionPool(reactor, persistent=False)
        treq._utils.set_global_pool(connection_pool)
Пример #29
0
class HTTPDownloadHandler(object):
    def __init__(self, logformatter, settings):
        # 管理连接的,作用request完成后,connections不会自动关闭,而是保持在缓存中,再次被利用
        self.lfm = logformatter
        #logger.debug(*self.lfm.crawled)
        logger.debug(
            *self.lfm.crawled('Downloader', 'HTTPDownloadHandler', '已初始化'))
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint(
            'CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False  # 用于设置proxy代理

        self._contextFactory = DownloaderClientContextFactory()

        self._default_maxsize = settings.getint('DOWNLOAD_MAXSIZE')
        self._default_warnsize = settings.getint('DOWNLOAD_WARNSIZE')
        self._fail_on_dataloss = settings.getbool('DOWNLOAD_FAIL_ON_DATALOSS')
        self._disconnect_timeout = 1

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.logformatter, crawler.settings)

    def download_request(self, request, spider):
        logger.debug(*self.lfm.crawled('Spider', spider.name,
                                       '执行download_request', request))
        """返回一个http download 的 defer"""
        self.spider = spider
        agent = DownloadAgent(contextFactory=self._contextFactory,
                              pool=self._pool,
                              maxsize=getattr(spider, 'download_maxsize',
                                              self._default_maxsize),
                              warnsize=getattr(spider, 'download_warnsize',
                                               self._default_warnsize),
                              fail_on_dataloss=self._fail_on_dataloss,
                              logformatter=self.lfm)
        return agent.download_request(request)

    def close(self):
        #  关闭所有的永久连接,并将它们移除pool 返回是一个defer
        d = self._pool.closeCachedConnections()
        #  直接关闭closeCachedConnections会引起网络或者服务器端的问题,所以,通过人工设置延迟
        #  来激发defer,closeCachedConnections不能直接处理额外的errbacks,所以需要个人设定一个
        #  callback在_disconnect_timeout之后
        logger.warning(
            *self.lfm.crawled('Downloader', 'HTTPDownloadHandler', '已关闭'))
        delayed_call = reactor.callLater(self._disconnect_timeout, d.callback,
                                         [])

        #  判断cancel_delayed_call是否在等待,True就是出于激活状态,还没被执行
        #  False代表着已经被激活或者已经被取消了
        def cancel_delayed_call(result):
            if delayed_call.active():
                delayed_call.cancel()
            return result

        d.addBoth(cancel_delayed_call)
        return d
Пример #30
0
	def __init__(self, session, windowTitle=_("TwitchTV")):
		Screen.__init__(self, session, windowTitle=windowTitle)
		self.skinName = "TwitchStreamGrid"
		self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
		{
			"ok": self._onOk,
			"cancel": self.close,
			"red": self._onRed,
			"green": self._onGreen,
			"yellow" : self._onYellow,
			"blue": self._onBlue,
		}, -1)

		self["key_red"] = StaticText()
		self["key_green"] = StaticText()
		self["key_blue"] = StaticText()
		self["key_yellow"] = StaticText()
		self._setupButtons()

		sizes = componentSizes[TwitchStreamGrid.SKIN_COMPONENT_KEY]
		self._itemWidth = sizes.get(ComponentSizes.ITEM_WIDTH, 280)
		self._itemHeight = sizes.get(ComponentSizes.ITEM_HEIGHT, 162)
		self._bannerHeight = sizes.get(TwitchStreamGrid.SKIN_COMPONENT_HEADER_HEIGHT, 30)
		self._footerHeight = sizes.get(TwitchStreamGrid.SKIN_COMPONENT_FOOTER_HEIGHT, 60)
		self._itemPadding = sizes.get(TwitchStreamGrid.SKIN_COMPONENT_ITEM_PADDING, 5)
		#one-off calculations
		pad = self._itemPadding * 2
		self._contentWidth = self._itemWidth - pad
		self._contentHeight = self._itemHeight - pad
		self._footerOffset = self._itemHeight - self._itemPadding - self._footerHeight

		self._items = []
		self._list = MenuList(self._items, mode=eListbox.layoutGrid, content=eListboxPythonMultiContent, itemWidth=self._itemWidth, itemHeight=self._itemHeight)
		self["list"] = self._list

		tlf = TemplatedListFonts()
		self._list.l.setFont(0, gFont(tlf.face(tlf.MEDIUM), tlf.size(tlf.MEDIUM)))
		self._list.l.setFont(1, gFont(tlf.face(tlf.SMALLER), tlf.size(tlf.SMALL)))
		self._list.l.setBuildFunc(self._buildFunc, True)

		self.twitch = Twitch()
		self.twitchMiddleware = TwitchMiddleware.instance

		self._picload = ePicLoad()
		self._picload.setPara((self._itemWidth, self._itemHeight, self._itemWidth, self._itemHeight, False, 0, '#000000'))
		self._picload_conn = self._picload.PictureData.connect(self._onDefaultPixmapReady)

		agent = Agent(reactor, contextFactory=TLSSNIContextFactory(), pool=HTTPConnectionPool(reactor))
		self._agent = BrowserLikeRedirectAgent(agent)
		self._cachingDeferred = None

		self._loadDefaultPixmap()

		self._pixmapCache = {}
		self._currentEntry = 0
		self._endEntry = 0
		self.onLayoutFinish.append(self._onLayoutFinish)
		self.onClose.append(self.__onClose)
Пример #31
0
class HTTP11DownloadHandler(object):
    def __init__(self, settings):
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint(
            'CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False

        self._sslMethod = openssl_methods[settings.get(
            'DOWNLOADER_CLIENT_TLS_METHOD')]
        self._contextFactoryClass = load_object(
            settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
        # try method-aware context factory
        try:
            self._contextFactory = self._contextFactoryClass(
                method=self._sslMethod)
        except TypeError:
            # use context factory defaults
            self._contextFactory = self._contextFactoryClass()
            msg = """
 '%s' does not accept `method` argument (type OpenSSL.SSL method,\
 e.g. OpenSSL.SSL.SSLv23_METHOD).\
 Please upgrade your context factory class to handle it or ignore it.""" % (
                settings['DOWNLOADER_CLIENTCONTEXTFACTORY'], )
            warnings.warn(msg)
        self._default_maxsize = settings.getint('DOWNLOAD_MAXSIZE')
        self._default_warnsize = settings.getint('DOWNLOAD_WARNSIZE')
        self._disconnect_timeout = 1

    def download_request(self, request, spider):
        """Return a deferred for the HTTP download"""
        agent = ScrapyAgent(contextFactory=self._contextFactory,
                            pool=self._pool,
                            maxsize=getattr(spider, 'download_maxsize',
                                            self._default_maxsize),
                            warnsize=getattr(spider, 'download_warnsize',
                                             self._default_warnsize))
        return agent.download_request(request)

    def close(self):
        d = self._pool.closeCachedConnections()
        # closeCachedConnections will hang on network or server issues, so
        # we'll manually timeout the deferred.
        #
        # Twisted issue addressing this problem can be found here:
        # https://twistedmatrix.com/trac/ticket/7738.
        #
        # closeCachedConnections doesn't handle external errbacks, so we'll
        # issue a callback after `_disconnect_timeout` seconds.
        delayed_call = reactor.callLater(self._disconnect_timeout, d.callback,
                                         [])

        def cancel_delayed_call(result):
            if delayed_call.active():
                delayed_call.cancel()
            return result

        d.addBoth(cancel_delayed_call)
        return d
Пример #32
0
    def __init__(self, name, sygnal, config):
        super(WebpushPushkin, self).__init__(name, sygnal, config)

        nonunderstood = self.cfg.keys() - self.UNDERSTOOD_CONFIG_FIELDS
        if nonunderstood:
            logger.warning(
                "The following configuration fields are not understood: %s",
                nonunderstood,
            )

        self.http_pool = HTTPConnectionPool(reactor=sygnal.reactor)
        self.max_connections = self.get_config("max_connections",
                                               DEFAULT_MAX_CONNECTIONS)
        self.connection_semaphore = DeferredSemaphore(self.max_connections)
        self.http_pool.maxPersistentPerHost = self.max_connections

        tls_client_options_factory = ClientTLSOptionsFactory()

        # use the Sygnal global proxy configuration
        proxy_url = sygnal.config.get("proxy")

        self.http_agent = ProxyAgent(
            reactor=sygnal.reactor,
            pool=self.http_pool,
            contextFactory=tls_client_options_factory,
            proxy_url_str=proxy_url,
        )
        self.http_agent_wrapper = HttpAgentWrapper(self.http_agent)

        self.allowed_endpoints = None  # type: Optional[List[Pattern]]
        allowed_endpoints = self.get_config("allowed_endpoints")
        if allowed_endpoints:
            if not isinstance(allowed_endpoints, list):
                raise PushkinSetupException(
                    "'allowed_endpoints' should be a list or not set")
            self.allowed_endpoints = list(map(glob_to_regex,
                                              allowed_endpoints))
        privkey_filename = self.get_config("vapid_private_key")
        if not privkey_filename:
            raise PushkinSetupException(
                "'vapid_private_key' not set in config")
        if not os.path.exists(privkey_filename):
            raise PushkinSetupException(
                "path in 'vapid_private_key' does not exist")
        try:
            self.vapid_private_key = Vapid.from_file(
                private_key_file=privkey_filename)
        except VapidException as e:
            raise PushkinSetupException(
                "invalid 'vapid_private_key' file") from e
        self.vapid_contact_email = self.get_config("vapid_contact_email")
        if not self.vapid_contact_email:
            raise PushkinSetupException(
                "'vapid_contact_email' not set in config")
        self.ttl = self.get_config("ttl", DEFAULT_TTL)
        if not isinstance(self.ttl, int):
            raise PushkinSetupException("'ttl' must be an int if set")
Пример #33
0
 def __init__(self,
              ip='127.0.0.1',
              port=5000,
              max_cache_size=1000,
              ttl_policy=30):
     TalosVCRestClient.__init__(self, ip, port)
     self.pool = HTTPConnectionPool(reactor)
     self.agent = Agent(reactor, pool=self.pool)
     self.policy_cache = TTLCache(max_cache_size, ttl_policy)
Пример #34
0
    def __init__(
        self,
        reactor: ISynapseReactor,
        tls_client_options_factory: Optional[FederationPolicyForHTTPS],
        user_agent: bytes,
        ip_blacklist: IPSet,
        proxy_reactor: Optional[ISynapseReactor] = None,
        _srv_resolver: Optional[SrvResolver] = None,
        _well_known_resolver: Optional[WellKnownResolver] = None,
    ):
        self._reactor = reactor
        self._clock = Clock(reactor)
        self._pool = HTTPConnectionPool(reactor)
        self._pool.retryAutomatically = False
        self._pool.maxPersistentPerHost = 5
        self._pool.cachedConnectionTimeout = 2 * 60

        if proxy_reactor is None:
            self.proxy_reactor = reactor
        else:
            self.proxy_reactor = proxy_reactor

        proxies = getproxies()
        https_proxy = proxies["https"].encode() if "https" in proxies else None

        self._agent = Agent.usingEndpointFactory(
            self._reactor,
            MatrixHostnameEndpointFactory(
                reactor,
                self.proxy_reactor,
                tls_client_options_factory,
                _srv_resolver,
                https_proxy,
            ),
            pool=self._pool,
        )
        self.user_agent = user_agent

        if _well_known_resolver is None:
            # Note that the name resolver has already been wrapped in a
            # IPBlacklistingResolver by MatrixFederationHttpClient.
            _well_known_resolver = WellKnownResolver(
                self._reactor,
                agent=BlacklistingAgentWrapper(
                    ProxyAgent(
                        self._reactor,
                        self.proxy_reactor,
                        pool=self._pool,
                        contextFactory=tls_client_options_factory,
                        use_proxy=True,
                    ),
                    ip_blacklist=ip_blacklist,
                ),
                user_agent=self.user_agent,
            )

        self._well_known_resolver = _well_known_resolver
Пример #35
0
    def __init__(self, reactor):
        self.reactor = reactor
        pool = HTTPConnectionPool(reactor, persistent=True)
        pool.maxPersistentPerHost = 1
        pool.cachedConnectionTimeout = 600
        self.agent = RedirectAgent(Agent(reactor, pool=pool))
        self.reqQ = HttpReqQ(self.agent, self.reactor)
        self.clientPlaylist = HlsPlaylist()
        self.verbose = False
        self.download = False
        self.outDir = ""
        self.encryptionHandled = False

        # required for the dump durations functionality
        self.dur_dump_file = None
        self.dur_avproble_acc = 0
        self.dur_vt_acc = 0
        self.dur_playlist_acc = 0
Пример #36
0
def _get_agent(connect_timeout=_CONNECT_TIMEOUT):
    context_factory = MyWebClientContextFactory()
    try:
        # HTTPConnectionPool has been present since Twisted version 12.1
        from twisted.web.client import HTTPConnectionPool
        pool = HTTPConnectionPool(reactor, persistent=True)
        pool.maxPersistentPerHost = _MAX_PERSISTENT_PER_HOST
        agent = Agent(reactor,
                      context_factory,
                      connectTimeout=connect_timeout,
                      pool=pool)
    except ImportError:
        from _zenclient import ZenAgent
        agent = ZenAgent(reactor,
                         context_factory,
                         persistent=True,
                         maxConnectionsPerHostName=1)
    return agent
Пример #37
0
class CouchStateTests(CouchDBTestCase):
    def setUp(self):
        CouchDBTestCase.setUp(self)
        self.db = self.couch_server.create('user-' + uuid4().hex)
        self.addCleanup(self.delete_db, self.db.name)
        self.pool = HTTPConnectionPool(reactor, persistent=False)
        self.agent = Agent(reactor, pool=self.pool)

    @defer.inlineCallbacks
    def tearDown(self):
        yield self.pool.closeCachedConnections()

    @restricted_listing
    @defer.inlineCallbacks
    def test__check_db_schema_version_wrong_schema_version_raises(self):
        wrong_schema_version = SCHEMA_VERSION + 1
        self.db.create({
            '_id': CONFIG_DOC_ID,
            SCHEMA_VERSION_KEY: wrong_schema_version
        })
        with pytest.raises(WrongCouchSchemaVersionError):
            yield _check_db_schema_version(self.couch_url,
                                           self.db.name,
                                           None,
                                           agent=self.agent)

    @restricted_listing
    @defer.inlineCallbacks
    def test_check_schema_versions_wrong_schema_version_raises(self):
        wrong_schema_version = SCHEMA_VERSION + 1
        self.db.create({
            '_id': CONFIG_DOC_ID,
            SCHEMA_VERSION_KEY: wrong_schema_version
        })
        expected_msg = 'Error checking CouchDB schema versions: ' \
                       'FirstError.*WrongCouchSchemaVersionError()'
        with pytest.raises(Exception, match=expected_msg):
            yield check_schema_versions(self.couch_url, agent=self.agent)

    @restricted_listing
    @defer.inlineCallbacks
    def test__check_db_schema_version_missing_config_doc_raises(self):
        self.db.create({})
        with pytest.raises(MissingCouchConfigDocumentError):
            yield _check_db_schema_version(self.couch_url,
                                           self.db.name,
                                           None,
                                           agent=self.agent)

    @restricted_listing
    @defer.inlineCallbacks
    def test_check_schema_versions_missing_config_doc_raises(self):
        self.db.create({})
        expected_msg = 'Error checking CouchDB schema versions: ' \
                       'FirstError.*MissingCouchConfigDocumentError()'
        with pytest.raises(Exception, match=expected_msg):
            yield check_schema_versions(self.couch_url, agent=self.agent)
Пример #38
0
    def __init__(self, reactor, node=('localhost', 4001), ca=None, cert=None):
        self.reactor = reactor
        self.node = node
        self.scheme = 'http'
        self.ca = ca
        self.cert = cert
        context = None
        if ca:
            self.scheme = 'https'
            context = PolicyForHTTPS(ca, cert)

        quietPool = HTTPConnectionPool(reactor, persistent=True)
        quietPool.maxPersistentPerHost = 2
        quietPool._factory = QuietHTTP11ClientFactory

        self.agent = Agent(self.reactor,
                           contextFactory=context,
                           pool=quietPool)
Пример #39
0
    def __init__(self):
        self.b = Breaker()

        pool = HTTPConnectionPool(reactor)
        self.ua = Agent(
            reactor,
            connectTimeout=0.25,
            pool=pool,
        )
Пример #40
0
def _get_agent():
    global _AGENT
    if _AGENT is None:
        try:
            # HTTPConnectionPool has been present since Twisted version 12.1
            from twisted.web.client import HTTPConnectionPool
            pool = HTTPConnectionPool(reactor, persistent=True)
            pool.maxPersistentPerHost = _MAX_PERSISTENT_PER_HOST
            pool.cachedConnectionTimeout = _CACHED_CONNECTION_TIMEOUT
            _AGENT = Agent(
                reactor, connectTimeout=_CONNECT_TIMEOUT, pool=pool)
        except ImportError:
            try:
                # connectTimeout first showed up in Twisted version 11.1
                _AGENT = Agent(reactor, connectTimeout=_CONNECT_TIMEOUT)
            except TypeError:
                _AGENT = Agent(reactor)
    return _AGENT
Пример #41
0
    def __init__(self, config):
        self.base_url = config["harold"]["base-url"]
        self.secret = config["harold"]["hmac-secret"]

        self.log = logging.getLogger(__name__)
        self.connection_pool = HTTPConnectionPool(reactor)
        self.agent = Agent(reactor,
                           connectTimeout=TIMEOUT_SECONDS,
                           pool=self.connection_pool)
Пример #42
0
def _default_client(jws_client, reactor, key, alg):
    """
    Make a client if we didn't get one.
    """
    if jws_client is None:
        pool = HTTPConnectionPool(reactor)
        agent = Agent(reactor, pool=pool)
        jws_client = JWSClient(HTTPClient(agent=agent), key, alg)
    return jws_client
Пример #43
0
class HTTP11DownloadHandler:
    lazy = False

    def __init__(self, settings, crawler=None):
        self._crawler = crawler

        from twisted.internet import reactor
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint(
            'CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False

        self._contextFactory = load_context_factory_from_settings(
            settings, crawler)
        self._default_maxsize = settings.getint('DOWNLOAD_MAXSIZE')
        self._default_warnsize = settings.getint('DOWNLOAD_WARNSIZE')
        self._fail_on_dataloss = settings.getbool('DOWNLOAD_FAIL_ON_DATALOSS')
        self._disconnect_timeout = 1

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.settings, crawler)

    def download_request(self, request, spider):
        """Return a deferred for the HTTP download"""
        agent = ScrapyAgent(
            contextFactory=self._contextFactory,
            pool=self._pool,
            maxsize=getattr(spider, 'download_maxsize', self._default_maxsize),
            warnsize=getattr(spider, 'download_warnsize',
                             self._default_warnsize),
            fail_on_dataloss=self._fail_on_dataloss,
            crawler=self._crawler,
        )
        return agent.download_request(request)

    def close(self):
        from twisted.internet import reactor
        d = self._pool.closeCachedConnections()
        # closeCachedConnections will hang on network or server issues, so
        # we'll manually timeout the deferred.
        #
        # Twisted issue addressing this problem can be found here:
        # https://twistedmatrix.com/trac/ticket/7738.
        #
        # closeCachedConnections doesn't handle external errbacks, so we'll
        # issue a callback after `_disconnect_timeout` seconds.
        delayed_call = reactor.callLater(self._disconnect_timeout, d.callback,
                                         [])

        def cancel_delayed_call(result):
            if delayed_call.active():
                delayed_call.cancel()
            return result

        d.addBoth(cancel_delayed_call)
        return d
Пример #44
0
    def __init__(self, reactor):
        self.reactor = reactor
        pool = HTTPConnectionPool(reactor, persistent=True)
        pool.maxPersistentPerHost = 1
        pool.cachedConnectionTimeout = 600
        self.agent = RedirectAgent(Agent(reactor, pool=pool))
        self.reqQ = HttpReqQ(self.agent, self.reactor)
        self.clientPlaylist = HlsPlaylist()
        self.verbose = False
        self.download = False
        self.outDir = ""
        self.encryptionHandled=False

        # required for the dump durations functionality
        self.dur_dump_file = None
        self.dur_avproble_acc = 0
        self.dur_vt_acc = 0
        self.dur_playlist_acc = 0
Пример #45
0
	def parallelFetchAllProblems(self):
			pool = HTTPConnectionPool(reactor)
			pool.maxPersistentPerHost = self.concurrency
			agent = Agent(reactor, pool=pool)
			sem = DeferredSemaphore(self.concurrency)
			self.done = 0

			def assign():
				self.query.execute('BEGIN')
				for id in range(1001, self.getProblemMax()+1):
					sem.acquire().addCallback(requestFactory, id)
			
			def requestFactory(token, id):
				deferred = agent.request('GET', self.baseUrl + self.problemPath + str(id))
				deferred.addCallback(onHeader, id)
				deferred.addErrback(errorHandler, id)
				return deferred
			
			def onHeader(response, id):
				deferred = readBody(response)
				deferred.addCallback(onBody, id)
				deferred.addErrback(errorHandler, id)
				return deferred

			def onBody(html, id):
				sem.release()
				d = pyq(html)
				title = d('#content_body > center:nth-child(1) > span').text(),
				body = d('#content_body').text()
				print('Fetched ProblemID: %s, Title: %s, done: %s' % (id, title[0], self.done))
				self.storeProblem(id, title[0], body)
				self.done += 1
				if(self.done == self.problemCount):
					print('Fetch data used %s s' % (reactor.seconds() - startTimeStamp))
					print('Fetch data end, writing to database')
					self.query.execute('COMMIT')
					reactor.stop()

			def errorHandler(err, id):
				print('[%s] id %s: %s' % (reactor.seconds() - startTimeStamp, id, err))

			startTimeStamp = reactor.seconds()
			reactor.callWhenRunning(assign)
			reactor.run()
 def make_web_agent(self):
     """
     Configure a `Twisted.web.client.Agent` to be used to make REST calls.
     """
     self.pool = HTTPConnectionPool(self.reactor)
     self.agent = Agent.usingEndpointFactory(self.reactor,
                                             WebClientEndpointFactory(
                                                 self.reactor,
                                                 self.endpoint_s),
                                             pool=self.pool)
Пример #47
0
def main():
    pool = HTTPConnectionPool(reactor)
    agent = Agent(reactor, connectTimeout=10, pool=pool)

    d = tryRequestUntilSuccess(agent, 'GET',
                               'http://data.mtgox.com/api/0/data/ticker.php')
    d.addCallback(printBody)
    d.addErrback(log.err, 'error fetching ticker')
    d.addCallback(lambda ignored: reactor.stop())
    reactor.run()
Пример #48
0
 def __init__(self, uuid, token, cert_file):
     self._uuid = uuid
     self._token = None
     self._creds = None
     self.set_token(token)
     # pin this agent with the platform TLS certificate
     factory = getPolicyForHTTPS(cert_file)
     persistent = os.environ.get('SOLEDAD_HTTP_PERSIST', None)
     pool = HTTPConnectionPool(reactor, persistent=bool(persistent))
     Agent.__init__(self, reactor, contextFactory=factory, pool=pool)
Пример #49
0
def http_request_full(url,
                      data=None,
                      headers={},
                      method='POST',
                      timeout=None,
                      data_limit=None,
                      context_factory=None,
                      agent_class=None,
                      reactor=None):
    """
    This is a drop in replacement for the original `http_request_full` method
    but it has its internals completely replaced by treq. Treq supports SNI
    and our implementation does not for some reason. Also, we do not want
    to continue maintaining this because we're favouring treq everywhere
    anyway.

    """
    agent_class = agent_class or Agent
    if reactor is None:
        # The import replaces the local variable.
        from twisted.internet import reactor
    kwargs = {'pool': HTTPConnectionPool(reactor, persistent=False)}
    if context_factory is not None:
        kwargs['contextFactory'] = context_factory
    agent = agent_class(reactor, **kwargs)
    client = HTTPClient(agent)

    def handle_response(response):
        return SimplishReceiver(response, data_limit).deferred

    d = client.request(method, url, headers=headers, data=data)
    d.addCallback(handle_response)

    if timeout is not None:
        cancelling_on_timeout = [False]

        def raise_timeout(reason):
            if not cancelling_on_timeout[0] or reason.check(HttpTimeoutError):
                return reason
            return Failure(HttpTimeoutError("Timeout while connecting"))

        def cancel_on_timeout():
            cancelling_on_timeout[0] = True
            d.cancel()

        def cancel_timeout(r, delayed_call):
            if delayed_call.active():
                delayed_call.cancel()
            return r

        d.addErrback(raise_timeout)
        delayed_call = reactor.callLater(timeout, cancel_on_timeout)
        d.addCallback(cancel_timeout, delayed_call)

    return d
Пример #50
0
class HTTP11DownloadHandler(object):
    lazy = False

    def __init__(self, settings):
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint('CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False

        self._sslMethod = openssl_methods[settings.get('DOWNLOADER_CLIENT_TLS_METHOD')]
        self._contextFactoryClass = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
        # try method-aware context factory
        try:
            self._contextFactory = self._contextFactoryClass(method=self._sslMethod)
        except TypeError:
            # use context factory defaults
            self._contextFactory = self._contextFactoryClass()
            msg = """
 '%s' does not accept `method` argument (type OpenSSL.SSL method,\
 e.g. OpenSSL.SSL.SSLv23_METHOD).\
 Please upgrade your context factory class to handle it or ignore it.""" % (
                settings['DOWNLOADER_CLIENTCONTEXTFACTORY'],)
            warnings.warn(msg)
        self._default_maxsize = settings.getint('DOWNLOAD_MAXSIZE')
        self._default_warnsize = settings.getint('DOWNLOAD_WARNSIZE')
        self._fail_on_dataloss = settings.getbool('DOWNLOAD_FAIL_ON_DATALOSS')
        self._disconnect_timeout = 1

    def download_request(self, request, spider):
        """Return a deferred for the HTTP download"""
        agent = ScrapyAgent(contextFactory=self._contextFactory, pool=self._pool,
            maxsize=getattr(spider, 'download_maxsize', self._default_maxsize),
            warnsize=getattr(spider, 'download_warnsize', self._default_warnsize),
            fail_on_dataloss=self._fail_on_dataloss)
        return agent.download_request(request)

    def close(self):
        d = self._pool.closeCachedConnections()
        # closeCachedConnections will hang on network or server issues, so
        # we'll manually timeout the deferred.
        #
        # Twisted issue addressing this problem can be found here:
        # https://twistedmatrix.com/trac/ticket/7738.
        #
        # closeCachedConnections doesn't handle external errbacks, so we'll
        # issue a callback after `_disconnect_timeout` seconds.
        delayed_call = reactor.callLater(self._disconnect_timeout, d.callback, [])

        def cancel_delayed_call(result):
            if delayed_call.active():
                delayed_call.cancel()
            return result

        d.addBoth(cancel_delayed_call)
        return d
Пример #51
0
 def __init__(self, common_headers = None, hang_up = True, use_cookies = True, pool = True, dns = True, max_hops = 5, connection_timeout = 10, verbose = False):
     if pool:
         self.connection_pool = HTTPConnectionPool(reactor, persistent=True)            
     else:
         self.connection_pool = HTTPConnectionPool(reactor, persistent=False)
         
     if use_cookies:
         cookieJar = CookieJar()
         self.agent = CookieAgent(Agent(reactor, pool = self.connection_pool), cookieJar)
     else:
         self.agent = Agent(reactor, pool = self.connection_pool)
     
     if verbose:
         log.startLogging(sys.stdout)
     
     self.hang_up = hang_up
     
     self.common_headers = common_headers
     self.max_hops = max_hops
     self.connection_timeout = connection_timeout
Пример #52
0
    def __init__(self, receiver):
        """Constructor

        Args:
        receiver: An instance of IARIEventReceiver
        """
        super(AsyncHTTPClient, self).__init__()
        self.receiver = receiver
        self.http_pool = HTTPConnectionPool(reactor)
        self.agent = Agent(reactor, pool=self.http_pool)
        self.authenticator = None
        self.ws_conn = None
    def __init__(self, hs):
        self.hs = hs
        self.signing_key = hs.config.signing_key[0]
        self.server_name = hs.hostname
        reactor = hs.get_reactor()
        pool = HTTPConnectionPool(reactor)
        pool.retryAutomatically = False
        pool.maxPersistentPerHost = 5
        pool.cachedConnectionTimeout = 2 * 60
        self.agent = Agent.usingEndpointFactory(
            reactor, MatrixFederationEndpointFactory(hs), pool=pool
        )
        self.clock = hs.get_clock()
        self._store = hs.get_datastore()
        self.version_string_bytes = hs.version_string.encode('ascii')
        self.default_timeout = 60

        def schedule(x):
            reactor.callLater(_EPSILON, x)

        self._cooperator = Cooperator(scheduler=schedule)
    def agent(self, scheme='http'):
        if not self._agents:
            pool = HTTPConnectionPool(reactor)
            pool.maxPersistentPerHost = 10
            pool.cachedConnectionTimeout = 15

            contextFactory = PermissiveBrowserLikePolicyForHTTPS()
            proxies = getproxies()

            if 'http' in proxies or 'https' in proxies:
                # I've noticed some intermittent failures (ResponseNeverReceived) to
                # POST request through a proxy when persistent connections are enabled.
                pool.persistent = False

            if 'https' in proxies:
                proxy = urlparse(proxies.get('https'))
                if proxy:
                    # Note- this isn't going to work completely.  It's not being
                    # passed the modified contextFactory, and in fact it doesn't
                    # even work properly for other reasons (ZPS-2061)
                    log.info("Creating https proxy (%s:%s)" % (proxy.hostname, proxy.port))
                    endpoint = TCP4ClientEndpoint(reactor, proxy.hostname, proxy.port, timeout=CONNECT_TIMEOUT)
                    SessionManager._agents['https'] = \
                        ProxyAgent(endpoint, reactor, pool=pool)
            else:
                SessionManager._agents['https'] = \
                    Agent(reactor, pool=pool, connectTimeout=CONNECT_TIMEOUT, contextFactory=contextFactory)

            if 'http' in proxies:
                proxy = urlparse(proxies.get('http'))
                if proxy:
                    log.info("Creating http proxy (%s:%s)" % (proxy.hostname, proxy.port))
                    endpoint = TCP4ClientEndpoint(reactor, proxy.hostname, proxy.port, timeout=CONNECT_TIMEOUT)
                    SessionManager._agents['http'] = \
                        ProxyAgent(endpoint, reactor, pool=pool)
            else:
                SessionManager._agents['http'] = \
                    Agent(reactor, pool=pool, connectTimeout=CONNECT_TIMEOUT)

        return SessionManager._agents[scheme]
Пример #55
0
    def __init__(self, page_archiver, cookie_file=None):
        self._logger = logging.getLogger(__name__)
        self._page_archiver = page_archiver
        self._logger.debug('Using page archiver: %s. Cookie file: %s',
                           page_archiver is not None,
                           cookie_file)
        if cookie_file:
            umask = os.umask(077)
            self._cj = LWPCookieJar(cookie_file)
            try:
                self._cj.load()
            except LoadError:
                self._logger.warning('Cannot load cookies from %s' % (cookie_file, ))
            os.umask(umask)
        else:
            self._cj = CookieJar()

        pool = HTTPConnectionPool(reactor, persistent=True)
        pool.maxPersistentPerHost = 10
        self._agent = CookieAgent(ContentDecoderAgent(Agent(reactor, pool=pool),
                                                       [('gzip', GzipDecoder)]), self._cj)
        self._lock = Lock()
Пример #56
0
 def __init__(self, server, secret, proxy = None):
     """
     :param server: Url of the server.
     :param secret: Passpharse. Only clients with the same secret can interact, 
       even when using the same server.
     :param proxy: URL to the proxy. An empty string or no proxy. `None` to check
       the environment variable `http_proxy`.
     """
     self.server = server
     self.encryption_key = _make_key(secret)
     self.proxy = proxy
     self.pool = HTTPConnectionPool(reactor, persistent=True)
     self.pool.maxPersistentPerHost = 1024
Пример #57
0
class CouchStateTests(CouchDBTestCase):

    def setUp(self):
        CouchDBTestCase.setUp(self)
        self.db = self.couch_server.create('user-' + uuid4().hex)
        self.addCleanup(self.delete_db, self.db.name)
        self.pool = HTTPConnectionPool(reactor, persistent=False)
        self.agent = Agent(reactor, pool=self.pool)

    @defer.inlineCallbacks
    def tearDown(self):
        yield self.pool.closeCachedConnections()

    @restricted_listing
    @defer.inlineCallbacks
    def test__check_db_schema_version_wrong_schema_version_raises(self):
        wrong_schema_version = SCHEMA_VERSION + 1
        self.db.create(
            {'_id': CONFIG_DOC_ID, SCHEMA_VERSION_KEY: wrong_schema_version})
        with pytest.raises(WrongCouchSchemaVersionError):
            yield _check_db_schema_version(
                self.couch_url, self.db.name, None, agent=self.agent)

    @restricted_listing
    @defer.inlineCallbacks
    def test_check_schema_versions_wrong_schema_version_raises(self):
        wrong_schema_version = SCHEMA_VERSION + 1
        self.db.create(
            {'_id': CONFIG_DOC_ID, SCHEMA_VERSION_KEY: wrong_schema_version})
        expected_msg = 'Error checking CouchDB schema versions: ' \
                       'FirstError.*WrongCouchSchemaVersionError()'
        with pytest.raises(Exception, match=expected_msg):
            yield check_schema_versions(self.couch_url, agent=self.agent)

    @restricted_listing
    @defer.inlineCallbacks
    def test__check_db_schema_version_missing_config_doc_raises(self):
        self.db.create({})
        with pytest.raises(MissingCouchConfigDocumentError):
            yield _check_db_schema_version(
                self.couch_url, self.db.name, None, agent=self.agent)

    @restricted_listing
    @defer.inlineCallbacks
    def test_check_schema_versions_missing_config_doc_raises(self):
        self.db.create({})
        expected_msg = 'Error checking CouchDB schema versions: ' \
                       'FirstError.*MissingCouchConfigDocumentError()'
        with pytest.raises(Exception, match=expected_msg):
            yield check_schema_versions(self.couch_url, agent=self.agent)
Пример #58
0
class FTPFuncTest(unittest.TestCase):
    @defer.inlineCallbacks
    def setUp(self):
        self.pool = HTTPConnectionPool(reactor, persistent=True)
        self.swift = get_swift_client(conf, pool=self.pool)
        self.tmpdir = tempfile.mkdtemp()
        self.ftp = get_ftp_client(conf)
        yield clean_swift(self.swift)

    @defer.inlineCallbacks
    def tearDown(self):
        shutil.rmtree(self.tmpdir)
        self.ftp.close()
        yield clean_swift(self.swift)
        yield self.pool.closeCachedConnections()
Пример #59
0
 def startService(self):
     # treq only supports basicauth, so we force txrequests if the auth is something else
     if self._auth is not None and not isinstance(self._auth, tuple):
         self.PREFER_TREQ = False
     if txrequests is not None and not self.PREFER_TREQ:
         self._session = txrequests.Session()
         self._doRequest = self._doTxRequest
     elif treq is None:
         raise ImportError("{classname} requires either txrequest or treq install."
                           " Users should call {classname}.checkAvailable() during checkConfig()"
                           " to properly alert the user.".format(classname=self.__class__.__name__))
     else:
         self._doRequest = self._doTReq
         self._pool = HTTPConnectionPool(self.master.reactor)
         self._pool.maxPersistentPerHost = self.MAX_THREADS
         self._agent = Agent(self.master.reactor, pool=self._pool)
Пример #60
0
    def __init__(self, settings):
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint('CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False

        self._sslMethod = openssl_methods[settings.get('DOWNLOADER_CLIENT_TLS_METHOD')]
        self._contextFactoryClass = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
        # try method-aware context factory
        try:
            self._contextFactory = self._contextFactoryClass(method=self._sslMethod)
        except TypeError:
            # use context factory defaults
            self._contextFactory = self._contextFactoryClass()
            msg = """
 '%s' does not accept `method` argument (type OpenSSL.SSL method,\
 e.g. OpenSSL.SSL.SSLv23_METHOD).\
 Please upgrade your context factory class to handle it or ignore it.""" % (
                settings['DOWNLOADER_CLIENTCONTEXTFACTORY'],)
            warnings.warn(msg)
        self._default_maxsize = settings.getint('DOWNLOAD_MAXSIZE')
        self._default_warnsize = settings.getint('DOWNLOAD_WARNSIZE')
        self._disconnect_timeout = 1