コード例 #1
0
    def requestAvatarId(self, c):
        creds = credentials.IUsernamePassword(c, None)

        if creds is not None:
            locks = []
            pool = HTTPConnectionPool(reactor, persistent=False)
            pool.cachedConnectionTimeout = self.timeout
            if self.max_concurrency:
                pool.persistent = True
                pool.maxPersistentPerHost = self.max_concurrency
                locks.append(defer.DeferredSemaphore(self.max_concurrency))

            if self.global_max_concurrency:
                locks.append(
                    defer.DeferredSemaphore(self.global_max_concurrency))

            conn = ThrottledSwiftConnection(locks,
                                            self.auth_url,
                                            creds.username,
                                            creds.password,
                                            pool=pool,
                                            extra_headers=self.extra_headers,
                                            verbose=self.verbose)
            conn.user_agent = USER_AGENT

            d = conn.authenticate()
            d.addCallback(self._after_auth, conn)
            d.addErrback(eb_failed_auth)
            return d
        return defer.fail(error.UnauthorizedLogin())
コード例 #2
0
 def __init__(self, host, port=None, pool_maxsize=2, scheme='http://'):
     self.scheme = scheme
     self.host = host
     self.port = port
     self._pool = HTTPConnectionPool(reactor)
     self._pool.maxPersistentPerHost = pool_maxsize
     self.agent = Agent(reactor, pool=self._pool)
コード例 #3
0
ファイル: base_api_test.py プロジェクト: paolomarin/tribler
 def setUp(self, autoload_discovery=True):
     yield super(AbstractBaseApiTest,
                 self).setUp(autoload_discovery=autoload_discovery)
     self.connection_pool = HTTPConnectionPool(reactor, False)
     terms = self.session.lm.category.xxx_filter.xxx_terms
     terms.add("badterm")
     self.session.lm.category.xxx_filter.xxx_terms = terms
コード例 #4
0
ファイル: __init__.py プロジェクト: lfany/certifi.io
def makeService(config):
    """
    Create the service serving the mkcert data.
    """
    from twisted.internet import reactor

    # We need a HTTP connection pool for rproxy.
    pool = HTTPConnectionPool(reactor)

    proxyResource = RProxyResource(
        hosts=hosts,
        pool=pool,
        customHeaders=customHeaders,
        reactor=reactor
    )
    redirectResource = RedirectResource()

    secureSite = Site(proxyResource)
    insecureSite = Site(redirectResource)

    multiService = service.MultiService()
    multiService.addService(
        strports.service('le:/certs:tcp:' + HTTPS_PORT, secureSite)
    )
    multiService.addService(
        strports.service("tcp:" + HTTP_PORT, insecureSite)
    )
    return multiService
コード例 #5
0
    def __init__(self, token):
        self._pool = HTTPConnectionPool(reactor)
        self._token = token
        self._queues = collections.defaultdict(list)
        self._retry_timers = {}

        self._pool._factory.noisy = False
コード例 #6
0
ファイル: gcmpushkin.py プロジェクト: yousefalatari/sygnal
    def __init__(self, name, sygnal, config, canonical_reg_id_store):
        super(GcmPushkin, self).__init__(name, sygnal, config)

        nonunderstood = set(self.cfg.keys()).difference(
            self.UNDERSTOOD_CONFIG_FIELDS)
        if len(nonunderstood) > 0:
            logger.warning(
                "The following configuration fields are not understood: %s",
                nonunderstood,
            )

        self.http_pool = HTTPConnectionPool(reactor=sygnal.reactor)
        self.max_connections = self.get_config("max_connections",
                                               DEFAULT_MAX_CONNECTIONS)
        self.connection_semaphore = DeferredSemaphore(self.max_connections)
        self.http_pool.maxPersistentPerHost = self.max_connections

        tls_client_options_factory = ClientTLSOptionsFactory()

        self.http_agent = Agent(
            reactor=sygnal.reactor,
            pool=self.http_pool,
            contextFactory=tls_client_options_factory,
        )

        self.db = sygnal.database
        self.canonical_reg_id_store = canonical_reg_id_store

        self.api_key = self.get_config("api_key")
        if not self.api_key:
            raise PushkinSetupException("No API key set in config")
コード例 #7
0
 def setUp(self):
     self.active_connections = []
     self.pool = HTTPConnectionPool(reactor, persistent=True)
     self.swift = get_swift_client(CONFIG, pool=self.pool)
     self.tmpdir = tempfile.mkdtemp()
     _, self.sftp = self.get_client()
     yield clean_swift(self.swift)
コード例 #8
0
    def from_nurl(cls,
                  nurl: DecodedURL,
                  reactor,
                  persistent: bool = True) -> StorageClient:
        """
        Create a ``StorageClient`` for the given NURL.

        ``persistent`` indicates whether to use persistent HTTP connections.
        """
        assert nurl.fragment == "v=1"
        assert nurl.scheme == "pb"
        swissnum = nurl.path[0].encode("ascii")
        certificate_hash = nurl.user.encode("ascii")

        treq_client = HTTPClient(
            Agent(
                reactor,
                _StorageClientHTTPSPolicy(expected_spki_hash=certificate_hash),
                pool=HTTPConnectionPool(reactor, persistent=persistent),
            ))

        https_url = DecodedURL().replace(scheme="https",
                                         host=nurl.host,
                                         port=nurl.port)
        return cls(https_url, swissnum, treq_client)
コード例 #9
0
    def __init__(self,logformatter,settings):
        # 管理连接的,作用request完成后,connections不会自动关闭,而是保持在缓存中,再次被利用
        self.lfm = logformatter
        #logger.debug(*self.lfm.crawled)
        logger.debug(*self.lfm.crawled(
            'Downloader',
            'HTTPDownloadHandler',
            '已初始化'
        ))
        self.settings = settings
        self._pool = HTTPConnectionPool(reactor,persistent=True)
        self._pool.maxPersistentPerHost = self.settings.getint('CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False # 用于设置proxy代理

        self._contextFactory_without_proxy = DownloaderClientContextFactory()
        self._contextFactory_with_proxy = ScrapyClientContextFactory()
        self._contextFactory = (self._contextFactory_without_proxy,self._contextFactory_with_proxy)

        self._default_maxsize = self.settings.getint('DOWNLOAD_MAXSIZE')
        self._default_warnsize = self.settings.getint('DOWNLOAD_WARNSIZE')
        self._fail_on_dataloss = self.settings.getbool('DOWNLOAD_FAIL_ON_DATALOSS')
        self._default_download_timeout = self.settings.getint('DOWNLOAD_TIMEOUT')
        if self._default_download_timeout == 0:
            raise ValueError('下载最大超时时间不能为0')
        self._disconnect_timeout = 1
コード例 #10
0
ファイル: transports.py プロジェクト: dev0p0/warpzone
    def __init__(self, reactor, iface, options):

        self.reactor = reactor
        pool = HTTPConnectionPool(self.reactor)
        self.agent = Agent(self.reactor, pool=pool, connectTimeout=5)
        self.iface = iface
        self.options = options
コード例 #11
0
 def setUp(self):
     yield super(TestEventsEndpoint, self).setUp()
     self.events_deferred = Deferred()
     self.connection_pool = HTTPConnectionPool(reactor, False)
     self.socket_open_deferred = self.tribler_started_deferred.addCallback(
         self.open_events_socket)
     self.messages_to_wait_for = 0
コード例 #12
0
    def setUp(self):
        reactor.suggestThreadPoolSize(1)
        connection_string = os.environ.get("SHORTENER_TEST_CONNECTION_STRING",
                                           "sqlite://")

        self.account = 'test-account'
        cfg = {
            'host_domain': 'http://wtxt.io',
            'account': self.account,
            'connection_string': connection_string,
            'graphite_endpoint': 'tcp:www.example.com:80',
            'handlers': [
                {
                    'dump': 'shortener.handlers.dump.Dump'
                },
            ],
        }
        self.pool = HTTPConnectionPool(reactor, persistent=False)
        self.service = ShortenerServiceApp(reactor=reactor, config=cfg)

        self.tr = DisconnectingStringTransport()
        endpoint = StringTransportClientEndpoint(reactor, self.tr)
        self.service.metrics.carbon_client = CarbonClientService(endpoint)
        self.service.metrics.carbon_client.startService()
        yield self.service.metrics.carbon_client.connect_d

        site = Site(self.service.app.resource())
        self.listener = reactor.listenTCP(0, site, interface='localhost')
        self.listener_port = self.listener.getHost().port
        self._drop_tables()
        self.conn = yield self.service.engine.connect()
        self.addCleanup(self.listener.loseConnection)
        self.addCleanup(self.pool.closeCachedConnections)
コード例 #13
0
    def __init__(self,
                 host='127.0.0.1',
                 port=8500,
                 scheme='http',
                 verify=True,
                 cert=None,
                 contextFactory=None,
                 **kwargs):
        self.host = host
        self.port = port
        self.scheme = scheme
        self.base_uri = '%s://%s:%s' % (self.scheme, self.host, self.port)

        agent_kwargs = dict(reactor=reactor,
                            pool=HTTPConnectionPool(reactor),
                            **kwargs)
        if contextFactory is not None:
            # use the provided context factory
            agent_kwargs['contextFactory'] = contextFactory
        elif not verify:
            # if no context is provided and verify is set to false, use the
            # insecure context factory implementation
            agent_kwargs['contextFactory'] = InsecureContextFactory()

        self.client = TreqHTTPClient(Agent(**agent_kwargs))
コード例 #14
0
    def __init__(
        self,
        reactor,
        tls_client_options_factory,
        _srv_resolver=None,
        _well_known_cache=None,
    ):
        self._reactor = reactor
        self._clock = Clock(reactor)

        self._tls_client_options_factory = tls_client_options_factory
        if _srv_resolver is None:
            _srv_resolver = SrvResolver()
        self._srv_resolver = _srv_resolver

        self._pool = HTTPConnectionPool(reactor)
        self._pool.retryAutomatically = False
        self._pool.maxPersistentPerHost = 5
        self._pool.cachedConnectionTimeout = 2 * 60

        self._well_known_resolver = WellKnownResolver(
            self._reactor,
            agent=Agent(
                self._reactor,
                pool=self._pool,
                contextFactory=tls_client_options_factory,
            ),
            well_known_cache=_well_known_cache,
        )
コード例 #15
0
    def __init__(self, config, pool=None, reactor=None, clock=None):
        super(PubNubTwisted, self).__init__(config)

        self.clock = clock
        self._publish_sequence_manager = PublishSequenceManager(
            PubNubCore.MAX_SEQUENCE)

        if self.config.enable_subscribe:
            self._subscription_manager = TwistedSubscriptionManager(self)

        self.disconnected_times = 0

        if reactor is None:
            self.reactor = _reactor
        else:
            self.reactor = reactor

        if pool is None:
            self.pnconn_pool = HTTPConnectionPool(self.reactor,
                                                  persistent=True)
            self.pnconn_pool.maxPersistentPerHost = 3
            self.pnconn_pool.cachedConnectionTimeout = self.config.subscribe_request_timeout
            self.pnconn_pool.retryAutomatically = False
        else:
            self.pnconn_pool = pool

        self.headers = {
            'User-Agent': [self.sdk_name],
        }
コード例 #16
0
ファイル: http11.py プロジェクト: pabitra10/company-crawl
    def __init__(self, settings):
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint(
            'CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False

        self._sslMethod = openssl_methods[settings.get(
            'DOWNLOADER_CLIENT_TLS_METHOD')]
        self._contextFactoryClass = load_object(
            settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
        # try method-aware context factory
        try:
            self._contextFactory = self._contextFactoryClass(
                method=self._sslMethod)
        except TypeError:
            # use context factory defaults
            self._contextFactory = self._contextFactoryClass()
            msg = """
 '%s' does not accept `method` argument (type OpenSSL.SSL method,\
 e.g. OpenSSL.SSL.SSLv23_METHOD).\
 Please upgrade your context factory class to handle it or ignore it.""" % (
                settings['DOWNLOADER_CLIENTCONTEXTFACTORY'], )
            warnings.warn(msg)
        self._default_maxsize = settings.getint('DOWNLOAD_MAXSIZE')
        self._default_warnsize = settings.getint('DOWNLOAD_WARNSIZE')
        self._fail_on_dataloss = settings.getbool('DOWNLOAD_FAIL_ON_DATALOSS')
        self._disconnect_timeout = 1
コード例 #17
0
ファイル: server.py プロジェクト: Nothing4You/Tumblr-Backup
 def __init__(self):
     self.pool = HTTPConnectionPool(reactor)
     self.pool.retryAutomatically = False
     self.pool.maxPersistentPerHost = 10
     self.pool._factory.noisy = False
     self.channels = {}
     self.downloads = {}
コード例 #18
0
 def __init__(self, settings):
     self.settings = settings
     self.ssl_methods = [SSL.SSLv3_METHOD, SSL.TLSv1_METHOD]
     self.context_factories = [CrawlmiClientContextFactory(method) for method in self.ssl_methods]
     self.pool = HTTPConnectionPool(reactor, persistent=True)
     self.pool.maxPersistentPerHost = settings.get_int('CONCURRENT_REQUESTS_PER_DOMAIN')
     self.pool._factory.noisy = False
コード例 #19
0
    def __init__(
        self,
        reactor,
        tls_client_options_factory,
        _well_known_tls_policy=None,
        _srv_resolver=None,
        _well_known_cache=well_known_cache,
    ):
        self._reactor = reactor
        self._clock = Clock(reactor)

        self._tls_client_options_factory = tls_client_options_factory
        if _srv_resolver is None:
            _srv_resolver = SrvResolver()
        self._srv_resolver = _srv_resolver

        self._pool = HTTPConnectionPool(reactor)
        self._pool.retryAutomatically = False
        self._pool.maxPersistentPerHost = 5
        self._pool.cachedConnectionTimeout = 2 * 60

        agent_args = {}
        if _well_known_tls_policy is not None:
            # the param is called 'contextFactory', but actually passing a
            # contextfactory is deprecated, and it expects an IPolicyForHTTPS.
            agent_args['contextFactory'] = _well_known_tls_policy
        _well_known_agent = RedirectAgent(
            Agent(self._reactor, pool=self._pool, **agent_args), )
        self._well_known_agent = _well_known_agent

        # our cache of .well-known lookup results, mapping from server name
        # to delegated name. The values can be:
        #   `bytes`:     a valid server-name
        #   `None`:      there is no (valid) .well-known here
        self._well_known_cache = _well_known_cache
コード例 #20
0
ファイル: _client_tx.py プロジェクト: oberstet/txaio-etcd
    def __init__(self, reactor, url, pool=None, timeout=None, connect_timeout=None):
        """

        :param rector: Twisted reactor to use.
        :type reactor: class

        :param url: etcd URL, eg `http://localhost:2379`
        :type url: str

        :param pool: Twisted Web agent connection pool
        :type pool:

        :param timeout: If given, a global request timeout used for all
            requests to etcd.
        :type timeout: float or None

        :param connect_timeout: If given, a global connection timeout used when
            opening a new HTTP connection to etcd.
        :type connect_timeout: float or None
        """
        if type(url) != six.text_type:
            raise TypeError('url must be of type unicode, was {}'.format(type(url)))
        self._url = url
        self._timeout = timeout
        self._pool = pool or HTTPConnectionPool(reactor, persistent=True)
        self._pool._factory.noisy = False
        self._agent = Agent(reactor, connectTimeout=connect_timeout, pool=self._pool)
コード例 #21
0
    def __init__(
        self,
        reactor,
        tls_client_options_factory,
        user_agent,
        _srv_resolver=None,
        _well_known_resolver=None,
    ):
        self._reactor = reactor
        self._clock = Clock(reactor)
        self._pool = HTTPConnectionPool(reactor)
        self._pool.retryAutomatically = False
        self._pool.maxPersistentPerHost = 5
        self._pool.cachedConnectionTimeout = 2 * 60

        self._agent = Agent.usingEndpointFactory(
            self._reactor,
            MatrixHostnameEndpointFactory(reactor, tls_client_options_factory,
                                          _srv_resolver),
            pool=self._pool,
        )
        self.user_agent = user_agent

        if _well_known_resolver is None:
            _well_known_resolver = WellKnownResolver(
                self._reactor,
                agent=Agent(
                    self._reactor,
                    pool=self._pool,
                    contextFactory=tls_client_options_factory,
                ),
                user_agent=self.user_agent,
            )

        self._well_known_resolver = _well_known_resolver
コード例 #22
0
    def __init__(self, settings, crawler=None):
        self._crawler = crawler

        from twisted.internet import reactor
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint('CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False

        self._sslMethod = openssl_methods[settings.get('DOWNLOADER_CLIENT_TLS_METHOD')]
        self._contextFactoryClass = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
        # try method-aware context factory
        try:
            self._contextFactory = create_instance(
                objcls=self._contextFactoryClass,
                settings=settings,
                crawler=crawler,
                method=self._sslMethod,
            )
        except TypeError:
            # use context factory defaults
            self._contextFactory = create_instance(
                objcls=self._contextFactoryClass,
                settings=settings,
                crawler=crawler,
            )
            msg = """
 '%s' does not accept `method` argument (type OpenSSL.SSL method,\
 e.g. OpenSSL.SSL.SSLv23_METHOD) and/or `tls_verbose_logging` argument and/or `tls_ciphers` argument.\
 Please upgrade your context factory class to handle them or ignore them.""" % (
                settings['DOWNLOADER_CLIENTCONTEXTFACTORY'],)
            warnings.warn(msg)
        self._default_maxsize = settings.getint('DOWNLOAD_MAXSIZE')
        self._default_warnsize = settings.getint('DOWNLOAD_WARNSIZE')
        self._fail_on_dataloss = settings.getbool('DOWNLOAD_FAIL_ON_DATALOSS')
        self._disconnect_timeout = 1
コード例 #23
0
ファイル: client.py プロジェクト: xelivous/synapse
    def __init__(self, hs):
        self.hs = hs

        pool = HTTPConnectionPool(reactor)

        # the pusher makes lots of concurrent SSL connections to sygnal, and
        # tends to do so in batches, so we need to allow the pool to keep lots
        # of idle connections around.
        pool.maxPersistentPerHost = max((100 * CACHE_SIZE_FACTOR, 5))
        pool.cachedConnectionTimeout = 2 * 60

        # The default context factory in Twisted 14.0.0 (which we require) is
        # BrowserLikePolicyForHTTPS which will do regular cert validation
        # 'like a browser'
        self.agent = Agent(
            reactor,
            connectTimeout=15,
            contextFactory=hs.get_http_client_context_factory(),
            pool=pool,
        )
        self.user_agent = hs.version_string
        self.clock = hs.get_clock()
        if hs.config.user_agent_suffix:
            self.user_agent = "%s %s" % (
                self.user_agent,
                hs.config.user_agent_suffix,
            )

        self.user_agent = self.user_agent.encode('ascii')
コード例 #24
0
ファイル: services.py プロジェクト: timhughes/naggregator
    def __init__(self):
        connection_pool = HTTPConnectionPool(reactor, persistent=True)
        context_factory = WebClientContextFactory()
        self.agent = Agent(reactor, context_factory, pool=connection_pool)
        # self.agent = Agent(reactor, context_factory)
        self.user = None
        self.password = None
        self.refresh_interval = 10
        self.nagios_uri = None
        self.cgi_uri = None
        self.useragent = {
            'User-Agent': ['Naggregator/1.1 twisted.web.client.Agent/12.2'],
        }

        self.hosts = {}
        self.services = {}
        self.comments = {}
        self.downtimes = {}

        self.hosts_lastupdate_utc = datetime.utcfromtimestamp(
            0)  # Never updated
        # self.hosts_message = False
        self.hosts_error = False
        self.services_lastupdate_utc = datetime.utcfromtimestamp(
            0)  # Never updated
        # self.services_message = False
        self.services_error = False
コード例 #25
0
    def __init__(self, reactor, email, password):
        self.reactor = reactor
        self.email = email
        self.password = password

        # Set up an agent for sending HTTP requests.  Uses cookies
        # (part of the authentication), persistent HTTP connection
        # pool, automatic content decoding (gzip)

        # container to keep track of cookies
        self.cookiejar = cookielib.CookieJar()

        # HTTP persistent connection pool
        self.pool = HTTPConnectionPool(self.reactor, persistent=True)
        # for some reason, using >1 connection per host fails
        self.pool.maxPersistentPerHost = 1

        self.agent = ContentDecoderAgent(
            CookieAgent(Agent(self.reactor, pool=self.pool), self.cookiejar),
            [('gzip', GzipDecoder)])

        # this is the token that is used to authenticate API requests
        self.xsrf_token = None
        self.auth_token = None

        # who we are
        self.player_nickname = None
        self.player_guid = None
        self.team = None
        self.ap = None
        self.level = None
        self.start_date = None
        self.new_version = False
        self.inventory_done = False
        self.profile_done = False

        # for keeping track of item inventory
        self.inventory = b07.inventory.Inventory()

        # for keeping track of API requests that are delayed until
        # authentication has completed
        self._deferred_api_requests = []

        # for keeping track of periodic inventory refreshes
        self._periodic_inventory_refresh_delayedcall = None

        # list of functions to call every time inventory is refreshed
        self._on_inventory_refreshed = []

        # do an immediate inventory refresh
        self._first_inventory_ready = self._defer_until_authenticated(
            self._inventory0, (), {})

        # do an immediate profile refresh
        self._first_profile_ready = self._defer_until_authenticated(
            self._profile0, (), {})

        # start the authentication process
        self.reactor.callLater(0, self._authenticate0)
コード例 #26
0
ファイル: test_workers.py プロジェクト: grigi/junebug
    def setUp(self):
        self.worker = yield self.get_worker()
        self.logging_api = RequestLoggingApi()
        self.logging_api.setup()
        self.addCleanup(self.logging_api.teardown)

        connection_pool = HTTPConnectionPool(reactor, persistent=False)
        treq._utils.set_global_pool(connection_pool)
コード例 #27
0
 def _parse_fluentd_http(self, kind, args):
     return lambda reactor: FluentdDestination(
         # Construct the pool ourselves with the default of using
         # persistent connections to override Agent's default of not using
         # persistent connections.
         agent=Agent(reactor, pool=HTTPConnectionPool(reactor)),
         fluentd_url=URL.fromText(args),
     )
コード例 #28
0
 def __init__(self, persistent=True):
     self.persistent = persistent
     self.agents = SortedCollection(key=lambda x: x.url.netloc)
     self.pool = HTTPConnectionPool(reactor)
     self.pool.maxPersistentPerHost = getattr(
         settings, 'DTX_WEB_DEFER_MAX_PERSISTENT_PER_HOST', 8)
     self.pool.cachedConnectionTimeout = getattr(
         settings, 'DTX_WEB_DEFER_CONNECT_TIMEOUT', 10)
コード例 #29
0
ファイル: shipper.py プロジェクト: mmedeiros/shipper
    def startup(cls):
        """Initiates connection pool and logging.

        We can not use persisten connections here as docker server
        has some troubles with those
        """
        cls.pool = HTTPConnectionPool(reactor, persistent=False)
        cls._init_logging()
コード例 #30
0
ファイル: api.py プロジェクト: Wanderu/treq
def default_pool(reactor, pool, persistent):
    """
    Return the specified pool or a pool with the specified reactor and
    persistence.
    """
    reactor = default_reactor(reactor)

    if pool is not None:
        return pool

    if persistent is False:
        return HTTPConnectionPool(reactor, persistent=persistent)

    if get_global_pool() is None:
        set_global_pool(HTTPConnectionPool(reactor, persistent=True))

    return get_global_pool()