Пример #1
0
class StateTestCase(unittest.TestCase):
    def setUp(self):
        pnconf_uuid_set = copy(pnconf)
        pnconf_uuid_set.uuid = 'someuuid'
        self.pool = HTTPConnectionPool(reactor, persistent=False)
        self.pubnub = PubNubTwisted(pnconf_uuid_set, reactor=reactor, pool=self.pool)

    def tearDown(self):
        return self.pool.closeCachedConnections()

    def assert_valid_state_envelope(self, envelope):
        self.assertIsInstance(envelope, TwistedEnvelope)
        self.assertIsInstance(envelope.result, PNSetStateResult)
        self.assertEqual(envelope.result.state, state)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/state/single_channel.yaml',
        filter_query_parameters=['uuid'])
    def test_state_single_channel(self):
        envelope = yield self.pubnub.set_state().channels(channel).state(state).deferred()
        self.assert_valid_state_envelope(envelope)
        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/state/multiple_channels.yaml',
        filter_query_parameters=['uuid'])
    def test_state_multiple_channels(self):
        envelope = yield self.pubnub.set_state().channels(channels).state(state).deferred()
        self.assert_valid_state_envelope(envelope)
        returnValue(envelope)
Пример #2
0
class BrowserDownloadHandler(object):
    def __init__(self, settings):
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint(
            'CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False

        self._sslMethod = openssl_methods[settings.get(
            'DOWNLOADER_CLIENT_TLS_METHOD')]
        self._contextFactoryClass = load_object(
            settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
        self._contextFactory = self._contextFactoryClass(
            method=self._sslMethod)
        self._disconnect_timeout = 1

    def download_request(self, request, spider):
        agent = BrowserAgent(contextFactory=self._contextFactory,
                             pool=self._pool)
        return agent.download_request(request)

    def close(self):
        d = self._pool.closeCachedConnections()
        delayed_call = reactor.callLater(self._disconnect_timeout, d.callback,
                                         [])

        def cancel_delayed_call(result):
            if delayed_call.active():
                delayed_call.cancel()
            return result

        d.addBoth(cancel_delayed_call)
        return d
Пример #3
0
class HTTP11DownloadHandler(object):
    def __init__(self, settings):
        self.settings = settings
        self.ssl_methods = [SSL.SSLv3_METHOD, SSL.TLSv1_METHOD]
        self.context_factories = [CrawlmiClientContextFactory(method) for method in self.ssl_methods]
        self.pool = HTTPConnectionPool(reactor, persistent=True)
        self.pool.maxPersistentPerHost = settings.get_int('CONCURRENT_REQUESTS_PER_DOMAIN')
        self.pool._factory.noisy = False

    def download_request(self, request):
        '''Return a deferred for the HTTP download.'''
        dfd = None
        for context_factory in self.context_factories:
            if dfd is None:
                dfd = self._download(request, context_factory)
            else:
                def _failure(failure):
                    failure.trap(SSL.Error)
                    return self._download(request, context_factory)
                dfd.addErrback(_failure)
        return dfd

    def _download(self, request, context_factory):
        agent = CrawlmiAgent(
            context_factory,
            self.settings.get_float('DOWNLOAD_TIMEOUT', 180, request),
            self.settings.get_int('DOWNLOAD_SIZE_LIMIT', 0, request),
            request.meta.get('bind_address'),
            self.pool)
        return agent.download_request(request)

    def close(self):
        return self.pool.closeCachedConnections()
Пример #4
0
class SFTPFuncTest(unittest.TestCase):
    @defer.inlineCallbacks
    def setUp(self):
        self.active_connections = []
        self.pool = HTTPConnectionPool(reactor, persistent=True)
        self.swift = get_swift_client(CONFIG, pool=self.pool)
        self.tmpdir = tempfile.mkdtemp()
        _, self.sftp = self.get_client()
        yield clean_swift(self.swift)

    @defer.inlineCallbacks
    def tearDown(self):
        shutil.rmtree(self.tmpdir)
        for (transport, conn) in self.active_connections:
            try:
                conn.close()
            except:
                pass
            try:
                transport.close()
            except:
                pass

        yield clean_swift(self.swift)
        yield self.pool.closeCachedConnections()

    def get_client(self):
        transport, conn = get_sftp_client_with_transport(CONFIG)
        self.active_connections.append((transport, conn))
        return transport, conn
class WhereNowTestCase(unittest.TestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(reactor, persistent=False)
        self.pubnub = PubNubTwisted(pnconf, reactor=reactor, pool=self.pool)

    def tearDown(self):
        return self.pool.closeCachedConnections()

    def assert_valid_where_now_envelope(self, envelope, channels):
        self.assertIsInstance(envelope, TwistedEnvelope)
        self.assertIsInstance(envelope.result, PNWhereNowResult)
        self.assertEqual(envelope.result.channels, channels)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/where_now/single.yaml',
        filter_query_parameters=['uuid'])
    def test_where_now_single_channel(self):
        envelope = yield self.pubnub.where_now().uuid(
            uuid_looking_for).deferred()
        self.assert_valid_where_now_envelope(envelope, [u'twisted-test-1'])
        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/where_now/multiple.yaml',
        filter_query_parameters=['uuid'])
    def test_where_now_multiple_channels(self):
        envelope = yield self.pubnub.where_now().uuid(
            uuid_looking_for).deferred()
        self.assert_valid_where_now_envelope(
            envelope, [u'twisted-test-2', u'twisted-test-1'])
        returnValue(envelope)
Пример #6
0
class SFTPFuncTest(unittest.TestCase):
    @defer.inlineCallbacks
    def setUp(self):
        self.active_connections = []
        self.pool = HTTPConnectionPool(reactor, persistent=True)
        self.swift = get_swift_client(CONFIG, pool=self.pool)
        self.tmpdir = tempfile.mkdtemp()
        _, self.sftp = self.get_client()
        yield clean_swift(self.swift)

    @defer.inlineCallbacks
    def tearDown(self):
        shutil.rmtree(self.tmpdir)
        for (transport, conn) in self.active_connections:
            try:
                conn.close()
            except:
                pass
            try:
                transport.close()
            except:
                pass

        yield clean_swift(self.swift)
        yield self.pool.closeCachedConnections()

    def get_client(self):
        transport, conn = get_sftp_client_with_transport(CONFIG)
        self.active_connections.append((transport, conn))
        return transport, conn
class AbstractBaseApiTest(SessionTestCase):
    """
    Tests for the IOM HTTP API should create a subclass of this class.
    """
    @blocking_call_on_reactor_thread
    @inlineCallbacks
    def setUp(self):
        yield super(AbstractBaseApiTest, self).setUp()
        self.session.set_password('abcd')
        self.session.unlock('abcd')
        self.connection_pool = HTTPConnectionPool(reactor, False)

    @blocking_call_on_reactor_thread
    @inlineCallbacks
    def tearDown(self):
        yield self.close_connections()
        yield super(AbstractBaseApiTest, self).tearDown()

    def close_connections(self):
        return self.connection_pool.closeCachedConnections()

    def do_request(self, endpoint, req_type, post_data, raw_data):
        agent = Agent(reactor, pool=self.connection_pool)
        return agent.request(req_type, 'http://localhost:%s/%s' % (self.session.api_manager.port, endpoint),
                             Headers(), POSTDataProducer(post_data, raw_data))
Пример #8
0
class HTTP11DownloadHandler(object):
    def __init__(self, settings):
        self.settings = settings
        self.ssl_methods = settings.get('DOWNLOAD_HANDLER_SSL_METHODS')
        self.context_factories = [CrawlmiClientContextFactory(method) for method in self.ssl_methods]
        self.pool = HTTPConnectionPool(reactor, persistent=True)
        self.pool.maxPersistentPerHost = settings.get_int('CONCURRENT_REQUESTS_PER_DOMAIN')
        self.pool._factory.noisy = False

    def download_request(self, request):
        '''Return a deferred for the HTTP download.'''
        dfd = None
        for context_factory in self.context_factories:
            if dfd is None:
                dfd = self._download(request, context_factory)
            else:
                def _failure(failure):
                    failure.trap(SSL.Error)
                    return self._download(request, context_factory)
                dfd.addErrback(_failure)
        return dfd

    def _download(self, request, context_factory):
        agent = CrawlmiAgent(
            context_factory,
            self.settings.get_float('DOWNLOAD_TIMEOUT', 180, request),
            self.settings.get_int('DOWNLOAD_SIZE_LIMIT', 0, request),
            request.meta.get('bind_address'),
            self.pool)
        return agent.download_request(request)

    def close(self):
        return self.pool.closeCachedConnections()
Пример #9
0
class HTTP11DownloadHandler(object):
    def __init__(self, settings):
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint(
            'CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False

        self._sslMethod = openssl_methods[settings.get(
            'DOWNLOADER_CLIENT_TLS_METHOD')]
        self._contextFactoryClass = load_object(
            settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
        # try method-aware context factory
        try:
            self._contextFactory = self._contextFactoryClass(
                method=self._sslMethod)
        except TypeError:
            # use context factory defaults
            self._contextFactory = self._contextFactoryClass()
            msg = """
 '%s' does not accept `method` argument (type OpenSSL.SSL method,\
 e.g. OpenSSL.SSL.SSLv23_METHOD).\
 Please upgrade your context factory class to handle it or ignore it.""" % (
                settings['DOWNLOADER_CLIENTCONTEXTFACTORY'], )
            warnings.warn(msg)
        self._default_maxsize = settings.getint('DOWNLOAD_MAXSIZE')
        self._default_warnsize = settings.getint('DOWNLOAD_WARNSIZE')
        self._fail_on_dataloss = settings.getbool('DOWNLOAD_FAIL_ON_DATALOSS')
        self._disconnect_timeout = 1

    def download_request(self, request, spider):
        """Return a deferred for the HTTP download"""
        agent = ScrapyAgent(contextFactory=self._contextFactory,
                            pool=self._pool,
                            maxsize=getattr(spider, 'download_maxsize',
                                            self._default_maxsize),
                            warnsize=getattr(spider, 'download_warnsize',
                                             self._default_warnsize),
                            fail_on_dataloss=self._fail_on_dataloss)
        return agent.download_request(request)

    def close(self):
        d = self._pool.closeCachedConnections()
        # closeCachedConnections will hang on network or server issues, so
        # we'll manually timeout the deferred.
        #
        # Twisted issue addressing this problem can be found here:
        # https://twistedmatrix.com/trac/ticket/7738.
        #
        # closeCachedConnections doesn't handle external errbacks, so we'll
        # issue a callback after `_disconnect_timeout` seconds.
        delayed_call = reactor.callLater(self._disconnect_timeout, d.callback,
                                         [])

        def cancel_delayed_call(result):
            if delayed_call.active():
                delayed_call.cancel()
            return result

        d.addBoth(cancel_delayed_call)
        return d
Пример #10
0
class HTTPDownloadHandler(object):
    def __init__(self, logformatter, settings):
        # 管理连接的,作用request完成后,connections不会自动关闭,而是保持在缓存中,再次被利用
        self.lfm = logformatter
        #logger.debug(*self.lfm.crawled)
        logger.debug(
            *self.lfm.crawled('Downloader', 'HTTPDownloadHandler', '已初始化'))
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint(
            'CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False  # 用于设置proxy代理

        self._contextFactory = DownloaderClientContextFactory()

        self._default_maxsize = settings.getint('DOWNLOAD_MAXSIZE')
        self._default_warnsize = settings.getint('DOWNLOAD_WARNSIZE')
        self._fail_on_dataloss = settings.getbool('DOWNLOAD_FAIL_ON_DATALOSS')
        self._disconnect_timeout = 1

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.logformatter, crawler.settings)

    def download_request(self, request, spider):
        logger.debug(*self.lfm.crawled('Spider', spider.name,
                                       '执行download_request', request))
        """返回一个http download 的 defer"""
        self.spider = spider
        agent = DownloadAgent(contextFactory=self._contextFactory,
                              pool=self._pool,
                              maxsize=getattr(spider, 'download_maxsize',
                                              self._default_maxsize),
                              warnsize=getattr(spider, 'download_warnsize',
                                               self._default_warnsize),
                              fail_on_dataloss=self._fail_on_dataloss,
                              logformatter=self.lfm)
        return agent.download_request(request)

    def close(self):
        #  关闭所有的永久连接,并将它们移除pool 返回是一个defer
        d = self._pool.closeCachedConnections()
        #  直接关闭closeCachedConnections会引起网络或者服务器端的问题,所以,通过人工设置延迟
        #  来激发defer,closeCachedConnections不能直接处理额外的errbacks,所以需要个人设定一个
        #  callback在_disconnect_timeout之后
        logger.warning(
            *self.lfm.crawled('Downloader', 'HTTPDownloadHandler', '已关闭'))
        delayed_call = reactor.callLater(self._disconnect_timeout, d.callback,
                                         [])

        #  判断cancel_delayed_call是否在等待,True就是出于激活状态,还没被执行
        #  False代表着已经被激活或者已经被取消了
        def cancel_delayed_call(result):
            if delayed_call.active():
                delayed_call.cancel()
            return result

        d.addBoth(cancel_delayed_call)
        return d
Пример #11
0
class HTTP11DownloadHandler:
    lazy = False

    def __init__(self, settings, crawler=None):
        self._crawler = crawler

        from twisted.internet import reactor
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint(
            'CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False

        self._contextFactory = load_context_factory_from_settings(
            settings, crawler)
        self._default_maxsize = settings.getint('DOWNLOAD_MAXSIZE')
        self._default_warnsize = settings.getint('DOWNLOAD_WARNSIZE')
        self._fail_on_dataloss = settings.getbool('DOWNLOAD_FAIL_ON_DATALOSS')
        self._disconnect_timeout = 1

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.settings, crawler)

    def download_request(self, request, spider):
        """Return a deferred for the HTTP download"""
        agent = ScrapyAgent(
            contextFactory=self._contextFactory,
            pool=self._pool,
            maxsize=getattr(spider, 'download_maxsize', self._default_maxsize),
            warnsize=getattr(spider, 'download_warnsize',
                             self._default_warnsize),
            fail_on_dataloss=self._fail_on_dataloss,
            crawler=self._crawler,
        )
        return agent.download_request(request)

    def close(self):
        from twisted.internet import reactor
        d = self._pool.closeCachedConnections()
        # closeCachedConnections will hang on network or server issues, so
        # we'll manually timeout the deferred.
        #
        # Twisted issue addressing this problem can be found here:
        # https://twistedmatrix.com/trac/ticket/7738.
        #
        # closeCachedConnections doesn't handle external errbacks, so we'll
        # issue a callback after `_disconnect_timeout` seconds.
        delayed_call = reactor.callLater(self._disconnect_timeout, d.callback,
                                         [])

        def cancel_delayed_call(result):
            if delayed_call.active():
                delayed_call.cancel()
            return result

        d.addBoth(cancel_delayed_call)
        return d
Пример #12
0
class CouchStateTests(CouchDBTestCase):
    def setUp(self):
        CouchDBTestCase.setUp(self)
        self.db = self.couch_server.create('user-' + uuid4().hex)
        self.addCleanup(self.delete_db, self.db.name)
        self.pool = HTTPConnectionPool(reactor, persistent=False)
        self.agent = Agent(reactor, pool=self.pool)

    @defer.inlineCallbacks
    def tearDown(self):
        yield self.pool.closeCachedConnections()

    @restricted_listing
    @defer.inlineCallbacks
    def test__check_db_schema_version_wrong_schema_version_raises(self):
        wrong_schema_version = SCHEMA_VERSION + 1
        self.db.create({
            '_id': CONFIG_DOC_ID,
            SCHEMA_VERSION_KEY: wrong_schema_version
        })
        with pytest.raises(WrongCouchSchemaVersionError):
            yield _check_db_schema_version(self.couch_url,
                                           self.db.name,
                                           None,
                                           agent=self.agent)

    @restricted_listing
    @defer.inlineCallbacks
    def test_check_schema_versions_wrong_schema_version_raises(self):
        wrong_schema_version = SCHEMA_VERSION + 1
        self.db.create({
            '_id': CONFIG_DOC_ID,
            SCHEMA_VERSION_KEY: wrong_schema_version
        })
        expected_msg = 'Error checking CouchDB schema versions: ' \
                       'FirstError.*WrongCouchSchemaVersionError()'
        with pytest.raises(Exception, match=expected_msg):
            yield check_schema_versions(self.couch_url, agent=self.agent)

    @restricted_listing
    @defer.inlineCallbacks
    def test__check_db_schema_version_missing_config_doc_raises(self):
        self.db.create({})
        with pytest.raises(MissingCouchConfigDocumentError):
            yield _check_db_schema_version(self.couch_url,
                                           self.db.name,
                                           None,
                                           agent=self.agent)

    @restricted_listing
    @defer.inlineCallbacks
    def test_check_schema_versions_missing_config_doc_raises(self):
        self.db.create({})
        expected_msg = 'Error checking CouchDB schema versions: ' \
                       'FirstError.*MissingCouchConfigDocumentError()'
        with pytest.raises(Exception, match=expected_msg):
            yield check_schema_versions(self.couch_url, agent=self.agent)
Пример #13
0
class HTTP11DownloadHandler(object):
    lazy = False

    def __init__(self, settings):
        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint('CONCURRENT_REQUESTS_PER_DOMAIN')
        self._pool._factory.noisy = False

        self._sslMethod = openssl_methods[settings.get('DOWNLOADER_CLIENT_TLS_METHOD')]
        self._contextFactoryClass = load_object(settings['DOWNLOADER_CLIENTCONTEXTFACTORY'])
        # try method-aware context factory
        try:
            self._contextFactory = self._contextFactoryClass(method=self._sslMethod)
        except TypeError:
            # use context factory defaults
            self._contextFactory = self._contextFactoryClass()
            msg = """
 '%s' does not accept `method` argument (type OpenSSL.SSL method,\
 e.g. OpenSSL.SSL.SSLv23_METHOD).\
 Please upgrade your context factory class to handle it or ignore it.""" % (
                settings['DOWNLOADER_CLIENTCONTEXTFACTORY'],)
            warnings.warn(msg)
        self._default_maxsize = settings.getint('DOWNLOAD_MAXSIZE')
        self._default_warnsize = settings.getint('DOWNLOAD_WARNSIZE')
        self._fail_on_dataloss = settings.getbool('DOWNLOAD_FAIL_ON_DATALOSS')
        self._disconnect_timeout = 1

    def download_request(self, request, spider):
        """Return a deferred for the HTTP download"""
        agent = ScrapyAgent(contextFactory=self._contextFactory, pool=self._pool,
            maxsize=getattr(spider, 'download_maxsize', self._default_maxsize),
            warnsize=getattr(spider, 'download_warnsize', self._default_warnsize),
            fail_on_dataloss=self._fail_on_dataloss)
        return agent.download_request(request)

    def close(self):
        d = self._pool.closeCachedConnections()
        # closeCachedConnections will hang on network or server issues, so
        # we'll manually timeout the deferred.
        #
        # Twisted issue addressing this problem can be found here:
        # https://twistedmatrix.com/trac/ticket/7738.
        #
        # closeCachedConnections doesn't handle external errbacks, so we'll
        # issue a callback after `_disconnect_timeout` seconds.
        delayed_call = reactor.callLater(self._disconnect_timeout, d.callback, [])

        def cancel_delayed_call(result):
            if delayed_call.active():
                delayed_call.cancel()
            return result

        d.addBoth(cancel_delayed_call)
        return d
Пример #14
0
class CouchStateTests(CouchDBTestCase):

    def setUp(self):
        CouchDBTestCase.setUp(self)
        self.db = self.couch_server.create('user-' + uuid4().hex)
        self.addCleanup(self.delete_db, self.db.name)
        self.pool = HTTPConnectionPool(reactor, persistent=False)
        self.agent = Agent(reactor, pool=self.pool)

    @defer.inlineCallbacks
    def tearDown(self):
        yield self.pool.closeCachedConnections()

    @restricted_listing
    @defer.inlineCallbacks
    def test__check_db_schema_version_wrong_schema_version_raises(self):
        wrong_schema_version = SCHEMA_VERSION + 1
        self.db.create(
            {'_id': CONFIG_DOC_ID, SCHEMA_VERSION_KEY: wrong_schema_version})
        with pytest.raises(WrongCouchSchemaVersionError):
            yield _check_db_schema_version(
                self.couch_url, self.db.name, None, agent=self.agent)

    @restricted_listing
    @defer.inlineCallbacks
    def test_check_schema_versions_wrong_schema_version_raises(self):
        wrong_schema_version = SCHEMA_VERSION + 1
        self.db.create(
            {'_id': CONFIG_DOC_ID, SCHEMA_VERSION_KEY: wrong_schema_version})
        expected_msg = 'Error checking CouchDB schema versions: ' \
                       'FirstError.*WrongCouchSchemaVersionError()'
        with pytest.raises(Exception, match=expected_msg):
            yield check_schema_versions(self.couch_url, agent=self.agent)

    @restricted_listing
    @defer.inlineCallbacks
    def test__check_db_schema_version_missing_config_doc_raises(self):
        self.db.create({})
        with pytest.raises(MissingCouchConfigDocumentError):
            yield _check_db_schema_version(
                self.couch_url, self.db.name, None, agent=self.agent)

    @restricted_listing
    @defer.inlineCallbacks
    def test_check_schema_versions_missing_config_doc_raises(self):
        self.db.create({})
        expected_msg = 'Error checking CouchDB schema versions: ' \
                       'FirstError.*MissingCouchConfigDocumentError()'
        with pytest.raises(Exception, match=expected_msg):
            yield check_schema_versions(self.couch_url, agent=self.agent)
Пример #15
0
class FTPFuncTest(unittest.TestCase):
    @defer.inlineCallbacks
    def setUp(self):
        self.pool = HTTPConnectionPool(reactor, persistent=True)
        self.swift = get_swift_client(conf, pool=self.pool)
        self.tmpdir = tempfile.mkdtemp()
        self.ftp = get_ftp_client(conf)
        yield clean_swift(self.swift)

    @defer.inlineCallbacks
    def tearDown(self):
        shutil.rmtree(self.tmpdir)
        self.ftp.close()
        yield clean_swift(self.swift)
        yield self.pool.closeCachedConnections()
Пример #16
0
class AbstractBaseApiTest(TestAsServer):
    """
    Tests for the Tribler HTTP API should create a subclass of this class.
    """
    @blocking_call_on_reactor_thread
    @inlineCallbacks
    def setUp(self, autoload_discovery=True):
        yield super(AbstractBaseApiTest,
                    self).setUp(autoload_discovery=autoload_discovery)
        self.connection_pool = HTTPConnectionPool(reactor, False)
        terms = self.session.lm.category.xxx_filter.xxx_terms
        terms.add("badterm")
        self.session.lm.category.xxx_filter.xxx_terms = terms

    @blocking_call_on_reactor_thread
    @inlineCallbacks
    def tearDown(self, annotate=True):
        yield self.close_connections()
        yield super(AbstractBaseApiTest, self).tearDown(annotate=annotate)

    def close_connections(self):
        return self.connection_pool.closeCachedConnections()

    @blocking_call_on_reactor_thread
    def setUpPreSession(self):
        super(AbstractBaseApiTest, self).setUpPreSession()
        self.config.set_http_api_enabled(True)
        self.config.set_megacache_enabled(True)
        self.config.set_tunnel_community_enabled(False)

        # Make sure we select a random port for the HTTP API
        min_base_port = 1000 if not os.environ.get("TEST_BUCKET", None) \
            else int(os.environ['TEST_BUCKET']) * 2000 + 2000
        self.config.set_http_api_port(
            get_random_port(min_port=min_base_port,
                            max_port=min_base_port + 2000))

    def do_request(self, endpoint, req_type, post_data, raw_data):
        agent = Agent(reactor, pool=self.connection_pool)
        return agent.request(
            req_type, 'http://localhost:%s/%s' %
            (self.session.config.get_http_api_port(), endpoint),
            Headers({
                'User-Agent': ['Tribler ' + version_id],
                "Content-Type": ["text/plain; charset=utf-8"]
            }), POSTDataProducer(post_data, raw_data))
Пример #17
0
class FTPFuncTest(unittest.TestCase):
    @defer.inlineCallbacks
    def setUp(self):
        self.pool = HTTPConnectionPool(reactor, persistent=True)
        self.swift = get_swift_client(conf, pool=self.pool)
        self.tmpdir = tempfile.mkdtemp()
        self.ftp = self.get_ftp_client()
        yield clean_swift(self.swift)

    @defer.inlineCallbacks
    def tearDown(self):
        shutil.rmtree(self.tmpdir)
        self.ftp.close()
        yield clean_swift(self.swift)
        yield self.pool.closeCachedConnections()

    def get_ftp_client(self):
        return get_ftp_client(conf)
Пример #18
0
class AbstractBaseApiTest(TestAsServer):
    """
    Tests for the Tribler HTTP API should create a subclass of this class.
    """
    @inlineCallbacks
    def setUp(self):
        yield super(AbstractBaseApiTest, self).setUp()
        self.connection_pool = HTTPConnectionPool(reactor, False)

    @inlineCallbacks
    def tearDown(self):
        yield self.close_connections()
        yield super(AbstractBaseApiTest, self).tearDown()

    def close_connections(self):
        return self.connection_pool.closeCachedConnections()

    def setUpPreSession(self):
        super(AbstractBaseApiTest, self).setUpPreSession()
        self.config.set_http_api_enabled(True)
        self.config.set_http_api_retry_port(True)
        self.config.set_tunnel_community_enabled(False)

        # Make sure we select a random port for the HTTP API
        min_base_port = 1000 if not os.environ.get("TEST_BUCKET", None) \
            else int(os.environ['TEST_BUCKET']) * 2000 + 2000
        self.config.set_http_api_port(
            get_random_port(min_port=min_base_port,
                            max_port=min_base_port + 2000))

    def do_request(self, endpoint, req_type, post_data, raw_data):
        try:
            req_type = req_type.encode('utf-8')
        except AttributeError:
            pass
        agent = Agent(reactor, pool=self.connection_pool)
        return agent.request(
            req_type, 'http://localhost:%s/%s' %
            (self.session.config.get_http_api_port(), endpoint),
            Headers({
                'User-Agent': ['Tribler ' + version_id],
                "Content-Type": ["text/plain; charset=utf-8"]
            }), POSTDataProducer(post_data, raw_data))
Пример #19
0
class AbstractBaseApiTest(TestAsServer):
    """
    Tests for the Tribler HTTP API should create a subclass of this class.
    """
    @inlineCallbacks
    def setUp(self):
        yield super(AbstractBaseApiTest, self).setUp()
        self.connection_pool = HTTPConnectionPool(reactor, False)

    @inlineCallbacks
    def tearDown(self):
        yield self.close_connections()
        yield super(AbstractBaseApiTest, self).tearDown()

    def close_connections(self):
        return self.connection_pool.closeCachedConnections()

    def setUpPreSession(self):
        super(AbstractBaseApiTest, self).setUpPreSession()
        self.config.set_http_api_enabled(True)
        self.config.set_http_api_retry_port(True)
        self.config.set_tunnel_community_enabled(False)

        # Make sure we select a random port for the HTTP API
        min_base_port = 1000 if not os.environ.get("TEST_BUCKET", None) \
            else int(os.environ['TEST_BUCKET']) * 2000 + 2000
        self.config.set_http_api_port(get_random_port(min_port=min_base_port, max_port=min_base_port + 2000))

    def do_request(self, endpoint, req_type, post_data, raw_data):
        try:
            req_type = req_type.encode('utf-8')
        except AttributeError:
            pass
        agent = Agent(reactor, pool=self.connection_pool)
        return agent.request(req_type, 'http://localhost:%s/%s' % (self.session.config.get_http_api_port(), endpoint),
                             Headers({'User-Agent': ['Tribler ' + version_id],
                                      "Content-Type": ["text/plain; charset=utf-8"]}),
                             POSTDataProducer(post_data, raw_data))
Пример #20
0
class TorrentChecker(TaskManager):

    def __init__(self, session):
        super(TorrentChecker, self).__init__()
        self._logger = logging.getLogger(self.__class__.__name__)
        self.tribler_session = session

        self._should_stop = False

        self.tracker_check_lc = self.register_task("tracker_check", LoopingCall(self.check_random_tracker))
        self.torrent_check_lc = self.register_task("torrent_check", LoopingCall(self.check_random_torrent))

        self._session_list = {'DHT': []}

        # Track all session cleanups
        self.session_stop_defer_list = []

        self.socket_mgr = self.udp_port = None
        self.connection_pool = None

    def initialize(self):
        self.tracker_check_lc.start(TRACKER_SELECTION_INTERVAL, now=False)
        self.torrent_check_lc.start(TORRENT_SELECTION_INTERVAL, now=False)
        self.connection_pool = HTTPConnectionPool(reactor, False)
        self.socket_mgr = UdpSocketManager()
        self.create_socket_or_schedule()

    def listen_on_udp(self):
        return reactor.listenUDP(0, self.socket_mgr)

    def create_socket_or_schedule(self):
        """
        This method attempts to bind to a UDP port. If it fails for some reason (i.e. no network connection), we try
        again later.
        """
        try:
            self.udp_port = self.listen_on_udp()
        except socket.error as exc:
            self._logger.error("Error when creating UDP socket in torrent checker: %s", exc)
            self.register_task("listen_udp_port", reactor.callLater(10, self.create_socket_or_schedule))

    def shutdown(self):
        """
        Shutdown the torrent health checker.

        Once shut down it can't be started again.
        :returns A deferred that will fire once the shutdown has completed.
        """
        self._should_stop = True

        if self.udp_port:
            self.session_stop_defer_list.append(maybeDeferred(self.udp_port.stopListening))
            self.udp_port = None

        if self.connection_pool:
            self.session_stop_defer_list.append(self.connection_pool.closeCachedConnections())

        self.shutdown_task_manager()

        # kill all the tracker sessions.
        # Wait for the defers to all have triggered by using a DeferredList
        for tracker_url in self._session_list.keys():
            for session in self._session_list[tracker_url]:
                self.session_stop_defer_list.append(session.cleanup())

        return DeferredList(self.session_stop_defer_list)

    def check_random_tracker(self):
        """
        Calling this method will fetch a random tracker from the database, select some torrents that have this
        tracker, and perform a request to these trackers.
        """
        tracker_url = self.get_valid_next_tracker_for_auto_check()
        if tracker_url is None:
            self._logger.warn(u"No tracker to select from, skip")
            return succeed(None)

        self._logger.debug(u"Start selecting torrents on tracker %s.", tracker_url)

        # get the torrents that should be checked
        infohashes = []
        with db_session:
            tracker = self.tribler_session.lm.mds.TrackerState.get(url=tracker_url)
            if tracker:
                torrents = tracker.torrents
                for torrent in torrents:
                    dynamic_interval = TORRENT_CHECK_RETRY_INTERVAL * (2 ** tracker.failures)
                    if torrent.last_check + dynamic_interval < int(time.time()):
                        infohashes.append(torrent.infohash)

        if len(infohashes) == 0:
            # We have no torrent to recheck for this tracker. Still update the last_check for this tracker.
            self._logger.info("No torrent to check for tracker %s", tracker_url)
            self.update_tracker_info(tracker_url, True)
            return succeed(None)
        elif tracker_url != u'DHT' and tracker_url != u'no-DHT':
            try:
                session = self._create_session_for_request(tracker_url, timeout=30)
            except MalformedTrackerURLException as e:
                # Remove the tracker from the database
                self.remove_tracker(tracker_url)
                self._logger.error(e)
                return succeed(None)

            # We shuffle the list so that different infohashes are checked on subsequent scrape requests if the total
            # number of infohashes exceeds the maximum number of infohashes we check.
            random.shuffle(infohashes)
            for infohash in infohashes:
                session.add_infohash(infohash)

            self._logger.info(u"Selected %d new torrents to check on tracker: %s", len(infohashes), tracker_url)
            return session.connect_to_tracker().addCallbacks(*self.get_callbacks_for_session(session)) \
                .addErrback(lambda _: None)

    @db_session
    def check_random_torrent(self):
        """
        Perform a full health check on a random torrent in the database.
        We prioritize torrents that have no health info attached.
        """
        random_torrents = self.tribler_session.lm.mds.TorrentState.select(
            lambda g: (metadata for metadata in g.metadata if metadata.status != LEGACY_ENTRY and
                       metadata.metadata_type == REGULAR_TORRENT))\
            .order_by(lambda g: g.last_check).limit(10)

        if not random_torrents:
            self._logger.info("Could not find any eligible torrent for random torrent check")
            return None

        random_torrent = choice(random_torrents)
        self.check_torrent_health(random_torrent.infohash)

        return random_torrent.infohash

    def get_callbacks_for_session(self, session):
        success_lambda = lambda info_dict: self._on_result_from_session(session, info_dict)
        error_lambda = lambda failure: self.on_session_error(session, failure)
        return success_lambda, error_lambda

    def get_valid_next_tracker_for_auto_check(self):
        tracker_url = self.get_next_tracker_for_auto_check()
        while tracker_url and not is_valid_url(tracker_url):
            self.remove_tracker(tracker_url)
            tracker_url = self.get_next_tracker_for_auto_check()
        return tracker_url

    def get_next_tracker_for_auto_check(self):
        return self.tribler_session.lm.tracker_manager.get_next_tracker_for_auto_check()

    def remove_tracker(self, tracker_url):
        self.tribler_session.lm.tracker_manager.remove_tracker(tracker_url)

    def update_tracker_info(self, tracker_url, value):
        self.tribler_session.lm.tracker_manager.update_tracker_info(tracker_url, value)

    def is_blacklisted_tracker(self, tracker_url):
        return tracker_url in self.tribler_session.lm.tracker_manager.blacklist

    @db_session
    def get_valid_trackers_of_torrent(self, torrent_id):
        """ Get a set of valid trackers for torrent. Also remove any invalid torrent."""
        db_tracker_list = self.tribler_session.lm.mds.TorrentState.get(infohash=database_blob(torrent_id)).trackers
        return set([str(tracker.url) for tracker in db_tracker_list
                    if is_valid_url(str(tracker.url)) and not self.is_blacklisted_tracker(str(tracker.url))])

    def on_torrent_health_check_completed(self, infohash, result):
        final_response = {}
        if not result or not isinstance(result, list):
            self._logger.info("Received invalid torrent checker result")
            return final_response

        torrent_update_dict = {'infohash': infohash, 'seeders': 0, 'leechers': 0, 'last_check': int(time.time())}
        for success, response in reversed(result):
            if not success and isinstance(response, Failure):
                final_response[response.tracker_url] = {'error': response.getErrorMessage()}
                continue
            final_response[response.keys()[0]] = response[response.keys()[0]][0]

            s = response[response.keys()[0]][0]['seeders']
            l = response[response.keys()[0]][0]['leechers']

            # More leeches is better, because undefined peers are marked as leeches in DHT
            if s > torrent_update_dict['seeders'] or \
                    (s == torrent_update_dict['seeders'] and l > torrent_update_dict['leechers']):
                torrent_update_dict['seeders'] = s
                torrent_update_dict['leechers'] = l

        self._update_torrent_result(torrent_update_dict)

        # Add this result to popularity community to publish to subscribers
        self.publish_torrent_result(torrent_update_dict)

        # TODO: DRY! Stop doing lots of formats, just make REST endpoint automatically encode binary data to hex!
        self.tribler_session.notifier.notify(NTFY_TORRENT, NTFY_UPDATE, infohash,
                                             {"num_seeders": torrent_update_dict["seeders"],
                                              "num_leechers": torrent_update_dict["leechers"],
                                              "last_tracker_check": torrent_update_dict["last_check"],
                                              "health": "updated"})
        return final_response

    def check_torrent_health(self, infohash, timeout=20, scrape_now=False):
        """
        Check the health of a torrent with a given infohash.
        :param infohash: Torrent infohash.
        :param timeout: The timeout to use in the performed requests
        :param scrape_now: Flag whether we want to force scraping immediately
        """
        tracker_set = []

        # We first check whether the torrent is already in the database and checked before
        with db_session:
            result = self.tribler_session.lm.mds.TorrentState.get(infohash=database_blob(infohash))
            if result:
                torrent_id = str(result.infohash)
                last_check = result.last_check
                time_diff = time.time() - last_check
                if time_diff < MIN_TORRENT_CHECK_INTERVAL and not scrape_now:
                    self._logger.debug("time interval too short, not doing torrent health check for %s",
                                       hexlify(infohash))
                    return succeed({
                        "db": {
                            "seeders": result.seeders,
                            "leechers": result.leechers,
                            "infohash": hexlify(infohash)
                        }
                    })

                # get torrent's tracker list from DB
                tracker_set = self.get_valid_trackers_of_torrent(torrent_id)

        deferred_list = []
        for tracker_url in tracker_set:
            session = self._create_session_for_request(tracker_url, timeout=timeout)
            session.add_infohash(infohash)
            deferred_list.append(session.connect_to_tracker().
                                 addCallbacks(*self.get_callbacks_for_session(session)))

        # Create a (fake) DHT session for the lookup if we have support for BEP33.
        if has_bep33_support():
            session = FakeDHTSession(self.tribler_session, infohash, timeout)
            self._session_list['DHT'].append(session)
            deferred_list.append(session.connect_to_tracker().
                                 addCallbacks(*self.get_callbacks_for_session(session)))

        return DeferredList(deferred_list, consumeErrors=True).addCallback(
            lambda res: self.on_torrent_health_check_completed(infohash, res))

    def on_session_error(self, session, failure):
        """
        Handles the scenario of when a tracker session has failed by calling the
        tracker_manager's update_tracker_info function.
        Trap value errors that are thrown by e.g. the HTTPTrackerSession when a connection fails.
        And trap CancelledErrors that can be thrown when shutting down.
        :param failure: The failure object raised by Twisted.
        """
        failure.trap(ValueError, CancelledError, ConnectingCancelledError, ConnectionLost, RuntimeError)
        self._logger.warning(u"Got session error for URL %s: %s", session.tracker_url,
                             str(failure).replace(u'\n]', u']'))

        self.clean_session(session)

        # Do not update if the connection got cancelled, we are probably shutting down
        # and the tracker_manager may have shutdown already.
        if failure.check(CancelledError, ConnectingCancelledError) is None:
            self.tribler_session.lm.tracker_manager.update_tracker_info(session.tracker_url, False)

        failure.tracker_url = session.tracker_url
        return failure

    def _create_session_for_request(self, tracker_url, timeout=20):
        session = create_tracker_session(tracker_url, timeout, self.socket_mgr, connection_pool=self.connection_pool)

        if tracker_url not in self._session_list:
            self._session_list[tracker_url] = []
        self._session_list[tracker_url].append(session)

        self._logger.debug(u"Session created for tracker %s", tracker_url)
        return session

    def clean_session(self, session):
        self.tribler_session.lm.tracker_manager.update_tracker_info(session.tracker_url, not session.is_failed)
        self.session_stop_defer_list.append(session.cleanup())

        # Remove the session from our session list dictionary
        self._session_list[session.tracker_url].remove(session)
        if len(self._session_list[session.tracker_url]) == 0 and session.tracker_url != u"DHT":
            del self._session_list[session.tracker_url]

    def _on_result_from_session(self, session, result_list):
        if self._should_stop:
            return

        self.clean_session(session)

        return result_list

    def _update_torrent_result(self, response):
        infohash = response['infohash']
        seeders = response['seeders']
        leechers = response['leechers']
        last_check = response['last_check']

        self._logger.debug(u"Update result %s/%s for %s", seeders, leechers, hexlify(infohash))

        with db_session:
            # Update torrent state
            torrent = self.tribler_session.lm.mds.TorrentState.get(infohash=database_blob(infohash))
            if not torrent:
                # Something is wrong, there should exist a corresponding TorrentState entry in the DB.
                return
            torrent.seeders = seeders
            torrent.leechers = leechers
            torrent.last_check = last_check

    def publish_torrent_result(self, response):
        if response['seeders'] == 0:
            self._logger.info("Not publishing zero seeded torrents")
            return
        content = (response['infohash'], response['seeders'], response['leechers'], response['last_check'])
        if self.tribler_session.lm.popularity_community:
            self.tribler_session.lm.popularity_community.queue_content(content)
        else:
            self._logger.info("Popular community not available to publish torrent checker result")
Пример #21
0
class HTTPClientService(service.SharedService):
    """A SharedService class that can make http requests to remote services.

    I can use either txrequests or treq, depending on what I find installed

    I provide minimal get/post/put/delete API with automatic baseurl joining, and json data encoding
    that is suitable for use from buildbot services.
    """
    TREQ_PROS_AND_CONS = textwrap.dedent("""
       txrequests is based on requests and is probably a bit more mature, but it requires threads
       to run, so has more overhead.
       treq is better integrated in twisted and is more and more feature equivalent

       txrequests is 2.8x slower than treq due to the use of threads.

       http://treq.readthedocs.io/en/latest/#feature-parity-w-requests
       pip install txrequests
           or
       pip install treq
    """)
    # Those could be in theory be overridden in master.cfg by using
    # import buildbot.util.httpclientservice.HTTPClientService.PREFER_TREQ = True
    # We prefer at the moment keeping it simple
    PREFER_TREQ = False
    MAX_THREADS = 5

    def __init__(self,
                 base_url,
                 auth=None,
                 headers=None,
                 verify=None,
                 debug=False,
                 skipEncoding=False):
        assert not base_url.endswith(
            "/"), "baseurl should not end with /: " + base_url
        super().__init__()
        self._base_url = base_url
        self._auth = auth
        self._headers = headers
        self._pool = None
        self._session = None
        self.verify = verify
        self.debug = debug
        self.skipEncoding = skipEncoding

    def updateHeaders(self, headers):
        if self._headers is None:
            self._headers = {}
        self._headers.update(headers)

    @staticmethod
    def checkAvailable(from_module):
        """Call me at checkConfig time to properly report config error
           if neither txrequests or treq is installed
        """
        if txrequests is None and treq is None:
            config.error(
                f"neither txrequests nor treq is installed, but {from_module} is "
                f"requiring it\n\n{HTTPClientService.TREQ_PROS_AND_CONS}")

    def startService(self):
        # treq only supports basicauth, so we force txrequests if the auth is
        # something else
        if self._auth is not None and not isinstance(self._auth, tuple):
            self.PREFER_TREQ = False
        if txrequests is not None and not self.PREFER_TREQ:
            self._session = txrequests.Session()
            self._doRequest = self._doTxRequest
        elif treq is None:
            raise ImportError(
                "{classname} requires either txrequest or treq install."
                " Users should call {classname}.checkAvailable() during checkConfig()"
                " to properly alert the user.".format(
                    classname=self.__class__.__name__))
        else:
            self._doRequest = self._doTReq
            self._pool = HTTPConnectionPool(self.master.reactor)
            self._pool.maxPersistentPerHost = self.MAX_THREADS
            self._agent = Agent(self.master.reactor, pool=self._pool)
        return super().startService()

    @defer.inlineCallbacks
    def stopService(self):
        if self._session:
            yield self._session.close()
        if self._pool:
            yield self._pool.closeCachedConnections()
        yield super().stopService()

    def _prepareRequest(self, ep, kwargs):
        if ep.startswith('http://') or ep.startswith('https://'):
            url = ep
        else:
            assert ep == "" or ep.startswith(
                "/"), "ep should start with /: " + ep
            url = self._base_url + ep
        if self._auth is not None and 'auth' not in kwargs:
            kwargs['auth'] = self._auth
        headers = kwargs.get('headers', {})
        if self._headers is not None:
            headers.update(self._headers)
        kwargs['headers'] = headers

        # we manually do the json encoding in order to automatically convert timestamps
        # for txrequests and treq
        json = kwargs.pop('json', None)
        if isinstance(json, (dict, list)):
            jsonStr = jsonmodule.dumps(json, default=toJson)
            kwargs['headers']['Content-Type'] = 'application/json'
            if self.skipEncoding:
                kwargs['data'] = jsonStr
            else:
                jsonBytes = unicode2bytes(jsonStr)
                kwargs['data'] = jsonBytes
        return url, kwargs

    @defer.inlineCallbacks
    def _doTxRequest(self, method, ep, **kwargs):
        url, kwargs = yield self._prepareRequest(ep, kwargs)
        if self.debug:
            log.debug("http {url} {kwargs}", url=url, kwargs=kwargs)

        def readContent(session, res):
            # this forces reading of the content inside the thread
            res.content
            if self.debug:
                log.debug("==> {code}: {content}",
                          code=res.status_code,
                          content=res.content)
            return res

        # read the whole content in the thread
        kwargs['background_callback'] = readContent
        if self.verify is False:
            kwargs['verify'] = False

        res = yield self._session.request(method, url, **kwargs)
        return IHttpResponse(TxRequestsResponseWrapper(res))

    @defer.inlineCallbacks
    def _doTReq(self, method, ep, **kwargs):
        url, kwargs = yield self._prepareRequest(ep, kwargs)
        # treq requires header values to be an array
        kwargs['headers'] = {k: [v] for k, v in kwargs['headers'].items()}
        kwargs['agent'] = self._agent

        res = yield getattr(treq, method)(url, **kwargs)
        return IHttpResponse(res)

    # lets be nice to the auto completers, and don't generate that code
    def get(self, ep, **kwargs):
        return self._doRequest('get', ep, **kwargs)

    def put(self, ep, **kwargs):
        return self._doRequest('put', ep, **kwargs)

    def delete(self, ep, **kwargs):
        return self._doRequest('delete', ep, **kwargs)

    def post(self, ep, **kwargs):
        return self._doRequest('post', ep, **kwargs)
Пример #22
0
class TestEventsEndpoint(AbstractApiTest):

    @inlineCallbacks
    def setUp(self):
        yield super(TestEventsEndpoint, self).setUp()
        self.events_deferred = Deferred()
        self.connection_pool = HTTPConnectionPool(reactor, False)
        self.socket_open_deferred = self.tribler_started_deferred.addCallback(self.open_events_socket)
        self.messages_to_wait_for = 0

    @inlineCallbacks
    def tearDown(self):
        yield self.close_connections()

        # Wait to make sure the HTTPChannel is closed, see https://twistedmatrix.com/trac/ticket/2447
        yield deferLater(reactor, 0.3, lambda: None)

        yield super(TestEventsEndpoint, self).tearDown()

    def on_event_socket_opened(self, response):
        response.deliverBody(EventDataProtocol(self.messages_to_wait_for, self.events_deferred, response))

    def open_events_socket(self, _):
        agent = Agent(reactor, pool=self.connection_pool)
        return agent.request(b'GET', 'http://localhost:%s/events' % self.session.config.get_http_api_port(),
                             Headers({'User-Agent': ['Tribler ' + version_id]}), None) \
            .addCallback(self.on_event_socket_opened)

    def close_connections(self):
        return self.connection_pool.closeCachedConnections()

    @trial_timeout(20)
    def test_events(self):
        """
        Testing whether various events are coming through the events endpoints
        """
        self.messages_to_wait_for = 21

        def send_notifications(_):
            self.session.notifier.notify(NTFY_UPGRADER, NTFY_STARTED, None, None)
            self.session.notifier.notify(NTFY_UPGRADER_TICK, NTFY_STARTED, None, None)
            self.session.notifier.notify(NTFY_UPGRADER, NTFY_FINISHED, None, None)
            self.session.notifier.notify(NTFY_WATCH_FOLDER_CORRUPT_TORRENT, NTFY_INSERT, None, None)
            self.session.notifier.notify(NTFY_NEW_VERSION, NTFY_INSERT, None, None)
            self.session.notifier.notify(NTFY_CHANNEL, NTFY_DISCOVERED, None, None)
            self.session.notifier.notify(NTFY_TORRENT, NTFY_DISCOVERED, None, {'a': 'Invalid character \xa1'})
            self.session.notifier.notify(NTFY_TORRENT, NTFY_FINISHED, 'a' * 10, None)
            self.session.notifier.notify(NTFY_TORRENT, NTFY_ERROR, 'a' * 10, 'This is an error message')
            self.session.notifier.notify(NTFY_MARKET_ON_ASK, NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_BID, NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_ASK_TIMEOUT, NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_BID_TIMEOUT, NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_TRANSACTION_COMPLETE, NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_PAYMENT_RECEIVED, NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_PAYMENT_SENT, NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(SIGNAL_RESOURCE_CHECK, SIGNAL_LOW_SPACE, None, {})
            self.session.notifier.notify(NTFY_CREDIT_MINING, NTFY_ERROR, None, {"message": "Some credit mining error"})
            self.session.notifier.notify(NTFY_TUNNEL, NTFY_REMOVE, Circuit(1234, None), 'test')
            self.session.notifier.notify(SIGNAL_GIGACHANNEL_COMMUNITY, SIGNAL_ON_SEARCH_RESULTS, None,
                                         {"query": "test", "results": []})
            self.session.lm.api_manager.root_endpoint.events_endpoint.on_tribler_exception("hi")

        self.socket_open_deferred.addCallback(send_notifications)

        return self.events_deferred
Пример #23
0
class TestRestApiBase(TestBase):
    __testing__ = False
    NUM_NODES = 1

    def setUp(self):
        super(TestRestApiBase, self).setUp()

        self.expected_response_code = 200
        self.expected_response_json = None
        self.should_check_equality = True
        self.restapi = None

        self.initialize(MarketCommunity, self.NUM_NODES)
        for node in self.nodes:
            node.overlay._use_main_thread = True

        self.connection_pool = HTTPConnectionPool(reactor, False)

    @inlineCallbacks
    def tearDown(self):
        yield self.close_connections()
        yield self.restapi.stop()
        yield super(TestRestApiBase, self).tearDown()

    def close_connections(self):
        return self.connection_pool.closeCachedConnections()

    def create_node(self):
        dum1_wallet = DummyWallet1()
        dum2_wallet = DummyWallet2()
        dum1_wallet.MONITOR_DELAY = 0
        dum2_wallet.MONITOR_DELAY = 0

        wallets = {'DUM1': dum1_wallet, 'DUM2': dum2_wallet}

        mock_ipv8 = MockIPv8(u"curve25519",
                             MarketCommunity,
                             create_trustchain=True,
                             create_dht=True,
                             is_matchmaker=True,
                             wallets=wallets,
                             use_database=False,
                             working_directory=u":memory:")

        mock_ipv8.overlay.settings.single_trade = False
        mock_ipv8.overlay.clearing_policies = []
        mock_ipv8.overlays = [mock_ipv8.overlay]

        # Start REST API
        self.restapi = RESTManager(mock_ipv8)
        random_port = get_random_port()
        self.restapi.start(random_port)

        return mock_ipv8

    def parse_body(self, body):
        if body is not None and self.should_check_equality:
            self.assertDictEqual(self.expected_response_json,
                                 json.twisted_loads(body))
        return body

    def parse_response(self, response):
        self.assertEqual(response.code, self.expected_response_code)
        if response.code in (200, 400, 500):
            return readBody(response)
        return succeed(None)

    def do_request(self,
                   endpoint,
                   expected_code=200,
                   expected_json=None,
                   request_type='GET',
                   post_data='',
                   raw_data=False):
        self.expected_response_code = expected_code
        self.expected_response_json = expected_json

        try:
            request_type = request_type.encode('ascii')
            endpoint = endpoint.encode('ascii')
        except AttributeError:
            pass
        agent = Agent(reactor, pool=self.connection_pool)
        request = agent.request(
            request_type,
            b'http://localhost:%d/%s' % (self.restapi.port, endpoint),
            Headers({
                'User-Agent': ['AnyDex'],
                "Content-Type": ["text/plain; charset=utf-8"]
            }), POSTDataProducer(post_data, raw_data))

        return request.addCallback(self.parse_response).addCallback(
            self.parse_body)
Пример #24
0
class HttpTrackerSession(TrackerSession):
    def __init__(self, tracker_url, tracker_address, announce_page, timeout):
        super(HttpTrackerSession, self).__init__(u'HTTP', tracker_url, tracker_address, announce_page, timeout)
        self._header_buffer = None
        self._message_buffer = None
        self._content_encoding = None
        self._content_length = None
        self._received_length = None
        self.result_deferred = None
        self.request = None
        self._connection_pool = HTTPConnectionPool(reactor, False)

    def max_retries(self):
        """
        Returns the max amount of retries allowed for this session.
        :return: The maximum amount of retries.
        """
        return HTTP_TRACKER_MAX_RETRIES

    def retry_interval(self):
        """
        Returns the interval one has to wait before retrying to connect.
        :return: The interval before retrying.
        """
        return HTTP_TRACKER_RECHECK_INTERVAL

    def connect_to_tracker(self):
        # create the HTTP GET message
        # Note: some trackers have strange URLs, e.g.,
        #       http://moviezone.ws/announce.php?passkey=8ae51c4b47d3e7d0774a720fa511cc2a
        #       which has some sort of 'key' as parameter, so we need to use the add_url_params
        #       utility function to handle such cases.

        url = add_url_params("http://%s:%s/%s" %
                             (self._tracker_address[0], self._tracker_address[1],
                              self._announce_page.replace(u'announce', u'scrape')),
                             {"info_hash": self._infohash_list})

        agent = RedirectAgent(Agent(reactor, connectTimeout=self.timeout, pool=self._connection_pool))
        self.request = self.register_task("request", agent.request('GET', bytes(url)))
        self.request.addCallback(self.on_response)
        self.request.addErrback(self.on_error)

        self._logger.debug(u"%s HTTP SCRAPE message sent: %s", self, url)

        # no more requests can be appended to this session
        self._is_initiated = True
        self._last_contact = int(time.time())

        # Return deferred that will evaluate when the whole chain is done.
        self.result_deferred = self.register_task("result", Deferred(canceller=self._on_cancel))
        return self.result_deferred

    def on_error(self, failure):
        """
        Handles the case of an error during the request.
        :param failure: The failure object that is thrown by a deferred.
        """
        self._logger.info("Error when querying http tracker: %s %s", str(failure), self.tracker_url)
        self.failed(msg=failure.getErrorMessage())

    def on_response(self, response):
        # Check if this one was OK.
        if response.code != 200:
            # error response code
            self._logger.warning(u"%s HTTP SCRAPE error response code [%s, %s]", self, response.code, response.phrase)
            self.failed(msg="error code %s" % response.code)
            return

        # All ok, parse the body
        self.register_task("parse_body", readBody(response).addCallbacks(self._process_scrape_response, self.on_error))

    def _on_cancel(self, a):
        """
        :param _: The deferred which we ignore.
        This function handles the scenario of the session prematurely being cleaned up,
        most likely due to a shutdown.
        This function only should be called by the result_deferred.
        """
        self._logger.info(
            "The result deferred of this HTTP tracker session is being cancelled due to a session cleanup. HTTP url: %s",
            self.tracker_url)

    def failed(self, msg=None):
        """
        This method handles everything that needs to be done when one step
        in the session has failed and thus no data can be obtained.
        """
        self._is_failed = True
        if self.result_deferred:
            result_msg = "HTTP tracker failed for url %s" % self._tracker_url
            if msg:
                result_msg += " (error: %s)" % msg
            self.result_deferred.errback(ValueError(result_msg))

    def _process_scrape_response(self, body):
        """
        This function handles the response body of a HTTP tracker,
        parsing the results.
        """
        # parse the retrieved results
        if body is None:
            self.failed(msg="no response body")
            return

        response_dict = bdecode(body)
        if response_dict is None:
            self.failed(msg="no valid response")
            return

        response_list = []

        unprocessed_infohash_list = self._infohash_list[:]
        if 'files' in response_dict and isinstance(response_dict['files'], dict):
            for infohash in response_dict['files']:
                complete = response_dict['files'][infohash].get('complete', 0)
                incomplete = response_dict['files'][infohash].get('incomplete', 0)

                # Sow complete as seeders. "complete: number of peers with the entire file, i.e. seeders (integer)"
                #  - https://wiki.theory.org/BitTorrentSpecification#Tracker_.27scrape.27_Convention
                seeders = complete
                leechers = incomplete

                # Store the information in the dictionary
                response_list.append({'infohash': infohash.encode('hex'), 'seeders': seeders, 'leechers': leechers})

                # remove this infohash in the infohash list of this session
                if infohash in unprocessed_infohash_list:
                    unprocessed_infohash_list.remove(infohash)

        elif 'failure reason' in response_dict:
            self._logger.info(u"%s Failure as reported by tracker [%s]", self, repr(response_dict['failure reason']))
            self.failed(msg=response_dict['failure reason'])
            return

        # handle the infohashes with no result (seeders/leechers = 0/0)
        for infohash in unprocessed_infohash_list:
            response_list.append({'infohash': infohash.encode('hex'), 'seeders': 0, 'leechers': 0})

        self._is_finished = True
        self.result_deferred.callback({self.tracker_url: response_list})

    @inlineCallbacks
    def cleanup(self):
        """
        Cleans the session by cancelling all deferreds and closing sockets.
        :return: A deferred that fires once the cleanup is done.
        """
        yield self._connection_pool.closeCachedConnections()
        yield super(HttpTrackerSession, self).cleanup()
        self.request = None

        self.result_deferred = None
Пример #25
0
class TestScaling(unittest.TestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(reactor, False)
        self.identity = IdentityV2(
            auth=auth, username=username, password=password,
            endpoint=endpoint, pool=self.pool
        )

    def tearDown(self):
        def _check_fds(_):
            fds = set(reactor.getReaders() + reactor.getReaders())
            if not [fd for fd in fds if isinstance(fd, Client)]:
                return
            return deferLater(reactor, 0, _check_fds, None)
        return self.pool.closeCachedConnections().addBoth(_check_fds)

    def test_scaling_up(self):
        scaling_group_body = autoscale.create_scaling_group_dict(
            image_ref=image_ref, flavor_ref=flavor_ref,
            name="my-group-configuration"
        )

        self.scaling_group = autoscale.ScalingGroup(
            group_config=scaling_group_body,
            pool=self.pool
        )

        self.scaling_policy = autoscale.ScalingPolicy(
            scale_by=2,
            scaling_group=self.scaling_group
        )

        rcs = TestResources()
        d = (
            self.identity.authenticate_user(
                rcs,
                resources={
                    "otter": ("autoscale", "http://localhost:9000/v1.0/{0}"),
                    "loadbalancers": ("cloudLoadBalancers",),
                },
                region=region,
            ).addCallback(print_token_and_ep)
            .addCallback(self.scaling_group.start, self)
            .addCallback(dump_groups)
            .addCallback(self.scaling_policy.start, self)
            .addCallback(self.scaling_policy.execute)
            .addCallback(
                self.scaling_group.wait_for_N_servers, 2, timeout=1800
            ).addCallback(
                lambda _: self.scaling_group.get_scaling_group_state(rcs)
            ).addCallback(dump_state)
        )
        return d
    test_scaling_up.timeout = 1800

    def test_scaling_down(self):
        """
        Verify that a basic scale down operation completes as expected.
        """
        scaling_group_body = autoscale.create_scaling_group_dict(
            image_ref=image_ref, flavor_ref=flavor_ref,
            name="tr-scaledown-conf",
        )

        self.scaling_group = autoscale.ScalingGroup(
            group_config=scaling_group_body,
            pool=self.pool
        )

        self.scaling_policy_up_2 = autoscale.ScalingPolicy(
            scale_by=2,
            scaling_group=self.scaling_group
        )
        self.scaling_policy_down_1 = autoscale.ScalingPolicy(
            scale_by=-1,
            scaling_group=self.scaling_group
        )

        rcs = TestResources()
        d = (
            self.identity.authenticate_user(
                rcs,
                resources={
                    "otter": ("autoscale", "http://localhost:9000/v1.0/{0}"),
                    "loadbalancers": ("cloudLoadBalancers",),
                },
                region=region
            ).addCallback(print_token_and_ep)
            .addCallback(self.scaling_group.start, self)
            .addCallback(self.scaling_policy_up_2.start, self)
            .addCallback(self.scaling_policy_up_2.execute)
            .addCallback(self.scaling_group.wait_for_N_servers,
                         2, timeout=1800)
            .addCallback(
                lambda _: self.scaling_group.get_scaling_group_state(rcs))
            .addCallback(dump_state)
            .addCallback(lambda _: rcs)
            .addCallback(self.scaling_policy_down_1.start, self)
            .addCallback(self.scaling_policy_down_1.execute)
            .addCallback(self.scaling_group.wait_for_N_servers,
                         1, timeout=900)
            .addCallback(
                lambda _: self.scaling_group.get_scaling_group_state(rcs)
            ).addCallback(dump_state)
        )
        return d
    test_scaling_down.timeout = 2700

    def test_policy_execution_after_adding_clb(self):
        """This test attempts to reproduce the steps documented in a bug
        submitted to Otter, documented in
        https://github.com/rackerlabs/otter/issues/1135
        """
        rcs = TestResources()

        def create_1st_load_balancer():
            """First, we authenticate and create a single load balancer."""
            self.clb1 = cloud_load_balancer.CloudLoadBalancer(pool=self.pool)

            return (
                self.identity.authenticate_user(
                    rcs,
                    resources={
                        "otter": (
                            "autoscale", "http://localhost:9000/v1.0/{0}"
                        ),
                        "loadbalancers": ("cloudLoadBalancers",),
                    },
                    region=region
                ).addCallback(self.clb1.start, self)
                .addCallback(self.clb1.wait_for_state, "ACTIVE", 600)
            ).addCallback(add_2nd_load_balancer, self)

        def add_2nd_load_balancer(_, self):
            """After that, we scale up to two servers, then create the second
            load balancer.
            """
            self.clb2 = cloud_load_balancer.CloudLoadBalancer(pool=self.pool)

            scaling_group_body = {
                "launchConfiguration": {
                    "type": "launch_server",
                    "args": {
                        "loadBalancers": [{
                            "port": 80,
                            "loadBalancerId": self.clb1.clb_id,
                        }],
                        "server": {
                            "flavorRef": flavor_ref,
                            "imageRef": image_ref,
                        }
                    }
                },
                "groupConfiguration": {
                    "name": "my-group-configuration",
                    "cooldown": 0,
                    "minEntities": 0,
                },
                "scalingPolicies": [],
            }

            self.scaling_group = autoscale.ScalingGroup(
                group_config=scaling_group_body,
                pool=self.pool
            )

            self.scale_up_policy = autoscale.ScalingPolicy(
                scale_by=2,
                scaling_group=self.scaling_group
            )

            self.scale_down_policy = autoscale.ScalingPolicy(
                scale_by=-2,
                scaling_group=self.scaling_group
            )

            d = (
                self.scaling_group.start(rcs, self)
                .addCallback(self.scale_up_policy.start, self)
                .addCallback(self.scale_down_policy.start, self)
                .addCallback(self.scale_up_policy.execute)
                .addCallback(self.scaling_group.wait_for_N_servers, 2,
                             timeout=1800)
                .addCallback(self.clb2.start, self)
                .addCallback(self.clb2.wait_for_state, "ACTIVE", 600)
            ).addCallback(scale_after_lc_changed, self)
            return d

        def scale_after_lc_changed(_, self):
            """After that, we attempt to execute a scaling policy (doesn't
            matter which one).  According to the bug report, this yields an
            error.
            """
            lc_alt = {
                "type": "launch_server",
                "args": {
                    "loadBalancers": [{
                        "port": 80,
                        "loadBalancerId": self.clb1.clb_id,
                    }, {
                        "port": 80,
                        "loadBalancerId": self.clb2.clb_id,
                    }],
                    "server": {
                        "flavorRef": flavor_ref,
                        "imageRef": image_ref,
                    }
                }
            }
            d = (
                self.scaling_group.set_launch_config(rcs, lc_alt)
                .addCallback(self.scale_down_policy.execute)
                .addCallback(self.scaling_group.wait_for_N_servers, 0,
                             timeout=1800)
            )
            return d

        return create_1st_load_balancer()
    test_policy_execution_after_adding_clb.timeout = 1800
Пример #26
0
class TestEventsEndpoint(AbstractApiTest):

    @inlineCallbacks
    def setUp(self):
        yield super(TestEventsEndpoint, self).setUp()
        self.events_deferred = Deferred()
        self.connection_pool = HTTPConnectionPool(reactor, False)
        self.socket_open_deferred = self.tribler_started_deferred.addCallback(self.open_events_socket)
        self.messages_to_wait_for = 0

    @inlineCallbacks
    def tearDown(self):
        yield self.close_connections()

        # Wait to make sure the HTTPChannel is closed, see https://twistedmatrix.com/trac/ticket/2447
        yield deferLater(reactor, 0.3, lambda: None)

        yield super(TestEventsEndpoint, self).tearDown()

    def on_event_socket_opened(self, response):
        response.deliverBody(EventDataProtocol(self.messages_to_wait_for, self.events_deferred, response))

    def open_events_socket(self, _):
        agent = Agent(reactor, pool=self.connection_pool)
        return agent.request('GET', 'http://localhost:%s/events' % self.session.config.get_http_api_port(),
                             Headers({'User-Agent': ['Tribler ' + version_id]}), None)\
            .addCallback(self.on_event_socket_opened)

    def close_connections(self):
        return self.connection_pool.closeCachedConnections()

    @trial_timeout(20)
    def test_search_results(self):
        """
        Testing whether the event endpoint returns search results when we have search results available
        """
        def verify_search_results(results):
            self.assertEqual(len(results), 2)

        self.messages_to_wait_for = 2

        def send_notifications(_):
            self.session.lm.api_manager.root_endpoint.events_endpoint.start_new_query()

            results_dict = {"keywords": ["test"], "result_list": [('a',) * 10]}
            self.session.notifier.notify(SIGNAL_CHANNEL, SIGNAL_ON_SEARCH_RESULTS, None, results_dict)
            self.session.notifier.notify(SIGNAL_TORRENT, SIGNAL_ON_SEARCH_RESULTS, None, results_dict)

        self.socket_open_deferred.addCallback(send_notifications)

        return self.events_deferred.addCallback(verify_search_results)

    @trial_timeout(20)
    def test_events(self):
        """
        Testing whether various events are coming through the events endpoints
        """
        self.messages_to_wait_for = 21

        def send_notifications(_):
            self.session.lm.api_manager.root_endpoint.events_endpoint.start_new_query()
            results_dict = {"keywords": ["test"], "result_list": [('a',) * 10]}
            self.session.notifier.notify(SIGNAL_TORRENT, SIGNAL_ON_SEARCH_RESULTS, None, results_dict)
            self.session.notifier.notify(SIGNAL_CHANNEL, SIGNAL_ON_SEARCH_RESULTS, None, results_dict)
            self.session.notifier.notify(NTFY_UPGRADER, NTFY_STARTED, None, None)
            self.session.notifier.notify(NTFY_UPGRADER_TICK, NTFY_STARTED, None, None)
            self.session.notifier.notify(NTFY_UPGRADER, NTFY_FINISHED, None, None)
            self.session.notifier.notify(NTFY_WATCH_FOLDER_CORRUPT_TORRENT, NTFY_INSERT, None, None)
            self.session.notifier.notify(NTFY_NEW_VERSION, NTFY_INSERT, None, None)
            self.session.notifier.notify(NTFY_CHANNEL, NTFY_DISCOVERED, None, None)
            self.session.notifier.notify(NTFY_TORRENT, NTFY_DISCOVERED, None, {'a': 'Invalid character \xa1'})
            self.session.notifier.notify(NTFY_TORRENT, NTFY_DELETE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_TORRENT, NTFY_FINISHED, 'a' * 10, None)
            self.session.notifier.notify(NTFY_TORRENT, NTFY_ERROR, 'a' * 10, 'This is an error message')
            self.session.notifier.notify(NTFY_MARKET_ON_ASK, NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_BID, NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_ASK_TIMEOUT, NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_BID_TIMEOUT, NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_TRANSACTION_COMPLETE, NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_PAYMENT_RECEIVED, NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_PAYMENT_SENT, NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(SIGNAL_RESOURCE_CHECK, SIGNAL_LOW_SPACE, None, {})
            self.session.notifier.notify(NTFY_CREDIT_MINING, NTFY_ERROR, None, {"message": "Some credit mining error"})
            self.session.lm.api_manager.root_endpoint.events_endpoint.on_tribler_exception("hi")

        self.socket_open_deferred.addCallback(send_notifications)

        return self.events_deferred

    @trial_timeout(20)
    def test_family_filter_search(self):
        """
        Testing the family filter when searching for torrents and channels
        """
        self.messages_to_wait_for = 2

        def send_searches(_):
            events_endpoint = self.session.lm.api_manager.root_endpoint.events_endpoint

            channels = [['a', ] * 10, ['a', ] * 10]
            channels[0][2] = 'badterm'
            events_endpoint.on_search_results_channels(None, None, None, {"keywords": ["test"],
                                                                          "result_list": channels})
            self.assertEqual(len(events_endpoint.channel_cids_sent), 1)

            torrents = [['a', ] * 10, ['a', ] * 10]
            torrents[0][4] = 'xxx'
            events_endpoint.on_search_results_torrents(None, None, None, {"keywords": ["test"],
                                                                          "result_list": torrents})
            self.assertEqual(len(events_endpoint.infohashes_sent), 1)

        self.socket_open_deferred.addCallback(send_searches)

        return self.events_deferred
Пример #27
0
class TorrentChecker(TaskManager):

    def __init__(self, session):
        super(TorrentChecker, self).__init__()
        self._logger = logging.getLogger(self.__class__.__name__)
        self.tribler_session = session

        self._torrent_db = None

        self._should_stop = False

        self._torrent_check_interval = DEFAULT_TORRENT_CHECK_INTERVAL
        self._torrent_check_retry_interval = DEFAULT_TORRENT_CHECK_RETRY_INTERVAL
        self._max_torrent_check_retries = DEFAULT_MAX_TORRENT_CHECK_RETRIES

        self._session_list = {'DHT': []}
        self._last_torrent_selection_time = 0

        # Track all session cleanups
        self.session_stop_defer_list = []

        self.socket_mgr = self.udp_port = None
        self.connection_pool = None

    @blocking_call_on_reactor_thread
    def initialize(self):
        self._torrent_db = self.tribler_session.open_dbhandler(NTFY_TORRENTS)
        self._reschedule_tracker_select()
        self.connection_pool = HTTPConnectionPool(reactor, False)
        self.socket_mgr = UdpSocketManager()
        self.create_socket_or_schedule()

    def listen_on_udp(self):
        return reactor.listenUDP(0, self.socket_mgr)

    def create_socket_or_schedule(self):
        """
        This method attempts to bind to a UDP port. If it fails for some reason (i.e. no network connection), we try
        again later.
        """
        try:
            self.udp_port = self.listen_on_udp()
        except socket.error as exc:
            self._logger.error("Error when creating UDP socket in torrent checker: %s", exc)
            self.register_task("listen_udp_port", reactor.callLater(10, self.create_socket_or_schedule))

    def shutdown(self):
        """
        Shutdown the torrent health checker.

        Once shut down it can't be started again.
        :returns A deferred that will fire once the shutdown has completed.
        """
        self._should_stop = True

        if self.udp_port:
            self.session_stop_defer_list.append(maybeDeferred(self.udp_port.stopListening))
            self.udp_port = None

        if self.connection_pool:
            self.session_stop_defer_list.append(self.connection_pool.closeCachedConnections())

        self.shutdown_task_manager()

        # kill all the tracker sessions.
        # Wait for the defers to all have triggered by using a DeferredList
        for tracker_url in self._session_list.keys():
            for session in self._session_list[tracker_url]:
                self.session_stop_defer_list.append(session.cleanup())

        return DeferredList(self.session_stop_defer_list)

    def _reschedule_tracker_select(self):
        """
        Changes the tracker selection interval dynamically and schedules the task.
        """
        # dynamically change the interval: update at least every 2h
        num_torrents = self._torrent_db.getNumberCollectedTorrents()

        tracker_select_interval = min(max(7200 / num_torrents, 10), 100) if num_torrents \
            else DEFAULT_TORRENT_SELECTION_INTERVAL

        self._logger.debug(u"tracker selection interval changed to %s", tracker_select_interval)

        self.register_task(u"torrent_checker_tracker_selection",
                           reactor.callLater(tracker_select_interval, self._task_select_tracker))

    def _task_select_tracker(self):
        """
        The regularly scheduled task that selects torrents associated with a specific tracker to check.
        """

        # update the torrent selection interval
        self._reschedule_tracker_select()

        # start selecting torrents
        tracker_url = self.get_valid_next_tracker_for_auto_check()
        if tracker_url is None:
            self._logger.warn(u"No tracker to select from, skip")
            return succeed(None)

        self._logger.debug(u"Start selecting torrents on tracker %s.", tracker_url)

        # get the torrents that should be checked
        infohashes = self._torrent_db.getTorrentsOnTracker(tracker_url, int(time.time()))

        if len(infohashes) == 0:
            # We have not torrent to recheck for this tracker. Still update the last_check for this tracker.
            self._logger.info("No torrent to check for tracker %s", tracker_url)
            self.update_tracker_info(tracker_url, True)
            return succeed(None)
        elif tracker_url != u'DHT' and tracker_url != u'no-DHT':
            try:
                session = self._create_session_for_request(tracker_url, timeout=30)
            except MalformedTrackerURLException as e:
                # Remove the tracker from the database
                self.remove_tracker(tracker_url)
                self._logger.error(e)
                return succeed(None)

            for infohash in infohashes:
                session.add_infohash(infohash)

            self._logger.info(u"Selected %d new torrents to check on tracker: %s", len(infohashes), tracker_url)
            return session.connect_to_tracker().addCallbacks(*self.get_callbacks_for_session(session))\
                .addErrback(lambda _: None)

    def get_callbacks_for_session(self, session):
        success_lambda = lambda info_dict: self._on_result_from_session(session, info_dict)
        error_lambda = lambda failure: self.on_session_error(session, failure)
        return success_lambda, error_lambda

    def get_valid_next_tracker_for_auto_check(self):
        tracker_url = self.get_next_tracker_for_auto_check()
        while tracker_url and not is_valid_url(tracker_url):
            self.remove_tracker(tracker_url)
            tracker_url = self.get_next_tracker_for_auto_check()
        return tracker_url

    def get_next_tracker_for_auto_check(self):
        return self.tribler_session.lm.tracker_manager.get_next_tracker_for_auto_check()

    def remove_tracker(self, tracker_url):
        self.tribler_session.lm.tracker_manager.remove_tracker(tracker_url)

    def update_tracker_info(self, tracker_url, value):
        self.tribler_session.lm.tracker_manager.update_tracker_info(tracker_url, value)

    def get_valid_trackers_of_torrent(self, torrent_id):
        """ Get a set of valid trackers for torrent. Also remove any invalid torrent."""
        db_tracker_list = self._torrent_db.getTrackerListByTorrentID(torrent_id)
        return set([tracker for tracker in db_tracker_list if is_valid_url(tracker) or tracker == u'DHT'])

    def on_gui_request_completed(self, infohash, result):
        final_response = {}

        torrent_update_dict = {'infohash': infohash, 'seeders': 0, 'leechers': 0, 'last_check': time.time()}
        for success, response in result:
            if not success and isinstance(response, Failure):
                final_response[response.tracker_url] = {'error': response.getErrorMessage()}
                continue

            response_seeders = response[response.keys()[0]][0]['seeders']
            response_leechers = response[response.keys()[0]][0]['leechers']
            if response_seeders > torrent_update_dict['seeders'] or \
                    (response_seeders == torrent_update_dict['seeders']
                     and response_leechers < torrent_update_dict['leechers']):
                torrent_update_dict['seeders'] = response_seeders
                torrent_update_dict['leechers'] = response_leechers

            final_response[response.keys()[0]] = response[response.keys()[0]][0]

        self._update_torrent_result(torrent_update_dict)

        # Add this result to popularity community to publish to subscribers
        self.publish_torrent_result(torrent_update_dict)

        return final_response

    @call_on_reactor_thread
    def add_gui_request(self, infohash, timeout=20, scrape_now=False):
        """
        Public API for adding a GUI request.
        :param infohash: Torrent infohash.
        :param timeout: The timeout to use in the performed requests
        :param scrape_now: Flag whether we want to force scraping immediately
        """
        result = self._torrent_db.getTorrent(infohash, (u'torrent_id', u'last_tracker_check',
                                                        u'num_seeders', u'num_leechers'), False)
        if result is None:
            self._logger.warn(u"torrent info not found, skip. infohash: %s", hexlify(infohash))
            return fail(Failure(RuntimeError("Torrent not found")))

        torrent_id = result[u'torrent_id']
        last_check = result[u'last_tracker_check']
        time_diff = time.time() - last_check
        if time_diff < self._torrent_check_interval and not scrape_now:
            self._logger.debug(u"time interval too short, skip GUI request. infohash: %s", hexlify(infohash))
            return succeed({"db": {"seeders": result[u'num_seeders'],
                                   "leechers": result[u'num_leechers'], "infohash": infohash.encode('hex')}})

        # get torrent's tracker list from DB
        tracker_set = self.get_valid_trackers_of_torrent(torrent_id)
        if not tracker_set:
            self._logger.warn(u"no trackers, skip GUI request. infohash: %s", hexlify(infohash))
            # TODO: add code to handle torrents with no tracker
            return fail(Failure(RuntimeError("No trackers available for this torrent")))

        deferred_list = []
        for tracker_url in tracker_set:
            if tracker_url == u'DHT':
                # Create a (fake) DHT session for the lookup
                session = FakeDHTSession(self.tribler_session, infohash, timeout)
                self._session_list['DHT'].append(session)
                deferred_list.append(session.connect_to_tracker().
                                     addCallbacks(*self.get_callbacks_for_session(session)))
            elif tracker_url != u'no-DHT':
                session = self._create_session_for_request(tracker_url, timeout=timeout)
                session.add_infohash(infohash)
                deferred_list.append(session.connect_to_tracker().
                                     addCallbacks(*self.get_callbacks_for_session(session)))

        return DeferredList(deferred_list, consumeErrors=True).addCallback(
            lambda res: self.on_gui_request_completed(infohash, res))

    def on_session_error(self, session, failure):
        """
        Handles the scenario of when a tracker session has failed by calling the
        tracker_manager's update_tracker_info function.
        Trap value errors that are thrown by e.g. the HTTPTrackerSession when a connection fails.
        And trap CancelledErrors that can be thrown when shutting down.
        :param failure: The failure object raised by Twisted.
        """
        failure.trap(ValueError, CancelledError, ConnectingCancelledError, ConnectionLost, RuntimeError)
        self._logger.warning(u"Got session error for URL %s: %s", session.tracker_url, failure)

        self.clean_session(session)

        # Do not update if the connection got cancelled, we are probably shutting down
        # and the tracker_manager may have shutdown already.
        if failure.check(CancelledError, ConnectingCancelledError) is None:
            self.tribler_session.lm.tracker_manager.update_tracker_info(session.tracker_url, False)

        failure.tracker_url = session.tracker_url
        return failure

    def _create_session_for_request(self, tracker_url, timeout=20):
        session = create_tracker_session(tracker_url, timeout, self.socket_mgr, connection_pool=self.connection_pool)

        if tracker_url not in self._session_list:
            self._session_list[tracker_url] = []
        self._session_list[tracker_url].append(session)

        self._logger.debug(u"Session created for tracker %s", tracker_url)
        return session

    def clean_session(self, session):
        self.tribler_session.lm.tracker_manager.update_tracker_info(session.tracker_url, not session.is_failed)
        self.session_stop_defer_list.append(session.cleanup())

        # Remove the session from our session list dictionary
        self._session_list[session.tracker_url].remove(session)
        if len(self._session_list[session.tracker_url]) == 0 and session.tracker_url != u"DHT":
            del self._session_list[session.tracker_url]

    def _on_result_from_session(self, session, result_list):
        if self._should_stop:
            return

        self.clean_session(session)

        return result_list

    def _update_torrent_result(self, response):
        infohash = response['infohash']
        seeders = response['seeders']
        leechers = response['leechers']
        last_check = response['last_check']

        # the torrent status logic, TODO: do it in other way
        self._logger.debug(u"Update result %s/%s for %s", seeders, leechers, hexlify(infohash))

        result = self._torrent_db.getTorrent(infohash, (u'torrent_id', u'tracker_check_retries'), include_mypref=False)
        torrent_id = result[u'torrent_id']
        retries = result[u'tracker_check_retries']

        # the status logic
        if seeders > 0:
            retries = 0
            status = u'good'
        else:
            retries += 1
            if retries < self._max_torrent_check_retries:
                status = u'unknown'
            else:
                status = u'dead'
                # prevent retries from exceeding the maximum
                retries = self._max_torrent_check_retries

        # calculate next check time: <last-time> + <interval> * (2 ^ <retries>)
        next_check = last_check + self._torrent_check_retry_interval * (2 ** retries)

        self._torrent_db.updateTorrentCheckResult(torrent_id,
                                                  infohash, seeders, leechers, last_check, next_check,
                                                  status, retries)

    def publish_torrent_result(self, response):
        if response['seeders'] == 0:
            self._logger.info("Not publishing zero seeded torrents")
            return
        content = (response['infohash'], response['seeders'], response['leechers'], response['last_check'])
        if self.tribler_session.lm.popularity_community:
            self.tribler_session.lm.popularity_community.queue_content(TYPE_TORRENT_HEALTH, content)
        else:
            self._logger.info("Popular community not available to publish torrent checker result")
class TreqIntegrationTests(TestCase):
    baseurl = HTTPBIN_URL
    get = with_baseurl(treq.get)
    head = with_baseurl(treq.head)
    post = with_baseurl(treq.post)
    put = with_baseurl(treq.put)
    patch = with_baseurl(treq.patch)
    delete = with_baseurl(treq.delete)

    def setUp(self):
        self.pool = HTTPConnectionPool(reactor, False)

    def tearDown(self):
        def _check_fds(_):
            # This appears to only be necessary for HTTPS tests.
            # For the normal HTTP tests then closeCachedConnections is
            # sufficient.
            fds = set(reactor.getReaders() + reactor.getReaders())
            if not [fd for fd in fds if isinstance(fd, Client)]:
                return

            return deferLater(reactor, 0, _check_fds, None)

        return self.pool.closeCachedConnections().addBoth(_check_fds)

    @inlineCallbacks
    def assert_data(self, response, expected_data):
        body = yield treq.json_content(response)
        self.assertIn('data', body)
        self.assertEqual(body['data'], expected_data)

    @inlineCallbacks
    def assert_sent_header(self, response, header, expected_value):
        body = yield treq.json_content(response)
        self.assertIn(header, body['headers'])
        self.assertEqual(body['headers'][header], expected_value)

    @inlineCallbacks
    def test_get(self):
        response = yield self.get('/get')
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_get_headers(self):
        response = yield self.get('/get', {'X-Blah': ['Foo', 'Bar']})
        self.assertEqual(response.code, 200)
        yield self.assert_sent_header(response, 'X-Blah', 'Foo, Bar')
        yield print_response(response)

    @inlineCallbacks
    def test_get_302_absolute_redirect(self):
        response = yield self.get(
            '/redirect-to?url={0}/get'.format(self.baseurl))
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @todo_relative_redirect
    @inlineCallbacks
    def test_get_302_relative_redirect(self):
        response = yield self.get('/relative-redirect/1')
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_get_302_redirect_disallowed(self):
        response = yield self.get('/redirect/1', allow_redirects=False)
        self.assertEqual(response.code, 302)
        yield print_response(response)

    @inlineCallbacks
    def test_head(self):
        response = yield self.head('/get')
        body = yield treq.content(response)
        self.assertEqual('', body)
        yield print_response(response)

    @inlineCallbacks
    def test_head_302_absolute_redirect(self):
        response = yield self.head(
            '/redirect-to?url={0}/get'.format(self.baseurl))
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @todo_relative_redirect
    @inlineCallbacks
    def test_head_302_relative_redirect(self):
        response = yield self.head('/relative-redirect/1')
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_head_302_redirect_disallowed(self):
        response = yield self.head('/redirect/1', allow_redirects=False)
        self.assertEqual(response.code, 302)
        yield print_response(response)

    @inlineCallbacks
    def test_post(self):
        response = yield self.post('/post', 'Hello!')
        self.assertEqual(response.code, 200)
        yield self.assert_data(response, 'Hello!')
        yield print_response(response)

    @inlineCallbacks
    def test_multipart_post(self):
        class FileLikeObject(StringIO):
            def __init__(self, val):
                StringIO.__init__(self, val)
                self.name = "david.png"

            def read(*args, **kwargs):
                return StringIO.read(*args, **kwargs)

        response = yield self.post(
            '/post',
            data={"a": "b"},
            files={"file1": FileLikeObject("file")})
        self.assertEqual(response.code, 200)

        body = yield treq.json_content(response)
        self.assertEqual('b', body['form']['a'])
        self.assertEqual('file', body['files']['file1'])
        yield print_response(response)

    @inlineCallbacks
    def test_post_headers(self):
        response = yield self.post(
            '/post',
            '{msg: "Hello!"}',
            headers={'Content-Type': ['application/json']}
        )

        self.assertEqual(response.code, 200)
        yield self.assert_sent_header(
            response, 'Content-Type', 'application/json')
        yield self.assert_data(response, '{msg: "Hello!"}')
        yield print_response(response)

    @inlineCallbacks
    def test_put(self):
        response = yield self.put('/put', data='Hello!')
        yield print_response(response)

    @inlineCallbacks
    def test_patch(self):
        response = yield self.patch('/patch', data='Hello!')
        self.assertEqual(response.code, 200)
        yield self.assert_data(response, 'Hello!')
        yield print_response(response)

    @inlineCallbacks
    def test_delete(self):
        response = yield self.delete('/delete')
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_gzip(self):
        response = yield self.get('/gzip')
        self.assertEqual(response.code, 200)
        yield print_response(response)
        json = yield treq.json_content(response)
        self.assertTrue(json['gzipped'])

    @inlineCallbacks
    def test_basic_auth(self):
        response = yield self.get('/basic-auth/treq/treq',
                                  auth=('treq', 'treq'))
        self.assertEqual(response.code, 200)
        yield print_response(response)
        json = yield treq.json_content(response)
        self.assertTrue(json['authenticated'])
        self.assertEqual(json['user'], 'treq')

    @inlineCallbacks
    def test_failed_basic_auth(self):
        response = yield self.get('/basic-auth/treq/treq',
                                  auth=('not-treq', 'not-treq'))
        self.assertEqual(response.code, 401)
        yield print_response(response)

    @inlineCallbacks
    def test_timeout(self):
        """
        Verify a timeout fires if a request takes too long.
        """
        yield self.assertFailure(self.get('/delay/2', timeout=1),
                                 CancelledError,
                                 ResponseFailed)
Пример #29
0
class TestEventsEndpoint(AbstractApiTest):

    @blocking_call_on_reactor_thread
    @inlineCallbacks
    def setUp(self, autoload_discovery=True):
        yield super(TestEventsEndpoint, self).setUp(autoload_discovery=autoload_discovery)
        self.events_deferred = Deferred()
        self.connection_pool = HTTPConnectionPool(reactor, False)
        self.socket_open_deferred = self.tribler_started_deferred.addCallback(self.open_events_socket)
        self.messages_to_wait_for = 0

    @blocking_call_on_reactor_thread
    @inlineCallbacks
    def tearDown(self, annotate=True):
        yield self.close_connections()

        # Wait to make sure the HTTPChannel is closed, see https://twistedmatrix.com/trac/ticket/2447
        yield deferLater(reactor, 0.3, lambda: None)

        yield super(TestEventsEndpoint, self).tearDown(annotate=annotate)

    def on_event_socket_opened(self, response):
        response.deliverBody(EventDataProtocol(self.messages_to_wait_for, self.events_deferred, response))

    def open_events_socket(self, _):
        agent = Agent(reactor, pool=self.connection_pool)
        return agent.request('GET', 'http://localhost:%s/events' % self.session.get_http_api_port(),
                             Headers({'User-Agent': ['Tribler ' + version_id]}), None)\
            .addCallback(self.on_event_socket_opened)

    def close_connections(self):
        return self.connection_pool.closeCachedConnections()

    @deferred(timeout=20)
    def test_search_results(self):
        """
        Testing whether the event endpoint returns search results when we have search results available
        """
        def verify_search_results(results):
            self.assertEqual(len(results), 2)

        self.messages_to_wait_for = 2

        def send_notifications(_):
            self.session.lm.api_manager.root_endpoint.events_endpoint.start_new_query()

            results_dict = {"keywords": ["test"], "result_list": [('a',) * 10]}
            self.session.notifier.notify(SIGNAL_CHANNEL, SIGNAL_ON_SEARCH_RESULTS, None, results_dict)
            self.session.notifier.notify(SIGNAL_TORRENT, SIGNAL_ON_SEARCH_RESULTS, None, results_dict)

        self.socket_open_deferred.addCallback(send_notifications)

        return self.events_deferred.addCallback(verify_search_results)

    @deferred(timeout=20)
    def test_events(self):
        """
        Testing whether various events are coming through the events endpoints
        """
        self.messages_to_wait_for = 14

        def send_notifications(_):
            self.session.lm.api_manager.root_endpoint.events_endpoint.start_new_query()
            results_dict = {"keywords": ["test"], "result_list": [('a',) * 10]}
            self.session.notifier.notify(SIGNAL_TORRENT, SIGNAL_ON_SEARCH_RESULTS, None, results_dict)
            self.session.notifier.notify(SIGNAL_CHANNEL, SIGNAL_ON_SEARCH_RESULTS, None, results_dict)
            self.session.notifier.notify(NTFY_UPGRADER, NTFY_STARTED, None, None)
            self.session.notifier.notify(NTFY_UPGRADER_TICK, NTFY_STARTED, None, None)
            self.session.notifier.notify(NTFY_UPGRADER, NTFY_FINISHED, None, None)
            self.session.notifier.notify(NTFY_WATCH_FOLDER_CORRUPT_TORRENT, NTFY_INSERT, None, None)
            self.session.notifier.notify(NTFY_NEW_VERSION, NTFY_INSERT, None, None)
            self.session.notifier.notify(NTFY_CHANNEL, NTFY_DISCOVERED, None, None)
            self.session.notifier.notify(NTFY_TORRENT, NTFY_DISCOVERED, None, {'a': 'Invalid character \xa1'})
            self.session.notifier.notify(NTFY_TORRENT, NTFY_DELETE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_TORRENT, NTFY_FINISHED, 'a' * 10, None)
            self.session.notifier.notify(NTFY_TORRENT, NTFY_ERROR, 'a' * 10, 'This is an error message')
            self.session.notifier.notify(SIGNAL_RESOURCE_CHECK, SIGNAL_LOW_SPACE, None, {})
            self.session.lm.api_manager.root_endpoint.events_endpoint.on_tribler_exception("hi")

        self.socket_open_deferred.addCallback(send_notifications)

        return self.events_deferred

    @deferred(timeout=20)
    def test_family_filter_search(self):
        """
        Testing the family filter when searching for torrents and channels
        """
        self.messages_to_wait_for = 2

        def send_searches(_):
            events_endpoint = self.session.lm.api_manager.root_endpoint.events_endpoint

            channels = [['a', ] * 10, ['a', ] * 10]
            channels[0][2] = 'badterm'
            events_endpoint.on_search_results_channels(None, None, None, {"keywords": ["test"],
                                                                          "result_list": channels})
            self.assertEqual(len(events_endpoint.channel_cids_sent), 1)

            torrents = [['a', ] * 10, ['a', ] * 10]
            torrents[0][4] = 'xxx'
            events_endpoint.on_search_results_torrents(None, None, None, {"keywords": ["test"],
                                                                          "result_list": torrents})
            self.assertEqual(len(events_endpoint.infohashes_sent), 1)

        self.socket_open_deferred.addCallback(send_searches)

        return self.events_deferred
Пример #30
0
class PublishTestCase(unittest.TestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(reactor, persistent=False)
        self.pubnub = PubNubTwisted(pnconf, reactor=reactor, pool=self.pool)

    def tearDown(self):
        return self.pool.closeCachedConnections()

    # for async
    def error_envelope_asserter(self, expected_err_msg):
        def assert_error_message(envelope):
            assert envelope.status.error_data.information == expected_err_msg

        return assert_error_message

    def assert_client_error(self, publish, message):
        try:
            publish.deferred()
        except PubNubException as exception:
            self.assertTrue(message in exception.message)
        else:
            self.fail('Expected PubNubException not raised')

    def assert_client_side_error(self, envelope, expected_err_msg):
        assert envelope.status.error_data.information == expected_err_msg

    def assert_valid_publish_envelope(self, envelope):
        assert isinstance(envelope, TwistedEnvelope)
        assert isinstance(envelope.result, PNPublishResult)
        assert isinstance(envelope.status, PNStatus)
        assert envelope.result.timetoken > 0

    @inlineCallbacks
    def deferred(self, event):
        envelope = yield event.deferred()
        returnValue(envelope)

    @inlineCallbacks
    def assert_success_publish_get(self, message, meta=None):
        publish = self.pubnub.publish().channel(channel).message(message).meta(
            meta)
        envelope = yield self.deferred(publish)
        self.assert_valid_publish_envelope(envelope)
        returnValue(envelope)

    @inlineCallbacks
    def assert_success_encrypted_publish_get(self, message):
        pubnub = PubNubTwisted(pnconf_enc_copy())
        publish = pubnub.publish().channel(channel).message(message)
        envelope = yield self.deferred(publish)
        self.assert_valid_publish_envelope(envelope)
        returnValue(envelope)

    @inlineCallbacks
    def assert_success_publish_post(self, message):
        publish = self.pubnub.publish().channel(channel).message(
            message).use_post(True)
        envelope = yield self.deferred(publish)
        self.assert_valid_publish_envelope(envelope)
        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/publish/mixed_via_get.yaml',
        filter_query_parameters=['uuid', 'seqn'])
    def test_publish_mixed_via_get(self):
        d0 = yield self.assert_success_publish_get("hi")
        d1 = yield self.assert_success_publish_get(5)
        d2 = yield self.assert_success_publish_get(True)
        d3 = yield self.assert_success_publish_get(["hi", "hi2", "hi3"])
        returnValue([d0, d1, d2, d3])

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/publish/mixed_encrypted_via_get.yaml',
        filter_query_parameters=['uuid', 'seqn'])
    def test_publish_mixed_encrypted_via_get(self):
        d0 = yield self.assert_success_encrypted_publish_get("hi")
        d1 = yield self.assert_success_encrypted_publish_get(5)
        d2 = yield self.assert_success_encrypted_publish_get(True)
        d3 = yield self.assert_success_encrypted_publish_get(
            ["hi", "hi2", "hi3"])
        returnValue([d0, d1, d2, d3])

    # TODO: uncomment this when vcr for post is fixed
    # @inlineCallbacks
    # @pn_vcr.use_cassette(
    #     'tests/integrational/fixtures/twisted/publish/mixed_via_post.yaml',
    #     filter_query_parameters=['uuid', 'seqn'])
    # def test_publish_mixed_via_post(self):
    #     d0 = yield self.assert_success_publish_post("hi")
    #     d1 = yield self.assert_success_publish_post(5)
    #     d2 = yield self.assert_success_publish_post(True)
    #     d3 = yield self.assert_success_publish_post(["hi", "hi2", "hi3"])
    #     returnValue([d0, d1, d2, d3])

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/publish/object_via_get.yaml',
        filter_query_parameters=['uuid', 'seqn'])
    def test_publish_object_via_get(self):
        d0 = yield self.assert_success_publish_get({"one": 2, "three": True})
        returnValue(d0)

    def test_error_missing_message(self):
        self.assert_client_error(
            self.pubnub.publish().channel(channel).message(None),
            PNERR_MESSAGE_MISSING)

    def test_error_missing_channel(self):
        self.assert_client_error(
            self.pubnub.publish().channel('').message('whatever'),
            PNERR_CHANNEL_MISSING)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/publish/invalid_key.yaml',
        filter_query_parameters=['uuid', 'seqn'])
    def test_error_invalid_key(self):
        conf = PNConfiguration()
        conf.publish_key = "fake"
        conf.subscribe_key = "demo"
        pubnub = PubNubTwisted(conf)
        with pytest.raises(PubNubTwistedException) as exception:
            yield pubnub.publish().channel(channel).message("hey").deferred()

        self.assertEqual(
            exception.value.status.error_data.information,
            "HTTP Client Error (400): [0, u'Invalid Key', u'14767989321048626']"
        )

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/publish/forbidden.yaml',
        filter_query_parameters=['uuid', 'seqn', 'timestamp', 'signature'])
    def test_error_forbidden(self):
        pubnub = PubNubTwisted(pnconf_pam_copy())
        with pytest.raises(PubNubTwistedException) as exception:
            yield pubnub.publish().channel("not_permitted_channel").message(
                "hey").deferred()

        self.assertEqual(
            exception.value.status.error_data.information,
            "HTTP Client Error (403): {u'status': 403, u'message': u'Forbidden', u'payload':"
            " {u'channels': [u'not_permitted_channel']}, u'service': u'Access Manager', u'error': True}"
        )

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/publish/meta_object.yaml',
        filter_query_parameters=['uuid', 'seqn'],
        match_on=['host', 'method', 'path', 'meta_object_in_query'])
    def test_publish_with_meta(self):
        yield self.assert_success_publish_get('hi', {'a': 2, 'b': True})

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/publish/do_not_store.yaml',
        filter_query_parameters=['uuid', 'seqn'])
    def test_publish_do_not_store(self):
        publish = self.pubnub.publish().channel(channel).message(
            'whatever').should_store(False)
        envelope = yield self.deferred(publish)
        self.assert_valid_publish_envelope(envelope)
        returnValue(envelope)
Пример #31
0
class JSONClient(object):

    protocolName = 'jsonrpc'
    protocolVersion = '2.0'
    protocolContentType = 'text/json'

    userAgentName = 'twswebrpc'

    def __init__(self, url, callID=0, maxPersistentPerHost=2, useCompression=False, connectTimeout=None):

        self.url = url
        self.connectTimeout = connectTimeout
        self.encoder = self.get_encoder()

        assert IEncoder.providedBy(self.encoder), 'no encoder available or encoder does not provide IEncoder'
        assert isinstance(callID, (int, long)), "callID must be <type 'int'> or <type 'long'>"

        self.__callID = callID
        self.__callsCounter = 0
        if maxPersistentPerHost > 0:
            self.pool = HTTPConnectionPool(reactor, persistent=True)
            self.pool.maxPersistentPerHost = maxPersistentPerHost
        else:
            self.pool = None

        agent = Agent(reactor, connectTimeout=self.connectTimeout, pool=self.pool)
        if useCompression:
            self.agent = ContentDecoderAgent(agent, [('gzip', GzipDecoder)])
        else:
            self.agent = agent

    def get_encoder(self):
        return JSONEncoder()

    @property
    def callID(self):
        return self.__callID

    def __callsCounterInc(self):
        self.__callsCounter += 1

    def __callsCounterDec(self):
        self.__callsCounter -= 1

    @property
    def callsCounter(self):
        return self.__callsCounter

    def callRemote(self, function, *params):

        self.__callID += 1
        data = dict(id=self.__callID,
                    method=function,
                    params=params)

        data[self.protocolName] = self.protocolVersion

        encodedData = self.encoder.encode(data)

        deferred = maybeDeferred(self.agent.request, 'POST',
                                 self.url,
                                 Headers({'User-Agent': [self.userAgentName],
                                          "content-type": [self.protocolContentType],
                                          "content-length": [str(len(encodedData))]
                                          }
                                         ),
                                 StringProducer(encodedData)
                                 )

        deferred.addCallback(self._onCallSuccess)
        deferred.addErrback(self._onCallError)

        self.__callsCounterInc()

        return deferred

    def _onCallSuccess(self, response):
        if response.code != 200:
            return Failure(ServerError('%s - %s' % (response.code, response.phrase)))

        finished = Deferred()
        finished.addCallback(self._onCallSuccessFinish)
        response.deliverBody(DataReceiver(finished, self.encoder))
        return finished

    def _onCallSuccessFinish(self, response):
        self.__callsCounterDec()
        return response

    def _onCallError(self, response):
        self.__callsCounterDec()
        return response

    def closeCachedConnections(self, callBack=None):
        if self.pool:
            deferred = self.pool.closeCachedConnections()
            if callBack:
                assert callable(callBack)
                return deferred.addCallback(callBack)

            else:
                return deferred

        return None
Пример #32
0
class WebClient(object):
    """
    Client to communicate with the webservice.
    
    Manages a connection pool, supports using a proxy.
    """
    
    def __init__(self, server, secret, proxy = None):
        """
        :param server: Url of the server.
        :param secret: Passpharse. Only clients with the same secret can interact, 
          even when using the same server.
        :param proxy: URL to the proxy. An empty string or no proxy. `None` to check
          the environment variable `http_proxy`.
        """
        self.server = server
        self.encryption_key = _make_key(secret)
        self.proxy = proxy
        self.pool = HTTPConnectionPool(reactor, persistent=True)
        self.pool.maxPersistentPerHost = 1024
    
    
    def close(self):
        """
        Closes the connection pool.
        """
        return self.pool.closeCachedConnections()
    
    def public_ip(self):
        """
        Returns our IP as it is seen from the server.
        """
        def cb(headers):
            return headers["X-Request-From"]
        url = self._url(0, 0)
        d = httpclient.request("GET", url, return_headers=True, pool=self.pool, proxy=self.proxy)
        
        d.addCallback(lambda headers:headers["X-Request-From"][0])
        #d.addCallback(cb)
        return d
    
    def put(self, key, value):
        """
        Store new key-value pair.
        @return deferred new version
        """
        key = _encrypt_key(self.encryption_key, key)
        value = _encrypt_value(self.encryption_key, value)
        d = self._post_request(key, "JUNGEST", value)
        d.addCallback(lambda r:r["record_version"])
        return d
    
    
    def get(self, key, version, wait=False):
        """
        Returns the value for the given key and version.
        If `wait` is `True` then we wait for the key & version
        to be stored. The deferred can be canceled.
        """
        d = self._get(key, version, wait)
        d.addCallback(lambda r:r["value"])
        return d
                    

    def get_jungest(self, key, wait=False):
        """
        Returns a tuple with the jungest version and value for the given key.
        """
        d = self._get(key, "JUNGEST", wait)
        d.addCallback(lambda r:(r["record_version"], r["value"]))
        return d
    

    def get_oldest(self, key, wait=False):
        """
        Returns a tuple with the oldest version and value for the given key.
        """
        d = self._get(key, "OLDEST", wait)
        d.addCallback(lambda r:(r["record_version"], r["value"]))
        return d
    
    def _url(self, record_id, record_version):
        record_version = str(record_version)
        url = "{base}/rec/{id}/{version}".format(
                    base=self.server,
                    id=urllib.quote(record_id, ''), 
                    version=urllib.quote(record_version, ''))
        return url


    def _get(self, key, version, wait=False):
        
        def make_request():
            d = self._get_request(record_id, version, timeout = timeout)
            d.addCallbacks(got_response, got_failure)
            return d
        
        def got_response(response):
            if "value" in response:
                response["value"] = _decrypt_value(self.encryption_key, response["value"])
            return response
        
        def got_failure(failure):
            if failure.check(Error) and wait and failure.value.status == httplib.NOT_FOUND:
                return make_request()
            else:
                return failure
        
        record_id = _encrypt_key(self.encryption_key, key)
        timeout = 60 if wait else None
        return make_request()


    def _get_request(self, record_id, record_version, timeout=None): 
        url = self._url(record_id, record_version)
        if timeout:
            values = {'timeout': str(timeout)}
        else:
            values = {}
        d = httpclient.request("GET", url, values, pool=self.pool, proxy=self.proxy)
        def gotit(v):
            try:
                return json.loads(v)
            except BaseException as e:
                print e
        d.addCallback(gotit)
        return d
        

    def _post_request(self, record_id, record_version, value):
        idepo = _get_random_string()
        url = self._url(record_id, record_version)
        d = httpclient.request("POST", url, {"idepo":idepo, "data":value}, {"Content-Type":["application/x-www-form-urlencoded"]},
                    pool=self.pool, proxy=self.proxy)
        d.addCallback(json.loads)
        return d
Пример #33
0
class TreqIntegrationTests(TestCase):
    baseurl = HTTPBIN_URL
    get = with_baseurl(treq.get)
    head = with_baseurl(treq.head)
    post = with_baseurl(treq.post)
    put = with_baseurl(treq.put)
    patch = with_baseurl(treq.patch)
    delete = with_baseurl(treq.delete)

    def setUp(self):
        self.pool = HTTPConnectionPool(reactor, False)

    def tearDown(self):
        def _check_fds(_):
            # This appears to only be necessary for HTTPS tests.
            # For the normal HTTP tests then closeCachedConnections is
            # sufficient.
            fds = set(reactor.getReaders() + reactor.getReaders())
            if not [fd for fd in fds if isinstance(fd, Client)]:
                return

            return deferLater(reactor, 0, _check_fds, None)

        return self.pool.closeCachedConnections().addBoth(_check_fds)

    @inlineCallbacks
    def assert_data(self, response, expected_data):
        body = yield treq.json_content(response)
        self.assertIn('data', body)
        self.assertEqual(body['data'], expected_data)

    @inlineCallbacks
    def assert_sent_header(self, response, header, expected_value):
        body = yield treq.json_content(response)
        self.assertIn(header, body['headers'])
        self.assertEqual(body['headers'][header], expected_value)

    @inlineCallbacks
    def test_get(self):
        response = yield self.get('/get')
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_get_headers(self):
        response = yield self.get('/get', {'X-Blah': ['Foo', 'Bar']})
        self.assertEqual(response.code, 200)
        yield self.assert_sent_header(response, 'X-Blah', 'Foo, Bar')
        yield print_response(response)

    @inlineCallbacks
    def test_get_302_redirect_allowed(self):
        response = yield self.get('/redirect/1')
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_get_302_redirect_disallowed(self):
        response = yield self.get('/redirect/1', allow_redirects=False)
        self.assertEqual(response.code, 302)
        yield print_response(response)

    @inlineCallbacks
    def test_head(self):
        response = yield self.head('/get')
        body = yield treq.content(response)
        self.assertEqual('', body)
        yield print_response(response)

    @inlineCallbacks
    def test_head_302_redirect_allowed(self):
        response = yield self.head('/redirect/1')
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_head_302_redirect_disallowed(self):
        response = yield self.head('/redirect/1', allow_redirects=False)
        self.assertEqual(response.code, 302)
        yield print_response(response)

    @inlineCallbacks
    def test_post(self):
        response = yield self.post('/post', 'Hello!')
        self.assertEqual(response.code, 200)
        yield self.assert_data(response, 'Hello!')
        yield print_response(response)

    @inlineCallbacks
    def test_post_headers(self):
        response = yield self.post(
            '/post',
            '{msg: "Hello!"}',
            headers={'Content-Type': ['application/json']}
        )

        self.assertEqual(response.code, 200)
        yield self.assert_sent_header(
            response, 'Content-Type', 'application/json')
        yield self.assert_data(response, '{msg: "Hello!"}')
        yield print_response(response)

    @inlineCallbacks
    def test_put(self):
        response = yield self.put('/put', data='Hello!')
        yield print_response(response)

    @inlineCallbacks
    def test_patch(self):
        response = yield self.patch('/patch', data='Hello!')
        self.assertEqual(response.code, 200)
        yield self.assert_data(response, 'Hello!')
        yield print_response(response)

    @inlineCallbacks
    def test_delete(self):
        response = yield self.delete('/delete')
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_gzip(self):
        response = yield self.get('/gzip')
        self.assertEqual(response.code, 200)
        yield print_response(response)
        json = yield treq.json_content(response)
        self.assertTrue(json['gzipped'])

    @inlineCallbacks
    def test_basic_auth(self):
        response = yield self.get('/basic-auth/treq/treq',
                                  auth=('treq', 'treq'))
        self.assertEqual(response.code, 200)
        yield print_response(response)
        json = yield treq.json_content(response)
        self.assertTrue(json['authenticated'])
        self.assertEqual(json['user'], 'treq')

    @inlineCallbacks
    def test_failed_basic_auth(self):
        response = yield self.get('/basic-auth/treq/treq',
                                  auth=('not-treq', 'not-treq'))
        self.assertEqual(response.code, 401)
        yield print_response(response)
Пример #34
0
class CGTestCase(unittest.TestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(reactor, persistent=False)
        self.pubnub = PubNubTwisted(pnconf, reactor=reactor, pool=self.pool)

    def tearDown(self):
        return self.pool.closeCachedConnections()

    def assert_valid_cg_envelope(self, envelope, type):
        self.assertIsInstance(envelope.result, type)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        "tests/integrational/fixtures/twisted/groups/add_single_channel.yaml", filter_query_parameters=["uuid"]
    )
    def test_adding_channel(self):
        channel = "cgttc"
        group = "cgttg"

        envelope = yield self.pubnub.add_channel_to_channel_group().channels(channel).channel_group(group).deferred()

        self.assert_valid_cg_envelope(envelope, PNChannelGroupsAddChannelResult)

        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        "tests/integrational/fixtures/twisted/groups/remove_single_channel.yaml", filter_query_parameters=["uuid"]
    )
    def test_removing_channel(self):
        channel = "cgttc"
        group = "cgttg"

        envelope = (
            yield self.pubnub.remove_channel_from_channel_group().channels(channel).channel_group(group).deferred()
        )

        self.assert_valid_cg_envelope(envelope, PNChannelGroupsRemoveChannelResult)

        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        "tests/integrational/fixtures/twisted/groups/add_channels.yaml", filter_query_parameters=["uuid"]
    )
    def test_adding_channels(self):
        channel = ["cgttc0", "cgttc1"]
        group = "cgttg"

        envelope = yield self.pubnub.add_channel_to_channel_group().channels(channel).channel_group(group).deferred()

        self.assert_valid_cg_envelope(envelope, PNChannelGroupsAddChannelResult)

        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        "tests/integrational/fixtures/twisted/groups/remove_channels.yaml", filter_query_parameters=["uuid"]
    )
    def test_removing_channels(self):
        channel = ["cgttc0", "cgttc1"]
        group = "cgttg"

        envelope = (
            yield self.pubnub.remove_channel_from_channel_group().channels(channel).channel_group(group).deferred()
        )

        self.assert_valid_cg_envelope(envelope, PNChannelGroupsRemoveChannelResult)

        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        "tests/integrational/fixtures/twisted/groups/list_channels.yaml", filter_query_parameters=["uuid"]
    )
    def test_list_channels(self):
        group = "cgttg"

        envelope = yield self.pubnub.list_channels_in_channel_group().channel_group(group).deferred()

        self.assert_valid_cg_envelope(envelope, PNChannelGroupsListResult)

        returnValue(envelope)
Пример #35
0
class HTTP11DownloadHandler:
    lazy = False

    def __init__(self, settings, crawler=None):
        self._crawler = crawler

        from twisted.internet import reactor

        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = settings.getint(
            "CONCURRENT_REQUESTS_PER_DOMAIN")
        self._pool._factory.noisy = False

        self._sslMethod = openssl_methods[settings.get(
            "DOWNLOADER_CLIENT_TLS_METHOD")]
        self._contextFactoryClass = load_object(
            settings["DOWNLOADER_CLIENTCONTEXTFACTORY"])
        # try method-aware context factory
        try:
            self._contextFactory = create_instance(
                objcls=self._contextFactoryClass,
                settings=settings,
                crawler=crawler,
                method=self._sslMethod,
            )
        except TypeError:
            # use context factory defaults
            self._contextFactory = create_instance(
                objcls=self._contextFactoryClass,
                settings=settings,
                crawler=crawler,
            )
            msg = f"""
 '{settings["DOWNLOADER_CLIENTCONTEXTFACTORY"]}' does not accept `method` \
 argument (type OpenSSL.SSL method, e.g. OpenSSL.SSL.SSLv23_METHOD) and/or \
 `tls_verbose_logging` argument and/or `tls_ciphers` argument.\
 Please upgrade your context factory class to handle them or ignore them."""
            warnings.warn(msg)
        self._default_maxsize = settings.getint("DOWNLOAD_MAXSIZE")
        self._default_warnsize = settings.getint("DOWNLOAD_WARNSIZE")
        self._fail_on_dataloss = settings.getbool("DOWNLOAD_FAIL_ON_DATALOSS")
        self._disconnect_timeout = 1

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.settings, crawler)

    def download_request(self, request, spider):
        """Return a deferred for the HTTP download"""
        agent = ScrapyAgent(
            contextFactory=self._contextFactory,
            pool=self._pool,
            maxsize=getattr(spider, "download_maxsize", self._default_maxsize),
            warnsize=getattr(spider, "download_warnsize",
                             self._default_warnsize),
            fail_on_dataloss=self._fail_on_dataloss,
            crawler=self._crawler,
        )
        return agent.download_request(request)

    def close(self):
        from twisted.internet import reactor

        d = self._pool.closeCachedConnections()
        # closeCachedConnections will hang on network or server issues, so
        # we'll manually timeout the deferred.
        #
        # Twisted issue addressing this problem can be found here:
        # https://twistedmatrix.com/trac/ticket/7738.
        #
        # closeCachedConnections doesn't handle external errbacks, so we'll
        # issue a callback after `_disconnect_timeout` seconds.
        delayed_call = reactor.callLater(self._disconnect_timeout, d.callback,
                                         [])

        def cancel_delayed_call(result):
            if delayed_call.active():
                delayed_call.cancel()
            return result

        d.addBoth(cancel_delayed_call)
        return d
Пример #36
0
class Gardener():

    HTTP_REASON_PHRASE = 'Reason-Phrase'
    HTTP_VERSION = 'Version'
    HTTP_STATUS_CODE = 'Status-Code'
    HTTP_URI = 'URI'
    DNS = 'DNS'    
    
    http_content_type = 'content-type'
    http_header_location = 'location'
    
    previous = 'previous'
    
    text_html = 'text/html'

    @staticmethod
    def get_header(headers, header):
        for h, v in headers.iteritems():
            if h.lower() == header.lower():
                return v
        return None
    
    '''
    '''        
    def __init__(self, common_headers = None, hang_up = True, use_cookies = True, pool = True, dns = True, max_hops = 5, connection_timeout = 10, verbose = False):
        if pool:
            self.connection_pool = HTTPConnectionPool(reactor, persistent=True)            
        else:
            self.connection_pool = HTTPConnectionPool(reactor, persistent=False)
            
        if use_cookies:
            cookieJar = CookieJar()
            self.agent = CookieAgent(Agent(reactor, pool = self.connection_pool), cookieJar)
        else:
            self.agent = Agent(reactor, pool = self.connection_pool)
        
        if verbose:
            log.startLogging(sys.stdout)
        
        self.hang_up = hang_up
        
        self.common_headers = common_headers
        self.max_hops = max_hops
        self.connection_timeout = connection_timeout
                
    def _request_error(self, err, url, prev = None):
        log.msg('request_error: {0} for {1}'.format(err.value.message, url), logLevel=logging.CRITICAL)            
        raise err

    def _gather_headers(self, reply, url, timer = None, prev = None):
        if timer is not None and not timer.called:
            timer.cancel()                    
        headers = {}
        if prev:
            headers[self.previous] = prev
        try:
            
            headers[self.HTTP_URI] = url
            
            for header, value in reply.headers.getAllRawHeaders():            
                headers[header] = value[0]
                        
            try:                
                headers[self.HTTP_STATUS_CODE] = reply.code
            except:
                log.msg('no code', logLevel=logging.DEBUG)
                raise Exception("Bad Response:" + url + " no " + self.HTTP_STATUS_CODE)
    
            try:
                headers[self.HTTP_VERSION] = reply.version
            except:            
                log.msg('no version', logLevel=logging.DEBUG)
                raise Exception("Bad Response:" + url + " no " + self.HTTP_VERSION)
            
            try:
                headers[self.HTTP_REASON_PHRASE] = reply.phrase
            except:
                log.msg('no phrase', logLevel=logging.DEBUG)
                raise Exception("Bad Response:" + url + " no " + self.HTTP_REASON_PHRASE)
            
            try:
                if reply._transport:
                    log.msg( 'stop producing: {0}'.format(url), logLevel=logging.DEBUG) 
                    reply._transport.stopProducing()                
                    #if reply._transport._producer:
                    #    print 'Producer', reply._transport._producer.__class__.__name__
                    #    reply._transport._producer.loseConnection()                            
            except Exception as e:
                log.msg('bad reply?: {0}'.format(e), logLevel=logging.CRITICAL)
                raise Exception("bad reply?" + url)                                    
        except Exception as e:
            he = HeaderException(e)
            he.setHeader(headers)
            raise he
        return headers

    def been_to(self, url, headers):
        if url == headers[self.HTTP_URI]:
            return True
        elif self.previous in headers:
            return self.been_to(url, headers[self.previous])
        else:
            return False
        
    def _follow_(self, headers):
        if str(headers[self.HTTP_STATUS_CODE])[:1] == '3' and Gardener.get_header(headers, self.http_header_location):
            moved_to = Gardener.get_header(headers, self.http_header_location)
            log.msg('{0} moved: {1}'.format(headers[self.HTTP_URI], moved_to), logLevel=logging.DEBUG)  
            if not urlparse(moved_to).scheme:
                moved_to = urljoin(headers[self.HTTP_URI], moved_to)
            if not self.been_to(moved_to, headers):
                log.msg('chase {0}'.format(moved_to), logLevel=logging.INFO) 
                return self.get_url(moved_to, headers)
            else:
                he = HeaderException('Code: ' + str(headers[self.HTTP_STATUS_CODE]) + ' Location and URI resolve to same:' + headers[self.HTTP_URI] + '    ' + moved_to)
                he.setHeader(headers)
                raise he
            
        elif headers[self.HTTP_STATUS_CODE] == 302 and Gardener.get_header(headers, self.http_content_type) and self.text_html in Gardener.get_header(headers, self.http_content_type):
            log.msg('acceptable 302 found', logLevel=logging.DEBUG)
            return headers
        else:
            return headers

    def timeout_request(self, timed_deferred, url):
        if not timed_deferred.called:
            log.msg('cancel request to {0}'.format(url), logLevel=logging.INFO)  
            timed_deferred.cancel()
        if timed_deferred.paused:
            def check_paused(paused_deferred):
                log.msg('paused deferred {0}'.format(paused_deferred), logLevel=logging.INFO)
                paused_deferred.cancel()
            reactor.callLater(self.connection_timeout, check_paused, timed_deferred)

    def _hang_up(self, answer, url):
        log.msg('hang up {0}'.format(self.connection_pool._connections.keys()), logLevel=logging.INFO)        
        if self.connection_pool._connections or self.connection_pool._timeouts:
            d = self.connection_pool.closeCachedConnections()
            d.addBoth(lambda ign: answer)
            return d
        else:
            log.msg('no hang up necessary: {0}'.format(url), logLevel=logging.DEBUG)
            return answer

    def get_url(self, url, prev = None):
        if not urlparse(url).scheme:
            log.msg('add http:// to {0}'.format(url), logLevel=logging.DEBUG)
            url = "http://" + url                            
        log.msg('url: {0}'.format(url), logLevel=logging.INFO)
        def previousCount(p):
            if p is None: 
                return 0
            elif self.previous in p:
                return 1 + previousCount(p[self.previous])
            else:
                return 1
        if previousCount(prev) > self.max_hops:
            log.msg('Too Many Hops {0}'.format(url), logLevel=logging.WARN) 
            ex = TooManyHopsException('Too Many Hops')
            ex.setHeader(prev)
            raise ex
                    
        request = self.agent.request('GET', url, Headers(self.common_headers))        
        
        timer = reactor.callLater(self.connection_timeout, self.timeout_request, request, url)        
        
        request.addCallback(self._gather_headers, url, timer, prev)        
        request.addCallback(self._follow_)        
        request.addErrback(self._request_error, url, prev)
        if self.hang_up and previousCount(prev) == 0:
            request.addBoth(lambda answer: self._hang_up(answer, url))
        return request
Пример #37
0
class HereNowTest(unittest.TestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(reactor, persistent=False)
        self.pubnub = PubNubTwisted(pnconf, reactor=reactor, pool=self.pool)

    def tearDown(self):
        return self.pool.closeCachedConnections()

    class PNHereNowChannelData(object):
        def __init__(self, channel_name, occupancy, occupants):
            self.channel_name = channel_name
            self.occupancy = occupancy
            self.occupants = occupants

    def assert_valid_here_now_envelope(self, envelope, result_channels):
        def get_uuids(here_now_channel_data):
            return [
                here_now_channel_data.channel_name,
                here_now_channel_data.occupancy,
                map(lambda x: x.uuid, here_now_channel_data.occupants)
            ]

        self.assertIsInstance(envelope, TwistedEnvelope)
        self.assertIsInstance(envelope.result, PNHereNowResult)
        self.assertEqual(map(get_uuids, envelope.result.channels),
                         result_channels)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/here_now/global.yaml',
        filter_query_parameters=['uuid'])
    def test_global_here_now(self):
        envelope = yield self.pubnub.here_now() \
            .include_uuids(True) \
            .deferred()

        self.assert_valid_here_now_envelope(
            envelope,
            [[u'twisted-test-1', 1, [u'00de2586-7ad8-4955-b5f6-87cae3215d02']],
             [u'twisted-test', 1, [u'00de2586-7ad8-4955-b5f6-87cae3215d02']]])
        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/here_now/single.yaml',
        filter_query_parameters=['uuid'])
    def test_here_now_single_channel(self):
        envelope = yield self.pubnub.here_now() \
            .channels(channel) \
            .include_uuids(True) \
            .deferred()

        self.assert_valid_here_now_envelope(
            envelope,
            [['twisted-test', 1, [u'00de2586-7ad8-4955-b5f6-87cae3215d02']]])
        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/here_now/multiple.yaml',
        filter_query_parameters=['uuid'])
    def test_here_now_multiple_channels(self):
        envelope = yield self.pubnub.here_now() \
            .channels(channels) \
            .include_uuids(True) \
            .deferred()

        self.assert_valid_here_now_envelope(
            envelope,
            [[u'twisted-test-1', 1, [u'00de2586-7ad8-4955-b5f6-87cae3215d02']]
             ])
        returnValue(envelope)
Пример #38
0
class Downloader():
    """
	
	an async downloader built on top of the twisted project

	"""
    def __init__(self,
                 scraper,
                 cc=3,
                 progress_check_interval=60,
                 stop_when_no_progress_made=True):

        self.scraper = scraper

        self.cc = cc
        self.progress_check_interval = progress_check_interval
        self.stop_when_no_progress_made = stop_when_no_progress_made

        self.q = deque()  #working queue

        self.onhold = deque()  #waiting queue

        self._done_count = 0

        self._prev_stats = None

        self._pool = HTTPConnectionPool(reactor, persistent=True)
        self._pool.maxPersistentPerHost = max(self.cc, 10)
        self._pool._factory.noisy = False
        self.client = Client(scraper, pool=self._pool)

    def set_cc(self, cc):
        self.cc = cc
        self._pool.maxPersistentPerHost = max(self.cc, 10)

    def put(self, req, onhold=False):
        """	
		put a Request object to the queue

		- onhold: put it to the secondary queue so that it will be processed after all requests on the primary queue
		

		"""
        if not onhold:

            self.q.append(req)
        else:
            self.onhold.append(req)

    def putleft(self, req):
        """
		put to the beginning of the primary queue so it will be processed immediately
		"""
        self.q.appendleft(req)

    def putonhold(self, req):
        """
		put it to the secondary queue so that it will be processed after all requests on the primary queue
		* equilevant to put with onhold = True
		
		"""
        self.onhold.append(req)

    def process(self):
        """ a generator used by Cooperator """

        while True:
            try:
                req = self.q.popleft()
                req.normalize(self.scraper)

                req.start_time = time.time()

                self._done_count += 1

                if req.get('bin') is True:
                    d = self._download_file(req)
                else:
                    d = self._request(req)

                if d is not None:
                    #add a timeout call on the deferred to prevent downloader hangout
                    timeout = req.get('timeout') or 60

                    timeout += 30  #wait an extra time compared to the timeout set by the request

                    reactor.callLater(timeout, d.cancel)

                yield d

            except Exception as e:
                #main queue finished
                break

    def start(self):

        if len(self.q) == 0 and len(self.onhold) == 0:
            #nothing on queues
            return

        coop = task.Cooperator(started=True)
        generator = self.process()

        deferreds = []

        for i in xrange(self.cc):

            d = coop.coiterate(generator)

            deferreds.append(d)

        dl = defer.DeferredList(deferreds)

        def dl_finish(reason=None):

            self.stop()

        dl.addBoth(dl_finish)

        if self.progress_check_interval:
            task.LoopingCall(self.progress).start(self.progress_check_interval)

            #make sure this task is triggered just once
            self.progress_check_interval = None

        if not reactor.running:
            reactor.run()

    def stop(self, result=''):

        if len(self.q):
            # new items have just been added to the q, so go back and start processing again.
            self.start()

        elif len(self.onhold):
            #copy the onhold to main q, then restart the downloader
            self.q = self.onhold
            self.onhold = deque()
            self.scraper.logger.info('process onhold items: %s', len(self.q))
            self.start()

        else:

            #all items in both queues processed
            self.progress()
            self.scraper.logger.info('download finished')
            try:
                d = self._close_connection_pool()

                d.addBoth(lambda param: reactor.stop())

                # reactor.stop()
            except Exception:
                pass

    def _close_connection_pool(self):
        d = self._pool.closeCachedConnections()
        # closeCachedConnections will hang on network or server issues, so
        # we'll manually timeout the deferred.
        #
        # Twisted issue addressing this problem can be found here:
        # https://twistedmatrix.com/trac/ticket/7738.
        #
        # closeCachedConnections doesn't handle external errbacks, so we'll
        # issue a callback after `_disconnect_timeout` seconds.
        delayed_call = reactor.callLater(1, d.callback, [])

        def cancel_delayed_call(result):
            if delayed_call.active():
                delayed_call.cancel()
            return result

        d.addBoth(cancel_delayed_call)
        return d

    def progress(self):
        stats = dict(pending=len(self.q),
                     onhold=len(self.onhold),
                     done=int(self._done_count))
        self.scraper.logger.info('pending: %s, done: %s, onhold: %s',
                                 stats['pending'], stats['done'],
                                 stats['onhold'])

        if self._prev_stats == stats and (stats['pending'] > 0
                                          or stats['onhold'] > 0):
            #for some reason the downloader made no progress, try to stop it manually
            if self.stop_when_no_progress_made:
                if reactor.running:
                    try:
                        self.scraper.logger.warn(
                            'downloader stopped uncleanly')
                        reactor.stop()
                    except Exception:
                        pass

        self._prev_stats = stats

    def _write_file(self, file_path, data):

        with open(file_path, 'w') as f:
            # d = Deferred()
            fd = f.fileno()
            setNonBlocking(fd)
            writeToFD(fd, data)
            # return d

    def _write_to_cache(self, url, post, data, file_name=None):

        file_name = file_name or self.scraper.cache.make_key(url=url,
                                                             post=post)
        file_path = os.path.join(self.scraper.cache.location, file_name)
        return self._write_file(file_path, data)

    def _read_from_cache(self, url, post, file_name):
        d = defer.Deferred()

        file_name = file_name or self.scraper.cache.make_key(url=url,
                                                             post=post)
        path = os.path.join(self.scraper.cache.location, file_name)
        with open(path, 'rb') as f:
            fd = f.fileno()
            setNonBlocking(fd)
            _readFromFD(fd, d.callback)

        return d

    def _build_response_data(self, req, response):

        encoding = 'utf8'
        unicode_html = u''

        try:
            unicode_html = response['data'].decode(encoding, 'ignore')
        except Exception as e:
            logger.warn('failed to decode bytes from url: %s', req.url)

        return_type = req.get('return_type') or 'doc'

        if return_type == 'doc':
            doc = http.Doc(url=req.url, html=unicode_html)
            doc.req = req
            doc.status.code = response['code']
            doc.status.message = response['message']
            return doc
        elif return_type == 'html':
            html = common.DataItem(unicode_html)
            html.req = req
            html.status = common.DataObject()
            html.status.code = response['code']
            html.status.message = response['message']
            return html

        else:
            self.scraper.logger.warn('unsupported return_type: %s',
                                     return_type)
            return None

    def _cb_no_response(self, err):

        self.scraper.logger.debug('no response: %s', err)

    def _cb_fetch_finished(self, response):

        req = response['req']
        time_elapsed = time.time() - req.start_time
        time_elapsed = round(time_elapsed, 2)
        req.time_elapsed = time_elapsed
        self.scraper.logger.debug('time_elapsed: %s -- %s', time_elapsed,
                                  response['success'])

        if response['success'] == True:
            if req['use_cache']:
                self._write_to_cache(req.url,
                                     req.post,
                                     data=response['data'],
                                     file_name=req.get('file_name'))
        else:

            #untested code
            if req.get('retries'):
                req.update({'retries': req['retries'] - 1})

                self.scraper.logger.debug('fetch error: %s -- %s, url: %s',
                                          response['code'],
                                          response['message'], req.url)
                self.scraper.logger.debug('retry(%s): %s %s', req['retries'],
                                          req.url, req.post)

                #put back the request into the queue
                if req.get('proxy'):
                    #try with new proxy
                    req.set('proxy',
                            self.scraper.proxy_manager.get_proxy(req.url))

                self.putleft(req)
                return
            else:
                self.scraper.logger.warn('fetch error: %s -- %s, url: %s',
                                         response['code'], response['message'],
                                         req.url)
            #end of untested code

        if req.get('cb'):
            cb_data = self._build_response_data(req, response)

            req.get('cb')(cb_data)

    def _request(self, req):
        if req['use_cache']:
            if self.scraper.cache.exists(url=req.url,
                                         post=req.post,
                                         file_name=req.get('file_name')):

                if req.get('cb'):

                    def read_file_done(data):

                        try:
                            encoding = req.get('encoding') or 'utf8'
                            cb_data = self._build_response_data(req,
                                                                response={
                                                                    'data':
                                                                    data,
                                                                    'code':
                                                                    200,
                                                                    'message':
                                                                    'ok'
                                                                })
                            req.get('cb')(cb_data)
                        except Exception as e:
                            self.scraper.logger.exception(e)

                    deferred = self._read_from_cache(req.url, req.post,
                                                     req.get('file_name'))

                    deferred.addCallback(read_file_done)

                    return deferred
                else:
                    #no need to return a deffered
                    return None

        deferred = self.client.fetch(req)

        # deferred.addBoth(self._cb_fetch_finished)

        deferred.addCallback(
            self._cb_fetch_finished
        )  #handle both good and bad result, as long as the request finished
        deferred.addErrback(
            self._cb_no_response)  #somehow the request could not finish

        return deferred

    def _cb_file_downloaded(self, response, req, file_path):

        cb = req.get('cb')

        if isinstance(response, Failure):
            self.scraper.logger.warn('request cancelled: %s', req.url)
            if req.get('retries'):
                req.update({'retries': req['retries'] - 1})
                self.scraper.logger.debug('retry: %s %s', req.url, req.post)
                #put back the request into the queue
                self.put(req)
                return

            if cb:
                cb(
                    FileDownloadResponse(req=req,
                                         success=False,
                                         message='request cancelled'))

            return

        if response['success']:

            self._write_file(file_path, response['data'])
        else:
            if req.get('retries'):
                req.update({'retries': req['retries'] - 1})
                self.scraper.logger.debug('fetch error: %s -- %s, url: %s',
                                          response['code'],
                                          response['message'], req.url)
                self.scraper.logger.debug('retry: %s %s', req.url, req.post)
                #put back the request into the queue
                self.putleft(req)
                return

        if cb:
            cb(
                FileDownloadResponse(req=req,
                                     success=response['success'],
                                     message=response['message']))

    def _download_file(self, req):
        cb = req.get('cb')

        file_name = req.get('file_name')
        if not file_name:

            if cb:
                cb(
                    FileDownloadResponse(req=req,
                                         success=False,
                                         message='file_name not defined'))

            return None

        directory = req.get('dir') or 'images'
        directory = self.scraper.join_path(directory)
        if not os.path.exists(directory):
            os.mkdir(directory)

        file_path = os.path.join(directory, file_name)
        if os.path.exists(file_path):
            #already downloaded
            if cb:
                cb(
                    FileDownloadResponse(req=req,
                                         success=True,
                                         message='already downloaded'))
            return None

        deferred = self.client.fetch(req)
        deferred.addBoth(self._cb_file_downloaded, req, file_path)
        return deferred
Пример #39
0
class CGTestCase(unittest.TestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(reactor, persistent=False)
        self.pubnub = PubNubTwisted(pnconf, reactor=reactor, pool=self.pool)

    def tearDown(self):
        return self.pool.closeCachedConnections()

    def assert_valid_cg_envelope(self, envelope, type):
        self.assertIsInstance(envelope.result, type)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/groups/add_single_channel.yaml',
        filter_query_parameters=['uuid'])
    def test_adding_channel(self):
        channel = 'cgttc'
        group = 'cgttg'

        envelope = yield self.pubnub.add_channel_to_channel_group() \
            .channels(channel).channel_group(group).deferred()

        self.assert_valid_cg_envelope(envelope, PNChannelGroupsAddChannelResult)

        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/groups/remove_single_channel.yaml',
        filter_query_parameters=['uuid'])
    def test_removing_channel(self):
        channel = 'cgttc'
        group = 'cgttg'

        envelope = yield self.pubnub.remove_channel_from_channel_group() \
            .channels(channel).channel_group(group).deferred()

        self.assert_valid_cg_envelope(envelope, PNChannelGroupsRemoveChannelResult)

        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/groups/add_channels.yaml',
        filter_query_parameters=['uuid'])
    def test_adding_channels(self):
        channel = ['cgttc0', 'cgttc1']
        group = 'cgttg'

        envelope = yield self.pubnub.add_channel_to_channel_group() \
            .channels(channel).channel_group(group).deferred()

        self.assert_valid_cg_envelope(envelope, PNChannelGroupsAddChannelResult)

        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/groups/remove_channels.yaml',
        filter_query_parameters=['uuid'])
    def test_removing_channels(self):
        channel = ['cgttc0', 'cgttc1']
        group = 'cgttg'

        envelope = yield self.pubnub.remove_channel_from_channel_group() \
            .channels(channel).channel_group(group).deferred()

        self.assert_valid_cg_envelope(envelope, PNChannelGroupsRemoveChannelResult)

        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/groups/list_channels.yaml',
        filter_query_parameters=['uuid'])
    def test_list_channels(self):
        group = 'cgttg'

        envelope = yield self.pubnub.list_channels_in_channel_group() \
            .channel_group(group).deferred()

        self.assert_valid_cg_envelope(envelope, PNChannelGroupsListResult)

        returnValue(envelope)
Пример #40
0
class HereNowTest(unittest.TestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(reactor, persistent=False)
        self.pubnub = PubNubTwisted(pnconf, reactor=reactor, pool=self.pool)

    def tearDown(self):
        return self.pool.closeCachedConnections()

    class PNHereNowChannelData(object):
        def __init__(self, channel_name, occupancy, occupants):
            self.channel_name = channel_name
            self.occupancy = occupancy
            self.occupants = occupants

    def assert_valid_here_now_envelope(self, envelope, result_channels):
        def get_uuids(here_now_channel_data):
            return [here_now_channel_data.channel_name,
                    here_now_channel_data.occupancy,
                    map(lambda x: x.uuid, here_now_channel_data.occupants)]

        self.assertIsInstance(envelope, TwistedEnvelope)
        self.assertIsInstance(envelope.result, PNHereNowResult)
        self.assertEqual(map(get_uuids, envelope.result.channels), result_channels)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/here_now/global.yaml',
        filter_query_parameters=['uuid'])
    def test_global_here_now(self):
        envelope = yield self.pubnub.here_now() \
            .include_uuids(True) \
            .deferred()

        self.assert_valid_here_now_envelope(envelope,
                                            [[u'twisted-test-1', 1, [u'00de2586-7ad8-4955-b5f6-87cae3215d02']],
                                             [u'twisted-test', 1, [u'00de2586-7ad8-4955-b5f6-87cae3215d02']]])
        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/here_now/single.yaml',
        filter_query_parameters=['uuid'])
    def test_here_now_single_channel(self):
        envelope = yield self.pubnub.here_now() \
            .channels(channel) \
            .include_uuids(True) \
            .deferred()

        self.assert_valid_here_now_envelope(envelope, [['twisted-test', 1, [u'00de2586-7ad8-4955-b5f6-87cae3215d02']]])
        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/here_now/multiple.yaml',
        filter_query_parameters=['uuid'])
    def test_here_now_multiple_channels(self):
        envelope = yield self.pubnub.here_now() \
            .channels(channels) \
            .include_uuids(True) \
            .deferred()

        self.assert_valid_here_now_envelope(envelope,
                                            [[u'twisted-test-1', 1, [u'00de2586-7ad8-4955-b5f6-87cae3215d02']]])
        returnValue(envelope)
Пример #41
0
class AsyncHTTPClient(HttpClient):
    """An asynchronous swaggerpy.HttpClient using twisted

    Attributes:
    receiver: An instance of IARIEventReceiver
    http_pool: A twisted.web.client.HTTPConnectionPool, used with agent
    agent: A twisted.web.client.Agent, for sending HTTP requests
    authenticator: The authenticator used with agent and ws_conn
    ws_conn: A twisted.internet.interfaces.IConnector representing the
             WebSocket connection
    """

    def __init__(self, receiver):
        """Constructor

        Args:
        receiver: An instance of IARIEventReceiver
        """
        super(AsyncHTTPClient, self).__init__()
        self.receiver = receiver
        self.http_pool = HTTPConnectionPool(reactor)
        self.agent = Agent(reactor, pool=self.http_pool)
        self.authenticator = None
        self.ws_conn = None

    def close(self):
        """Close the HTTP persistent connections and the WebSocket connection
        """
        self.http_pool.closeCachedConnections()
        if self.ws_conn:
            self.ws_conn.disconnect()

    def set_basic_auth(self, host, username, password):
        """Set up a SwaggerPy basic authenticator

        Args:
        host: The host to authenticate
        username: The user's name
        password: The user's password
        """
        self.authenticator = BasicAuthenticator(
            host=host, username=username, password=password)

    def set_api_key(self, host, api_key, param_name='api_key'):
        """Set up a SwaggerPy API key authenticator

        Args:
        host: The host to authenticate
        api_key: The API key
        param_name: The query parameter for api_key
        """
        self.authenticator = ApiKeyAuthenticator(
            host=host, api_key=api_key, param_name=param_name)

    def apply_authentication(self, req):
        """Apply authentication to a request

        Args:
        req  The Request instance to apply authentication to
        """
        if self.authenticator and self.authenticator.matches(req.url):
            self.authenticator.apply(req)

    def request(self, method, url, params=None, data=None):
        """Perform an HTTP request

        Args:
        method: The HTTP verb to use for the request
        url: The base URL of the request
        params: Optional. Query parameters to use with the request.
        data: A JSON body to encode and provide with the request

        Returns:
        twisted.Deferred that will be called when the request completes. On
        success, the callback will be called; on failure, the errback will be
        called.
        """

        request = Request(method, url, params=params)
        self.apply_authentication(request)
        if data:
            request.headers.addRawHeader('Content-Type', 'application/json')
            request.body_producer = JSONBodyProducer(data)
        deferred = self.agent.request(request.method,
                                      request.build_url(),
                                      request.headers,
                                      request.body_producer)
        return deferred

    def ws_connect(self, url, params=None):
        """Websocket-client based implementation.

        Args:
        url: The base url of the request
        params: Optional. Query parameters to use with the request.

        Note that the connection object returned by this function
        should generally not be used to close the WebSocket connection.
        The close method should be used instead, as that will close
        both the WebSocket as well as the persistent HTTP connection.

        Returns:
        An instance of twisted.internet.interfaces.IConnector
        """
        request = Request('GET', url, params=params)
        self.apply_authentication(request)

        ws_factory = ARIWebSocketClientFactory(self.receiver,
                                               request.build_url())
        self.ws_conn = ws_factory.connect()
        return self.ws_conn
Пример #42
0
class HTTPClientService(service.SharedService):
    """A SharedService class that can make http requests to remote services.

    I can use either txrequests or treq, depending on what I find installed

    I provide minimal get/post/put/delete API with automatic baseurl joining, and json data encoding
    that is suitable for use from buildbot services.
    """
    TREQ_PROS_AND_CONS = textwrap.dedent("""
       txrequests is based on requests and is probably a bit more mature, but it requires threads to run,
       so has more overhead.
       treq is better integrated in twisted and is more and more feature equivalent

       txrequests is 2.8x slower than treq due to the use of threads.

       http://treq.readthedocs.io/en/latest/#feature-parity-w-requests
       pip install txrequests
           or
       pip install treq
    """)
    # Those could be in theory be overridden in master.cfg by using
    # import buildbot.util.httpclientservice.HTTPClientService.PREFER_TREQ = True
    # We prefer at the moment keeping it simple
    PREFER_TREQ = False
    MAX_THREADS = 5

    def __init__(self, base_url, auth=None, headers=None):
        assert not base_url.endswith("/"), "baseurl should not end with /: " + base_url
        service.SharedService.__init__(self)
        self._base_url = base_url
        self._auth = auth
        self._headers = headers
        self._session = None

    def updateHeaders(self, headers):
        if self._headers is None:
            self._headers = {}
        self._headers.update(headers)

    @staticmethod
    def checkAvailable(from_module):
        """Call me at checkConfig time to properly report config error
           if neither txrequests or treq is installed
        """
        if txrequests is None and treq is None:
            config.error("neither txrequests nor treq is installed, but {} is requiring it\n\n{}".format(
                from_module, HTTPClientService.TREQ_PROS_AND_CONS))

    def startService(self):
        # treq only supports basicauth, so we force txrequests if the auth is something else
        if self._auth is not None and not isinstance(self._auth, tuple):
            self.PREFER_TREQ = False
        if txrequests is not None and not self.PREFER_TREQ:
            self._session = txrequests.Session()
            self._doRequest = self._doTxRequest
        elif treq is None:
            raise ImportError("{classname} requires either txrequest or treq install."
                              " Users should call {classname}.checkAvailable() during checkConfig()"
                              " to properly alert the user.".format(classname=self.__class__.__name__))
        else:
            self._doRequest = self._doTReq
            self._pool = HTTPConnectionPool(self.master.reactor)
            self._pool.maxPersistentPerHost = self.MAX_THREADS
            self._agent = Agent(self.master.reactor, pool=self._pool)

    def stopService(self):
        if self._session:
            return self._session.close()
        else:
            return self._pool.closeCachedConnections()

    def _prepareRequest(self, ep, kwargs):
        assert ep.startswith("/"), "ep should start with /: " + ep
        url = self._base_url + ep
        if self._auth is not None and 'auth' not in kwargs:
            kwargs['auth'] = self._auth
        headers = kwargs.get('headers', {})
        if self._headers is not None:
            headers.update(self._headers)
        kwargs['headers'] = headers
        return url, kwargs

    def _doTxRequest(self, method, ep, **kwargs):
        url, kwargs = self._prepareRequest(ep, kwargs)

        def readContent(session, res):
            # this forces reading of the content inside the thread
            res.content
            return res
        # read the whole content in the thread
        kwargs['background_callback'] = readContent
        d = self._session.request(method, url, **kwargs)
        d.addCallback(TxRequestsResponseWrapper)
        d.addCallback(IHttpResponse)
        return d

    def _doTReq(self, method, ep, data=None, json=None, **kwargs):
        url, kwargs = self._prepareRequest(ep, kwargs)
        # treq requires header values to be an array
        kwargs['headers'] = dict([(k, [v]) for k, v in kwargs['headers'].items()])
        kwargs['agent'] = self._agent

        if isinstance(json, dict):
            data = jsonmodule.dumps(json)
            kwargs['headers']['Content-Type'] = ['application/json']
            kwargs['data'] = data

        if isinstance(data, dict):
            kwargs['data'] = data

        d = getattr(treq, method)(url, **kwargs)
        d.addCallback(IHttpResponse)
        return d

    # lets be nice to the auto completers, and don't generate that code
    def get(self, ep, **kwargs):
        return self._doRequest('get', ep, **kwargs)

    def put(self, ep, **kwargs):
        return self._doRequest('put', ep, **kwargs)

    def delete(self, ep, **kwargs):
        return self._doRequest('delete', ep, **kwargs)

    def post(self, ep, **kwargs):
        return self._doRequest('post', ep, **kwargs)
Пример #43
0
class TreqIntegrationTests(TestCase):
    baseurl = HTTPBIN_URL
    get = with_baseurl(treq.get)
    head = with_baseurl(treq.head)
    post = with_baseurl(treq.post)
    put = with_baseurl(treq.put)
    patch = with_baseurl(treq.patch)
    delete = with_baseurl(treq.delete)

    def setUp(self):
        self.pool = HTTPConnectionPool(reactor, False)

    def tearDown(self):
        def _check_fds(_):
            # This appears to only be necessary for HTTPS tests.
            # For the normal HTTP tests then closeCachedConnections is
            # sufficient.
            fds = set(reactor.getReaders() + reactor.getReaders())
            if not [fd for fd in fds if isinstance(fd, Client)]:
                return

            return deferLater(reactor, 0, _check_fds, None)

        return self.pool.closeCachedConnections().addBoth(_check_fds)

    @inlineCallbacks
    def assert_data(self, response, expected_data):
        body = yield treq.json_content(response)
        self.assertIn("data", body)
        self.assertEqual(body["data"], expected_data)

    @inlineCallbacks
    def assert_sent_header(self, response, header, expected_value):
        body = yield treq.json_content(response)
        self.assertIn(header, body["headers"])
        self.assertEqual(body["headers"][header], expected_value)

    @inlineCallbacks
    def test_get(self):
        response = yield self.get("/get")
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_get_headers(self):
        response = yield self.get("/get", {"X-Blah": ["Foo", "Bar"]})
        self.assertEqual(response.code, 200)
        yield self.assert_sent_header(response, "X-Blah", "Foo, Bar")
        yield print_response(response)

    @inlineCallbacks
    def test_get_302_absolute_redirect(self):
        response = yield self.get("/redirect-to?url={0}/get".format(self.baseurl))
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @todo_relative_redirect
    @inlineCallbacks
    def test_get_302_relative_redirect(self):
        response = yield self.get("/relative-redirect/1")
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_get_302_redirect_disallowed(self):
        response = yield self.get("/redirect/1", allow_redirects=False)
        self.assertEqual(response.code, 302)
        yield print_response(response)

    @inlineCallbacks
    def test_head(self):
        response = yield self.head("/get")
        body = yield treq.content(response)
        self.assertEqual("", body)
        yield print_response(response)

    @inlineCallbacks
    def test_head_302_absolute_redirect(self):
        response = yield self.head("/redirect-to?url={0}/get".format(self.baseurl))
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @todo_relative_redirect
    @inlineCallbacks
    def test_head_302_relative_redirect(self):
        response = yield self.head("/relative-redirect/1")
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_head_302_redirect_disallowed(self):
        response = yield self.head("/redirect/1", allow_redirects=False)
        self.assertEqual(response.code, 302)
        yield print_response(response)

    @inlineCallbacks
    def test_post(self):
        response = yield self.post("/post", "Hello!")
        self.assertEqual(response.code, 200)
        yield self.assert_data(response, "Hello!")
        yield print_response(response)

    @inlineCallbacks
    def test_multipart_post(self):
        class FileLikeObject(StringIO):
            def __init__(self, val):
                StringIO.__init__(self, val)
                self.name = "david.png"

            def read(*args, **kwargs):
                return StringIO.read(*args, **kwargs)

        response = yield self.post("/post", data={"a": "b"}, files={"file1": FileLikeObject("file")})
        self.assertEqual(response.code, 200)

        body = yield treq.json_content(response)
        self.assertEqual("b", body["form"]["a"])
        self.assertEqual("file", body["files"]["file1"])
        yield print_response(response)

    @inlineCallbacks
    def test_post_headers(self):
        response = yield self.post("/post", '{msg: "Hello!"}', headers={"Content-Type": ["application/json"]})

        self.assertEqual(response.code, 200)
        yield self.assert_sent_header(response, "Content-Type", "application/json")
        yield self.assert_data(response, '{msg: "Hello!"}')
        yield print_response(response)

    @inlineCallbacks
    def test_put(self):
        response = yield self.put("/put", data="Hello!")
        yield print_response(response)

    @inlineCallbacks
    def test_patch(self):
        response = yield self.patch("/patch", data="Hello!")
        self.assertEqual(response.code, 200)
        yield self.assert_data(response, "Hello!")
        yield print_response(response)

    @inlineCallbacks
    def test_delete(self):
        response = yield self.delete("/delete")
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_gzip(self):
        response = yield self.get("/gzip")
        self.assertEqual(response.code, 200)
        yield print_response(response)
        json = yield treq.json_content(response)
        self.assertTrue(json["gzipped"])

    @inlineCallbacks
    def test_basic_auth(self):
        response = yield self.get("/basic-auth/treq/treq", auth=("treq", "treq"))
        self.assertEqual(response.code, 200)
        yield print_response(response)
        json = yield treq.json_content(response)
        self.assertTrue(json["authenticated"])
        self.assertEqual(json["user"], "treq")

    @inlineCallbacks
    def test_failed_basic_auth(self):
        response = yield self.get("/basic-auth/treq/treq", auth=("not-treq", "not-treq"))
        self.assertEqual(response.code, 401)
        yield print_response(response)

    @inlineCallbacks
    def test_timeout(self):
        """
        Verify a timeout fires if a request takes too long.
        """
        yield self.assertFailure(self.get("/delay/2", timeout=1), CancelledError, ResponseFailed)
Пример #44
0
class TestEventsEndpoint(AbstractApiTest):
    @inlineCallbacks
    def setUp(self):
        yield super(TestEventsEndpoint, self).setUp()
        self.events_deferred = Deferred()
        self.connection_pool = HTTPConnectionPool(reactor, False)
        self.socket_open_deferred = self.tribler_started_deferred.addCallback(
            self.open_events_socket)
        self.messages_to_wait_for = 0

    @inlineCallbacks
    def tearDown(self):
        yield self.close_connections()

        # Wait to make sure the HTTPChannel is closed, see https://twistedmatrix.com/trac/ticket/2447
        yield deferLater(reactor, 0.3, lambda: None)

        yield super(TestEventsEndpoint, self).tearDown()

    def on_event_socket_opened(self, response):
        response.deliverBody(
            EventDataProtocol(self.messages_to_wait_for, self.events_deferred,
                              response))

    def open_events_socket(self, _):
        agent = Agent(reactor, pool=self.connection_pool)
        return agent.request(b'GET', 'http://localhost:%s/events' % self.session.config.get_http_api_port(),
                             Headers({'User-Agent': ['Tribler ' + version_id]}), None) \
            .addCallback(self.on_event_socket_opened)

    def close_connections(self):
        return self.connection_pool.closeCachedConnections()

    @trial_timeout(20)
    def test_events(self):
        """
        Testing whether various events are coming through the events endpoints
        """
        self.messages_to_wait_for = 21

        def send_notifications(_):
            self.session.notifier.notify(NTFY_UPGRADER, NTFY_STARTED, None,
                                         None)
            self.session.notifier.notify(NTFY_UPGRADER_TICK, NTFY_STARTED,
                                         None, None)
            self.session.notifier.notify(NTFY_UPGRADER, NTFY_FINISHED, None,
                                         None)
            self.session.notifier.notify(NTFY_WATCH_FOLDER_CORRUPT_TORRENT,
                                         NTFY_INSERT, None, None)
            self.session.notifier.notify(NTFY_NEW_VERSION, NTFY_INSERT, None,
                                         None)
            self.session.notifier.notify(NTFY_CHANNEL, NTFY_DISCOVERED, None,
                                         None)
            self.session.notifier.notify(NTFY_TORRENT, NTFY_DISCOVERED, None,
                                         {'a': 'Invalid character \xa1'})
            self.session.notifier.notify(NTFY_TORRENT, NTFY_FINISHED, 'a' * 10,
                                         None)
            self.session.notifier.notify(NTFY_TORRENT, NTFY_ERROR, 'a' * 10,
                                         'This is an error message')
            self.session.notifier.notify(NTFY_MARKET_ON_ASK, NTFY_UPDATE, None,
                                         {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_BID, NTFY_UPDATE, None,
                                         {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_ASK_TIMEOUT,
                                         NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_BID_TIMEOUT,
                                         NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_TRANSACTION_COMPLETE,
                                         NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_PAYMENT_RECEIVED,
                                         NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(NTFY_MARKET_ON_PAYMENT_SENT,
                                         NTFY_UPDATE, None, {'a': 'b'})
            self.session.notifier.notify(SIGNAL_RESOURCE_CHECK,
                                         SIGNAL_LOW_SPACE, None, {})
            self.session.notifier.notify(
                NTFY_CREDIT_MINING, NTFY_ERROR, None,
                {"message": "Some credit mining error"})
            self.session.notifier.notify(NTFY_TUNNEL, NTFY_REMOVE,
                                         Circuit(1234, None), 'test')
            self.session.notifier.notify(SIGNAL_GIGACHANNEL_COMMUNITY,
                                         SIGNAL_ON_SEARCH_RESULTS, None, {
                                             "query": "test",
                                             "results": []
                                         })
            self.session.lm.api_manager.root_endpoint.events_endpoint.on_tribler_exception(
                "hi")

        self.socket_open_deferred.addCallback(send_notifications)

        return self.events_deferred
Пример #45
0
class HTTPClientService(service.SharedService):
    """A SharedService class that can make http requests to remote services.

    I can use either txrequests or treq, depending on what I find installed

    I provide minimal get/post/put/delete API with automatic baseurl joining, and json data encoding
    that is suitable for use from buildbot services.
    """
    TREQ_PROS_AND_CONS = textwrap.dedent("""
       txrequests is based on requests and is probably a bit more mature, but it requires threads to run,
       so has more overhead.
       treq is better integrated in twisted and is more and more feature equivalent

       txrequests is 2.8x slower than treq due to the use of threads.

       http://treq.readthedocs.io/en/latest/#feature-parity-w-requests
       pip install txrequests
           or
       pip install treq
    """)
    # Those could be in theory be overridden in master.cfg by using
    # import buildbot.util.httpclientservice.HTTPClientService.PREFER_TREQ = True
    # We prefer at the moment keeping it simple
    PREFER_TREQ = False
    MAX_THREADS = 5

    def __init__(self, base_url, auth=None, headers=None):
        service.SharedService.__init__(self)
        self._base_url = base_url
        self._auth = auth
        self._headers = headers
        self._session = None

    def updateHeaders(self, headers):
        if self._headers is None:
            self._headers = {}
        self._headers.update(headers)

    @staticmethod
    def checkAvailable(from_module):
        """Call me at checkConfig time to properly report config error
           if neither txrequests or treq is installed
        """
        if txrequests is None and treq is None:
            config.error(
                "neither txrequests nor treq is installed, but {} is requiring it\n\n{}"
                .format(from_module, HTTPClientService.TREQ_PROS_AND_CONS))

    def startService(self):
        # treq only supports basicauth, so we force txrequests if the auth is something else
        if self._auth is not None and not isinstance(self._auth, tuple):
            self.PREFER_TREQ = False
        if txrequests is not None and not self.PREFER_TREQ:
            self._session = txrequests.Session()
            self._doRequest = self._doTxRequest
        elif treq is None:
            raise ImportError(
                "{classname} requires either txrequest or treq install."
                " Users should call {classname}.checkAvailable() during checkConfig()"
                " to properly alert the user.".format(
                    classname=self.__class__.__name__))
        else:
            self._doRequest = self._doTReq
            self._pool = HTTPConnectionPool(self.master.reactor)
            self._pool.maxPersistentPerHost = self.MAX_THREADS
            self._agent = Agent(self.master.reactor, pool=self._pool)

    def stopService(self):
        if self._session:
            return self._session.close()
        else:
            return self._pool.closeCachedConnections()

    def _prepareRequest(self, ep, kwargs):
        url = urlparse.urljoin(self._base_url, ep)
        if self._auth is not None and 'auth' not in kwargs:
            kwargs['auth'] = self._auth
        headers = kwargs.get('headers', {})
        if self._headers is not None:
            headers.update(self._headers)
        kwargs['headers'] = headers
        return url, kwargs

    def _doTxRequest(self, method, ep, **kwargs):
        url, kwargs = self._prepareRequest(ep, kwargs)

        def readContent(session, res):
            # this forces reading of the content inside the thread
            res.content
            return res

        # read the whole content in the thread
        kwargs['background_callback'] = readContent
        d = self._session.request(method, url, **kwargs)
        d.addCallback(TxRequestsResponseWrapper)
        d.addCallback(IHttpResponse)
        return d

    def _doTReq(self, method, ep, data=None, json=None, **kwargs):
        url, kwargs = self._prepareRequest(ep, kwargs)
        # treq requires header values to be an array
        kwargs['headers'] = dict([(k, [v])
                                  for k, v in kwargs['headers'].items()])
        kwargs['agent'] = self._agent

        if isinstance(json, dict):
            data = jsonmodule.dumps(json)
            kwargs['headers']['Content-Type'] = ['application/json']
            kwargs['data'] = data

        if isinstance(data, dict):
            kwargs['data'] = data

        d = getattr(treq, method)(url, **kwargs)
        d.addCallback(IHttpResponse)
        return d

    # lets be nice to the auto completers, and don't generate that code
    def get(self, ep, **kwargs):
        return self._doRequest('get', ep, **kwargs)

    def put(self, ep, **kwargs):
        return self._doRequest('put', ep, **kwargs)

    def delete(self, ep, **kwargs):
        return self._doRequest('delete', ep, **kwargs)

    def post(self, ep, **kwargs):
        return self._doRequest('post', ep, **kwargs)
Пример #46
0
class PublishTestCase(unittest.TestCase):
    def setUp(self):
        self.pool = HTTPConnectionPool(reactor, persistent=False)
        self.pubnub = PubNubTwisted(pnconf, reactor=reactor, pool=self.pool)

    def tearDown(self):
        return self.pool.closeCachedConnections()

    # for async
    def error_envelope_asserter(self, expected_err_msg):
        def assert_error_message(envelope):
            assert envelope.status.error_data.information == expected_err_msg

        return assert_error_message

    def assert_client_error(self, publish, message):
        try:
            publish.deferred()
        except PubNubException as exception:
            self.assertTrue(message in exception.message)
        else:
            self.fail('Expected PubNubException not raised')

    def assert_client_side_error(self, envelope, expected_err_msg):
        assert envelope.status.error_data.information == expected_err_msg

    def assert_valid_publish_envelope(self, envelope):
        assert isinstance(envelope, TwistedEnvelope)
        assert isinstance(envelope.result, PNPublishResult)
        assert isinstance(envelope.status, PNStatus)
        assert envelope.result.timetoken > 0

    @inlineCallbacks
    def deferred(self, event):
        envelope = yield event.deferred()
        returnValue(envelope)

    @inlineCallbacks
    def assert_success_publish_get(self, message, meta=None):
        publish = self.pubnub.publish().channel(channel).message(message).meta(meta)
        envelope = yield self.deferred(publish)
        self.assert_valid_publish_envelope(envelope)
        returnValue(envelope)

    @inlineCallbacks
    def assert_success_encrypted_publish_get(self, message):
        pubnub = PubNubTwisted(pnconf_enc_copy())
        publish = pubnub.publish().channel(channel).message(message)
        envelope = yield self.deferred(publish)
        self.assert_valid_publish_envelope(envelope)
        returnValue(envelope)

    @inlineCallbacks
    def assert_success_publish_post(self, message):
        publish = self.pubnub.publish().channel(channel).message(message).use_post(True)
        envelope = yield self.deferred(publish)
        self.assert_valid_publish_envelope(envelope)
        returnValue(envelope)

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/publish/mixed_via_get.yaml',
        filter_query_parameters=['uuid', 'seqn'])
    def test_publish_mixed_via_get(self):
        d0 = yield self.assert_success_publish_get("hi")
        d1 = yield self.assert_success_publish_get(5)
        d2 = yield self.assert_success_publish_get(True)
        d3 = yield self.assert_success_publish_get(["hi", "hi2", "hi3"])
        returnValue([d0, d1, d2, d3])

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/publish/mixed_encrypted_via_get.yaml',
        filter_query_parameters=['uuid', 'seqn'])
    def test_publish_mixed_encrypted_via_get(self):
        d0 = yield self.assert_success_encrypted_publish_get("hi")
        d1 = yield self.assert_success_encrypted_publish_get(5)
        d2 = yield self.assert_success_encrypted_publish_get(True)
        d3 = yield self.assert_success_encrypted_publish_get(["hi", "hi2", "hi3"])
        returnValue([d0, d1, d2, d3])

    # TODO: uncomment this when vcr for post is fixed
    # @inlineCallbacks
    # @pn_vcr.use_cassette(
    #     'tests/integrational/fixtures/twisted/publish/mixed_via_post.yaml',
    #     filter_query_parameters=['uuid', 'seqn'])
    # def test_publish_mixed_via_post(self):
    #     d0 = yield self.assert_success_publish_post("hi")
    #     d1 = yield self.assert_success_publish_post(5)
    #     d2 = yield self.assert_success_publish_post(True)
    #     d3 = yield self.assert_success_publish_post(["hi", "hi2", "hi3"])
    #     returnValue([d0, d1, d2, d3])

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/publish/object_via_get.yaml',
        filter_query_parameters=['uuid', 'seqn'])
    def test_publish_object_via_get(self):
        d0 = yield self.assert_success_publish_get({"one": 2, "three": True})
        returnValue(d0)

    def test_error_missing_message(self):
        self.assert_client_error(
            self.pubnub.publish().channel(channel).message(None),
            PNERR_MESSAGE_MISSING
        )

    def test_error_missing_channel(self):
        self.assert_client_error(
            self.pubnub.publish().channel('').message('whatever'),
            PNERR_CHANNEL_MISSING
        )

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/publish/invalid_key.yaml',
        filter_query_parameters=['uuid', 'seqn'])
    def test_error_invalid_key(self):
        conf = PNConfiguration()
        conf.publish_key = "fake"
        conf.subscribe_key = "demo"
        pubnub = PubNubTwisted(conf)
        with pytest.raises(PubNubTwistedException) as exception:
            yield pubnub.publish().channel(channel).message("hey").deferred()

        self.assertEqual(exception.value.status.error_data.information,
                         "HTTP Client Error (400): [0, u'Invalid Key', u'14767989321048626']")

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/publish/forbidden.yaml',
        filter_query_parameters=['uuid', 'seqn', 'timestamp', 'signature'])
    def test_error_forbidden(self):
        pubnub = PubNubTwisted(pnconf_pam_copy())
        with pytest.raises(PubNubTwistedException) as exception:
            yield pubnub.publish().channel("not_permitted_channel").message("hey").deferred()

        self.assertEqual(exception.value.status.error_data.information,
                         "HTTP Client Error (403): {u'status': 403, u'message': u'Forbidden', u'payload':"
                         " {u'channels': [u'not_permitted_channel']}, u'service': u'Access Manager', u'error': True}")

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/publish/meta_object.yaml',
        filter_query_parameters=['uuid', 'seqn'],
        match_on=['host', 'method', 'path', 'meta_object_in_query'])
    def test_publish_with_meta(self):
        yield self.assert_success_publish_get('hi', {'a': 2, 'b': True})

    @inlineCallbacks
    @pn_vcr.use_cassette(
        'tests/integrational/fixtures/twisted/publish/do_not_store.yaml',
        filter_query_parameters=['uuid', 'seqn'])
    def test_publish_do_not_store(self):
        publish = self.pubnub.publish().channel(channel).message('whatever').should_store(False)
        envelope = yield self.deferred(publish)
        self.assert_valid_publish_envelope(envelope)
        returnValue(envelope)
Пример #47
0
class TreqIntegrationTests(TestCase):
    baseurl = HTTPBIN_URL
    get = with_baseurl(treq.get)
    head = with_baseurl(treq.head)
    post = with_baseurl(treq.post)
    put = with_baseurl(treq.put)
    patch = with_baseurl(treq.patch)
    delete = with_baseurl(treq.delete)

    def setUp(self):
        self.pool = HTTPConnectionPool(reactor, False)

    def tearDown(self):
        def _check_fds(_):
            # This appears to only be necessary for HTTPS tests.
            # For the normal HTTP tests then closeCachedConnections is
            # sufficient.
            fds = set(reactor.getReaders() + reactor.getReaders())
            if not [fd for fd in fds if isinstance(fd, Client)]:
                return

            return deferLater(reactor, 0, _check_fds, None)

        return self.pool.closeCachedConnections().addBoth(_check_fds)

    @inlineCallbacks
    def assert_data(self, response, expected_data):
        body = yield treq.json_content(response)
        self.assertIn('data', body)
        self.assertEqual(body['data'], expected_data)

    @inlineCallbacks
    def assert_sent_header(self, response, header, expected_value):
        body = yield treq.json_content(response)
        self.assertIn(header, body['headers'])
        self.assertEqual(body['headers'][header], expected_value)

    @inlineCallbacks
    def test_get(self):
        response = yield self.get('/get')
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_get_headers(self):
        response = yield self.get('/get', {'X-Blah': ['Foo', 'Bar']})
        self.assertEqual(response.code, 200)
        yield self.assert_sent_header(response, 'X-Blah', 'Foo, Bar')
        yield print_response(response)

    @inlineCallbacks
    def test_get_302_absolute_redirect(self):
        response = yield self.get('/redirect-to?url={0}/get'.format(
            self.baseurl))
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @todo_relative_redirect
    @inlineCallbacks
    def test_get_302_relative_redirect(self):
        response = yield self.get('/relative-redirect/1')
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_get_302_redirect_disallowed(self):
        response = yield self.get('/redirect/1', allow_redirects=False)
        self.assertEqual(response.code, 302)
        yield print_response(response)

    @inlineCallbacks
    def test_head(self):
        response = yield self.head('/get')
        body = yield treq.content(response)
        self.assertEqual('', body)
        yield print_response(response)

    @inlineCallbacks
    def test_head_302_absolute_redirect(self):
        response = yield self.head('/redirect-to?url={0}/get'.format(
            self.baseurl))
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @todo_relative_redirect
    @inlineCallbacks
    def test_head_302_relative_redirect(self):
        response = yield self.head('/relative-redirect/1')
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_head_302_redirect_disallowed(self):
        response = yield self.head('/redirect/1', allow_redirects=False)
        self.assertEqual(response.code, 302)
        yield print_response(response)

    @inlineCallbacks
    def test_post(self):
        response = yield self.post('/post', 'Hello!')
        self.assertEqual(response.code, 200)
        yield self.assert_data(response, 'Hello!')
        yield print_response(response)

    @inlineCallbacks
    def test_multipart_post(self):
        class FileLikeObject(StringIO):
            def __init__(self, val):
                StringIO.__init__(self, val)
                self.name = "david.png"

            def read(*args, **kwargs):
                return StringIO.read(*args, **kwargs)

        response = yield self.post('/post',
                                   data={"a": "b"},
                                   files={"file1": FileLikeObject("file")})
        self.assertEqual(response.code, 200)

        body = yield treq.json_content(response)
        self.assertEqual('b', body['form']['a'])
        self.assertEqual('file', body['files']['file1'])
        yield print_response(response)

    @inlineCallbacks
    def test_post_headers(self):
        response = yield self.post(
            '/post',
            '{msg: "Hello!"}',
            headers={'Content-Type': ['application/json']})

        self.assertEqual(response.code, 200)
        yield self.assert_sent_header(response, 'Content-Type',
                                      'application/json')
        yield self.assert_data(response, '{msg: "Hello!"}')
        yield print_response(response)

    @inlineCallbacks
    def test_put(self):
        response = yield self.put('/put', data='Hello!')
        yield print_response(response)

    @inlineCallbacks
    def test_patch(self):
        response = yield self.patch('/patch', data='Hello!')
        self.assertEqual(response.code, 200)
        yield self.assert_data(response, 'Hello!')
        yield print_response(response)

    @inlineCallbacks
    def test_delete(self):
        response = yield self.delete('/delete')
        self.assertEqual(response.code, 200)
        yield print_response(response)

    @inlineCallbacks
    def test_gzip(self):
        response = yield self.get('/gzip')
        self.assertEqual(response.code, 200)
        yield print_response(response)
        json = yield treq.json_content(response)
        self.assertTrue(json['gzipped'])

    @inlineCallbacks
    def test_basic_auth(self):
        response = yield self.get('/basic-auth/treq/treq',
                                  auth=('treq', 'treq'))
        self.assertEqual(response.code, 200)
        yield print_response(response)
        json = yield treq.json_content(response)
        self.assertTrue(json['authenticated'])
        self.assertEqual(json['user'], 'treq')

    @inlineCallbacks
    def test_failed_basic_auth(self):
        response = yield self.get('/basic-auth/treq/treq',
                                  auth=('not-treq', 'not-treq'))
        self.assertEqual(response.code, 401)
        yield print_response(response)

    @inlineCallbacks
    def test_timeout(self):
        """
        Verify a timeout fires if a request takes too long.
        """
        yield self.assertFailure(self.get('/delay/2', timeout=1),
                                 CancelledError, ResponseFailed)