Ejemplo n.º 1
0
 def test_api_timeout(self):
     self.wikipedia = WikipediaAPI(self.fake_api.url, False, api_timeout=0)
     return self.assertFailure(
         self.wikipedia.get_extract('Cthulhu'), HttpTimeoutError)
Ejemplo n.º 2
0
 def get_wikipedia_api(self, config):
     return WikipediaAPI(
         config.api_url.geturl(), config.accept_gzip, config.user_agent,
         api_timeout=config.api_timeout)
Ejemplo n.º 3
0
 def setUp(self):
     self.fake_api = self.start_webserver(WIKIPEDIA_RESPONSES)
     self.wikipedia = WikipediaAPI(self.fake_api.url, False)
Ejemplo n.º 4
0
 def test_user_agent(self):
     self.expected_user_agent = self.wikipedia.USER_AGENT
     yield self.wikipedia.get_extract('Cthulhu')
     self.wikipedia = WikipediaAPI(self.fake_api.url, False, 'Bob Howard')
     self.expected_user_agent = 'Bob Howard'
     yield self.wikipedia.get_extract('Cthulhu')
Ejemplo n.º 5
0
class WikipediaAPITestCase(VumiTestCase, FakeHTTPTestCaseMixin):
    def setUp(self):
        self.fake_api = self.start_webserver(WIKIPEDIA_RESPONSES)
        self.wikipedia = WikipediaAPI(self.fake_api.url, False)

    def assert_api_result(self, api_result_d, expected):
        return api_result_d.addCallback(self.assertEqual, expected)

    @inlineCallbacks
    def test_search_success(self):
        yield self.assert_api_result(
            self.wikipedia.search('wikipedia', limit=3),
            [u'Wikipedia', u'Wikip\xe9dia', u'Main Page'])
        # And again with a different request and result limit
        yield self.assert_api_result(
            self.wikipedia.search('vumi', limit=2),
            [u'Arambagh Utsab', u'Vulpia microstachys'])

    @inlineCallbacks
    def test_search_custom_backend(self):
        yield self.assert_api_result(
            self.wikipedia.search('wikipedia', limit=3,
                                  backend='CirrusSearch'),
            [u'Wikipedia', u'Wikip\xe9dia', u'English Wikipedia'])

    def test_search_error(self):
        return self.assertFailure(self.wikipedia.search('.'), APIError)

    @inlineCallbacks
    def test_bad_response(self):
        yield self.assertFailure(self.wikipedia.search('notjson'), APIError)
        self.flushLoggedErrors()

    def test_search_no_results(self):
        return self.assert_api_result(
            self.wikipedia.search('ncdkiuagdqpowebjkcs'), [])

    def test_get_extract(self):
        def assert_extract(extract):
            self.assertEqual(4, len(extract.sections))

        return self.wikipedia.get_extract('Cthulhu').addCallback(
            assert_extract)

    @inlineCallbacks
    def test_user_agent(self):
        self.expected_user_agent = self.wikipedia.USER_AGENT
        yield self.wikipedia.get_extract('Cthulhu')
        self.wikipedia = WikipediaAPI(self.fake_api.url, False, 'Bob Howard')
        self.expected_user_agent = 'Bob Howard'
        yield self.wikipedia.get_extract('Cthulhu')

    def test_api_timeout(self):
        self.wikipedia = WikipediaAPI(self.fake_api.url, False, api_timeout=0)
        return self.assertFailure(
            self.wikipedia.get_extract('Cthulhu'), HttpTimeoutError)

    @inlineCallbacks
    def test_redirect(self):
        """
        If we get a 301 response, we must correctly redirect.
        """
        # The redirect magic here is ugly and hacky, but making it cleaner
        # would require rewriting most of the fake HTTP stuff.
        self.check_redirect = []

        # Test with a search.
        yield self.assert_api_result(
            self.wikipedia.search('wikipedia', limit=3),
            [u'Wikipedia', u'Wikip\xe9dia', u'Main Page'])
        self.assertEqual(len(self.check_redirect), 1)

        # Test with an article extract.
        extract = yield self.wikipedia.get_extract('Cthulhu')
        self.assertEqual(4, len(extract.sections))
        self.assertEqual(len(self.check_redirect), 2)
Ejemplo n.º 6
0
 def test_api_timeout(self):
     self.wikipedia = WikipediaAPI(self.fake_api.url, False, api_timeout=0)
     return self.assertFailure(self.wikipedia.get_extract('Cthulhu'),
                               HttpTimeoutError)
Ejemplo n.º 7
0
 def test_user_agent(self):
     self.expected_user_agent = self.wikipedia.USER_AGENT
     yield self.wikipedia.get_extract('Cthulhu')
     self.wikipedia = WikipediaAPI(self.fake_api.url, False, 'Bob Howard')
     self.expected_user_agent = 'Bob Howard'
     yield self.wikipedia.get_extract('Cthulhu')
Ejemplo n.º 8
0
 def setUp(self):
     self.fake_api = self.start_webserver(WIKIPEDIA_RESPONSES)
     self.wikipedia = WikipediaAPI(self.fake_api.url, False)
Ejemplo n.º 9
0
class WikipediaAPITestCase(VumiTestCase, FakeHTTPTestCaseMixin):
    def setUp(self):
        self.fake_api = self.start_webserver(WIKIPEDIA_RESPONSES)
        self.wikipedia = WikipediaAPI(self.fake_api.url, False)

    def assert_api_result(self, api_result_d, expected):
        return api_result_d.addCallback(self.assertEqual, expected)

    @inlineCallbacks
    def test_search_success(self):
        yield self.assert_api_result(
            self.wikipedia.search('wikipedia', limit=3),
            [u'Wikipedia', u'Wikip\xe9dia', u'Main Page'])
        # And again with a different request and result limit
        yield self.assert_api_result(self.wikipedia.search(
            'vumi', limit=2), [u'Arambagh Utsab', u'Vulpia microstachys'])

    @inlineCallbacks
    def test_search_custom_backend(self):
        yield self.assert_api_result(
            self.wikipedia.search('wikipedia', limit=3,
                                  backend='CirrusSearch'),
            [u'Wikipedia', u'Wikip\xe9dia', u'English Wikipedia'])

    def test_search_error(self):
        return self.assertFailure(self.wikipedia.search('.'), APIError)

    @inlineCallbacks
    def test_bad_response(self):
        yield self.assertFailure(self.wikipedia.search('notjson'), APIError)
        self.flushLoggedErrors()

    def test_search_no_results(self):
        return self.assert_api_result(
            self.wikipedia.search('ncdkiuagdqpowebjkcs'), [])

    def test_get_extract(self):
        def assert_extract(extract):
            self.assertEqual(4, len(extract.sections))

        return self.wikipedia.get_extract('Cthulhu').addCallback(
            assert_extract)

    @inlineCallbacks
    def test_user_agent(self):
        self.expected_user_agent = self.wikipedia.USER_AGENT
        yield self.wikipedia.get_extract('Cthulhu')
        self.wikipedia = WikipediaAPI(self.fake_api.url, False, 'Bob Howard')
        self.expected_user_agent = 'Bob Howard'
        yield self.wikipedia.get_extract('Cthulhu')

    def test_api_timeout(self):
        self.wikipedia = WikipediaAPI(self.fake_api.url, False, api_timeout=0)
        return self.assertFailure(self.wikipedia.get_extract('Cthulhu'),
                                  HttpTimeoutError)

    @inlineCallbacks
    def test_redirect(self):
        """
        If we get a 301 response, we must correctly redirect.
        """
        # The redirect magic here is ugly and hacky, but making it cleaner
        # would require rewriting most of the fake HTTP stuff.
        self.check_redirect = []

        # Test with a search.
        yield self.assert_api_result(
            self.wikipedia.search('wikipedia', limit=3),
            [u'Wikipedia', u'Wikip\xe9dia', u'Main Page'])
        self.assertEqual(len(self.check_redirect), 1)

        # Test with an article extract.
        extract = yield self.wikipedia.get_extract('Cthulhu')
        self.assertEqual(4, len(extract.sections))
        self.assertEqual(len(self.check_redirect), 2)