def test_fetch_from_non_set_cache(self): """Test if a error is raised when the cache was not set""" mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL) with self.assertRaises(CacheError): _ = [page for page in mediawiki.fetch_from_cache()]
def _test_fetch_version(self, version, from_date=None, reviews_api=False): """Test whether the pages with their reviews are returned""" HTTPServer.routes(version) # Test fetch pages with their reviews mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL) if from_date: # Set flag to ignore MAX_RECENT_DAYS exception mediawiki._test_mode = True pages = [ page for page in mediawiki.fetch(from_date=from_date, reviews_api=reviews_api) ] else: pages = [page for page in mediawiki.fetch(reviews_api=reviews_api)] if version == "1.28" and reviews_api: # 2 pages in all name spaces self.assertEqual(len(pages), 2) elif version == "1.23" or not reviews_api: if not from_date: # 2 pages per each of the 5 name spaces self.assertEqual(len(pages), 10) else: # 1 page in recent changes self.assertEqual(len(pages), 1) HTTPServer.check_pages_contents(self, pages)
def test_fetch_from_empty_cache(self): """Test if there are not any pages returned when the cache is empty""" cache = Cache(self.tmp_path) mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL, cache=cache) cached_pages = [page for page in mediawiki.fetch_from_cache()] self.assertEqual(len(cached_pages), 0)
def _test_fetch_version(self, version, from_date=None): """Test whether the pages with their reviews are returned""" HTTPServer.routes(version) # Test fetch pages with their reviews mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL) if from_date: # Set flag to ignore MAX_RECENT_DAYS exception mediawiki._test_mode = True pages = [page for page in mediawiki.fetch(from_date=from_date)] else: pages = [page for page in mediawiki.fetch()] if version == "1.28": # 2 pages in all name spaces self.assertEqual(len(pages), 2) elif version == "1.23": if not from_date: # 2 pages per each of the 5 name spaces self.assertEqual(len(pages), 10) else: # 1 page in recent changes self.assertEqual(len(pages), 1) HTTPServer.check_pages_contents(self, pages)
def _test_fetch_from_cache(self, version): """Test whether the cache works""" HTTPServer.routes(version) # First, we fetch the pages from the server, storing them # in a cache cache = Cache(self.tmp_path) mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL, cache=cache) pages = [page for page in mediawiki.fetch()] requests_done = len(HTTPServer.requests_http) # Now, we get the pages from the cache. cached_pages = [page for page in mediawiki.fetch_from_cache()] # No new requests to the server self.assertEqual(len(HTTPServer.requests_http), requests_done) # The contents should be the same self.assertEqual(len(cached_pages), len(pages)) if version == "1.28": # 2 pages in all name spaces self.assertEqual(len(pages), 2) elif version == "1.23": # 2 pages per each of the 5 name spaces self.assertEqual(len(pages), 10) HTTPServer.check_pages_contents(self, pages)
def test_fetch_empty_1_28(self): """Test whether it works when no pages are fetched""" HTTPServer.routes("1.28", empty=True) mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL) pages = [page for page in mediawiki.fetch()] self.assertEqual(len(pages), 0)
def test_initialization(self): """Test whether attributes are initializated""" mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL, origin='test') self.assertEqual(mediawiki.url, MEDIAWIKI_SERVER_URL) self.assertEqual(mediawiki.origin, 'test') self.assertIsInstance(mediawiki.client, MediaWikiClient) # When origin is empty or None it will be set to # the value in url mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL) self.assertEqual(mediawiki.url, MEDIAWIKI_SERVER_URL) self.assertEqual(mediawiki.origin, MEDIAWIKI_SERVER_URL) mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL, origin='') self.assertEqual(mediawiki.url, MEDIAWIKI_SERVER_URL) self.assertEqual(mediawiki.origin, MEDIAWIKI_SERVER_URL)
def _test_fetch_from_cache(self, version, reviews_api=False): """Test whether the cache works""" HTTPServer.routes(version) # First, we fetch the pages from the server, storing them # in a cache shutil.rmtree(self.tmp_path) cache = Cache(self.tmp_path) mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL, cache=cache) pages = [page for page in mediawiki.fetch(reviews_api=reviews_api)] requests_done = len(HTTPServer.requests_http) # Now, we get the pages from the cache. cached_pages = [page for page in mediawiki.fetch_from_cache()] # No new requests to the server self.assertEqual(len(HTTPServer.requests_http), requests_done) self.assertEqual(len(cached_pages), len(pages)) if version == "1.28" and reviews_api: # 2 pages in all name spaces self.assertEqual(len(pages), 2) elif version == "1.23" or not reviews_api: # 2 pages per each of the 5 name spaces self.assertEqual(len(pages), 10) HTTPServer.check_pages_contents(self, pages) # Now let's tests more than one execution in the same cache shutil.rmtree(self.tmp_path) cache = Cache(self.tmp_path) mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL, cache=cache) pages = [page for page in mediawiki.fetch(reviews_api=reviews_api)] pages_1 = [page for page in mediawiki.fetch(reviews_api=reviews_api)] cached_pages = [page for page in mediawiki.fetch_from_cache()] if version == "1.28" and reviews_api: # 2 unique pages x2 caches self.assertEqual(len(cached_pages), 4) elif version == "1.23" or not reviews_api: # 2 pages per each of the 5 name spaces, x2 caches self.assertEqual(len(cached_pages), 10 * 2)
def _test_fetch_from_cache(self, version, reviews_api=False): """Test whether the cache works""" HTTPServer.routes(version) # First, we fetch the pages from the server, storing them # in a cache shutil.rmtree(self.tmp_path) cache = Cache(self.tmp_path) mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL, cache=cache) pages = [page for page in mediawiki.fetch(reviews_api=reviews_api)] requests_done = len(HTTPServer.requests_http) # Now, we get the pages from the cache. cached_pages = [page for page in mediawiki.fetch_from_cache()] # No new requests to the server self.assertEqual(len(HTTPServer.requests_http), requests_done) self.assertEqual(len(cached_pages), len(pages)) if version == "1.28" and reviews_api: # 2 pages in all name spaces self.assertEqual(len(pages), 2) elif version == "1.23" or not reviews_api: # 2 pages per each of the 5 name spaces self.assertEqual(len(pages), 10) HTTPServer.check_pages_contents(self, pages) # Now let's tests more than one execution in the same cache shutil.rmtree(self.tmp_path) cache = Cache(self.tmp_path) mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL, cache=cache) pages = [page for page in mediawiki.fetch(reviews_api=reviews_api)] pages_1 = [page for page in mediawiki.fetch(reviews_api=reviews_api)] cached_pages = [page for page in mediawiki.fetch_from_cache()] if version == "1.28" and reviews_api: # 2 unique pages x2 caches self.assertEqual(len(cached_pages), 4) elif version == "1.23" or not reviews_api: # 2 pages per each of the 5 name spaces, x2 caches self.assertEqual(len(cached_pages), 10*2)
def test_has_resuming(self): """Test if it returns False when has_resuming is called""" self.assertEqual(MediaWiki.has_resuming(), False)
def test_has_caching(self): """Test if it returns True when has_caching is called""" self.assertEqual(MediaWiki.has_caching(), True)