def test_store_and_retrieve(self): """Test whether objects are stored and retrieved""" expected = ['a', 'b', 'c'] cache_path = os.path.join(self.test_path, CACHE_DIR) cache = Cache(cache_path) cache.store(*expected) contents = [item for item in cache.retrieve()] self.assertListEqual(contents, expected)
def test_backup(self): """Test backup method""" items = [1, 2, 3, 4, 5] cache_path = os.path.join(self.test_path, CACHE_DIR) cache = Cache(cache_path) cache.store(*items) cache.backup() expected = [f for f in os.listdir(cache.items_path)] rfiles = [f for f in os.listdir(cache.recovery_path)] self.assertEqual(rfiles, expected) self.assertNotEqual(len(rfiles), 0)
def test_fetch_from_empty_cache(self): """Test if there are not any articles returned when the cache is empty""" cache = Cache(self.tmp_path) nntp = NNTP(NNTP_SERVER, NNTP_GROUP, cache=cache) cached_articles = [article for article in nntp.fetch_from_cache()] self.assertEqual(len(cached_articles), 0)
def test_fetch_from_empty_cache(self): """Test if there are not any questions returned when the cache is empty""" cache = Cache(self.tmp_path) kitsune = Kitsune(KITSUNE_SERVER_URL, cache=cache) cached_questions = [event for event in kitsune.fetch_from_cache()] self.assertEqual(len(cached_questions), 0)
def test_fetch_from_cache(self): """Test whether the cache works""" HTTPServer.routes() # First, we fetch the questions from the server, storing them # in a cache cache = Cache(self.tmp_path) kitsune = Kitsune(KITSUNE_SERVER_URL, cache=cache) questions = [event for event in kitsune.fetch()] requests_done = len(HTTPServer.requests_http) # Now, we get the questions from the cache. # The contents should be the same and there won't be # any new request to the server cached_questions = [event for event in kitsune.fetch_from_cache()] # No new requests to the server self.assertEqual(len(HTTPServer.requests_http), requests_done) # The contents should be the same self.assertEqual(len(cached_questions), len(questions)) for i in range(0, len(questions)): self.assertDictEqual(cached_questions[i]['data'], questions[i]['data'])
def test_fetch_from_empty_cache(self): """Test if there are not any event returned when the cache is empty""" cache = Cache(self.tmp_path) dockerhub = DockerHub('grimoirelab', 'perceval', cache=cache) cached_items = [item for item in dockerhub.fetch_from_cache()] self.assertEqual(len(cached_items), 0)
def test_fetch_from_empty_cache(self): """Test if there are not any events returned when the cache is empty""" cache = Cache(self.tmp_path) mozillaclub = MozillaClub(MozillaClub_FEED_URL, cache=cache) cached_events = [event for event in mozillaclub.fetch_from_cache()] self.assertEqual(len(cached_events), 0)
def _test_fetch_from_cache(self, version): """Test whether the cache works""" HTTPServer.routes(version) # First, we fetch the pages from the server, storing them # in a cache cache = Cache(self.tmp_path) mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL, cache=cache) pages = [page for page in mediawiki.fetch()] requests_done = len(HTTPServer.requests_http) # Now, we get the pages from the cache. cached_pages = [page for page in mediawiki.fetch_from_cache()] # No new requests to the server self.assertEqual(len(HTTPServer.requests_http), requests_done) # The contents should be the same self.assertEqual(len(cached_pages), len(pages)) if version == "1.28": # 2 pages in all name spaces self.assertEqual(len(pages), 2) elif version == "1.23": # 2 pages per each of the 5 name spaces self.assertEqual(len(pages), 10) HTTPServer.check_pages_contents(self, pages)
def __test_fetch_from_cache(self, category): """Test whether the cache works""" HTTPServer.routes() # First, we fetch the events from the server, storing them # in a cache cache = Cache(self.tmp_path) remo = ReMo(MOZILLA_REPS_SERVER_URL, cache=cache) items = [item for item in remo.fetch(category=category)] requests_done = len(HTTPServer.requests_http) # Now, we get the items from the cache. # The contents should be the same and there won't be # any new request to the server cached_items = [item for item in remo.fetch_from_cache()] # No new requests to the server self.assertEqual(len(HTTPServer.requests_http), requests_done) # The contents should be the same self.assertEqual(len(cached_items), len(items)) for i in range(0, len(items)): self.assertDictEqual(cached_items[i]['data'], items[i]['data']) self.assertEqual(cached_items[i]['offset'], items[i]['offset'])
def test_fetch_from_empty_cache(self): """Test if there are not any builds returned when the cache is empty""" cache = Cache(self.tmp_path) jenkins = Jenkins(JENKINS_SERVER_URL, cache=cache) cached_builds = [build for build in jenkins.fetch_from_cache()] self.assertEqual(len(cached_builds), 0)
def test_fetch_from_empty_cache(self): """Test if there are not any message returned when the cache is empty""" cache = Cache(self.tmp_path) slack = Slack('C011DUKE8', 'aaaa', max_items=5, cache=cache) cached_messages = [msg for msg in slack.fetch_from_cache()] self.assertEqual(len(cached_messages), 0)
def test_fetch_from_cache(self, mock_utcnow): """Test whether the cache works""" mock_utcnow.return_value = datetime.datetime( 2017, 1, 1, tzinfo=dateutil.tz.tzutc()) http_requests = setup_http_server() # First, we fetch the events from the server, # storing them in a cache cache = Cache(self.tmp_path) slack = Slack('C011DUKE8', 'aaaa', max_items=5, cache=cache) messages = [msg for msg in slack.fetch()] self.assertEqual(len(http_requests), 5) # Now, we get the messages from the cache. # The events should be the same and there won't be # any new request to the server cached_messages = [msg for msg in slack.fetch_from_cache()] self.assertEqual(len(cached_messages), len(messages)) expected = [ ("<@U0003|dizquierdo> has joined the channel", 'bb95a1facf7d61baaf57322f3d6b6d2d45af8aeb', 1427799888.0, '*****@*****.**'), ("tengo el m\u00f3vil", 'f8668de6fadeb5730e0a80d4c8e5d3f8d175f4d5', 1427135890.000071, '*****@*****.**'), ("hey acs", '29c2942a704c4e0b067daeb76edb2f826376cecf', 1427135835.000070, '*****@*****.**'), ("¿vale?", '757e88ea008db0fff739dd261179219aedb84a95', 1427135740.000069, '*****@*****.**'), ("jsmanrique: tenemos que dar m\u00e9tricas super chulas", 'e92555381bc431a53c0b594fc118850eafd6e212', 1427135733.000068, '*****@*****.**'), ("hi!", 'b92892e7b65add0e83d0839de20b2375a42014e8', 1427135689.000067, '*****@*****.**'), ("hi!", 'e59d9ca0d9a2ba1c747dc60a0904edd22d69e20e', 1427135634.000066, '*****@*****.**') ] self.assertEqual(len(cached_messages), len(expected)) for x in range(len(cached_messages)): cmessage = cached_messages[x] expc = expected[x] self.assertEqual(cmessage['data']['text'], expc[0]) self.assertEqual(cmessage['uuid'], expc[1]) self.assertEqual(cmessage['origin'], 'https://slack.com/C011DUKE8') self.assertEqual(cmessage['updated_on'], expc[2]) self.assertEqual(cmessage['category'], 'message') self.assertEqual(cmessage['tag'], 'https://slack.com/C011DUKE8') self.assertEqual(cmessage['data']['user_data']['profile']['email'], expc[3]) # Compare chached and fetched message self.assertDictEqual(cmessage['data'], messages[x]['data']) # No more requests were sent self.assertEqual(len(http_requests), 5)
def test_fetch_from_empty_cache(self): """Test if there are not any entries returned when the cache is empty""" cache = Cache(self.tmp_path) rss = RSS(RSS_FEED_URL, cache=cache) cached_entries = [entry for entry in rss.fetch_from_cache()] self.assertEqual(len(cached_entries), 0)
def test_fetch_from_empty_cache(self): """Test if there are not any task returned when the cache is empty""" cache = Cache(self.tmp_path) phab = Phabricator(PHABRICATOR_URL, 'AAAA', cache=cache) cached_tasks = [task for task in phab.fetch_from_cache()] self.assertEqual(len(cached_tasks), 0)
def test_fetch_from_cache(self): """ Test whether a list of questions is returned from cache """ question = read_file('data/stackexchange_question') httpretty.register_uri(httpretty.GET, STACKEXCHANGE_QUESTIONS_URL, body=question, status=200) # First, we fetch the bugs from the server, storing them # in a cache cache = Cache(self.tmp_path) stack = StackExchange(site="stackoverflow", tagged="python", api_token="aaa", max_questions=1, cache=cache) questions = [question for question in stack.fetch(from_date=None)] del questions[0]['timestamp'] # Now, we get the bugs from the cache. # The contents should be the same and there won't be # any new request to the server cache_questions = [ cache_question for cache_question in stack.fetch_from_cache() ] del cache_questions[0]['timestamp'] self.assertEqual(cache_questions, questions)
def test_fetch_from_empty_cache(self): """Test if there are not any events returned when the cache is empty""" cache = Cache(self.tmp_path) remo = ReMo(MOZILLA_REPS_SERVER_URL, cache=cache) cached_events = [event for event in remo.fetch_from_cache()] self.assertEqual(len(cached_events), 0)
def test_fetch_from_empty_cache(self): """Test if there are not any content returned when the cache is empty""" cache = Cache(self.tmp_path) confluence = Confluence(CONFLUENCE_URL, cache=cache) cached_hcs = [hc for hc in confluence.fetch_from_cache()] self.assertEqual(len(cached_hcs), 0)
def test_fetch_from_empty_cache(self): """Test if there are not any event returned when the cache is empty""" cache = Cache(self.tmp_path) meetup = Meetup('sqlpass-es', 'aaaa', max_items=2, cache=cache) cached_events = [event for event in meetup.fetch_from_cache()] self.assertEqual(len(cached_events), 0)
def test_fetch_from_empty_cache(self): """Test if there are not any message returned when the cache is empty""" cache = Cache(self.tmp_path) tlg = Telegram(TELEGRAM_BOT, TELEGRAM_TOKEN, cache=cache) cached_messages = [msg for msg in tlg.fetch_from_cache()] self.assertEqual(len(cached_messages), 0)
def test_fetch_from_empty_cache(self): """Test if there are not any topics returned when the cache is empty""" cache = Cache(self.tmp_path) discourse = Discourse(DISCOURSE_SERVER_URL, cache=cache) cached_topics = [topic for topic in discourse.fetch_from_cache()] self.assertEqual(len(cached_topics), 0)
def test_fetch_from_empty_cache(self): """Test if there are not any issue returned when the cache is empty""" cache = Cache(self.tmp_path) redmine = Redmine(REDMINE_URL, api_token='AAAA', cache=cache) cached_issues = [issue for issue in redmine.fetch_from_cache()] self.assertEqual(len(cached_issues), 0)
def test_fetch_from_empty_cache(self): """Test if there are not any pages returned when the cache is empty""" cache = Cache(self.tmp_path) mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL, cache=cache) cached_pages = [page for page in mediawiki.fetch_from_cache()] self.assertEqual(len(cached_pages), 0)
def test_fetch_from_empty_cache(self): """Test if there are not any bugs returned when the cache is empty""" cache = Cache(self.tmp_path) bg = BugzillaREST(BUGZILLA_SERVER_URL, cache=cache) cached_bugs = [bug for bug in bg.fetch_from_cache()] self.assertEqual(len(cached_bugs), 0)
def test_fetch_from_empty_cache(self): """Test if there are not any issues returned when the cache is empty""" cache = Cache(self.tmp_path) github = GitHub("zhquan_example", "repo", "aaa", cache=cache) cache_issues = [cache_issues for cache_issues in github.fetch_from_cache()] self.assertEqual(len(cache_issues), 0)
def test_fetch_from_cache(self): """Test whether the cache works""" http_requests = setup_http_server() # First, we fetch the tasks from the server, # storing them in a cache cache = Cache(self.tmp_path) phab = Phabricator(PHABRICATOR_URL, 'AAAA', cache=cache) tasks = [task for task in phab.fetch()] self.assertEqual(len(http_requests), 12) # Now, we get the tasks from the cache. # The tasks should be the same and there won't be # any new request to the server cached_tasks = [task for task in phab.fetch_from_cache()] self.assertEqual(len(cached_tasks), len(tasks)) expected = [(69, 16, 0, 'jdoe', 'jdoe', '1b4c15d26068efcae83cd920bcada6003d2c4a6c', 1462306027.0), (73, 20, 0, 'jdoe', 'janesmith', '5487fc704f2d3c4e83ab0cd065512a181c1726cc', 1462464642.0), (78, 17, 0, 'jdoe', None, 'fa971157c4d0155652f94b673866abd83b929b27', 1462792338.0), (296, 18, 2, 'jane', 'jrae', 'e8fa3e4a4381d6fea3bcf5c848f599b87e7dc4a6', 1467196707.0)] self.assertEqual(len(cached_tasks), len(expected)) for x in range(len(cached_tasks)): task = cached_tasks[x] expc = expected[x] self.assertEqual(task['data']['id'], expc[0]) self.assertEqual(len(task['data']['transactions']), expc[1]) self.assertEqual(len(task['data']['projects']), expc[2]) self.assertEqual(task['data']['fields']['authorData']['userName'], expc[3]) # Check owner data; when it is null owner is not included if not expc[4]: self.assertNotIn('ownerData', task['data']['fields']) else: self.assertEqual( task['data']['fields']['ownerData']['userName'], expc[4]) self.assertEqual(task['uuid'], expc[5]) self.assertEqual(task['origin'], PHABRICATOR_URL) self.assertEqual(task['updated_on'], expc[6]) self.assertEqual(task['category'], 'task') self.assertEqual(task['tag'], PHABRICATOR_URL) # Compare chached and fetched task self.assertDictEqual(task['data'], tasks[x]['data']) # No more requests were sent self.assertEqual(len(http_requests), 12)
def test_fetch_from_cache_no_entries(self): """Test when activities, attachments and messages contain no JSON-like data""" issues_page_1 = read_file( 'data/launchpad/launchpad_issues_page_1_no_entries') issue_1 = read_file('data/launchpad/launchpad_issue_1_no_entries') issue_1_comments = read_file( 'data/launchpad/launchpad_issue_1_comments_no_entries') issue_1_attachments = read_file( 'data/launchpad/launchpad_issue_1_attachments_no_entries') issue_1_activities = read_file( 'data/launchpad/launchpad_issue_1_activities_no_entries') httpretty.register_uri( httpretty.GET, LAUNCHPAD_PACKAGE_PROJECT_URL + "?modified_since=1970-01-01T00%3A00%3A00%2B00%3A00&ws.op=searchTasks" "&omit_duplicates=false&order_by=date_last_updated&status=Confirmed&status=Expired" "&status=Fix+Committed&status=Fix+Released" "&status=In+Progress&status=Incomplete&status=Incomplete+%28with+response%29" "&status=Incomplete+%28without+response%29" "&status=Invalid&status=New&status=Opinion&status=Triaged" "&status=Won%27t+Fix" "&ws.size=1", body=issues_page_1, status=200) httpretty.register_uri(httpretty.GET, LAUNCHPAD_API_URL + "/bugs/1", body=issue_1, status=200) httpretty.register_uri(httpretty.GET, LAUNCHPAD_API_URL + "/bugs/1/messages", body=issue_1_comments, status=410) httpretty.register_uri(httpretty.GET, LAUNCHPAD_API_URL + "/bugs/1/attachments", body=issue_1_attachments, status=410) httpretty.register_uri(httpretty.GET, LAUNCHPAD_API_URL + "/bugs/1/activity", body=issue_1_activities, status=410) cache = Cache(self.tmp_path) launchpad = Launchpad('mydistribution', consumer_key=CONSUMER_KEY, api_token=OAUTH_TOKEN, package='mypackage', items_per_page=2, cache=cache) issues = [issues for issues in launchpad.fetch()] issues_from_cache = [issues for issues in launchpad.fetch_from_cache()] self.assertDictEqual(issues[0]['data'], issues_from_cache[0]['data'])
def test_fetch_from_cache_empty(self): """Test if there are not any issues returned when the cache is empty""" cache = Cache(self.tmp_path) jira = Jira(JIRA_SERVER_URL, cache=cache) cache_issues = [cache_issue for cache_issue in jira.fetch_from_cache()] self.assertEqual(len(cache_issues), 0)
def test_fetch_from_empty_cache(self): """Test if there are not any questions returned when the cache is empty""" cache = Cache(self.tmp_path) stack = StackExchange(site="stackoverflow", tagged="python", token="aaa", max_questions=1, cache=cache) cache_questions = [cache_question for cache_question in stack.fetch_from_cache()] self.assertEqual(len(cache_questions), 0)
def test_fetch_from_cache(self): """Test whether the cache works""" http_requests = setup_http_server() # First, we fetch the topics from the server, storing them # in a cache cache = Cache(self.tmp_path) bg = BugzillaREST(BUGZILLA_SERVER_URL, max_bugs=2, cache=cache) bugs = [bug for bug in bg.fetch()] self.assertEqual(len(http_requests), 9) # Now, we get the topics from the cache. # The contents should be the same and there won't be # any new request to the server cached_bugs = [bug for bug in bg.fetch_from_cache()] self.assertEqual(len(cached_bugs), len(bugs)) self.assertEqual(len(cached_bugs), 3) self.assertEqual(bugs[0]['data']['id'], 1273442) self.assertEqual(len(bugs[0]['data']['comments']), 7) self.assertEqual(len(bugs[0]['data']['history']), 6) self.assertEqual(len(bugs[0]['data']['attachments']), 1) self.assertEqual(bugs[0]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[0]['uuid'], '68494ad0072ed9e09cecb8235649a38c443326db') self.assertEqual(bugs[0]['updated_on'], 1465257689.0) self.assertEqual(bugs[0]['category'], 'bug') self.assertEqual(bugs[0]['tag'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[1]['data']['id'], 1273439) self.assertEqual(len(bugs[1]['data']['comments']), 0) self.assertEqual(len(bugs[1]['data']['history']), 0) self.assertEqual(len(bugs[1]['data']['attachments']), 0) self.assertEqual(bugs[1]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[1]['uuid'], 'd306162de06bc759f9bd9227fe3fd5f08aeb0dde') self.assertEqual(bugs[1]['updated_on'], 1465257715.0) self.assertEqual(bugs[1]['category'], 'bug') self.assertEqual(bugs[1]['tag'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[2]['data']['id'], 947945) self.assertEqual(len(bugs[2]['data']['comments']), 0) self.assertEqual(len(bugs[2]['data']['history']), 0) self.assertEqual(len(bugs[2]['data']['attachments']), 0) self.assertEqual(bugs[2]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[2]['uuid'], '33edda925351c3310fc3e12d7f18a365c365f6bd') self.assertEqual(bugs[2]['updated_on'], 1465257743.0) self.assertEqual(bugs[2]['category'], 'bug') self.assertEqual(bugs[2]['tag'], BUGZILLA_SERVER_URL) # No more requests were sent self.assertEqual(len(http_requests), 9)
def _test_fetch_from_cache(self, version, reviews_api=False): """Test whether the cache works""" HTTPServer.routes(version) # First, we fetch the pages from the server, storing them # in a cache shutil.rmtree(self.tmp_path) cache = Cache(self.tmp_path) mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL, cache=cache) pages = [page for page in mediawiki.fetch(reviews_api=reviews_api)] requests_done = len(HTTPServer.requests_http) # Now, we get the pages from the cache. cached_pages = [page for page in mediawiki.fetch_from_cache()] # No new requests to the server self.assertEqual(len(HTTPServer.requests_http), requests_done) self.assertEqual(len(cached_pages), len(pages)) if version == "1.28" and reviews_api: # 2 pages in all name spaces self.assertEqual(len(pages), 2) elif version == "1.23" or not reviews_api: # 2 pages per each of the 5 name spaces self.assertEqual(len(pages), 10) HTTPServer.check_pages_contents(self, pages) # Now let's tests more than one execution in the same cache shutil.rmtree(self.tmp_path) cache = Cache(self.tmp_path) mediawiki = MediaWiki(MEDIAWIKI_SERVER_URL, cache=cache) pages = [page for page in mediawiki.fetch(reviews_api=reviews_api)] pages_1 = [page for page in mediawiki.fetch(reviews_api=reviews_api)] cached_pages = [page for page in mediawiki.fetch_from_cache()] if version == "1.28" and reviews_api: # 2 unique pages x2 caches self.assertEqual(len(cached_pages), 4) elif version == "1.23" or not reviews_api: # 2 pages per each of the 5 name spaces, x2 caches self.assertEqual(len(cached_pages), 10 * 2)
def test_fetch_from_cache(self): """Test whether the cache works""" http_requests = setup_http_server() # First, we fetch the issues from the server, # storing them in a cache cache = Cache(self.tmp_path) redmine = Redmine(REDMINE_URL, api_token='AAAA', max_issues=3, cache=cache) issues = [issue for issue in redmine.fetch()] self.assertEqual(len(http_requests), 12) # Now, we get the issues from the cache. # The issues should be the same and there won't be # any new request to the server cached_issues = [issue for issue in redmine.fetch_from_cache()] self.assertEqual(len(cached_issues), len(issues)) expected = [(9, '91a8349c2f6ebffcccc49409529c61cfd3825563', 1323367020.0, 3, 3), (5, 'c4aeb9e77fec8e4679caa23d4012e7cc36ae8b98', 1323367075.0, 3, 3), (2, '3c3d67925b108a37f88cc6663f7f7dd493fa818c', 1323367117.0, 3, 3), (7311, '4ab289ab60aee93a66e5490529799cf4a2b4d94c', 1469607427.0, 24, 4)] self.assertEqual(len(cached_issues), len(expected)) for x in range(len(cached_issues)): issue = cached_issues[x] expc = expected[x] self.assertEqual(issue['data']['id'], expc[0]) self.assertEqual(issue['uuid'], expc[1]) self.assertEqual(issue['origin'], REDMINE_URL) self.assertEqual(issue['updated_on'], expc[2]) self.assertEqual(issue['category'], 'issue') self.assertEqual(issue['tag'], REDMINE_URL) self.assertEqual(issue['data']['author_data']['id'], expc[3]) self.assertEqual(issue['data']['journals'][0]['user_data']['id'], expc[4]) self.assertDictEqual(issue['data'], issues[x]['data']) # The user 99 does not have information self.assertEqual(issues[3]['data']['journals'][1]['user']['id'], 99) self.assertDictEqual(issues[3]['data']['journals'][1]['user_data'], {}) # No more requests were sent self.assertEqual(len(http_requests), 12)
def test_recover(self): """Test recover method""" expected = [1, 2, 3, 4, 5] cache_path = os.path.join(self.test_path, CACHE_DIR) cache = Cache(cache_path) cache.store(*expected) cache.backup() cache.clean() contents = [item for item in cache.retrieve()] self.assertEqual(len(contents), 0) cache.recover() contents = [item for item in cache.retrieve()] self.assertListEqual(contents, expected)
def test_clean(self): """Test clean method""" items = [1, 2, 3, 4, 5] cache_path = os.path.join(self.test_path, CACHE_DIR) cache = Cache(cache_path) cache.store(*items) expected = [f for f in os.listdir(cache.items_path)] cache.clean() # Check the contents and the files stored in each directory contents = [item for item in cache.retrieve()] self.assertEqual(len(contents), 0) rfiles = [f for f in os.listdir(cache.recovery_path)] self.assertEqual(rfiles, expected) self.assertNotEqual(len(rfiles), 0) # Check erase mode cache.store(*items) cache.clean(erase=True) contents = [item for item in cache.retrieve()] self.assertEqual(len(contents), 0) rfiles = [f for f in os.listdir(cache.recovery_path)] self.assertEqual(len(rfiles), 0)