def test_fetch_from_non_set_cache(self): """Test if a error is raised when the cache was not set""" jira = Jira(JIRA_SERVER_URL) with self.assertRaises(CacheError): _ = cache_issues = [cache_issue for cache_issue in jira.fetch_from_cache()]
def test_fetch_from_non_set_cache(self): """Test if a error is raised when the cache was not set""" jira = Jira(JIRA_SERVER_URL) with self.assertRaises(CacheError): _ = cache_issues = [ cache_issue for cache_issue in jira.fetch_from_cache() ]
def test_fetch_from_cache_empty(self): """Test if there are not any issues returned when the cache is empty""" cache = Cache(self.tmp_path) jira = Jira(JIRA_SERVER_URL, cache=cache) cache_issues = [cache_issue for cache_issue in jira.fetch_from_cache()] self.assertEqual(len(cache_issues), 0)
def test_fetch_from_cache(self): """Test whether a list of issues is returned from cache""" requests = [] bodies_json = [ read_file('data/jira/jira_issues_page_1.json'), read_file('data/jira/jira_issues_page_2.json') ] body = read_file('data/jira/jira_fields.json') def request_callback(method, uri, headers): body = bodies_json.pop(0) requests.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, JIRA_SEARCH_URL, responses=[httpretty.Response(body=request_callback) \ for _ in range(2)]) httpretty.register_uri(httpretty.GET, JIRA_FIELDS_URL, body=body, status=200) # First, we fetch the issues from the server, storing them # in a cache cache = Cache(self.tmp_path) jira = Jira(JIRA_SERVER_URL, cache=cache) issues = [issue for issue in jira.fetch()] del issues[0]['timestamp'] # Now, we get the issues from the cache. # The contents should be the same and there won't be # any new request to the server cache_issues = [cache_issue for cache_issue in jira.fetch_from_cache()] expected_req = [{ 'expand': ['renderedFields,transitions,operations,changelog'], 'jql': [' updated > 0'], 'startAt': ['0'] }, { 'expand': ['renderedFields,transitions,operations,changelog'], 'jql': [' updated > 0'], 'startAt': ['2'] }] for i in range(len(expected_req)): self.assertEqual(requests[i].method, 'GET') self.assertRegex(requests[i].path, '/rest/api/2/search') self.assertDictEqual(requests[i].querystring, expected_req[i]) self.assertEqual(len(issues), len(cache_issues)) for i in range(len(cache_issues)): self.assertEqual(issues[i]['origin'], cache_issues[i]['origin']) self.assertEqual(issues[i]['uuid'], cache_issues[i]['uuid']) self.assertEqual(issues[i]['updated_on'], cache_issues[i]['updated_on']) self.assertEqual(issues[i]['data']['key'], cache_issues[i]['data']['key']) self.assertEqual( issues[i]['data']['fields']['issuetype']['name'], cache_issues[i]['data']['fields']['issuetype']['name']) self.assertEqual( issues[i]['data']['fields']['creator']['name'], cache_issues[i]['data']['fields']['creator']['name']) self.assertEqual( issues[i]['data']['fields']['assignee']['name'], cache_issues[i]['data']['fields']['assignee']['name'])
def test_fetch_from_cache(self): """Test whether a list of issues is returned from cache""" requests = [] bodies_json = [read_file('data/jira/jira_issues_page_1.json'), read_file('data/jira/jira_issues_page_2.json')] body = read_file('data/jira/jira_fields.json') def request_callback(method, uri, headers): body = bodies_json.pop(0) requests.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, JIRA_SEARCH_URL, responses=[httpretty.Response(body=request_callback) \ for _ in range(2)]) httpretty.register_uri(httpretty.GET, JIRA_FIELDS_URL, body=body, status=200) # First, we fetch the issues from the server, storing them # in a cache cache = Cache(self.tmp_path) jira = Jira(JIRA_SERVER_URL, cache=cache) issues = [issue for issue in jira.fetch()] del issues[0]['timestamp'] # Now, we get the issues from the cache. # The contents should be the same and there won't be # any new request to the server cache_issues = [cache_issue for cache_issue in jira.fetch_from_cache()] expected_req = [{ 'expand': ['renderedFields,transitions,operations,changelog'], 'jql': ['updated > 0 order by updated asc'], 'startAt': ['0'] }, { 'expand': ['renderedFields,transitions,operations,changelog'], 'jql': ['updated > 0 order by updated asc'], 'startAt': ['2'] }] for i in range(len(expected_req)): self.assertEqual(requests[i].method, 'GET') self.assertRegex(requests[i].path, '/rest/api/2/search') self.assertDictEqual(requests[i].querystring, expected_req[i]) self.assertEqual(len(issues), len(cache_issues)) for i in range(len(cache_issues)): self.assertEqual(issues[i]['origin'], cache_issues[i]['origin']) self.assertEqual(issues[i]['uuid'], cache_issues[i]['uuid']) self.assertEqual(issues[i]['updated_on'], cache_issues[i]['updated_on']) self.assertEqual(issues[i]['category'], cache_issues[i]['category']) self.assertEqual(issues[1]['tag'], cache_issues[i]['tag']) self.assertEqual(issues[i]['data']['key'], cache_issues[i]['data']['key']) self.assertEqual(issues[i]['data']['fields']['issuetype']['name'], cache_issues[i]['data']['fields']['issuetype']['name']) self.assertEqual(issues[i]['data']['fields']['creator']['name'], cache_issues[i]['data']['fields']['creator']['name']) self.assertEqual(issues[i]['data']['fields']['assignee']['name'], cache_issues[i]['data']['fields']['assignee']['name'])