def test_not_found_user(self): """Test if it works when a user is not found""" http_requests = setup_http_server() from_date = datetime.datetime(2016, 7, 27) redmine = Redmine(REDMINE_URL, api_token='AAAA', max_issues=3) issues = [issue for issue in redmine.fetch(from_date=from_date)] self.assertEqual(len(issues), 1) # The user 99 does not have information self.assertEqual(issues[0]['data']['journals'][1]['user']['id'], 99) self.assertDictEqual(issues[0]['data']['journals'][1]['user_data'], {})
def test_fetch_from_cache(self): """Test whether the cache works""" http_requests = setup_http_server() # First, we fetch the issues from the server, # storing them in a cache cache = Cache(self.tmp_path) redmine = Redmine(REDMINE_URL, api_token='AAAA', max_issues=3, cache=cache) issues = [issue for issue in redmine.fetch()] self.assertEqual(len(http_requests), 12) # Now, we get the issues from the cache. # The issues should be the same and there won't be # any new request to the server cached_issues = [issue for issue in redmine.fetch_from_cache()] self.assertEqual(len(cached_issues), len(issues)) expected = [(9, '91a8349c2f6ebffcccc49409529c61cfd3825563', 1323367020.0, 3, 3), (5, 'c4aeb9e77fec8e4679caa23d4012e7cc36ae8b98', 1323367075.0, 3, 3), (2, '3c3d67925b108a37f88cc6663f7f7dd493fa818c', 1323367117.0, 3, 3), (7311, '4ab289ab60aee93a66e5490529799cf4a2b4d94c', 1469607427.0, 24, 4)] self.assertEqual(len(cached_issues), len(expected)) for x in range(len(cached_issues)): issue = cached_issues[x] expc = expected[x] self.assertEqual(issue['data']['id'], expc[0]) self.assertEqual(issue['uuid'], expc[1]) self.assertEqual(issue['origin'], REDMINE_URL) self.assertEqual(issue['updated_on'], expc[2]) self.assertEqual(issue['category'], 'issue') self.assertEqual(issue['tag'], REDMINE_URL) self.assertEqual(issue['data']['author_data']['id'], expc[3]) self.assertEqual(issue['data']['journals'][0]['user_data']['id'], expc[4]) self.assertDictEqual(issue['data'], issues[x]['data']) # The user 99 does not have information self.assertEqual(issues[3]['data']['journals'][1]['user']['id'], 99) self.assertDictEqual(issues[3]['data']['journals'][1]['user_data'], {}) # No more requests were sent self.assertEqual(len(http_requests), 12)
def test_parse_user_data(self): """"Test if it parses a user stream""" raw_json = read_file('data/redmine/redmine_user_3.json') user = Redmine.parse_user_data(raw_json) self.assertEqual(user['id'], 3) self.assertEqual(user['lastname'], 'User') self.assertEqual(user['login'], 'generic')
def test_parse_issue_data(self): """Test if it parses a issue stream""" raw_json = read_file('data/redmine/redmine_issue_7311.json') issue = Redmine.parse_issue_data(raw_json) self.assertEqual(issue['id'], 7311) self.assertEqual(len(issue['journals']), 22) self.assertEqual(len(issue['changesets']), 0)
def test_parse_issues(self): """Test if it parses a issues stream""" raw_json = read_file('data/redmine/redmine_issues.json') issues = Redmine.parse_issues(raw_json) results = [issue for issue in issues] self.assertEqual(len(results), 3) self.assertEqual(results[0]['id'], 9) self.assertEqual(results[1]['id'], 5) self.assertEqual(results[2]['id'], 2) # Parse a file without results raw_json = read_file('data/redmine/redmine_issues_empty.json') issues = Redmine.parse_issues(raw_json) results = [issue for issue in issues] self.assertEqual(len(results), 0)
def test_fetch_empty(self): """Test if nothing is returnerd when there are no issues""" http_requests = setup_http_server() from_date = datetime.datetime(2017, 1, 1) redmine = Redmine(REDMINE_URL, api_token='AAAA', max_issues=3) issues = [issue for issue in redmine.fetch(from_date=from_date)] self.assertEqual(len(issues), 0) expected = { 'key': ['AAAA'], 'status_id': ['*'], 'sort': ['updated_on'], 'updated_on': ['>=2017-01-01T00:00:00Z'], 'offset': ['0'], 'limit': ['3'] } self.assertEqual(len(http_requests), 1) self.assertDictEqual(http_requests[0].querystring, expected)
def test_initialization(self): """Test whether attributes are initializated""" redmine = Redmine(REDMINE_URL, api_token='AAAA', max_issues=5, tag='test') self.assertEqual(redmine.url, REDMINE_URL) self.assertEqual(redmine.max_issues, 5) self.assertEqual(redmine.origin, REDMINE_URL) self.assertEqual(redmine.tag, 'test') self.assertIsNone(redmine.client) # When tag is empty or None it will be set to # the value in url redmine = Redmine(REDMINE_URL) self.assertEqual(redmine.url, REDMINE_URL) self.assertEqual(redmine.origin, REDMINE_URL) self.assertEqual(redmine.tag, REDMINE_URL) redmine = Redmine(REDMINE_URL, tag='') self.assertEqual(redmine.url, REDMINE_URL) self.assertEqual(redmine.origin, REDMINE_URL) self.assertEqual(redmine.tag, REDMINE_URL)
def test_search_fields(self): """Test whether the search_fields is properly set""" setup_http_server() redmine = Redmine(REDMINE_URL, api_token='AAAA', max_issues=3) issues = [issue for issue in redmine.fetch()] issue = issues[0] self.assertEqual(redmine.metadata_id(issue['data']), issue['search_fields']['item_id']) self.assertEqual(issue['data']['project']['name'], 'Global') self.assertEqual(issue['data']['project']['name'], issue['search_fields']['project_name']) self.assertEqual(issue['data']['project']['id'], 1) self.assertEqual(issue['data']['project']['id'], issue['search_fields']['project_id']) issue = issues[1] self.assertEqual(redmine.metadata_id(issue['data']), issue['search_fields']['item_id']) self.assertEqual(issue['data']['project']['name'], 'Global') self.assertEqual(issue['data']['project']['name'], issue['search_fields']['project_name']) self.assertEqual(issue['data']['project']['id'], 1) self.assertEqual(issue['data']['project']['id'], issue['search_fields']['project_id']) issue = issues[2] self.assertEqual(redmine.metadata_id(issue['data']), issue['search_fields']['item_id']) self.assertEqual(issue['data']['project']['name'], 'Global') self.assertEqual(issue['data']['project']['name'], issue['search_fields']['project_name']) self.assertEqual(issue['data']['project']['id'], 1) self.assertEqual(issue['data']['project']['id'], issue['search_fields']['project_id']) issue = issues[3] self.assertEqual(redmine.metadata_id(issue['data']), issue['search_fields']['item_id']) self.assertEqual(issue['data']['project']['name'], 'MAD') self.assertEqual(issue['data']['project']['name'], issue['search_fields']['project_name']) self.assertEqual(issue['data']['project']['id'], 91) self.assertEqual(issue['data']['project']['id'], issue['search_fields']['project_id'])
def setUp(self): super().setUp() self.backend = Redmine(REDMINE_URL, api_token='AAAA', max_issues=3, archive=self.archive)
def test_fetch(self): """Test whether it fetches a set of issues""" http_requests = setup_http_server() redmine = Redmine(REDMINE_URL, api_token='AAAA', max_issues=3) issues = [issue for issue in redmine.fetch()] expected = [(9, '91a8349c2f6ebffcccc49409529c61cfd3825563', 1323367020.0, 3, 3), (5, 'c4aeb9e77fec8e4679caa23d4012e7cc36ae8b98', 1323367075.0, 3, 3), (2, '3c3d67925b108a37f88cc6663f7f7dd493fa818c', 1323367117.0, 3, 3), (7311, '4ab289ab60aee93a66e5490529799cf4a2b4d94c', 1469607427.0, 24, 4)] self.assertEqual(len(issues), len(expected)) for x in range(len(issues)): issue = issues[x] expc = expected[x] self.assertEqual(issue['data']['id'], expc[0]) self.assertEqual(issue['uuid'], expc[1]) self.assertEqual(issue['origin'], REDMINE_URL) self.assertEqual(issue['updated_on'], expc[2]) self.assertEqual(issue['category'], 'issue') self.assertEqual(issue['tag'], REDMINE_URL) self.assertEqual(issue['data']['author_data']['id'], expc[3]) self.assertEqual(issue['data']['journals'][0]['user_data']['id'], expc[4]) # Check requests expected = [{ 'key': ['AAAA'], 'status_id': ['*'], 'sort': ['updated_on'], 'updated_on': ['>=1970-01-01T00:00:00Z'], 'offset': ['0'], 'limit': ['3'] }, { 'key': ['AAAA'], 'include': ['attachments,changesets,children,journals,relations,watchers'] }, { 'key': ['AAAA'] }, { 'key': ['AAAA'], 'include': ['attachments,changesets,children,journals,relations,watchers'] }, { 'key': ['AAAA'], 'include': ['attachments,changesets,children,journals,relations,watchers'] }, { 'key': ['AAAA'], 'status_id': ['*'], 'sort': ['updated_on'], 'updated_on': ['>=1970-01-01T00:00:00Z'], 'offset': ['3'], 'limit': ['3'] }, { 'key': ['AAAA'], 'include': ['attachments,changesets,children,journals,relations,watchers'] }, { 'key': ['AAAA'] }, { 'key': ['AAAA'] }, { 'key': ['AAAA'] }, { 'key': ['AAAA'] }, { 'key': ['AAAA'], 'status_id': ['*'], 'sort': ['updated_on'], 'updated_on': ['>=1970-01-01T00:00:00Z'], 'offset': ['6'], 'limit': ['3'] }] self.assertEqual(len(http_requests), len(expected)) for i in range(len(expected)): self.assertDictEqual(http_requests[i].querystring, expected[i])
def test_has_resuming(self): """Test if it returns True when has_resuming is called""" self.assertEqual(Redmine.has_resuming(), True)
def setUp(self): super().setUp() self.backend_write_archive = Redmine(REDMINE_URL, api_token='AAAA', max_issues=3, archive=self.archive) self.backend_read_archive = Redmine(REDMINE_URL, api_token='BBBB', max_issues=3, archive=self.archive)
def test_fetch_from_date(self): """Test wether if fetches a set of issues from the given date""" http_requests = setup_http_server() from_date = datetime.datetime(2016, 7, 27) redmine = Redmine(REDMINE_URL, api_token='AAAA', max_issues=3) issues = [issue for issue in redmine.fetch(from_date=from_date)] self.assertEqual(len(issues), 1) issue = issues[0] self.assertEqual(issue['data']['id'], 7311) self.assertEqual(issue['uuid'], '4ab289ab60aee93a66e5490529799cf4a2b4d94c') self.assertEqual(issue['origin'], REDMINE_URL) self.assertEqual(issue['updated_on'], 1469607427.0) self.assertEqual(issue['category'], 'issue') self.assertEqual(issue['tag'], REDMINE_URL) self.assertEqual(issue['data']['author_data']['id'], 24) self.assertEqual(issue['data']['journals'][0]['user_data']['id'], 4) expected = [ { 'key': ['AAAA'], 'status_id': ['*'], 'sort': ['updated_on'], 'updated_on': ['>=2016-07-27T00:00:00Z'], 'offset': ['0'], 'limit': ['3'] }, { 'key': ['AAAA'], 'include': ['attachments,changesets,children,journals,relations,watchers'] }, { 'key': ['AAAA'] }, { 'key': ['AAAA'] }, { 'key': ['AAAA'] }, { 'key': ['AAAA'] }, { 'key': ['AAAA'], 'status_id': ['*'], 'sort': ['updated_on'], 'updated_on': ['>=2016-07-27T00:00:00Z'], 'offset': ['3'], 'limit': ['3'] } ] self.assertEqual(len(http_requests), len(expected)) for i in range(len(expected)): self.assertDictEqual(http_requests[i].querystring, expected[i])
def __init__(self, bt_info): super().__init__(bt_info) bt = bt_info.bug_tracker self.backend = Redmine(bt.baseurl) self.usercache = {}
class RedmineImporter(BugTrackerImporter): severity_level_map = { 'Urgent': 4, 'High': 3, 'Immediate': 3, 'Normal': 2, 'Low': 1, } def __init__(self, bt_info): super().__init__(bt_info) bt = bt_info.bug_tracker self.backend = Redmine(bt.baseurl) self.usercache = {} def getuser(self, rmuser): try: return self.usercache[rmuser['id']] except (KeyError, AttributeError): # Sometimes rmuser is None name = ' '.join([ getresource(rmuser, 'firstname', ''), getresource(rmuser, 'lastname', '') ]) email = getresource(rmuser, 'mail', '') user = get_participant(name, email) if rmuser: self.usercache[rmuser['id']] = user return user def journal_is_closing_entry(self, journal): return any( detail['property'] == 'attr' and detail['name'] == 'status_id' and int(detail['new_value']) in [2] # CLOSED for detail in journal['details']) @cached_property def closed_status_ids(self): return set(status.id for status in self.client.issue_status.all() if getresource(status, 'is_closed', False)) @transaction.atomic def _run(self): issues = self.backend.fetch() for issue in issues: data = issue['data'] severity = self.translate_severity(data['priority']['name']) bug, created = self.object.bugs.get_or_create( bug_id=data['id'], defaults={'severity': severity}) logger.info("%s bug [%s]", "Created" if created else "Found", bug) if not created: bug.severity = severity bug.save() else: logger.info("Saving initial comment for [%s]", bug) bug.comments.create(comment_id='VIRTUAL-1', author=self.getuser(data['author_data']), timestamp=str_to_datetime( data['created_on'])) last_closed_time = None for journal in data['journals']: journal_time = str_to_datetime(journal['created_on']) comment, created = bug.comments.get_or_create( comment_id=journal['id'], author=self.getuser(journal['user_data']), timestamp=journal_time) logger.info("%s comment [%s]", "Created" if created else "Found", comment) if self.journal_is_closing_entry(journal): last_closed_time = journal_time if last_closed_time is not None: bug.close_date = last_closed_time bug.save()
def test_has_caching(self): """Test if it returns False when has_caching is called""" self.assertEqual(Redmine.has_caching(), False)