def sync_assessment_statuses(): """Synchronizes issue tracker ticket statuses with the Assessment statuses. Checks for Assessments which are in sync with Issue Tracker issues and updates their statuses in accordance to the corresponding Assessments if differ. """ assessment_issues = sync_utils.collect_issue_tracker_info("Assessment", include_ccs=True) if not assessment_issues: return logger.debug('Syncing state of %d issues.', len(assessment_issues)) cli = issues.Client() processed_ids = set() for batch in sync_utils.iter_issue_batches(assessment_issues.keys()): for issue_id, issuetracker_state in batch.iteritems(): issue_id = str(issue_id) issue_info = assessment_issues.get(issue_id) if not issue_info: logger.warning( 'Got an unexpected issue from Issue Tracker: %s', issue_id) continue processed_ids.add(issue_id) assessment_state = issue_info['state'] status_value = ASSESSMENT_STATUSES_MAPPING.get( assessment_state["status"]) if not status_value: logger.error( 'Inexistent Issue Tracker status for assessment ID=%d ' 'with status: %s.', issue_info['object_id'], status_value) continue assessment_state["status"] = status_value if all( assessment_state.get(field) == issuetracker_state.get( field) for field in FIELDS_TO_CHECK) and _compare_ccs( assessment_state.get("ccs", []), issuetracker_state.get("ccs", [])): continue try: sync_utils.update_issue(cli, issue_id, assessment_state) except integrations_errors.Error as error: logger.error( 'Unable to update status of Issue Tracker issue ID=%s for ' 'assessment ID=%d: %r', issue_id, issue_info['object_id'], error) logger.debug('Sync is done, %d issue(s) were processed.', len(processed_ids)) missing_ids = set(assessment_issues) - processed_ids if missing_ids: logger.warning( 'Some issues are linked to Assessments ' 'but were not found in Issue Tracker: %s', ', '.join(str(i) for i in missing_ids))
def test_iter_issue_batches_error(self): """Tests handling error fetching issues from Issue Tracer in batches.""" cli_mock = mock.MagicMock() cli_mock.search.side_effect = integrations_errors.HttpError('Test') with mock.patch.object(sync_utils.issues, 'Client', return_value=cli_mock): actual = list(sync_utils.iter_issue_batches([1, 2, 3])) self.assertEqual(actual, [])
def test_iter_issue_batches_error(self): """Tests handling error fetching issues from Issue Tracer in batches.""" cli_mock = mock.MagicMock() cli_mock.search.side_effect = integrations_errors.HttpError('Test') with mock.patch.object(sync_utils.issues, 'Client', return_value=cli_mock): actual = list(sync_utils.iter_issue_batches([1, 2, 3])) self.assertEqual(actual, [])
def _compare_values_for_issues(): """ Compare local and external values for IssuetrackerIssue objects Returns: Dict() key: issue_id, value: {component_id: int, hotlist_id: int} """ issues_to_update = defaultdict(dict) local_issues = _collect_issues_from_db() external_issues = sync_utils.iter_issue_batches(local_issues.keys(), batch_size=100) logger.info("Collecting issues those need to be updated") for info in external_issues: for issue_id, issue_data in info.iteritems(): local_component = local_issues[issue_id].get("component_id") local_hotlist = local_issues[issue_id].get("hotlist_id") if local_component != issue_data["component_id"]: issues_to_update[issue_id]["component_id"] = issue_data[ "component_id"] if not issue_data["hotlist_ids"]: issues_to_update[issue_id]["hotlist_id"] = None elif local_hotlist not in issue_data["hotlist_ids"]: issues_to_update[issue_id]["hotlist_id"] = issue_data[ "hotlist_ids"][0] return issues_to_update
def sync_assessment_statuses(): """Synchronizes issue tracker ticket statuses with the Assessment statuses. Checks for Assessments which are in sync with Issue Tracker issues and updates their statuses in accordance to the corresponding Assessments if differ. """ assessment_issues = sync_utils.collect_issue_tracker_info("Assessment") if not assessment_issues: return logger.debug('Syncing state of %d issues.', len(assessment_issues)) cli = issues.Client() processed_ids = set() for batch in sync_utils.iter_issue_batches(assessment_issues.keys()): for issue_id, issuetracker_state in batch.iteritems(): issue_id = str(issue_id) issue_info = assessment_issues.get(issue_id) if not issue_info: logger.warning( 'Got an unexpected issue from Issue Tracker: %s', issue_id) continue processed_ids.add(issue_id) assessment_state = issue_info['state'] status_value = ASSESSMENT_STATUSES_MAPPING.get( assessment_state["status"] ) if not status_value: logger.error( 'Inexistent Issue Tracker status for assessment ID=%d ' 'with status: %s.', issue_info['object_id'], status_value ) continue assessment_state["status"] = status_value if all( assessment_state.get(field) == issuetracker_state.get(field) for field in FIELDS_TO_CHECK ): continue try: sync_utils.update_issue(cli, issue_id, assessment_state) except integrations_errors.Error as error: logger.error( 'Unable to update status of Issue Tracker issue ID=%s for ' 'assessment ID=%d: %r', issue_id, issue_info['object_id'], error) logger.debug('Sync is done, %d issue(s) were processed.', len(processed_ids)) missing_ids = set(assessment_issues) - processed_ids if missing_ids: logger.warning( 'Some issues are linked to Assessments ' 'but were not found in Issue Tracker: %s', ', '.join(str(i) for i in missing_ids))
def test_iter_issue_batches(self): """Tests fetching issues from Issue Tracer in batches.""" cli_mock = mock.MagicMock() cli_mock.search.side_effect = iter([ { 'issues': [ { 'issueId': 't1', 'issueState': { 'status': 'FIXED', 'type': 'bug1', 'priority': 'P1', 'severity': 'S1', 'ccs': [] }, }, { 'issueId': 't2', 'issueState': { 'status': 'FIXED', 'type': 'bug2', 'priority': 'P2', 'severity': 'S2', 'ccs': [] }, }, ], }, ]) with mock.patch.object(sync_utils.issues, 'Client', return_value=cli_mock): actual = list(sync_utils.iter_issue_batches([1, 2, 3])) self.assertEquals(actual, [ { 't1': { 'status': 'FIXED', 'type': 'bug1', 'priority': 'P1', 'severity': 'S1', 'ccs': [] }, 't2': { 'status': 'FIXED', 'type': 'bug2', 'priority': 'P2', 'severity': 'S2', 'ccs': [] }, }, ]) self.assertEqual(cli_mock.search.call_args_list, [ mock.call({ 'issue_ids': [1, 2, 3], 'page_size': 100, }), ])
def sync_issue_attributes(): """Synchronizes issue tracker ticket attrs with the Issue object attrs. Synchronize issue status and email list (Primary contacts and Admins). """ issuetracker_issues = sync_utils.collect_issue_tracker_info( "Issue", include_object=True ) if not issuetracker_issues: return assignees_role = all_models.AccessControlRole.query.filter_by( object_type=all_models.Issue.__name__, name="Primary Contacts" ).first() admin_role = all_models.AccessControlRole.query.filter_by( object_type=all_models.Issue.__name__, name="Admin" ).first() processed_ids = set() for batch in sync_utils.iter_issue_batches(issuetracker_issues.keys(), include_emails=True): for issue_id, issuetracker_state in batch.iteritems(): issue_id = str(issue_id) issue_info = issuetracker_issues.get(issue_id) if not issue_info: logger.warning( "Got an unexpected issue from Issue Tracker: %s", issue_id) continue processed_ids.add(issue_id) sync_object = issue_info["object"] # Sync attributes. sync_statuses(issuetracker_state, sync_object) sync_assignee_email(issuetracker_state, sync_object, assignees_role) sync_verifier_email(issuetracker_state, sync_object, admin_role) custom_fields = { constants.CUSTOM_FIELDS_DUE_DATE: sync_utils.parse_due_date( issuetracker_state.get("custom_fields", []) ) } sync_due_date(custom_fields, sync_object) db.session.commit() logger.debug("Sync is done, %d issue(s) were processed.", len(processed_ids)) missing_ids = set(issuetracker_issues) - processed_ids if missing_ids: logger.warning( "Some issues are linked to Issue " "but were not found in Issue Tracker: %s", ", ".join(str(i) for i in missing_ids) )
def sync_issue_attributes(): """Synchronizes issue tracker ticket attrs with the Issue object attrs. Synchronize issue status and email list (Primary contacts and Admins). """ issuetracker_issues = sync_utils.collect_issue_tracker_info( "Issue" ) if not issuetracker_issues: return assignees_role = all_models.AccessControlRole.query.filter_by( object_type=all_models.Issue.__name__, name="Primary Contacts" ).first() admin_role = all_models.AccessControlRole.query.filter_by( object_type=all_models.Issue.__name__, name="Admin" ).first() processed_ids = set() for batch in sync_utils.iter_issue_batches(issuetracker_issues.keys()): for issue_id, issuetracker_state in batch.iteritems(): issue_id = str(issue_id) issue_info = issuetracker_issues.get(issue_id) if not issue_info: logger.warning( "Got an unexpected issue from Issue Tracker: %s", issue_id) continue processed_ids.add(issue_id) sync_object = issue_info["object"] # Sync attributes. sync_statuses(issuetracker_state, sync_object) sync_assignee_email(issuetracker_state, sync_object, assignees_role) sync_verifier_email(issuetracker_state, sync_object, admin_role) custom_fields = { constants.CustomFields.DUE_DATE: sync_utils.parse_due_date( issuetracker_state.get("custom_fields", []) ) } sync_due_date(custom_fields, sync_object) db.session.commit() logger.debug("Sync is done, %d issue(s) were processed.", len(processed_ids)) missing_ids = set(issuetracker_issues) - processed_ids if missing_ids: logger.warning( "Some issues are linked to Issue " "but were not found in Issue Tracker: %s", ", ".join(str(i) for i in missing_ids) )
def test_iter_issue_batches(self): """Tests fetching issues from Issue Tracer in batches.""" cli_mock = mock.MagicMock() cli_mock.search.side_effect = iter([ { 'issues': [ { 'issueId': 't1', 'issueState': { 'status': 'FIXED', 'type': 'bug1', 'priority': 'P1', 'severity': 'S1', }, }, { 'issueId': 't2', 'issueState': { 'status': 'FIXED', 'type': 'bug2', 'priority': 'P2', 'severity': 'S2', }, }, ], }, ]) with mock.patch.object(sync_utils.issues, 'Client', return_value=cli_mock): actual = list(sync_utils.iter_issue_batches([1, 2, 3])) self.assertEquals(actual, [ { 't1': { 'status': 'FIXED', 'type': 'bug1', 'priority': 'P1', 'severity': 'S1', }, 't2': { 'status': 'FIXED', 'type': 'bug2', 'priority': 'P2', 'severity': 'S2', }, }, ]) self.assertEqual(cli_mock.search.call_args_list, [ mock.call({ 'issue_ids': [1, 2, 3], 'page_size': 100, }), ])
def sync_assessment_attributes(): # noqa """Synchronizes issue tracker ticket statuses with the Assessment statuses. Checks for Assessments which are in sync with Issue Tracker issues and updates their statuses in accordance to the corresponding Assessments if differ. """ logger.info( "Assessment synchronization start: %s", datetime.datetime.utcnow() ) assessment_issues = sync_utils.collect_issue_tracker_info( "Assessment" ) if not assessment_issues: return logger.info("Syncing state of %d issues.", len(assessment_issues)) processed_ids = set() tracker_handler = assessment_integration.AssessmentTrackerHandler() for batch in sync_utils.iter_issue_batches(assessment_issues.keys()): for issue_id, issuetracker_state in batch.iteritems(): issue_id, issue_info = _get_issue_info_by_issue_id( issue_id, assessment_issues ) if not issue_info: logger.warning( "Got an unexpected issue from Issue Tracker: %s", issue_id ) continue processed_ids.add(issue_id) try: tracker_handler.handle_assessment_sync( issue_info, issue_id, issuetracker_state ) except Exception as ex: # pylint: disable=broad-except logger.error( "Unhandled synchronization error: %s %s %s", issue_id, issue_info, ex ) continue logger.info("Sync is done, %d issue(s) were processed.", len(processed_ids)) _check_missing_ids(assessment_issues, processed_ids)
def sync_assessment_attributes(): # noqa """Synchronizes issue tracker ticket statuses with the Assessment statuses. Checks for Assessments which are in sync with Issue Tracker issues and updates their statuses in accordance to the corresponding Assessments if differ. """ logger.info( "Assessment synchronization start: %s", datetime.datetime.utcnow() ) assessment_issues = sync_utils.collect_issue_tracker_info( "Assessment" ) if not assessment_issues: return logger.info("Syncing state of %d issues.", len(assessment_issues)) processed_ids = set() tracker_handler = assessment_integration.AssessmentTrackerHandler() for batch in sync_utils.iter_issue_batches(assessment_issues.keys()): for issue_id, issuetracker_state in batch.iteritems(): issue_id, issue_info = _get_issue_info_by_issue_id( issue_id, assessment_issues ) if not issue_info: logger.warning( "Got an unexpected issue from Issue Tracker: %s", issue_id ) continue processed_ids.add(issue_id) try: tracker_handler.handle_assessment_sync( issue_info, issue_id, issuetracker_state ) except Exception as ex: # pylint: disable=broad-except logger.error( "Unhandled synchronization error: %s %s %s", issue_id, issue_info, ex ) continue logger.info("Sync is done, %d issue(s) were processed.", len(processed_ids)) _check_missing_ids(assessment_issues, processed_ids)
def sync_assessment_attributes(): # noqa """Synchronizes issue tracker ticket statuses with the Assessment statuses. Checks for Assessments which are in sync with Issue Tracker issues and updates their statuses in accordance to the corresponding Assessments if differ. """ assessment_issues = sync_utils.collect_issue_tracker_info("Assessment", include_ccs=True) if not assessment_issues: return logger.debug("Syncing state of %d issues.", len(assessment_issues)) cli = issues.Client() processed_ids = set() for batch in sync_utils.iter_issue_batches(assessment_issues.keys()): for issue_id, issuetracker_state in batch.iteritems(): issue_id, issue_info = _get_issue_info_by_issue_id( issue_id, assessment_issues) if not issue_info: logger.warning( "Got an unexpected issue from Issue Tracker: %s", issue_id) continue object_id = issue_info["object_id"] processed_ids.add(issue_id) issue_payload = _prepare_issue_payload(issue_info) if not _is_need_synchronize_issue(object_id, issue_payload, issuetracker_state): continue _update_issue(cli, issue_id, object_id, issue_payload) logger.debug("Sync is done, %d issue(s) were processed.", len(processed_ids)) _check_missing_ids(assessment_issues, processed_ids)
def test_iter_issue_batches(self): """Tests fetching issues from Issue Tracer in batches.""" cli_mock = mock.MagicMock() cli_mock.search.side_effect = iter([ { 'issues': [ { 'issueId': 't1', 'issueState': { 'status': 'FIXED', 'type': 'bug1', 'priority': 'P1', 'severity': 'S1', 'verifier': '*****@*****.**', 'assignee': '*****@*****.**', 'reporter': '*****@*****.**', 'custom_fields': [{ 'name': 'Due Date', 'value': '2018-09-13', 'type': 'Date', 'display_string': 'Due Date', }], 'ccs': [] }, }, { 'issueId': 't2', 'issueState': { 'status': 'FIXED', 'type': 'bug2', 'priority': 'P2', 'severity': 'S2', 'ccs': [] }, }, ], }, ]) with mock.patch.object(sync_utils.issues, 'Client', return_value=cli_mock): actual = list(sync_utils.iter_issue_batches([1, 2, 3])) self.assertEquals(actual, [ { 't1': { 'status': 'FIXED', 'type': 'bug1', 'priority': 'P1', 'severity': 'S1', 'verifier': '*****@*****.**', 'assignee': '*****@*****.**', 'reporter': '*****@*****.**', 'custom_fields': [{ 'name': 'Due Date', 'value': '2018-09-13', 'type': 'Date', 'display_string': 'Due Date', }], 'ccs': [] }, 't2': { 'status': 'FIXED', 'type': 'bug2', 'priority': 'P2', 'severity': 'S2', 'verifier': None, 'assignee': None, 'reporter': None, 'custom_fields': [], 'ccs': [] }, }, ]) self.assertEqual(cli_mock.search.call_args_list, [ mock.call({ 'issue_ids': [1, 2, 3], 'page_size': 100, }), ])
def test_iter_issue_batches(self): """Tests fetching issues from Issue Tracer in batches.""" cli_mock = mock.MagicMock() cli_mock.search.side_effect = iter([ { 'issues': [ { 'issueId': 't1', 'issueState': { 'status': 'FIXED', 'type': 'bug1', 'priority': 'P1', 'severity': 'S1', 'verifier': '*****@*****.**', 'assignee': '*****@*****.**', 'reporter': '*****@*****.**', 'custom_fields': [{ 'name': 'Due Date', 'value': '2018-09-13', 'type': 'Date', 'display_string': 'Due Date', }], 'ccs': [] }, }, { 'issueId': 't2', 'issueState': { 'status': 'FIXED', 'type': 'bug2', 'priority': 'P2', 'severity': 'S2', 'ccs': [] }, }, ], }, ]) with mock.patch.object(sync_utils.issues, 'Client', return_value=cli_mock): actual = list(sync_utils.iter_issue_batches([1, 2, 3])) self.assertEquals(actual, [ { 't1': { 'status': 'FIXED', 'type': 'bug1', 'priority': 'P1', 'severity': 'S1', 'verifier': '*****@*****.**', 'assignee': '*****@*****.**', 'reporter': '*****@*****.**', 'custom_fields': [{ 'name': 'Due Date', 'value': '2018-09-13', 'type': 'Date', 'display_string': 'Due Date', }], 'ccs': [] }, 't2': { 'status': 'FIXED', 'type': 'bug2', 'priority': 'P2', 'severity': 'S2', 'verifier': None, 'assignee': None, 'reporter': None, 'custom_fields': [], 'ccs': [] }, }, ]) self.assertEqual(cli_mock.search.call_args_list, [ mock.call({ 'issue_ids': [1, 2, 3], 'page_size': 100, }), ])