def test_first_node_of_story(client): """Tests getting the siblings for the first node of the story with backward_rel=true.""" BugzillaBug.get_or_create({ 'classification': 'Red Hat', 'creation_time': datetime(2017, 4, 2, 6, 43, 58), 'id_': '5555', 'modified_time': datetime(2017, 12, 5, 10, 12, 47), 'priority': 'unspecified', 'product_name': 'Red Hat CloudForms Management Engine', 'product_version': '5.7.0', 'resolution': 'WORKSFORME', 'severity': 'unspecified', 'short_description': 'Fail to delete OSP tenant by CFME', 'status': 'CLOSED', 'target_milestone': 'GA', 'votes': 0 })[0] expected = { 'message': 'Siblings cannot be determined on this kind of resource', 'status': 400 } rv = client.get('/api/v1/siblings/bugzillabug/5555?backward_rel=true') assert rv.status_code == 400 assert json.loads(rv.data.decode('utf-8')) == expected
def commit_handler(self, msg): """ Handle a dist-git commit message and update Neo4j if necessary. :param dict msg: a message to be processed """ repo = DistGitRepo.get_or_create({ 'namespace': msg['headers']['namespace'], 'name': msg['headers']['repo'] })[0] # Get the username from the email if the email is a Red Hat email email = msg['headers']['email'].lower() if email.endswith('@redhat.com'): username = email.split('@redhat.com')[0] else: username = email author = User.create_or_update({ 'username': username, 'email': email })[0] commit_message = msg['body']['msg']['message'] commit = DistGitCommit.create_or_update({ 'hash_': msg['headers']['rev'], 'log_message': commit_message, 'author_date': timestamp_to_datetime(msg['body']['msg']['author_date']), 'commit_date': timestamp_to_datetime(msg['body']['msg']['commit_date']) })[0] bug_rel_mapping = self.parse_bugzilla_bugs(commit_message) for bug_id in bug_rel_mapping['resolves']: bug = BugzillaBug.get_or_create({'id_': bug_id})[0] commit.resolved_bugs.connect(bug) for bug_id in bug_rel_mapping['related']: bug = BugzillaBug.get_or_create({'id_': bug_id})[0] commit.related_bugs.connect(bug) for bug_id in bug_rel_mapping['reverted']: bug = BugzillaBug.get_or_create({'id_': bug_id})[0] commit.reverted_bugs.connect(bug) commit.conditional_connect(commit.author, author) repo.commits.connect(commit)
def test_conditional_connect_zero_or_more(): """Test EstuaryStructuredNode.conditional_connect on a ZeroOrMore relationship.""" adv = Advisory(id_='12345', advisory_name='RHBA-2017:27760-01').save() bug = BugzillaBug(id_='2345').save() bug_two = BugzillaBug(id_='3456').save() assert len(adv.attached_bugs) == 0 EstuaryStructuredNode.conditional_connect(adv.attached_bugs, bug) assert bug in adv.attached_bugs assert len(adv.attached_bugs) == 1 EstuaryStructuredNode.conditional_connect(adv.attached_bugs, bug_two) assert bug in adv.attached_bugs assert bug_two in adv.attached_bugs assert len(adv.attached_bugs) == 2
def test_load_BugzillaBug(id): BugzillaBug.get_or_create({"id_": id}) payload = { "query": "MATCH (node:BugzillaBug) WHERE node.id = {id} RETURN node.id", "params": { "id": id }, } headers = { "Accept": "application/json;charset=UTF-8", "Content-Type": "application/json", } response = requests.post( url="http://localhost:7474/db/data/cypher", headers=headers, data=json.dumps(payload), ) assert response.status_code == 200 assert response.json()["data"][0][0] == id
def bug_handler(self, msg): """ Handle a modified or created Bugzilla bug and update Neo4j if necessary. :param dict msg: a message to be processed """ bug_data = msg['body']['msg']['bug'] bug_params = { 'id_': str(bug_data['id']), 'creation_time': timestamp_to_datetime(bug_data['creation_time']), 'modified_time': timestamp_to_datetime(bug_data['last_change_time']), 'priority': bug_data['priority'], 'product_name': bug_data['product']['name'], 'product_version': bug_data['version']['name'], 'resolution': bug_data['resolution'], 'severity': bug_data['severity'], 'short_description': bug_data['summary'], 'status': bug_data['status']['name'], 'target_milestone': bug_data['target_milestone']['name'], } assignee = User.create_or_update({ 'username': bug_data['assigned_to']['login'].split('@')[0], 'email': bug_data['assigned_to']['login'] })[0] qa_contact = User.create_or_update({ 'username': bug_data['qa_contact']['login'].split('@')[0], 'email': bug_data['qa_contact']['login'] })[0] reporter = User.create_or_update({ 'username': bug_data['reporter']['login'].split('@')[0], 'email': bug_data['reporter']['login'] })[0] bug = BugzillaBug.create_or_update(bug_params)[0] bug.conditional_connect(bug.assignee, assignee) bug.conditional_connect(bug.qa_contact, qa_contact) bug.conditional_connect(bug.reporter, reporter)
def update_neo4j(self, bugs): """ Update Neo4j with Bugzilla bugs information from Teiid. :param list bugs: a list of dictionaries """ log.info('Beginning to upload data to Neo4j') count = 0 for bug_dict in bugs: bug = BugzillaBug.create_or_update( dict(id_=bug_dict['bug_id'], severity=bug_dict['bug_severity'], status=bug_dict['bug_status'], creation_time=bug_dict['creation_ts'], modified_time=bug_dict['delta_ts'], priority=bug_dict['priority'], product_name=bytes(bug_dict['product_name'], 'utf-8').decode(), product_version=bug_dict['version'], classification=bug_dict['classification'], resolution=bug_dict['resolution'], target_milestone=bug_dict['target_milestone'], votes=bug_dict['votes'], short_description=bytes(bug_dict['short_desc'], 'utf-8').decode()))[0] count += 1 log.info('Uploaded {0} bugs out of {1}'.format(count, len(bugs))) # Creating User nodes and updating their relationships if bug_dict['assigned_to']: assignee = self.create_user_node(bug_dict['assigned_to_email']) bug.conditional_connect(bug.assignee, assignee) if bug_dict['reporter']: reporter = self.create_user_node(bug_dict['reported_by_email']) bug.conditional_connect(bug.reporter, reporter) if bug_dict['qa_contact']: qa_contact = self.create_user_node( bug_dict['qa_contact_email']) bug.conditional_connect(bug.qa_contact, qa_contact)
def test_module_story_node_siblings(client, resource, uid, backward_rel, expected): """Tests getting the siblings of an artifact's adjacent node in the module story path.""" bug = BugzillaBug.get_or_create({ 'classification': 'Red Hat', 'creation_time': datetime(2017, 4, 2, 19, 39, 6), 'id_': '12345', 'modified_time': datetime(2018, 2, 7, 19, 30, 47), 'priority': 'high', 'product_name': 'Red Hat Enterprise Linux', 'product_version': '7.5', 'resolution': '', 'severity': 'low', 'short_description': 'Some description', 'status': 'VERIFIED', 'target_milestone': 'rc', 'votes': 0 })[0] commit = DistGitCommit.get_or_create({ 'author_date': datetime(2017, 4, 26, 11, 44, 38), 'commit_date': datetime(2017, 4, 26, 11, 44, 38), 'hash_': '8a63adb248ba633e200067e1ad6dc61931727bad', 'log_message': 'Related: #12345 - fix xyz' })[0] build = KojiBuild.get_or_create({ 'completion_time': datetime(2017, 4, 2, 19, 39, 6), 'creation_time': datetime(2017, 4, 2, 19, 39, 6), 'epoch': '0', 'id_': '2345', 'name': 'slf4j', 'release': '4.el7_4', 'start_time': datetime(2017, 4, 2, 19, 39, 6), 'state': 1, 'version': '1.7.4' })[0] build_two = KojiBuild.get_or_create({ 'completion_time': datetime(2017, 4, 2, 19, 39, 6), 'creation_time': datetime(2017, 4, 2, 19, 39, 6), 'epoch': '0', 'id_': '3456', 'name': 'slf3j', 'release': '4.el6_3', 'start_time': datetime(2017, 4, 2, 19, 39, 6), 'state': 2, 'version': '1.7.1' })[0] module_build = ModuleKojiBuild.get_or_create({ 'completion_time': datetime(2017, 4, 2, 19, 39, 6), 'creation_time': datetime(2017, 4, 2, 19, 39, 6), 'epoch': '0', 'id_': '2345', 'name': '389-ds', 'context': 'a2037af3', 'release': '20180805121332.a2037af3', 'start_time': datetime(2017, 4, 2, 19, 39, 6), 'mbs_id': 1338, 'module_name': '389-ds', 'module_version': '20180805121332', 'module_stream': '1.4' })[0] advisory = Advisory.get_or_create({ 'actual_ship_date': datetime(2017, 8, 1, 15, 43, 51), 'advisory_name': 'RHBA-2017:2251-02', 'content_types': ['docker'], 'created_at': datetime(2017, 4, 3, 14, 47, 23), 'id_': '27825', 'issue_date': datetime(2017, 8, 1, 5, 59, 34), 'product_name': 'Red Hat Enterprise Linux', 'product_short_name': 'RHEL', 'security_impact': 'None', 'state': 'SHIPPED_LIVE', 'status_time': datetime(2017, 8, 1, 15, 43, 51), 'synopsis': 'cifs-utils bug fix update', 'update_date': datetime(2017, 8, 1, 7, 16) })[0] fm_event = FreshmakerEvent.get_or_create({ 'event_type_id': 8, 'id_': '1180', 'message_id': 'ID:messaging-devops-broker01.test', 'state': 2, 'state_name': 'COMPLETE', 'state_reason': 'All container images have been rebuilt.', 'time_created': datetime(2019, 8, 21, 13, 42, 20), 'time_done': datetime(2099, 8, 21, 13, 42, 20) })[0] cb = ContainerKojiBuild.get_or_create({ 'completion_time': datetime(2017, 4, 2, 19, 39, 6), 'creation_time': datetime(2017, 4, 2, 19, 39, 6), 'epoch': '0', 'id_': '710', 'name': 'slf4j_2', 'release': '4.el7_4_as', 'start_time': datetime(2017, 4, 2, 19, 39, 6), 'state': 1, 'version': '1.7.4' })[0] cb_two = ContainerKojiBuild.get_or_create({ 'completion_time': datetime(2018, 4, 2, 19, 39, 6), 'creation_time': datetime(2018, 4, 2, 19, 39, 6), 'epoch': '0', 'id_': '811', 'name': 'some_build', 'release': '4.el7_4_as', 'start_time': datetime(2018, 4, 2, 19, 39, 6), 'state': 2, 'version': '1.7.5' })[0] ca = ContainerAdvisory.get_or_create({ 'actual_ship_date': datetime(2017, 8, 1, 15, 43, 51), 'advisory_name': 'RHBA-2017:2251-03', 'content_types': ['docker'], 'created_at': datetime(2017, 4, 3, 14, 47, 23), 'id_': '12327', 'issue_date': datetime(2017, 8, 1, 5, 59, 34), 'product_name': 'Red Hat Enterprise Linux', 'product_short_name': 'RHEL', 'security_impact': 'None', 'state': 'SHIPPED_LIVE', 'status_time': datetime(2017, 8, 1, 15, 43, 51), 'synopsis': 'cifs-utils bug fix update', 'update_date': datetime(2017, 8, 1, 7, 16) })[0] commit.resolved_bugs.connect(bug) commit.koji_builds.connect(build) build.advisories.connect(advisory) build_two.advisories.connect(advisory) fm_event.triggered_by_advisory.connect(advisory) fm_event.successful_koji_builds.connect(cb) fm_event.successful_koji_builds.connect(cb_two) ca.attached_builds.connect(cb) ca.attached_builds.connect(cb_two) module_build.components.connect(build) module_build.components.connect(build_two) module_build.advisories.connect(advisory) url = '/api/v1/siblings/{0}/{1}?story_type=module'.format(resource, uid) if backward_rel: url = '{0}&backward_rel=true'.format(url) rv = client.get(url) assert rv.status_code == 200 assert json.loads(rv.data.decode('utf-8')) == expected
def test_all_stories(client, resource, uid, expected): """Test getting all unique stories for an artifact.""" bug = BugzillaBug.get_or_create({ 'creation_time': datetime(2017, 4, 2, 19, 39, 6), 'id_': '12345', 'modified_time': datetime(2018, 2, 7, 19, 30, 47), 'priority': 'high', 'product_name': 'Red Hat Enterprise Linux', 'product_version': '7.5', 'resolution': '', 'severity': 'low', 'short_description': 'Some description', 'status': 'VERIFIED', 'target_milestone': 'rc', })[0] bug_two = BugzillaBug.get_or_create({ 'creation_time': datetime(2017, 4, 1, 17, 41, 4), 'severity': 'medium', 'short_description': 'some description', 'product_version': '7.2', 'priority': 'unspecified', 'product_name': 'Red Hat Enterprise Linux 7', 'resolution': 'DUPLICATE', 'target_milestone': 'rc', 'modified_time': datetime(2018, 3, 14, 5, 53, 19), 'id_': '1245', 'status': 'CLOSED' })[0] commit = DistGitCommit.get_or_create({ 'author_date': datetime(2017, 4, 26, 11, 44, 38), 'commit_date': datetime(2018, 5, 2, 10, 36, 47), 'hash_': '8a63adb248ba633e200067e1ad6dc61931727bad', 'log_message': 'Related: #12345 - fix xyz' })[0] commit_two = DistGitCommit.get_or_create({ 'commit_date': datetime(2018, 3, 14, 5, 52, 19), 'author_date': datetime(2018, 3, 14, 5, 53, 25), 'log_message': 'Repo creation', 'hash_': 'f4dfc64c10a90492303e4f14ad3549a1a2b13575' })[0] build = KojiBuild.get_or_create({ 'completion_time': datetime(2018, 6, 2, 10, 55, 47), 'creation_time': datetime(2018, 6, 2, 10, 36, 47), 'epoch': '0', 'id_': '2345', 'name': 'slf4j', 'release': '4.el7_4', 'start_time': datetime(2018, 6, 2, 10, 36, 47), 'state': 1, 'version': '1.7.4' })[0] advisory = Advisory.get_or_create({ 'actual_ship_date': datetime(2017, 8, 1, 15, 43, 51), 'advisory_name': 'RHBA-2017:2251-02', 'created_at': datetime(2018, 6, 13, 10, 36, 47), 'id_': '27825', 'issue_date': datetime(2017, 8, 1, 5, 59, 34), 'product_name': 'Red Hat Enterprise Linux', 'security_impact': 'None', 'state': 'SHIPPED_LIVE', 'status_time': datetime(2017, 8, 1, 15, 43, 51), 'synopsis': 'cifs-utils bug fix update', 'update_date': datetime(2017, 8, 1, 7, 16) })[0] advisory_two = Advisory.get_or_create({ 'security_impact': 'None', 'created_at': datetime(2018, 4, 21, 19, 36, 47), 'synopsis': 'This is a synopsis of a test advisory.', 'product_name': 'Release End2End Test', 'update_date': datetime(2018, 4, 21, 19, 36, 47), 'advisory_name': 'RHBA-2017:27760-01', 'issue_date': datetime(2018, 3, 14, 5, 53, 25), 'status_time': datetime(2018, 3, 14, 7, 53, 25), 'state': 'DROPPED_NO_SHIP', 'id_': '123456' })[0] fm_event = FreshmakerEvent.get_or_create({ 'id_': '1180', 'state_name': 'COMPLETE', 'state_reason': 'All container images have been rebuilt.', 'time_created': datetime(2018, 8, 13, 10, 36, 47), 'time_done': datetime(2018, 8, 13, 12, 45, 47) })[0] cb = ContainerKojiBuild.get_or_create({ 'completion_time': datetime(2017, 4, 2, 19, 39, 6), 'creation_time': datetime(2017, 4, 2, 19, 39, 6), 'epoch': '0', 'id_': '710', 'name': 'slf4j_2', 'release': '4.el7_4_as', 'start_time': datetime(2017, 4, 2, 19, 39, 6), 'state': 1, 'version': '1.7.4' })[0] cb_two = ContainerKojiBuild.get_or_create({ 'completion_time': datetime(2018, 8, 17, 13, 55, 47), 'creation_time': datetime(2018, 8, 17, 8, 32, 47), 'epoch': '0', 'id_': '811', 'name': 'some_build', 'release': '4.el7_4_as', 'start_time': datetime(2018, 8, 17, 8, 32, 47), 'state': 2, 'version': '1.7.5' })[0] # Longest story commit.resolved_bugs.connect(bug) commit.koji_builds.connect(build) build.advisories.connect(advisory, {'time_attached': datetime(2018, 6, 13, 10, 36, 47)}) fm_event.triggered_by_advisory.connect(advisory) fm_event.successful_koji_builds.connect(cb) fm_event.successful_koji_builds.connect(cb_two) # Unique partial stories commit_two.resolved_bugs.connect(bug_two) commit_two.koji_builds.connect(build) build.advisories.connect(advisory_two, {'time_attached': datetime(2018, 4, 21, 19, 36, 47)}) rv = client.get('/api/v1/allstories/{0}/{1}'.format(resource, uid)) assert rv.status_code == 200 assert json.loads(rv.data.decode('utf-8')) == expected
def update_neo4j(self, advisories): """ Update Neo4j with Errata Tool advisories from Teiid. :param list advisories: a list of dictionaries of advisories """ count = 0 for advisory in advisories: count += 1 log.info('Processing advisory {0}/{1}'.format( count, len(advisories))) # The content_types column is a string with YAML in it, so convert it to a list content_types = yaml.safe_load(advisories[0]['content_types']) adv = Advisory.create_or_update({ 'actual_ship_date': advisory['actual_ship_date'], 'advisory_name': advisory['advisory_name'], 'content_types': content_types, 'created_at': advisory['created_at'], 'id_': advisory['id'], 'issue_date': advisory['issue_date'], 'product_name': advisory['product_name'], 'product_short_name': advisory['product_short_name'], 'release_date': advisory['release_date'], 'security_impact': advisory['security_impact'], 'security_sla': advisory['security_sla'], 'state': advisory['state'], 'status_time': advisory['status_time'], 'synopsis': advisory['synopsis'], 'type_': advisory['type'], 'update_date': advisory['update_date'], 'updated_at': advisory['updated_at'] })[0] assigned_to = User.get_or_create( {'username': advisory['assigned_to'].split('@')[0]})[0] adv.conditional_connect(adv.assigned_to, assigned_to) package_owner = User.get_or_create( {'username': advisory['package_owner'].split('@')[0]})[0] adv.conditional_connect(adv.package_owner, package_owner) reporter = User.get_or_create( {'username': advisory['reporter'].split('@')[0]})[0] adv.conditional_connect(adv.reporter, reporter) for state in self.get_advisory_states(advisory['id']): adv_state = AdvisoryState.create_or_update({ 'id_': state['id'], 'name': state['name'], 'created_at': state['created_at'], 'updated_at': state['updated_at'] })[0] adv_state.conditional_connect(adv_state.advisory, adv) state_creator = User.get_or_create( {'username': state['username'].split('@')[0]})[0] adv_state.conditional_connect(adv_state.creator, state_creator) for attached_bug in self.get_attached_bugs(advisory['id']): bug = BugzillaBug.get_or_create(attached_bug)[0] adv.attached_bugs.connect(bug) for associated_build in self.get_associated_builds(advisory['id']): # If this is set, that means it was once part of the advisory but not anymore. # This relationship needs to be deleted if it exists. if associated_build['removed_index_id']: build = KojiBuild.nodes.get_or_none( id_=associated_build['id_']) if build: adv.attached_builds.disconnect(build) else: # This key shouldn't be stored in Neo4j del associated_build['removed_index_id'] build = KojiBuild.get_or_create(associated_build)[0] adv.attached_builds.connect(build)
def update_neo4j(self, advisories): """ Update Neo4j with Errata Tool advisories from Teiid. :param list advisories: a list of dictionaries of advisories """ count = 0 for advisory in advisories: count += 1 log.info('Processing advisory {0}/{1}'.format( count, len(advisories))) # The content_types column is a string with YAML in it, so convert it to a list content_types = yaml.safe_load(advisory['content_types']) adv = Advisory.create_or_update({ 'actual_ship_date': advisory['actual_ship_date'], 'advisory_name': advisory['advisory_name'], 'content_types': content_types, 'created_at': advisory['created_at'], 'id_': advisory['id'], 'issue_date': advisory['issue_date'], 'product_name': advisory['product_name'], 'product_short_name': advisory['product_short_name'], 'release_date': advisory['release_date'], 'security_impact': advisory['security_impact'], 'security_sla': advisory['security_sla'], 'state': advisory['state'], 'status_time': advisory['status_time'], 'synopsis': advisory['synopsis'], 'update_date': advisory['update_date'], })[0] container_adv = False for associated_build in self.get_associated_builds(advisory['id']): # Even if a node has two labels in the database, Neo4j returns the node # only with the specific label you asked for. Hence we check for labels # ContainerKojiBuild and KojiBuild separately for the same node. build = ContainerKojiBuild.nodes.get_or_none( id_=associated_build['id_']) if not build: build = KojiBuild.nodes.get_or_none( id_=associated_build['id_']) if build and not container_adv: if build.__label__ == 'ContainerKojiBuild': adv.add_label(ContainerAdvisory.__label__) container_adv = True # If this is set, that means it was once part of the advisory but not anymore. # This relationship needs to be deleted if it exists. if associated_build['removed_index_id']: if build: adv.attached_builds.disconnect(build) else: # Query Teiid and create the entry only if the build is not present in Neo4j if not build: attached_build = self.get_koji_build( associated_build['id_']) if attached_build: if self.is_container_build(attached_build): build = ContainerKojiBuild.get_or_create( {'id_': associated_build['id_']})[0] else: build = KojiBuild.get_or_create( {'id_': associated_build['id_']})[0] # This will happen only if we do not find the build we are looking for in Teiid # which shouldn't usually happen under normal conditions if not build: log.warn( 'The Koji build with ID {} was not found in Teiid!' .format(associated_build['id_'])) continue if adv.__label__ != ContainerAdvisory.__label__ \ and build.__label__ == ContainerKojiBuild.__label__: adv.add_label(ContainerAdvisory.__label__) attached_rel = adv.attached_builds.relationship(build) time_attached = associated_build['time_attached'] if attached_rel: if attached_rel.time_attached != time_attached: adv.attached_builds.replace( build, {'time_attached': time_attached}) else: adv.attached_builds.connect( build, {'time_attached': time_attached}) assigned_to = User.get_or_create( {'username': advisory['assigned_to'].split('@')[0]})[0] adv.conditional_connect(adv.assigned_to, assigned_to) reporter = User.get_or_create( {'username': advisory['reporter'].split('@')[0]})[0] adv.conditional_connect(adv.reporter, reporter) for attached_bug in self.get_attached_bugs(advisory['id']): bug = BugzillaBug.get_or_create(attached_bug)[0] adv.attached_bugs.connect(bug)
def test_get_artifact_story_not_available(client): """Test getting a resource story on a resource that doesn't have any relationships.""" BugzillaBug.get_or_create({ 'classification': 'Red Hat', 'creation_time': datetime(2017, 4, 2, 6, 43, 58), 'id_': '5555', 'modified_time': datetime(2017, 12, 5, 10, 12, 47), 'priority': 'unspecified', 'product_name': 'Red Hat CloudForms Management Engine', 'product_version': '5.7.0', 'resolution': 'WORKSFORME', 'severity': 'unspecified', 'short_description': 'Fail to delete OSP tenant by CFME', 'status': 'CLOSED', 'target_milestone': 'GA', 'votes': 0 })[0] expected = { 'data': [{ 'assignee': None, 'attached_advisories': [], 'classification': 'Red Hat', 'creation_time': '2017-04-02T06:43:58+00:00', 'id': '5555', 'modified_time': '2017-12-05T10:12:47+00:00', 'priority': 'unspecified', 'product_name': 'Red Hat CloudForms Management Engine', 'product_version': '5.7.0', 'qa_contact': None, 'related_by_commits': [], 'reporter': None, 'resolution': 'WORKSFORME', 'resolved_by_commits': [], 'resource_type': 'BugzillaBug', 'reverted_by_commits': [], 'severity': 'unspecified', 'short_description': 'Fail to delete OSP tenant by CFME', 'status': 'CLOSED', 'target_milestone': 'GA', 'votes': 0 }], 'meta': { 'related_nodes': { 'Advisory': 0, 'BugzillaBug': 0, 'ContainerKojiBuild': 0, 'DistGitCommit': 0, 'FreshmakerEvent': 0, 'KojiBuild': 0 } } } rv = client.get('/api/v1/story/bugzillabug/5555') assert rv.status_code == 200 assert json.loads(rv.data.decode('utf-8')) == expected
def _update_neo4j(neo4j_url, total_results, counter_and_results): """ Update Neo4j results via mapping with multiprocessing. :param str neo4j_url: database url for Neo4j :param int total_results: the total number of results that will be processed. This is used for a logging statement about progress. :param tuple counter_and_results: a tuple where the first index is the current counter and the second index is a list of dictionaries representing results from Teiid """ try: previous_total = counter_and_results[0] results = counter_and_results[1] # Since _update_neo4j will be run in a separate process, we must configure the database # URL every time the method is run. neomodel_config.DATABASE_URL = neo4j_url # Create a thread pool with 4 threads to speed up queries to cgit pool = ThreadPool(4) counter = 0 for result in results: if counter % 200 == 0: until = counter + 200 if until > len(results): until = len(results) # Because of the joins in the SQL query, we end up with several rows with the # same commit hash and we only want to query cgit once per commit unique_commits = set([(c['module'], c['sha']) for c in results[counter:until]]) log.debug( 'Getting the author email addresses from cgit in parallel ' 'for results {0} to {1}'.format(counter, until)) repos_info = { r['commit']: r for r in pool.map(DistGitScraper._get_repo_info, unique_commits) } # This is no longer needed so it can be cleared to save RAM del unique_commits counter += 1 log.info('Processing commit entry {0}/{1}'.format( previous_total + counter, total_results)) repo_info = repos_info[result['sha']] if not repo_info.get('namespace'): log.info( 'Skipping nodes creation with commit ID {0}'.format( result['commit_id'])) continue log.debug( 'Creating nodes associated with commit ID {0}'.format( result['commit_id'])) repo = DistGitRepo.get_or_create({ 'namespace': repo_info['namespace'], 'name': result['module'] })[0] commit = DistGitCommit.create_or_update({ 'author_date': result['author_date'], 'commit_date': result['commit_date'], 'hash_': result['sha'], # In case we get unicode characters in Python 2 'log_message': bytes(result['log_message'], 'utf-8').decode() })[0] bug = BugzillaBug.get_or_create({'id_': result['bugzilla_id']})[0] log.debug( 'Creating the user nodes associated with commit ID {0}'. format(result['commit_id'])) author = User.create_or_update({ 'username': repo_info['author_username'], 'email': repo_info['author_email'] })[0] log.debug( 'Creating the relationships associated with commit ID {0}'. format(result['commit_id'])) repo.commits.connect(commit) commit.conditional_connect(commit.author, author) if result['bugzilla_type'] == 'related': commit.related_bugs.connect(bug) elif result['bugzilla_type'] == 'resolves': commit.resolved_bugs.connect(bug) elif result['bugzilla_type'] == 'reverted': commit.reverted_bugs.connect(bug) # This is no longer needed so it can be cleared to save RAM del repo_info finally: # Close the DB connection after this is done processing db.driver.close()
def test_get_recent_nodes(client): """Test the get_recent_nodes function.""" id_dict = { FreshmakerEvent.__label__: 'id', BugzillaBug.__label__: 'id', DistGitCommit.__label__: 'hash', KojiBuild.__label__: 'id', Advisory.__label__: 'id' } timestamp_dict = { FreshmakerEvent.__label__: 'time_created', BugzillaBug.__label__: 'modified_time', DistGitCommit.__label__: 'commit_date', KojiBuild.__label__: 'completion_time', Advisory.__label__: 'update_date' } expected = { 'data': { 'Advisory': [{ 'advisory_name': 'RHBA-2017:27760-01', 'update_date': '2017-05-30T11:44:38Z', 'issue_date': None, 'created_at': None, 'state': None, 'product_name': None, 'security_sla': None, 'synopsis': None, 'security_impact': None, 'status_time': None, 'actual_ship_date': None, 'release_date': None, 'id': '66666', 'display_name': 'RHBA-2017:27760-01', 'resource_type': 'Advisory', 'assigned_to': None, 'attached_bugs': [], 'attached_builds': [], 'triggered_freshmaker_event': [], 'reporter': None }], 'DistGitCommit': [{ 'log_message': None, 'author': None, 'author_date': None, 'koji_builds': [], 'hash': '55555', 'commit_date': '2017-05-02T11:44:38Z', 'display_name': 'commit #55555', 'resource_type': 'DistGitCommit', 'related_bugs': [], 'repos': [], 'resolved_bugs': [], 'reverted_bugs': [] }], 'FreshmakerEvent': [{ 'state_reason': None, 'state_name': None, 'time_created': '2017-05-30T11:44:38Z', 'time_done': None, 'id': '77777', 'display_name': 'Freshmaker event 77777', 'resource_type': 'FreshmakerEvent', 'requested_builds': [], 'successful_koji_builds': [], 'triggered_by_advisory': None, }], 'KojiBuild': [{ 'name': 'slf4j', 'start_time': None, 'creation_time': None, 'state': None, 'completion_time': '2017-05-27T11:44:38Z', 'epoch': None, 'version': '1.7.4', 'release': '4.el7_4', 'id': '44444', 'display_name': 'slf4j-1.7.4-4.el7_4', 'resource_type': 'KojiBuild', 'module_builds': [], 'owner': None, 'commit': None, 'advisories': [] }], 'BugzillaBug': [{ 'status': None, 'severity': None, 'resolution': None, 'product_version': None, 'creation_time': None, 'modified_time': '2017-06-26T11:44:38Z', 'product_name': None, 'priority': None, 'short_description': None, 'target_milestone': None, 'id': '22222', 'display_name': 'RHBZ#22222', 'resource_type': 'BugzillaBug', 'assignee': None, 'attached_advisories': [], 'qa_contact': None, 'related_by_commits': [], 'reporter': None, 'resolved_by_commits': [], 'reverted_by_commits': [] }, { 'status': None, 'severity': None, 'resolution': None, 'product_version': None, 'creation_time': None, 'modified_time': '2017-05-26T11:44:38Z', 'product_name': None, 'priority': None, 'short_description': None, 'target_milestone': None, 'id': '33333', 'display_name': 'RHBZ#33333', 'resource_type': 'BugzillaBug', 'assignee': None, 'attached_advisories': [], 'qa_contact': None, 'related_by_commits': [], 'reporter': None, 'resolved_by_commits': [], 'reverted_by_commits': [] }, { 'status': None, 'severity': None, 'resolution': None, 'product_version': None, 'creation_time': None, 'modified_time': '2017-04-26T11:44:38Z', 'product_name': None, 'priority': None, 'short_description': None, 'target_milestone': None, 'id': '11111', 'display_name': 'RHBZ#11111', 'resource_type': 'BugzillaBug', 'assignee': None, 'attached_advisories': [], 'qa_contact': None, 'related_by_commits': [], 'reporter': None, 'resolved_by_commits': [], 'reverted_by_commits': [] }] }, 'metadata': { 'id_keys': id_dict, 'timestamp_keys': timestamp_dict } } BugzillaBug.get_or_create({ 'id_': '11111', 'modified_time': datetime(2017, 4, 26, 11, 44, 38) }) BugzillaBug.get_or_create({ 'id_': '22222', 'modified_time': datetime(2017, 6, 26, 11, 44, 38) }) BugzillaBug.get_or_create({ 'id_': '33333', 'modified_time': datetime(2017, 5, 26, 11, 44, 38) }) KojiBuild.get_or_create({ 'id_': '44444', 'completion_time': datetime(2017, 5, 27, 11, 44, 38), 'name': 'slf4j', 'version': '1.7.4', 'release': '4.el7_4' }) DistGitCommit.get_or_create({ 'hash_': '55555', 'commit_date': datetime(2017, 5, 2, 11, 44, 38) }) Advisory.get_or_create({ 'id_': '66666', 'update_date': datetime(2017, 5, 30, 11, 44, 38), 'advisory_name': 'RHBA-2017:27760-01' }) FreshmakerEvent.get_or_create({ 'id_': '77777', 'time_created': datetime(2017, 5, 30, 11, 44, 38), }) rv = client.get('/api/v1/recents') assert rv.status_code == 200 assert json.loads(rv.data.decode('utf-8')) == expected
def update_neo4j(self, results): """ Update Neo4j with the dist-git commit and push information from Teiid. :param list results: a list of dictionaries """ pool = Pool(processes=8) counter = 0 for result in results: if counter % 200 == 0: until = counter + 200 if until > len(results): until = len(results) # Because of the joins in the SQL query, we end up with several rows with the same # commit hash and we only want to query cgit once per commit unique_commits = set([(c['module'], c['sha']) for c in results[counter:until]]) log.debug( 'Getting the author and committer email addresses from cgit in parallel ' 'for results {0} to {1}'.format(counter, until)) repos_info = {} for _r in pool.map(DistGitScraper._get_repo_info, unique_commits): r = json.loads(_r) repos_info[r['commit']] = r # This is no longer needed so it can be cleared to save RAM del unique_commits # A lot of RAM was allocated or used up, so let's call gc.collect() to ensure it # is removed gc.collect() counter += 1 log.info('Processing commit and push entry {0}/{1}'.format( str(counter), str(len(results)))) repo_info = repos_info[result['sha']] if not repo_info.get('namespace'): log.info( 'Skipping nodes creation with commit ID {0} and push ID {1}' .format(result['commit_id'], result['push_id'])) continue log.debug( 'Creating nodes associated with commit ID {0} and push ID {1}'. format(result['commit_id'], result['push_id'])) repo = DistGitRepo.get_or_create({ 'namespace': repo_info['namespace'], 'name': result['module'] })[0] branch_name = result['ref'].rsplit('/', 1)[1] branch = DistGitBranch.get_or_create({ 'name': branch_name, 'repo_namespace': repo_info['namespace'], 'repo_name': result['module'] })[0] commit = DistGitCommit.create_or_update({ 'author_date': result['author_date'], 'commit_date': result['commit_date'], 'hash_': result['sha'], # In case we get unicode characters in Python 2 'log_message': bytes(result['log_message'], 'utf-8').decode() })[0] push = DistGitPush.get_or_create({ 'id_': result['push_id'], 'push_date': result['push_date'], 'push_ip': result['push_ip'] })[0] bug = BugzillaBug.get_or_create({'id_': result['bugzilla_id']})[0] log.debug( 'Creating the user nodes associated with commit ID {0} and push ID {1}' .format(result['commit_id'], result['push_id'])) author = User.create_or_update({ 'username': repo_info['author_username'], 'email': repo_info['author_email'] })[0] committer = User.create_or_update({ 'username': repo_info['committer_username'], 'email': repo_info['committer_email'] })[0] pusher = User.get_or_create({'username': result['pusher']})[0] log.debug( 'Creating the relationships associated with commit ID {0} and push ID {1}' .format(result['commit_id'], result['push_id'])) repo.contributors.connect(author) repo.contributors.connect(committer) repo.contributors.connect(pusher) repo.commits.connect(commit) repo.pushes.connect(push) repo.branches.connect(branch) branch.contributors.connect(author) branch.contributors.connect(committer) branch.contributors.connect(pusher) branch.commits.connect(commit) branch.pushes.connect(push) push.conditional_connect(push.pusher, pusher) push.commits.connect(commit) commit.conditional_connect(commit.author, author) commit.conditional_connect(commit.committer, committer) if repo_info['parent']: parent_commit = DistGitCommit.get_or_create( {'hash_': repo_info['parent']})[0] commit.conditional_connect(commit.parent, parent_commit) if result['bugzilla_type'] == 'related': commit.related_bugs.connect(bug) elif result['bugzilla_type'] == 'resolves': commit.resolved_bugs.connect(bug) elif result['bugzilla_type'] == 'reverted': commit.reverted_bugs.connect(bug) # This is no longer needed so it can be cleared to save RAM del repo_info
def test_get_recent_nodes(): """Test the get_recent_nodes function.""" BugzillaBug.get_or_create({ 'id_': '11111', 'modified_time': datetime(2017, 4, 26, 11, 44, 38) }) BugzillaBug.get_or_create({ 'id_': '22222', 'modified_time': datetime(2017, 6, 26, 11, 44, 38) }) BugzillaBug.get_or_create({ 'id_': '33333', 'modified_time': datetime(2017, 5, 26, 11, 44, 38) }) KojiBuild.get_or_create({ 'id_': '44444', 'completion_time': datetime(2017, 5, 27, 11, 44, 38) }) DistGitCommit.get_or_create({ 'hash_': '55555', 'commit_date': datetime(2017, 5, 2, 11, 44, 38) }) Advisory.get_or_create({ 'id_': '66666', 'update_date': datetime(2017, 5, 30, 11, 44, 38) }) Advisory.get_or_create({'id_': '66666', 'update_date': None}) FreshmakerEvent.get_or_create({ 'id_': '77777', 'time_created': datetime(2017, 5, 30, 11, 44, 38), }) nodes, meta = estuary.utils.recents.get_recent_nodes() assert nodes['Advisory'][0]['id'] == '66666' assert nodes['DistGitCommit'][0]['hash'] == '55555' assert nodes['FreshmakerEvent'][0]['id'] == '77777' assert nodes['KojiBuild'][0]['id'] == '44444' assert nodes['BugzillaBug'][0]['id'] == '22222' assert nodes['BugzillaBug'][1]['id'] == '33333' assert nodes['BugzillaBug'][2]['id'] == '11111' id_dict = { FreshmakerEvent.__label__: 'id', BugzillaBug.__label__: 'id', DistGitCommit.__label__: 'hash', KojiBuild.__label__: 'id', Advisory.__label__: 'id' } timestamp_dict = { FreshmakerEvent.__label__: 'time_created', BugzillaBug.__label__: 'modified_time', DistGitCommit.__label__: 'commit_date', KojiBuild.__label__: 'completion_time', Advisory.__label__: 'update_date' } assert meta['id_keys'] == id_dict assert meta['timestamp_keys'] == timestamp_dict
def test_get_resources(client, resource, uid, expected): """Test getting a resource from Neo4j with its relationships.""" tbrady = User.get_or_create({ 'email': '*****@*****.**', 'username': '******' })[0] mprahl = User.get_or_create({ 'email': '*****@*****.**', 'username': '******' })[0] jsmith = User.get_or_create({ 'email': '*****@*****.**', 'username': '******' })[0] commit = DistGitCommit.get_or_create({ 'author_date': datetime(2017, 4, 26, 11, 44, 38), 'commit_date': datetime(2017, 4, 26, 11, 44, 38), 'hash_': '8a63adb248ba633e200067e1ad6dc61931727bad', 'log_message': 'Related: #12345 - fix xyz' })[0] commit_two = DistGitCommit.get_or_create({ 'author_date': datetime(2017, 4, 27, 11, 44, 38), 'commit_date': datetime(2017, 4, 27, 11, 44, 38), 'hash_': '1263adb248ba633e205067e1ad6dc61931727c2d', 'log_message': 'Related: #12345 - fix xz' })[0] commit_three = DistGitCommit.get_or_create({ 'author_date': datetime(2017, 4, 27, 11, 44, 38), 'commit_date': datetime(2017, 4, 27, 11, 44, 38), 'hash_': '5663adb248ba633e205067e1ad6dc61931727123', 'log_message': 'Revert: #12345' })[0] bug = BugzillaBug.get_or_create({ 'classification': 'Red Hat', 'creation_time': datetime(2017, 4, 2, 19, 39, 6), 'id_': '12345', 'modified_time': datetime(2018, 2, 7, 19, 30, 47), 'priority': 'high', 'product_name': 'Red Hat Enterprise Linux', 'product_version': '7.5', 'resolution': '', 'severity': 'low', 'short_description': 'Some description', 'status': 'VERIFIED', 'target_milestone': 'rc', 'votes': 0 })[0] bug_two = BugzillaBug.get_or_create({ 'classification': 'Red Hat', 'creation_time': datetime(2017, 4, 2, 19, 39, 6), 'id_': '67890', 'modified_time': datetime(2018, 2, 7, 19, 30, 47), 'priority': 'medium', 'product_name': 'Red Hat Enterprise Linux', 'product_version': '7.3', 'resolution': '', 'severity': 'low', 'short_description': 'Some description', 'status': 'VERIFIED', 'target_milestone': 'rc', 'votes': 0 })[0] bug_three = BugzillaBug.get_or_create({ 'classification': 'Red Hat', 'creation_time': datetime(2017, 4, 2, 19, 39, 6), 'id_': '272895', 'modified_time': datetime(2018, 2, 7, 19, 30, 47), 'priority': 'low', 'product_name': 'Satellite', 'product_version': '3', 'resolution': '', 'severity': 'medium', 'short_description': 'Some description', 'status': 'VERIFIED', 'target_milestone': 'rc', 'votes': 0 })[0] repo = DistGitRepo.get_or_create({ 'name': 'some_repo', 'namespace': 'some_namespace', })[0] branch = DistGitBranch.get_or_create({ 'name': 'some_branch_name', 'repo_name': 'some_repo_name', 'repo_namespace': 'some_repo_namespace' })[0] build = KojiBuild.get_or_create({ 'completion_time': datetime(2017, 4, 2, 19, 39, 6), 'creation_time': datetime(2017, 4, 2, 19, 39, 6), 'epoch': '0', 'id_': '2345', 'name': 'slf4j', 'release': '4.el7_4', 'start_time': datetime(2017, 4, 2, 19, 39, 6), 'state': 1, 'version': '1.7.4' })[0] tag = KojiTag.get_or_create({ 'id_': '2702', 'name': 'some_active_tag' })[0] advisory = Advisory.get_or_create({ 'actual_ship_date': datetime(2017, 8, 1, 15, 43, 51), 'advisory_name': 'RHBA-2017:2251-02', 'content_types': ['docker'], 'created_at': datetime(2017, 4, 3, 14, 47, 23), 'id_': '27825', 'issue_date': datetime(2017, 8, 1, 5, 59, 34), 'product_name': 'Red Hat Enterprise Linux', 'product_short_name': 'RHEL', 'security_impact': 'None', 'state': 'SHIPPED_LIVE', 'status_time': datetime(2017, 8, 1, 15, 43, 51), 'synopsis': 'cifs-utils bug fix update', 'update_date': datetime(2017, 8, 1, 7, 16) })[0] fm_event = FreshmakerEvent.get_or_create({ 'event_type_id': 8, 'id_': '1180', 'message_id': 'ID:messaging-devops-broker01.test', 'state': 2, 'state_name': 'COMPLETE', 'state_reason': 'All container images have been rebuilt', 'time_created': datetime(2019, 8, 21, 13, 42, 20), 'time_done': datetime(2099, 8, 21, 13, 42, 20) })[0] fm_build = FreshmakerBuild.get_or_create({ 'id_': 398, 'build_id': 15639305, 'dep_on': "jboss-eap-7-eap70-openshift-docker", 'name': "metrics-hawkular-metrics-docker", 'original_nvr': "metrics-hawkular-metrics-docker-v3.7.23-10", 'rebuilt_nvr': "metrics-hawkular-metrics-docker-v3.7.23-10.1522094767", 'state': 1, 'state_name': "DONE", 'state_reason': "Built successfully.", 'time_completed': datetime(2017, 4, 2, 19, 39, 6), 'time_submitted': datetime(2017, 4, 2, 19, 39, 6), 'type_': 1, 'type_name': "IMAGE", 'url': "/api/1/builds/398" })[0] cb = ContainerKojiBuild.get_or_create({ 'completion_time': datetime(2017, 4, 2, 19, 39, 6), 'creation_time': datetime(2017, 4, 2, 19, 39, 6), 'epoch': '0', 'id_': '710', 'name': 'slf4j_2', 'release': '4.el7_4_as', 'start_time': datetime(2017, 4, 2, 19, 39, 6), 'state': 1, 'version': '1.7.4' })[0] if resource == 'bugzillabug': bug.assignee.connect(mprahl) bug.qa_contact.connect(jsmith) bug.reporter.connect(tbrady) commit.resolved_bugs.connect(bug) commit_two.resolved_bugs.connect(bug) commit_three.reverted_bugs.connect(bug) advisory.attached_bugs.connect(bug) if resource == 'distgitcommit': commit.author.connect(tbrady) commit.parent.connect(commit_two) commit_three.parent.connect(commit) commit.related_bugs.connect(bug) commit.related_bugs.connect(bug_three) commit.reverted_bugs.connect(bug_two) repo.commits.connect(commit) branch.commits.connect(commit) commit.resolved_bugs.connect(bug) commit.resolved_bugs.connect(bug_two) if resource == 'kojibuild': build.owner.connect(mprahl) build.commit.connect(commit_two) tag.builds.connect(build) if resource == 'advisory': advisory.assigned_to.connect(mprahl) advisory.reporter.connect(jsmith) advisory.attached_builds.connect(build) advisory.attached_bugs.connect(bug) if resource == 'freshmakerevent': fm_event.triggered_by_advisory.connect(advisory) fm_event.successful_koji_builds.connect(cb) fm_event.requested_builds.connect(fm_build) if resource == 'containerbuild': fm_event.successful_koji_builds.connect(cb) rv = client.get('/api/v1/{0}/{1}'.format(resource, uid)) assert rv.status_code == 200 assert json.loads(rv.data.decode('utf-8')) == expected
def advisory_handler(self, msg): """ Handle an Errata tool advisory changes and update Neo4j if necessary. :param dict msg: a message to be processed """ advisory_id = msg['body']['headers']['errata_id'] erratum_url = '{0}/api/v1/erratum/{1}'.format( self.config['estuary_updater.errata_url'].rstrip('/'), advisory_id) response = requests.get(erratum_url, auth=requests_kerberos.HTTPKerberosAuth(), timeout=10) advisory_json = response.json() advisory_type = msg['body']['headers']['type'].lower() advisory_info = advisory_json['errata'][advisory_type] embargoed = msg['body']['headers']['synopsis'] == 'REDACTED' # We can't store information on embargoed advisories other than the ID if not embargoed: product_url = '{0}/products/{1}.json'.format( self.config['estuary_updater.errata_url'].rstrip('/'), advisory_info['product_id']) response = requests.get(product_url, auth=requests_kerberos.HTTPKerberosAuth(), timeout=10) product_json = response.json() reporter_url = '{0}/api/v1/user/{1}'.format( self.config['estuary_updater.errata_url'].rstrip('/'), advisory_info['reporter_id']) response = requests.get(reporter_url, auth=requests_kerberos.HTTPKerberosAuth(), timeout=10) reporter_json = response.json() reporter = User.create_or_update({ 'username': reporter_json['login_name'].split('@')[0], 'email': reporter_json['email_address'] })[0] assigned_to_url = '{0}/api/v1/user/{1}'.format( self.config['estuary_updater.errata_url'].rstrip('/'), advisory_info['assigned_to_id']) response = requests.get(assigned_to_url, auth=requests_kerberos.HTTPKerberosAuth(), timeout=10) assigned_to_json = response.json() assigned_to = User.create_or_update({ 'username': assigned_to_json['login_name'].split('@')[0], 'email': assigned_to_json['email_address'] })[0] advisory_params = { 'advisory_name': advisory_info['fulladvisory'], 'id_': advisory_id, 'product_name': product_json['product']['name'], 'security_impact': advisory_info['security_impact'], 'state': advisory_info['status'], 'synopsis': msg['body']['headers']['synopsis'] } for dt in ('actual_ship_date', 'created_at', 'issue_date', 'release_date', 'security_sla', 'status_updated_at', 'update_date'): if advisory_info[dt]: if dt == 'status_updated_at': estuary_key = 'status_time' else: estuary_key = dt advisory_params[estuary_key] = timestamp_to_datetime( advisory_info[dt]) else: advisory_params = { 'id_': advisory_id, # Set this to REDACTED and it'll be updated when it becomes public 'advisory_name': 'REDACTED' } if 'docker' in advisory_info['content_types']: try: advisory = ContainerAdvisory.create_or_update( advisory_params)[0] except neomodel.exceptions.ConstraintValidationFailed: # This must have errantly been created as an Advisory instead of a # ContainerAdvisory, so let's fix that. advisory = Advisory.nodes.get_or_none(id_=advisory_id) if not advisory: # If there was a constraint validation failure and the advisory isn't just # the wrong label, then we can't recover. raise advisory.add_label(ContainerAdvisory.__label__) advisory = ContainerAdvisory.create_or_update( advisory_params)[0] else: # Check to see if a ContainerAdvisory using this id already exists, and if so remove its # label because it should not be a ContainerAdvisory if docker isn't a content type. container_adv = ContainerAdvisory.nodes.get_or_none( id_=advisory_id) if container_adv: container_adv.remove_label(ContainerAdvisory.__label__) advisory = Advisory.create_or_update(advisory_params)[0] if not embargoed: advisory.conditional_connect(advisory.reporter, reporter) advisory.conditional_connect(advisory.assigned_to, assigned_to) bugs = advisory_json['bugs']['bugs'] for bug in bugs: bug = BugzillaBug.get_or_create({'id_': bug['bug']['id']})[0] advisory.attached_bugs.connect(bug)
def test_get_stories(client, resource, uid, expected): """Test getting a resource story from Neo4j with its relationships.""" commit = DistGitCommit.get_or_create({ 'author_date': datetime(2017, 4, 26, 11, 44, 38), 'commit_date': datetime(2017, 4, 26, 11, 44, 38), 'hash_': '8a63adb248ba633e200067e1ad6dc61931727bad', 'log_message': 'Related: #12345 - fix xyz' })[0] advisory = Advisory.get_or_create({ 'actual_ship_date': datetime(2017, 8, 1, 15, 43, 51), 'advisory_name': 'RHBA-2017:2251-02', 'content_types': ['docker'], 'created_at': datetime(2017, 4, 3, 14, 47, 23), 'id_': '27825', 'issue_date': datetime(2017, 8, 1, 5, 59, 34), 'product_name': 'Red Hat Enterprise Linux', 'product_short_name': 'RHEL', 'security_impact': 'None', 'state': 'SHIPPED_LIVE', 'status_time': datetime(2017, 8, 1, 15, 43, 51), 'synopsis': 'cifs-utils bug fix update', 'type_': 'RHBA', 'update_date': datetime(2017, 8, 1, 7, 16), 'updated_at': datetime(2017, 8, 1, 15, 43, 51) })[0] bug = BugzillaBug.get_or_create({ 'classification': 'Red Hat', 'creation_time': datetime(2017, 4, 2, 19, 39, 6), 'id_': '12345', 'modified_time': datetime(2018, 2, 7, 19, 30, 47), 'priority': 'high', 'product_name': 'Red Hat Enterprise Linux', 'product_version': '7.5', 'resolution': '', 'severity': 'low', 'short_description': 'Some description', 'status': 'VERIFIED', 'target_milestone': 'rc', 'votes': 0 })[0] bug_two = BugzillaBug.get_or_create({ 'classification': 'Red Hat', 'creation_time': datetime(2017, 4, 2, 6, 43, 58), 'id_': '5555', 'modified_time': datetime(2017, 12, 5, 10, 12, 47), 'priority': 'unspecified', 'product_name': 'Red Hat CloudForms Management Engine', 'product_version': '5.7.0', 'resolution': 'WORKSFORME', 'severity': 'unspecified', 'short_description': 'Fail to delete OSP tenant by CFME', 'status': 'CLOSED', 'target_milestone': 'GA', 'votes': 0 })[0] build = KojiBuild.get_or_create({ 'completion_time': datetime(2017, 4, 2, 19, 39, 6), 'creation_time': datetime(2017, 4, 2, 19, 39, 6), 'epoch': '0', 'id_': '2345', 'name': 'slf4j', 'release': '4.el7_4', 'start_time': datetime(2017, 4, 2, 19, 39, 6), 'state': 1, 'version': '1.7.4' })[0] fm_event = FreshmakerEvent.get_or_create({ 'event_type_id': 8, 'id_': '1180', 'message_id': 'ID:messaging-devops-broker01.test', 'state': 2, 'state_name': 'COMPLETE', 'state_reason': 'All container images have been rebuilt.', 'url': '/api/1/events/1180' })[0] cb = ContainerKojiBuild.get_or_create({ 'completion_time': datetime(2017, 4, 2, 19, 39, 6), 'creation_time': datetime(2017, 4, 2, 19, 39, 6), 'epoch': '0', 'id_': '710', 'name': 'slf4j_2', 'release': '4.el7_4_as', 'start_time': datetime(2017, 4, 2, 19, 39, 6), 'state': 1, 'version': '1.7.4' })[0] commit.resolved_bugs.connect(bug_two) commit.resolved_bugs.connect(bug) commit.koji_builds.connect(build) build.advisories.connect(advisory) advisory.attached_builds.connect(build) fm_event.triggered_by_advisory.connect(advisory) fm_event.triggered_container_builds.connect(cb) rv = client.get('/api/v1/story/{0}/{1}'.format(resource, uid)) assert rv.status_code == 200 assert json.loads(rv.data.decode('utf-8')) == expected