def create_or_update_freshmaker_build(self, build, event_id): """ Create or update a FreshmakerBuild. :param dict build: the build represented in Freshmaker being created or updated :param int event_id: the id of the Freshmaker event :return: the created/updated FreshmakerBuild or None if it cannot be created :rtype: FreshmakerBuild or None """ log.debug('Creating FreshmakerBuild {0}'.format(build['build_id'])) fb_params = dict( id_=build['id'], dep_on=build['dep_on'], name=build['name'], original_nvr=build['original_nvr'], rebuilt_nvr=build['rebuilt_nvr'], state_name=build['state_name'], state_reason=build['state_reason'], time_submitted=timestamp_to_datetime(build['time_submitted']), type_name=build['type_name'], ) if build['time_completed']: fb_params['time_completed'] = timestamp_to_datetime( build['time_completed']) if build['build_id']: fb_params['build_id'] = build['build_id'] return FreshmakerBuild.create_or_update(fb_params)[0]
def commit_handler(self, msg): """ Handle a dist-git commit message and update Neo4j if necessary. :param dict msg: a message to be processed """ repo = DistGitRepo.get_or_create({ 'namespace': msg['headers']['namespace'], 'name': msg['headers']['repo'] })[0] # Get the username from the email if the email is a Red Hat email email = msg['headers']['email'].lower() if email.endswith('@redhat.com'): username = email.split('@redhat.com')[0] else: username = email author = User.create_or_update({ 'username': username, 'email': email })[0] commit_message = msg['body']['msg']['message'] commit = DistGitCommit.create_or_update({ 'hash_': msg['headers']['rev'], 'log_message': commit_message, 'author_date': timestamp_to_datetime(msg['body']['msg']['author_date']), 'commit_date': timestamp_to_datetime(msg['body']['msg']['commit_date']) })[0] bug_rel_mapping = self.parse_bugzilla_bugs(commit_message) for bug_id in bug_rel_mapping['resolves']: bug = BugzillaBug.get_or_create({'id_': bug_id})[0] commit.resolved_bugs.connect(bug) for bug_id in bug_rel_mapping['related']: bug = BugzillaBug.get_or_create({'id_': bug_id})[0] commit.related_bugs.connect(bug) for bug_id in bug_rel_mapping['reverted']: bug = BugzillaBug.get_or_create({'id_': bug_id})[0] commit.reverted_bugs.connect(bug) commit.conditional_connect(commit.author, author) repo.commits.connect(commit)
def builds_added_handler(self, msg): """ Handle an Errata tool builds added message and update Neo4j if necessary. :param dict msg: a message to be processed """ embargoed = msg['body']['headers']['brew_build'] == 'REDACTED' # We can't store information on embargoed advisories other than the ID if embargoed: return advisory = Advisory.get_or_create( {'id_': msg['body']['headers']['errata_id']})[0] nvr = msg['body']['headers']['brew_build'] koji_build = self.get_or_create_build(nvr) time_attached_string = msg['body']['headers']['when'] if time_attached_string.endswith(' UTC'): time_attached_string = time_attached_string[:-4] time_attached = timestamp_to_datetime(time_attached_string) attached_rel = advisory.attached_builds.relationship(koji_build) if attached_rel: if attached_rel.time_attached != time_attached: advisory.attached_builds.replace( koji_build, {'time_attached': time_attached}) else: advisory.attached_builds.connect(koji_build, {'time_attached': time_attached})
def bug_handler(self, msg): """ Handle a modified or created Bugzilla bug and update Neo4j if necessary. :param dict msg: a message to be processed """ bug_data = msg['body']['msg']['bug'] bug_params = { 'id_': str(bug_data['id']), 'creation_time': timestamp_to_datetime(bug_data['creation_time']), 'modified_time': timestamp_to_datetime(bug_data['last_change_time']), 'priority': bug_data['priority'], 'product_name': bug_data['product']['name'], 'product_version': bug_data['version']['name'], 'resolution': bug_data['resolution'], 'severity': bug_data['severity'], 'short_description': bug_data['summary'], 'status': bug_data['status']['name'], 'target_milestone': bug_data['target_milestone']['name'], } assignee = User.create_or_update({ 'username': bug_data['assigned_to']['login'].split('@')[0], 'email': bug_data['assigned_to']['login'] })[0] qa_contact = User.create_or_update({ 'username': bug_data['qa_contact']['login'].split('@')[0], 'email': bug_data['qa_contact']['login'] })[0] reporter = User.create_or_update({ 'username': bug_data['reporter']['login'].split('@')[0], 'email': bug_data['reporter']['login'] })[0] bug = BugzillaBug.create_or_update(bug_params)[0] bug.conditional_connect(bug.assignee, assignee) bug.conditional_connect(bug.qa_contact, qa_contact) bug.conditional_connect(bug.reporter, reporter)
def event_state_handler(self, msg): """ Handle a Freshmaker event state changed message and update Neo4j if necessary. :param dict msg: a message to be processed """ msg_id = msg['body']['msg']['message_id'] if msg['body']['msg'].get('dry_run'): return event_params = { 'id_': str(msg['body']['msg']['id']), 'state_name': msg['body']['msg']['state_name'], 'state_reason': msg['body']['msg']['state_reason'] } if 'time_created' in msg['body']['msg']: event_params['time_created'] = timestamp_to_datetime( msg['body']['msg']['time_created']) if 'time_done' in msg['body']['msg'] and msg['body']['msg'][ 'time_done'] is not None: event_params['time_done'] = timestamp_to_datetime( msg['body']['msg']['time_done']) event = FreshmakerEvent.create_or_update(event_params)[0] advisory_name = msg_id.rsplit('.', 1)[-1] if advisory_name[0:4] not in ('RHSA', 'RHBA', 'RHEA'): log.warn( 'Unable to parse the advisory name from the Freshmaker message_id: {0}' .format(msg_id)) advisory_name = None advisory = Advisory.get_or_create({ 'id_': msg['body']['msg']['search_key'], 'advisory_name': advisory_name })[0] event.conditional_connect(event.triggered_by_advisory, advisory)
def advisory_handler(self, msg): """ Handle an Errata tool advisory changes and update Neo4j if necessary. :param dict msg: a message to be processed """ advisory_id = msg['body']['headers']['errata_id'] erratum_url = '{0}/api/v1/erratum/{1}'.format( self.config['estuary_updater.errata_url'].rstrip('/'), advisory_id) response = requests.get(erratum_url, auth=requests_kerberos.HTTPKerberosAuth(), timeout=10) advisory_json = response.json() advisory_type = msg['body']['headers']['type'].lower() advisory_info = advisory_json['errata'][advisory_type] embargoed = msg['body']['headers']['synopsis'] == 'REDACTED' # We can't store information on embargoed advisories other than the ID if not embargoed: product_url = '{0}/products/{1}.json'.format( self.config['estuary_updater.errata_url'].rstrip('/'), advisory_info['product_id']) response = requests.get(product_url, auth=requests_kerberos.HTTPKerberosAuth(), timeout=10) product_json = response.json() reporter_url = '{0}/api/v1/user/{1}'.format( self.config['estuary_updater.errata_url'].rstrip('/'), advisory_info['reporter_id']) response = requests.get(reporter_url, auth=requests_kerberos.HTTPKerberosAuth(), timeout=10) reporter_json = response.json() reporter = User.create_or_update({ 'username': reporter_json['login_name'].split('@')[0], 'email': reporter_json['email_address'] })[0] assigned_to_url = '{0}/api/v1/user/{1}'.format( self.config['estuary_updater.errata_url'].rstrip('/'), advisory_info['assigned_to_id']) response = requests.get(assigned_to_url, auth=requests_kerberos.HTTPKerberosAuth(), timeout=10) assigned_to_json = response.json() assigned_to = User.create_or_update({ 'username': assigned_to_json['login_name'].split('@')[0], 'email': assigned_to_json['email_address'] })[0] advisory_params = { 'advisory_name': advisory_info['fulladvisory'], 'id_': advisory_id, 'product_name': product_json['product']['name'], 'security_impact': advisory_info['security_impact'], 'state': advisory_info['status'], 'synopsis': msg['body']['headers']['synopsis'] } for dt in ('actual_ship_date', 'created_at', 'issue_date', 'release_date', 'security_sla', 'status_updated_at', 'update_date'): if advisory_info[dt]: if dt == 'status_updated_at': estuary_key = 'status_time' else: estuary_key = dt advisory_params[estuary_key] = timestamp_to_datetime( advisory_info[dt]) else: advisory_params = { 'id_': advisory_id, # Set this to REDACTED and it'll be updated when it becomes public 'advisory_name': 'REDACTED' } if 'docker' in advisory_info['content_types']: try: advisory = ContainerAdvisory.create_or_update( advisory_params)[0] except neomodel.exceptions.ConstraintValidationFailed: # This must have errantly been created as an Advisory instead of a # ContainerAdvisory, so let's fix that. advisory = Advisory.nodes.get_or_none(id_=advisory_id) if not advisory: # If there was a constraint validation failure and the advisory isn't just # the wrong label, then we can't recover. raise advisory.add_label(ContainerAdvisory.__label__) advisory = ContainerAdvisory.create_or_update( advisory_params)[0] else: # Check to see if a ContainerAdvisory using this id already exists, and if so remove its # label because it should not be a ContainerAdvisory if docker isn't a content type. container_adv = ContainerAdvisory.nodes.get_or_none( id_=advisory_id) if container_adv: container_adv.remove_label(ContainerAdvisory.__label__) advisory = Advisory.create_or_update(advisory_params)[0] if not embargoed: advisory.conditional_connect(advisory.reporter, reporter) advisory.conditional_connect(advisory.assigned_to, assigned_to) bugs = advisory_json['bugs']['bugs'] for bug in bugs: bug = BugzillaBug.get_or_create({'id_': bug['bug']['id']})[0] advisory.attached_bugs.connect(bug)
def query_api_and_update_neo4j(self): """ Scrape the Freshmaker API and upload the data to Neo4j. :param str start_date: a datetime to start scraping data from """ # Initialize session and url session = retry_session() fm_url = self.freshmaker_url while True: log.debug('Querying {0}'.format(fm_url)) try: rv_json = session.get(fm_url, timeout=60).json() except ConnectionError: # TODO: Remove this once FACTORY-3955 is resolved log.error( 'The connection to Freshmaker at %s failed. Skipping the rest of the scraper.', fm_url, ) break for fm_event in rv_json['items']: try: int(fm_event['search_key']) except ValueError: # Skip Freshmaker Events that don't have the search_key as the Advisory ID continue log.debug('Creating FreshmakerEvent {0}'.format(fm_event['id'])) event_params = dict( id_=fm_event['id'], event_type_id=fm_event['event_type_id'], message_id=fm_event['message_id'], state=fm_event['state'], state_name=fm_event['state_name'], state_reason=fm_event['state_reason'], url=fm_event['url'] ) if fm_event.get('time_created'): event_params['time_created'] = timestamp_to_datetime(fm_event['time_created']) if fm_event.get('time_done'): event_params['time_done'] = timestamp_to_datetime(fm_event['time_created']) event = FreshmakerEvent.create_or_update(event_params)[0] log.debug('Creating Advisory {0}'.format(fm_event['search_key'])) advisory = Advisory.get_or_create(dict( id_=fm_event['search_key'] ))[0] event.conditional_connect(event.triggered_by_advisory, advisory) for build_dict in fm_event['builds']: # To handle a faulty container build in Freshmaker if build_dict['build_id'] and int(build_dict['build_id']) < 0: continue log.debug('Creating FreshmakerBuild {0}'.format(build_dict['build_id'])) fb_params = dict( id_=build_dict['id'], dep_on=build_dict['dep_on'], name=build_dict['name'], original_nvr=build_dict['original_nvr'], rebuilt_nvr=build_dict['rebuilt_nvr'], state=build_dict['state'], state_name=build_dict['state_name'], state_reason=build_dict['state_reason'], time_submitted=timestamp_to_datetime(build_dict['time_submitted']), type_=build_dict['type'], type_name=build_dict['type_name'], url=build_dict['url'] ) if build_dict['time_completed']: fb_params['time_completed'] = timestamp_to_datetime( build_dict['time_completed']) if build_dict['build_id']: fb_params['build_id'] = build_dict['build_id'] fb = FreshmakerBuild.create_or_update(fb_params)[0] event.requested_builds.connect(fb) # The build ID obtained from Freshmaker API is actually a Koji task ID task_result = None if build_dict['build_id']: task_result = self.get_koji_task_result(build_dict['build_id']) if not task_result: continue # Extract the build ID from a task result xml_root = ET.fromstring(task_result) # TODO: Change this if a task can trigger multiple builds try: build_id = xml_root.find(".//*[name='koji_builds'].//string").text except AttributeError: build_id = None if not build_id: continue log.debug('Creating ContainerKojiBuild {0}'.format(build_id)) build_params = { 'id_': build_id, 'original_nvr': build_dict['original_nvr'] } try: build = ContainerKojiBuild.create_or_update(build_params)[0] except neomodel.exceptions.ConstraintValidationFailed: # This must have errantly been created as a KojiBuild instead of a # ContainerKojiBuild, so let's fix that. build = KojiBuild.nodes.get_or_none(id_=build_id) if not build: # If there was a constraint validation failure and the build isn't just # the wrong label, then we can't recover. raise build.add_label(ContainerKojiBuild.__label__) build = ContainerKojiBuild.create_or_update(build_params)[0] event.successful_koji_builds.connect(build) if rv_json['meta'].get('next'): fm_url = rv_json['meta']['next'] else: break
def test_timestamp_to_datetime_invalid(input_dt): """Test that an error is raised when an invalid timestamp is provided.""" with pytest.raises(ValueError) as exc_info: timestamp_to_datetime(input_dt) assert 'The timestamp "{0}" is an invalid format'.format(input_dt) == str( exc_info.value)
def test_timestamp_to_datetime(input_dt, expected_dt): """Test that a generic timestamp can be converted to a datetime object.""" assert timestamp_to_datetime(input_dt) == expected_dt
def test_timestamp_to_datetime_iso(input_dt, expected_dt): """Test that an ISO 8601 timestamp can be converted to a datetime object.""" assert timestamp_to_datetime(input_dt) == expected_dt