def test_get_crash_signature(line, exp_search_term): """tests the search term extracted from an error line is correct""" actual_search_term = get_crash_signature(line) assert actual_search_term == exp_search_term
def parse_log(project, job_log_url, job_guid, check_errors=False): """ Call ArtifactBuilderCollection on the given job. """ credentials = OAuthCredentials.get_credentials(project) req = TreeherderRequest( protocol=settings.TREEHERDER_REQUEST_PROTOCOL, host=settings.TREEHERDER_REQUEST_HOST, project=project, oauth_key=credentials.get('consumer_key', None), oauth_secret=credentials.get('consumer_secret', None), ) update_endpoint = 'job-log-url/{0}/update_parse_status'.format(job_log_url['id']) try: log_url = job_log_url['url'] bug_suggestions = [] bugscache_uri = '{0}{1}'.format( settings.API_HOSTNAME, reverse("bugscache-list") ) terms_requested = {} if log_url: # parse a log given its url artifact_bc = ArtifactBuilderCollection(log_url, check_errors=check_errors) artifact_bc.parse() artifact_list = [] for name, artifact in artifact_bc.artifacts.items(): artifact_list.append((job_guid, name, 'json', json.dumps(artifact))) if check_errors: all_errors = artifact_bc.artifacts.get( 'Structured Log', {} ).get( 'step_data', {} ).get( 'all_errors', [] ) for err in all_errors: # remove the mozharness prefix clean_line = get_mozharness_substring(err['line']) # get a meaningful search term out of the error line search_term = get_error_search_term(clean_line) bugs = dict(open_recent=[], all_others=[]) # collect open recent and all other bugs suggestions if search_term: if not search_term in terms_requested: # retrieve the list of suggestions from the api bugs = get_bugs_for_search_term( search_term, bugscache_uri ) terms_requested[search_term] = bugs else: bugs = terms_requested[search_term] if not bugs or not (bugs['open_recent'] or bugs['all_others']): # no suggestions, try to use # the crash signature as search term crash_signature = get_crash_signature(clean_line) if crash_signature: if not crash_signature in terms_requested: bugs = get_bugs_for_search_term( crash_signature, bugscache_uri ) terms_requested[crash_signature] = bugs else: bugs = terms_requested[crash_signature] bug_suggestions.append({ "search": clean_line, "bugs": bugs }) artifact_list.append((job_guid, 'Bug suggestions', 'json', json.dumps(bug_suggestions))) # store the artifacts generated tac = TreeherderArtifactCollection() for artifact in artifact_list: ta = tac.get_artifact({ "job_guid": artifact[0], "name": artifact[1], "type": artifact[2], "blob": artifact[3] }) tac.add(ta) req.post(tac) # send an update to job_log_url # the job_log_url status changes # from pending to running current_timestamp = time.time() status = 'parsed' req.send( update_endpoint, method='POST', data={ 'parse_status': status, 'parse_timestamp': current_timestamp } ) except Exception, e: parse_log.retry(exc=e) # send an update to job_log_url # the job_log_url status changes # from pending to running current_timestamp = time.time() status = 'failed' req.send( update_endpoint, method='POST', data={ 'parse_status': status, 'parse_timestamp': current_timestamp } ) # re raise the exception to leave a trace in the log raise
def parse_log(project, job_log_url, job_guid, check_errors=False): """ Call ArtifactBuilderCollection on the given job. """ credentials = OAuthCredentials.get_credentials(project) req = TreeherderRequest( protocol=settings.TREEHERDER_REQUEST_PROTOCOL, host=settings.TREEHERDER_REQUEST_HOST, project=project, oauth_key=credentials.get('consumer_key', None), oauth_secret=credentials.get('consumer_secret', None), ) update_endpoint = 'job-log-url/{0}/update_parse_status'.format( job_log_url['id']) try: log_url = job_log_url['url'] bug_suggestions = [] bugscache_uri = '{0}{1}'.format(settings.API_HOSTNAME, reverse("bugscache-list")) terms_requested = {} if log_url: # parse a log given its url artifact_bc = ArtifactBuilderCollection(log_url, check_errors=check_errors) artifact_bc.parse() artifact_list = [] for name, artifact in artifact_bc.artifacts.items(): artifact_list.append( (job_guid, name, 'json', json.dumps(artifact))) if check_errors: all_errors = artifact_bc.artifacts.get( 'Structured Log', {}).get('step_data', {}).get('all_errors', []) for err in all_errors: # remove the mozharness prefix clean_line = get_mozharness_substring(err['line']) # get a meaningful search term out of the error line search_term = get_error_search_term(clean_line) bugs = dict(open_recent=[], all_others=[]) # collect open recent and all other bugs suggestions if search_term: if not search_term in terms_requested: # retrieve the list of suggestions from the api bugs = get_bugs_for_search_term( search_term, bugscache_uri) terms_requested[search_term] = bugs else: bugs = terms_requested[search_term] if not bugs or not (bugs['open_recent'] or bugs['all_others']): # no suggestions, try to use # the crash signature as search term crash_signature = get_crash_signature(clean_line) if crash_signature: if not crash_signature in terms_requested: bugs = get_bugs_for_search_term( crash_signature, bugscache_uri) terms_requested[crash_signature] = bugs else: bugs = terms_requested[crash_signature] bug_suggestions.append({ "search": clean_line, "bugs": bugs }) artifact_list.append((job_guid, 'Bug suggestions', 'json', json.dumps(bug_suggestions))) # store the artifacts generated tac = TreeherderArtifactCollection() for artifact in artifact_list: ta = tac.get_artifact({ "job_guid": artifact[0], "name": artifact[1], "type": artifact[2], "blob": artifact[3] }) tac.add(ta) req.post(tac) # send an update to job_log_url # the job_log_url status changes # from pending to running current_timestamp = time.time() status = 'parsed' req.send(update_endpoint, method='POST', data={ 'parse_status': status, 'parse_timestamp': current_timestamp }) except Exception, e: parse_log.retry(exc=e) # send an update to job_log_url # the job_log_url status changes # from pending to running current_timestamp = time.time() status = 'failed' req.send(update_endpoint, method='POST', data={ 'parse_status': status, 'parse_timestamp': current_timestamp }) # re raise the exception to leave a trace in the log raise
def parse_log(project, log_url, job_guid, resultset, check_errors=False): """ Call ArtifactBuilderCollection on the given job. """ mozharness_pattern = re.compile( '^\d+:\d+:\d+[ ]+(?:DEBUG|INFO|WARNING|ERROR|CRITICAL|FATAL) - [ ]?' ) bugs_cache = {'open': {}, 'closed': {}} bug_suggestions = {'open': {}, 'closed': {}} status_publisher = JobStatusPublisher(settings.BROKER_URL) failure_publisher = JobFailurePublisher(settings.BROKER_URL) try: # return the resultset with the job id to identify if the UI wants # to fetch the whole thing. bugscache_uri = '{0}{1}'.format( settings.API_HOSTNAME, reverse("bugscache-list") ) credentials = OAuthCredentials.get_credentials(project) if log_url: # parse a log given its url artifact_bc = ArtifactBuilderCollection( log_url, check_errors=check_errors, ) artifact_bc.parse() artifact_list = [] for name, artifact in artifact_bc.artifacts.items(): artifact_list.append((job_guid, name, 'json', json.dumps(artifact))) if check_errors: all_errors = artifact_bc.artifacts['Structured Log']['step_data']['all_errors'] for err in all_errors: # remove the mozharness prefix clean_line = mozharness_pattern.sub('', err['line']).strip() # get a meaningful search term out of the error line search_term = get_error_search_term(clean_line) # collect open and closed bugs suggestions for status in ('open', 'closed'): if not search_term: bug_suggestions[status][clean_line] = [] continue if search_term not in bugs_cache[status]: # retrieve the list of suggestions from the api bugs_cache[status][search_term] = get_bugs_for_search_term( search_term, status, bugscache_uri ) # no suggestions, try to use the crash signature as search term if not bugs_cache[status][search_term]: crash_signature = get_crash_signature(search_term) if crash_signature: bugs_cache[status][search_term] = get_bugs_for_search_term( search_term, status, bugscache_uri ) bug_suggestions[status][clean_line] = bugs_cache[status][search_term] artifact_list.append((job_guid, 'Open bugs', 'json', json.dumps(bug_suggestions['open']))) artifact_list.append((job_guid, 'Closed bugs', 'json', json.dumps(bug_suggestions['closed']))) # store the artifacts generated tac = TreeherderArtifactCollection() for artifact in artifact_list: ta = tac.get_artifact({ "job_guid": artifact[0], "name": artifact[1], "type": artifact[2], "blob": artifact[3] }) tac.add(ta) req = TreeherderRequest( protocol=settings.TREEHERDER_REQUEST_PROTOCOL, host=settings.TREEHERDER_REQUEST_HOST, project=project, oauth_key=credentials.get('consumer_key', None), oauth_secret=credentials.get('consumer_secret', None), ) req.send(tac) status_publisher.publish(job_guid, resultset, project, 'processed') if check_errors: failure_publisher.publish(job_guid, project) finally: status_publisher.disconnect() failure_publisher.disconnect()