def HandleGet(self): """Lists WfAnalysis entities detected to have been aborted.""" midnight_today = datetime.combine(time_util.GetUTCNow(), time.min) start = self.request.get('start_date') end = self.request.get('end_date') start_date, end_date = _GetStartEndDates(start, end, midnight_today) analyses = WfAnalysis.query( ndb.AND(WfAnalysis.build_start_time >= start_date, WfAnalysis.build_start_time < end_date, WfAnalysis.aborted == True)).order(-WfAnalysis.build_start_time).fetch(_COUNT) analyses_data = [] for analysis in analyses: analyses_data.append(_Serialize(analysis)) data = { 'start_date': time_util.FormatDatetime(start_date), 'end_date': time_util.FormatDatetime(end_date), 'analyses': analyses_data, } return {'template': 'pipeline_errors_dashboard.html', 'data': data}
def _FetchAndSortUntriagedAnalyses(): query = WfAnalysis.query( WfAnalysis.result_status==wf_analysis_result_status.FOUND_UNTRIAGED) analyses = query.fetch() return sorted( analyses, key=lambda x : (x.master_name, x.builder_name, x.build_number))
def main(): start = datetime(2017, 12, 1, 0, 0, 0) cursor = None more = True groups_with_different_results = defaultdict(list) groups = defaultdict(list) while more: analyses, cursor, more = WfAnalysis.query( WfAnalysis.build_start_time >= start).fetch_page( 100, start_cursor=cursor) for analysis in analyses: if (analysis.status != analysis_status.COMPLETED or not analysis.failure_group_key or analysis.failure_type != failure_type.COMPILE): continue group_key = '/'.join(str(x) for x in analysis.failure_group_key) culprit = None try_job = WfTryJob.Get(*analysis.key.pairs()[0][1].split('/')) if try_job and try_job.compile_results: culprit = try_job.compile_results[-1].get('culprit') same_result = False for item in groups[group_key]: if (item['culprit'] != culprit or item['suspects'] != analysis.suspected_cls): continue same_result = True item['builds'].append(analysis.key.pairs()[0][1]) break if same_result: continue new_result = { 'suspects': analysis.suspected_cls, 'culprit': culprit, 'builds': [analysis.key.pairs()[0][1]] } groups[group_key].append(new_result) for key, item in groups.iteritems(): if len(item) > 1: groups_with_different_results[key] = item _DisplayResults(groups_with_different_results, groups)
def ReportTestFailuresForRange(start_time, end_time, dup_dict): """Reports test failures to BQ for the given range. Optional cursor can be specific to continue. Args: (datetime) start_time: Start of the range. (datetime) end_time: End of the range. (Cursor) start_cursor: Marker on where to start the query at. """ analyses_query = WfAnalysis.query( WfAnalysis.build_start_time > start_time, WfAnalysis.build_start_time < end_time).order( WfAnalysis.build_start_time) print 'reporting test failure events to {} from {} --> {}'.format( event_reporting._TABLE_ID_TEST, start_time, end_time) cursor = None more = True page_size = 100 while more: print 'fetching {} results...'.format(page_size) analyses, cursor, more = analyses_query.fetch_page(page_size, start_cursor=cursor) for analysis in analyses: if analysis.build_failure_type != failure_type.TEST: continue if not CanReportAnalysis(analysis): continue print 'attempting with datetime {}'.format( analysis.build_start_time) success = ReportTestFailureAnalysisCompletionEvent(analysis) if not success: print 'encountered error' return key = analysis.key.pairs()[0][1] if key not in dup_dict: dup_dict[key] = True else: print 'found dup' continue print 'new start_time is {}'.format(analysis.build_start_time)
def _ObscureTriageRecordsInWfAnalysis(): """Obscures the user names in WfAnalysis triage history.""" count = 0 time_limit = _TimeBeforeNow(days=_TRIAGE_RECORD_RENTENSION_DAYS) query = WfAnalysis.query(WfAnalysis.triage_email_obscured == False, WfAnalysis.triage_record_last_add < time_limit) more = True cursor = None while more: entities, cursor, more = query.fetch_page(_PAGE_SIZE, start_cursor=cursor) for entity in entities: for triage_record in (entity.triage_history or []): triage_record['user_name'] = email_util.ObscureEmails( [triage_record['user_name']], ['google.com'])[0] entity.triage_email_obscured = True ndb.put_multi(entities) count += len(entities) return count
def HandleGet(self): """Shows a list of Findit analysis results in HTML page. By default the page will display all the results under status FOUND_CORRECT, FOUND_INCORRECT and NOT_FOUND_INCORRECT. Available parameters: count: Parameter for number of analysis result to be displayed. result_status: Parameter to specify the result_status of the results. triage: Parameter for internal use. The page will display analysis results under status FOUND_INCORRECT, NOT_FOUND_INCORRECT, FOUND_UNTRIAGED and NOT_FOUND_UNTRIAGED. days: Parameter to decide only display results within a fixed amount of days. This parameter will turn off triage parameter and display all the results regardless of result_status. """ status_code = int(self.request.get('result_status', '-1')) if status_code >= 0: analysis_query = WfAnalysis.query(WfAnalysis.result_status == status_code) elif self.request.get('triage') == '1': analysis_query = WfAnalysis.query(ndb.AND( WfAnalysis.result_status > result_status.FOUND_CORRECT, WfAnalysis.result_status < result_status.NOT_FOUND_CORRECT)) else: analysis_query = WfAnalysis.query(ndb.AND( WfAnalysis.result_status >= result_status.FOUND_CORRECT, WfAnalysis.result_status < result_status.FOUND_UNTRIAGED)) if self.request.get('count'): count = int(self.request.get('count')) else: count = _DEFAULT_DISPLAY_COUNT if self.request.get('days'): # pragma: no cover start_date = time_util.GetUTCNow() - datetime.timedelta( int(self.request.get('days'))) start_date = start_date.replace( hour=0, minute=0, second=0, microsecond=0) if status_code >= 0: analysis_results = analysis_query.filter( WfAnalysis.build_start_time >= start_date).order( -WfAnalysis.build_start_time).fetch(count) else: analysis_results = WfAnalysis.query( WfAnalysis.build_start_time >= start_date).order( -WfAnalysis.build_start_time).fetch(count) else: analysis_results = analysis_query.order( WfAnalysis.result_status, -WfAnalysis.build_start_time).fetch(count) analyses = [] def FormatDatetime(start_time): if not start_time: return None else: return start_time.strftime('%Y-%m-%d %H:%M:%S UTC') for analysis_result in analysis_results: analysis = { 'master_name': analysis_result.master_name, 'builder_name': analysis_result.builder_name, 'build_number': analysis_result.build_number, 'build_start_time': FormatDatetime(analysis_result.build_start_time), 'failure_type': analysis_result.failure_type_str, 'status': analysis_result.status, 'status_description': analysis_result.status_description, 'suspected_cls': analysis_result.suspected_cls, 'result_status': analysis_result.result_status_description, } analyses.append(analysis) data = { 'analyses': analyses, 'triage': self.request.get('triage', '-1'), 'days': self.request.get('days', '-1'), 'count': self.request.get('count', '-1'), 'result_status': self.request.get('result_status', '-1') } return {'template': 'list_analyses.html', 'data': data}
def _FetchAnalyses(start_date, end_date): analyses_query = WfAnalysis.query( WfAnalysis.build_start_time >= start_date, WfAnalysis.build_start_time < end_date) return _BigFetch(analyses_query)
print 'percentage: %.2f%%' % _ResultPercentage(num_not_found, num_total) _PrintResults(not_found) if __name__ == '__main__': start = datetime.datetime(2017, 4, 25, 0, 0, 0) end = datetime.datetime(2017, 4, 27, 0, 0, 0) cursor = None more = True test_results = [] compile_results = [] while more: analyses, cursor, more = WfAnalysis.query( ndb.AND(WfAnalysis.build_start_time >= start, WfAnalysis.build_start_time < end)).fetch_page( 100, start_cursor=cursor) for analysis in analyses: if not analysis.completed or not analysis.result: continue build_key = analysis.key.pairs()[0][1] master, builder_name, build_number = ( BaseBuildModel.GetBuildInfoFromBuildKey(build_key)) build_number = int(build_number) try_job = WfTryJob.Get(master, builder_name, build_number) for failure in analysis.result.get('failures', {}):
def HandleGet(self): """Shows a list of Findit analysis results in HTML page. By default the page will display all the results under status FOUND_CORRECT, FOUND_INCORRECT and NOT_FOUND_INCORRECT. Available parameters: count: Parameter for number of analysis result to be displayed. result_status: Parameter to specify the result_status of the results. triage: Parameter for internal use. The page will display analysis results under status FOUND_INCORRECT, NOT_FOUND_INCORRECT, FOUND_UNTRIAGED and NOT_FOUND_UNTRIAGED. days: Parameter to decide only display results within a fixed amount of days. This parameter will turn off triage parameter and display all the results regardless of result_status. """ status_code = int(self.request.get('result_status', '-1')) if status_code >= 0: analysis_query = WfAnalysis.query(WfAnalysis.result_status == status_code) elif self.request.get('triage') == '1': analysis_query = WfAnalysis.query(ndb.AND( WfAnalysis.result_status > result_status.FOUND_CORRECT, WfAnalysis.result_status < result_status.NOT_FOUND_CORRECT)) else: analysis_query = WfAnalysis.query(ndb.AND( WfAnalysis.result_status >= result_status.FOUND_CORRECT, WfAnalysis.result_status < result_status.FOUND_UNTRIAGED)) if self.request.get('count'): count = int(self.request.get('count')) else: count = _DEFAULT_DISPLAY_COUNT if self.request.get('days'): # pragma: no cover start_date = datetime.datetime.utcnow() - datetime.timedelta( int(self.request.get('days'))) start_date = start_date.replace( hour=0, minute=0, second=0, microsecond=0) if status_code >= 0: analysis_results = analysis_query.filter( WfAnalysis.build_start_time >= start_date).order( -WfAnalysis.build_start_time).fetch(count) else: analysis_results = WfAnalysis.query( WfAnalysis.build_start_time >= start_date).order( -WfAnalysis.build_start_time).fetch(count) else: analysis_results = analysis_query.order( WfAnalysis.result_status, -WfAnalysis.build_start_time).fetch(count) analyses = [] def FormatDatetime(start_time): if not start_time: return None else: return start_time.strftime('%Y-%m-%d %H:%M:%S UTC') for analysis_result in analysis_results: analysis = { 'master_name': analysis_result.master_name, 'builder_name': analysis_result.builder_name, 'build_number': analysis_result.build_number, 'build_start_time': FormatDatetime(analysis_result.build_start_time), 'status': analysis_result.status, 'status_description': analysis_result.status_description, 'suspected_cls': analysis_result.suspected_cls, 'result_status': analysis_result.result_status_description } analyses.append(analysis) data = { 'analyses': analyses, 'triage': self.request.get('triage', '-1'), 'days': self.request.get('days', '-1'), 'count': self.request.get('count', '-1'), 'result_status': self.request.get('result_status', '-1') } return {'template': 'list_analyses.html', 'data': data}