def do_PUT(self): expectedPath_comment = "/bug/123?api_key=bob" size = int(self.headers.get('content-length')) content = json.loads(self.rfile.read(size).decode("utf-8")) self.send_response(200) self.send_header("Content-type", "application/json") self.end_headers() if expectedPath_comment == self.path: assert 'id' in content assert 'comment' in content assert 'body' in content['comment'] if 'flags' in content: assert content['comment']['body'] == "Test Flags" assert content['flags'][0]['name'] == 'needinfo' assert content['flags'][0]['status'] == '?' assert content['flags'][0]['requestee'] == 'Jon' elif 'assigned_to' in content: assert content['comment']['body'] == "Test Assignee" assert content['assigned_to'] == 'Jon' else: assert content['comment'][ 'body'] == CommentTemplates.TRY_RUN_SUBMITTED(TRY_REVISION) self.wfile.write( "{'bugs':[{'alias':null,'changes':{},'last_change_time':'2020-07-10T18:58:21Z','id':123}]}" .replace("'", '"').encode()) else: assert False, "Got a path %s I didn't expect" % self.path
def _process_job_details_for_awaiting_initial_platform_results( self, library, task, existing_job): try_revision_1 = existing_job.try_runs[0].revision self.logger.log( "Handling try revision %s in Awaiting Initial Platform Results" % try_revision_1) job_list = self.taskclusterProvider.get_job_details(try_revision_1) if not self._job_is_completed_without_build_failures( library, existing_job, job_list): return self.logger.log( "All jobs completed, we're going to go to the next set of platforms.", level=LogLevel.Info) self.vendorProvider.vendor(library) self.mercurialProvider.commit(library, existing_job.bugzilla_id, existing_job.version) try_revision_2 = self.taskclusterProvider.submit_to_try( library, "!linux64") self.dbProvider.add_try_run(existing_job, try_revision_2, 'more platforms') self.bugzillaProvider.comment_on_bug( existing_job.bugzilla_id, CommentTemplates.TRY_RUN_SUBMITTED(try_revision_2, another=True)) existing_job.status = JOBSTATUS.AWAITING_SECOND_PLATFORMS_TRY_RESULTS self.dbProvider.update_job_status(existing_job)
def _job_is_completed_without_build_failures(self, library, existing_job, job_list): if not job_list: self.logger.log( "Try revision had no job results. Skipping this job.", level=LogLevel.Warning) return False for j in job_list: if j.state not in ["completed", "failed", "exception"]: self.logger.log( "Not all jobs on the try revision are completed, so skipping this job until they are.", level=LogLevel.Info) return False # First, look for any failed build jobs for j in job_list: if j.result not in ["retry", "success"]: if "build" in j.job_type_name: self.bugzillaProvider.comment_on_bug( existing_job.bugzilla_id, CommentTemplates.DONE_BUILD_FAILURE(library), needinfo=library.maintainer_bz) self.phabricatorProvider.abandon( existing_job.phab_revision) existing_job.status = JOBSTATUS.DONE existing_job.outcome = JOBOUTCOME.BUILD_FAILED self.dbProvider.update_job_status(existing_job) return False return True
def testComment(self): self.bugzillaProvider.comment_on_bug( 123, CommentTemplates.TRY_RUN_SUBMITTED(TRY_REVISION)) self.bugzillaProvider.comment_on_bug(123, "Test Assignee", assignee='Jon') self.bugzillaProvider.comment_on_bug(123, "Test Flags", needinfo='Jon')
def _process_new_commits(self, library, task, new_commits, all_library_jobs): assert new_commits newest_commit = new_commits[-1] filtered_commits = new_commits if task.filter == 'security': # We don't support this filter yet pass elif task.filter == 'source-extensions': # We don't support this filter yet pass elif task.filter == 'none': pass else: raise Exception( "In a commit-altert task for library %s I got a filter '%s' I don't know how to handle." % (library.name, task.filter)) depends_on = all_library_jobs[ 0].bugzilla_id if all_library_jobs else None description = CommentTemplates.EXAMINE_COMMITS_BODY( library, task, self.scmProvider.build_bug_description(filtered_commits)) bugzilla_id = self.bugzillaProvider.file_bug( library, CommentTemplates.EXAMINE_COMMITS_SUMMARY(library, new_commits), description, task.cc, needinfo=task.needinfo, depends_on=depends_on, moco_confidential=True) self.dbProvider.create_job(JOBTYPE.COMMITALERT, library, newest_commit.revision, JOBSTATUS.DONE, JOBOUTCOME.ALL_SUCCESS, bugzilla_id, phab_revision=None, try_run=None, try_run_type=None)
def _process_job_results(self, library, task, existing_job, results, comment_lines): # We don't need to retrigger jobs, but we do have unclassified failures: if results['to_investigate'] and comment_lines: # This updates the job status to DONE, so return immediately after self._process_unclassified_failures(library, task, existing_job, comment_lines) return # We don't need to retrigger and we don't have unclassified failures but we do have failures if comment_lines: comment = "All jobs completed, we found the following issues.\n\n" self.logger.log(comment, level=LogLevel.Info) existing_job.outcome = JOBOUTCOME.CLASSIFIED_FAILURES for c in comment_lines: self.logger.log(c, level=LogLevel.Debug) comment += c + "\n" self.bugzillaProvider.comment_on_bug( existing_job.bugzilla_id, CommentTemplates.DONE_CLASSIFIED_FAILURE(comment, library), needinfo=library.maintainer_bz, assignee=library.maintainer_bz) self.phabricatorProvider.set_reviewer(existing_job.phab_revision, library.maintainer_phab) # Everything.... succeeded? else: self.logger.log("All jobs completed and we got a clean try run!", level=LogLevel.Info) existing_job.outcome = JOBOUTCOME.ALL_SUCCESS self.bugzillaProvider.comment_on_bug( existing_job.bugzilla_id, CommentTemplates.DONE_ALL_SUCCESS(), assignee=library.maintainer_bz) self.phabricatorProvider.set_reviewer(existing_job.phab_revision, library.maintainer_phab) existing_job.status = JOBSTATUS.DONE self.dbProvider.update_job_status(existing_job)
def process_task(self, library, task): assert task.type == 'commit-alert' all_library_jobs = self.dbProvider.get_all_jobs_for_library(library) all_library_jobs = [ j for j in all_library_jobs if j.type == JOBTYPE.COMMITALERT ] # Order them from newest to oldest sorted(all_library_jobs, key=lambda x: x.created) unseen_upstream_commits = self.scmProvider.check_for_update( library, task, all_library_jobs) if not unseen_upstream_commits: # We logged the reason for this already; just return return newest_commit = unseen_upstream_commits[-1] existing_job = self.dbProvider.get_job(library, newest_commit.revision, limit_by_ff_version=False) if existing_job: self.logger.log( "We found a job with id %s for revision %s that was processed for ff version %s (I am ff version %s). Adding a comment there and aborting." % (existing_job.id, newest_commit.revision, existing_job.ff_version, self.config_dictionary['General']['ff-version']), level=LogLevel.Info) self.bugzillaProvider.comment_on_bug( existing_job.bugzilla_id, CommentTemplates.COMMENT_ALSO_AFFECTS( self.config_dictionary['General']['ff-version'], self.config_dictionary['General']['repo'])) # We also need to make a stubby job entry for this ff version so we hit the above early return; otherwise we will repeat this ad-naseum self.dbProvider.create_job(JOBTYPE.COMMITALERT, library, newest_commit.revision, JOBSTATUS.DONE, JOBOUTCOME.CROSS_VERSION_STUB, existing_job.bugzilla_id, phab_revision=None, try_run=None, try_run_type=None) return self.logger.log( "Processing %s for %s upstream revisions culminating in %s." % (library.name, len(unseen_upstream_commits), newest_commit.revision), level=LogLevel.Info) self._process_new_commits(library, task, unseen_upstream_commits, all_library_jobs)
def _process_unclassified_failures(self, library, task, existing_job, comment_bullets): comment = "The try push is done, we found jobs with unclassified failures.\n\n" self.logger.log(comment.strip(), level=LogLevel.Info) for c in comment_bullets: comment += c + "\n" self.logger.log(c, level=LogLevel.Debug) self.bugzillaProvider.comment_on_bug( existing_job.bugzilla_id, CommentTemplates.DONE_UNCLASSIFIED_FAILURE(comment, library), needinfo=library.maintainer_bz) existing_job.outcome = JOBOUTCOME.UNCLASSIFIED_FAILURES existing_job.status = JOBSTATUS.DONE self.dbProvider.update_job_status(existing_job)
def testFile(self): library = Struct( **{ 'name': 'dav1d', 'bugzilla_product': 'Core', 'bugzilla_component': 'ImageLib', }) self.bugzillaProvider.file_bug( library, CommentTemplates.UPDATE_SUMMARY( library, 'V1', string_date_to_uniform_string_date( '2020-08-21T15:13:49.000+02:00')), "", ['*****@*****.**'], ['*****@*****.**'], 210, 110, moco_confidential=True)
def testAlertAcrossFFVersions(self): library_filter = "aom" (u, expected_values) = TestFunctionality._setup( lambda: "0886ba657dedc54fad06018618cc07689198abea", lambda: "11c85fb14571c822e5f7f8b92a7e87749430b696", lambda: 1, lambda: 0, library_filter, keep_tmp_db=True) u.run(library_filter=library_filter) all_jobs = u.dbProvider.get_all_jobs() self.assertEqual( len([j for j in all_jobs if j.library_shortname != "dav1d"]), 1, "I should have created a single job.") self._check_job(all_jobs[0], expected_values) config_dictionary = copy.deepcopy(u.config_dictionary) config_dictionary['Database']['keep_tmp_db'] = False config_dictionary['General']['ff-version'] -= 1 expected_values.ff_version -= 1 config_dictionary['General'][ 'repo'] = "https://hg.mozilla.org/mozilla-beta" u = Updatebot(config_dictionary, PROVIDERS) u.run(library_filter=library_filter) expected_comment = CommentTemplates.COMMENT_ALSO_AFFECTS( config_dictionary['General']['ff-version'], config_dictionary['General']['repo']) self.assertEqual( config_dictionary['Bugzilla']['comment_filed'], expected_comment, "Did not file a comment matching the expected value.") all_jobs = u.dbProvider.get_all_jobs() self.assertEqual( len([j for j in all_jobs if j.library_shortname != "dav1d"]), 2, "I should have two jobs.") self._check_job(all_jobs[1], expected_values, outcome=JOBOUTCOME.CROSS_VERSION_STUB) TestFunctionality._cleanup(u, library_filter)
def _process_new_job(self, library, task, new_version, timestamp): see_also = [] # First, we need to see if there was a previously active job for this library. # If so, we need to close that job out. active_jobs = self.dbProvider.get_all_active_jobs_for_library(library) assert len(active_jobs ) <= 1, "Got more than one active job for library %s" % ( library.name) self.logger.log("Found %i active jobs for this library" % len(active_jobs), level=LogLevel.Info) if len(active_jobs) == 1: active_job = active_jobs[0] self.bugzillaProvider.close_bug(active_job.bugzilla_id, CommentTemplates.BUG_SUPERSEDED()) self.phabricatorProvider.abandon(active_job.phab_revision) active_job.status = JOBSTATUS.DONE active_job.outcome = JOBOUTCOME.ABORTED self.dbProvider.update_job_status(active_job) see_also.append(active_job.bugzilla_id) # Now we can process the new job # Get the information we will need to file a bug. # set ignore_commits_from_these_jobs to get commit details on all revisions since the one in-tree upstream_commits = self.scmProvider.check_for_update( library, task, ignore_commits_from_these_jobs=None) commit_details = self.scmProvider.build_bug_description( upstream_commits) bugzilla_id = self.bugzillaProvider.file_bug( library, CommentTemplates.UPDATE_SUMMARY(library, new_version, timestamp), CommentTemplates.UPDATE_DETAILS(len(upstream_commits), commit_details), task.cc, see_also) try_run_type = 'initial platform' if self.config_dictionary['General'][ 'separate-platforms'] else 'all platforms' try: self.vendorProvider.vendor(library) except Exception as e: # We're not going to commit these changes; so clean them out. self.cmdProvider.run(["hg", "checkout", "-C", "."]) self.cmdProvider.run(["hg", "purge", "."]) # Handle `./mach vendor` failing self.dbProvider.create_job(JOBTYPE.VENDORING, library, new_version, JOBSTATUS.DONE, JOBOUTCOME.COULD_NOT_VENDOR, bugzilla_id, phab_revision=None) if isinstance(e, subprocess.CalledProcessError): msg = e.stderr.decode().strip() + "\n\n" if e.stderr else "" msg += e.stdout.decode().strip() else: msg = str(e) self.bugzillaProvider.comment_on_bug( bugzilla_id, CommentTemplates.COULD_NOT_VENDOR( "Could not vendor library. Received the following error from ./mach vendor:\n\n%s" % msg), needinfo=library.maintainer_bz) return self.mercurialProvider.commit(library, bugzilla_id, new_version) platform_restriction = "linux64" if self.config_dictionary['General'][ 'separate-platforms'] else "" next_status = JOBSTATUS.AWAITING_INITIAL_PLATFORM_TRY_RESULTS if self.config_dictionary[ 'General'][ 'separate-platforms'] else JOBSTATUS.AWAITING_SECOND_PLATFORMS_TRY_RESULTS try_revision = self.taskclusterProvider.submit_to_try( library, platform_restriction) self.bugzillaProvider.comment_on_bug( bugzilla_id, CommentTemplates.TRY_RUN_SUBMITTED(try_revision)) phab_revision = self.phabricatorProvider.submit_patch() self.dbProvider.create_job(JOBTYPE.VENDORING, library, new_version, next_status, JOBOUTCOME.PENDING, bugzilla_id, phab_revision, try_revision, try_run_type)