def vote_recombinations(self, test_results, recomb_id=None): if recomb_id: recombs = [recomb_id] else: recombs = [recomb for recomb in test_results] for recomb_id in recombs: recombination = self.underlayer.get_recombination(recomb_id) test_score, test_analysis = self.get_test_score( test_results[recomb_id]) if test_score > self.test_minimum_score: if self.replication_strategy == "lock-and-backports": comment_data = dict() comment_data['backport-test-results'] = dict() build_url = os.environ.get('BUILD_URL') if build_url: comment_data['backport-test-results'][ 'message'] = "test-link: %s" % build_url else: comment_data['backport-test-results']['message'] = "" comment_data['backport-test-results']['Code-Review'] = 0 comment_data['backport-test-results']['Verified'] = "1" comment_data['backport-test-results'][ 'reviewers'] = self.replica_project[ 'success_reviewers_list'] comment = yaml.dump(comment_data) recombination.comment(comment) recombination.approve() logsummary.info("Recombination %s Approved" % recomb_id) else: recombination.reject() logsummary.info("Recombination %s Rejected: %s" % (recomb_id, test_analysis))
def scan_replica_patches(self, patches_branch=None): # Mutations are only handled one at a time per branch if patches_branch: patches_branches = [patches_branch] else: patches_branches = list() for original_branch in self.original_branches: patches_branches.append( self.underlayer.branch_maps['original->patches'] [original_branch]) for patches_branch in patches_branches: recombination, remaining_changes = self.underlayer.get_recombination_from_patches( patches_branch) # TODO: handle new patchset on same branch-patches review. if recombination: recomb = recombination.__dict__ log.debugvar('recomb') recombination.handle_status() if remaining_changes: log.warning( "Remaining mutation changes %s will be handled in order one at a time after recombination %s is completed " % (' '.join(remaining_changes), recombination.uuid)) else: logsummary.info( "Project %s no new patches in patches branch %s" % (self.project_name, patches_branch))
def vote_recombinations(self, test_results, recomb_id=None): if recomb_id: recombs = [recomb_id] else: recombs = [recomb for recomb in test_results] for recomb_id in recombs: recombination = self.underlayer.get_recombination(recomb_id) test_score, test_analysis = self.get_test_score(test_results[recomb_id]) if test_score > self.test_minimum_score: if self.replication_strategy == "lock-and-backports": comment_data = dict() comment_data['backport-test-results'] = dict() build_url = os.environ.get('BUILD_URL') if build_url: comment_data['backport-test-results']['message'] = "test-link: %s" % build_url else: comment_data['backport-test-results']['message'] = "" comment_data['backport-test-results']['Code-Review'] = 0 comment_data['backport-test-results']['Verified'] = "1" comment_data['backport-test-results']['reviewers'] = self.replica_project['success_reviewers_list'] comment = yaml.dump(comment_data) recombination.comment(comment) recombination.approve() logsummary.info("Recombination %s Approved" % recomb_id) else: recombination.reject() logsummary.info("Recombination %s Rejected: %s" % (recomb_id, test_analysis))
def poll_original(self): logsummary.info( 'Polling original for new changes. Checking status of all changes.' ) for project_name in self.projects: try: logsummary.info('Polling project: %s' % project_name) project = self.projects[project_name] project.poll_original_branches() except Exception, e: traceback.print_exc(file=sys.stdout) log.error(e) logsummary.error("Project %s skipped, reason: %s" % (project_name, e))
def prepare_tests(self, tests_basedir, recomb_id=None): logsummary.info('Fetching untested recombinations') tester_vars = dict() tester_vars['projects_conf'] = {'projects': self.projects_conf} for project_name in self.projects: logsummary.info('Project: %s' % project_name) project = self.projects[project_name] log.debugvar('recomb_id') #try: changes_infos = project.fetch_untested_recombinations( tests_basedir, recomb_id=recomb_id) for change_number in changes_infos: tester_vars[change_number] = changes_infos[change_number] # except Exception, e: # logsummary.info("Problem with project %s: %s. Skipping" % (project_name, e)) return tester_vars
def scan_replica_patches(self, patches_branch=None): # Mutations are only handled one at a time per branch if patches_branch: patches_branches = [patches_branch] else: patches_branches = list() for original_branch in self.original_branches: patches_branches.append(self.underlayer.branch_maps['original->patches'][original_branch]) for patches_branch in patches_branches: recombination, remaining_changes = self.underlayer.get_recombination_from_patches(patches_branch) # TODO: handle new patchset on same branch-patches review. if recombination: recomb = recombination.__dict__ log.debugvar('recomb') recombination.handle_status() if remaining_changes: log.warning("Remaining mutation changes %s will be handled in order one at a time after recombination %s is completed " % (' '.join(remaining_changes), recombination.uuid)) else: logsummary.info("Project %s no new patches in patches branch %s" % (self.project_name, patches_branch))
def fetch_untested_recombinations(self, test_basedir, recomb_id=None): changes_infos = dict() dirlist = self.underlayer.fetch_recombinations(test_basedir, "untested", recomb_id=recomb_id) if not dirlist: logsummary.info("Project '%s': no untested recombinations" % self.project_name) if not self.test_types: logsummary.info("Project '%s': no tests specified" % self.project_name) else: for change_number in dirlist: tests = dict() projects = self.get_reverse_dependencies(tags=['included','contained','required','classes', 'functions']) projects[self.project_name] = self.test_types log.debugvar('projects') for name in projects: tests[name] = dict() tests[name]["types"] = dict() for test_type in projects[name]: result_file = "%s/%s/results/%s/%s_results.xml" % (self.project_name, change_number, test_type, name) tests[name]["types"][test_type] = result_file changes_infos[change_number] = { "target_project" : self.project_name, 'recombination_dir': dirlist[change_number], "recombination_id" : change_number, "tests": tests , } logsummary.info("Fetched recombination %s on dir %s" % (change_number, dirlist[change_number])) return changes_infos
def fetch_untested_recombinations(self, test_basedir, recomb_id=None): changes_infos = dict() dirlist = self.underlayer.fetch_recombinations(test_basedir, "untested", recomb_id=recomb_id) if not dirlist: logsummary.info("Project '%s': no untested recombinations" % self.project_name) if not self.test_types: logsummary.info("Project '%s': no tests specified" % self.project_name) else: for change_number in dirlist: tests = dict() projects = self.get_reverse_dependencies(tags=[ 'included', 'contained', 'required', 'classes', 'functions' ]) projects[self.project_name] = self.test_types log.debugvar('projects') for name in projects: tests[name] = dict() tests[name]["types"] = dict() for test_type in projects[name]: result_file = "%s/%s/results/%s/%s_results.xml" % ( self.project_name, change_number, test_type, name) tests[name]["types"][test_type] = result_file changes_infos[change_number] = { "target_project": self.project_name, 'recombination_dir': dirlist[change_number], "recombination_id": change_number, "tests": tests, } logsummary.info("Fetched recombination %s on dir %s" % (change_number, dirlist[change_number])) return changes_infos
def __init__(self, projects_conf, base_dir, filter_projects=None, filter_method=None, filter_branches=None, fetch=True): self.projects = dict() self.projects_conf = projects_conf self.base_dir = base_dir # extract reverse dependencies for project in self.projects_conf: self.projects_conf[project]["rev-deps"] = {} for project in self.projects_conf: if "test-teps" in self.projects_conf[project]: for test_dep in self.projects_conf[project]["test-deps"]: rev_dep = { project: { "tags": self.projects_conf[project]["test-deps"][test_dep], "tests": self.projects_conf[project]["replica"]["tests"] } } self.projects_conf[test_dep]["rev-deps"].update(rev_dep) # restrict project to operate on projects = copy.deepcopy(projects_conf) project_list = list(projects) if filter_method: new_projects = dict() log.info('Filtering projects with watch method: %s' % filter_method) for project_name in projects: if projects[project_name]['original'][ 'watch-method'] == filter_method: new_projects[project_name] = projects[project_name] projects = new_projects if filter_projects: new_projects = dict() log.info('Filtering projects with names: %s' % filter_projects) project_names = filter_projects.split(',') for project_name in project_names: if project_name not in project_list: log.error( "Project %s is not present in projects configuration" % project_name) try: new_projects[project_name] = projects[project_name] except KeyError: log.warning( "Project %s already discarded by previous filter" % project_name) projects = new_projects if filter_branches: log.info("Filtering branches: %s" % filter_branches) branches = filter_branches.split(',') for project_name in projects: projects[project_name]['original']['watch-branches'] = branches if not projects: log.error("Project list to operate on is empty") raise ValueError log.debugvar('projects') logsummary.info( "initializing and updating local repositories for relevant projects" ) for project_name in projects: try: self.projects[project_name] = Project(project_name, projects[project_name], self.base_dir + "/" + project_name, fetch=fetch) logsummary.info("Project: %s initialized" % project_name) except Exception, e: traceback.print_exc(file=sys.stdout) log.error(e) logsummary.error("Project %s skipped, reason: %s" % (project_name, e))