def analyze_comments(self):
        # comments may update metadata too
        # comments with actions will be acknoledged with
        # action:
        #   comment-id: comment id
        #   outcome: completed
        comments_metadata = dict()
        comment_commands = ["DISCARD"]
        # Maybe it's better to start yaml comments with ---
        if self.comments:
            for comment in self.comments:
                log.debugvar('comment')
                try:
                    comment_metadata = yaml.load(comment['message'])
                    if 'user-request' in comment_metadata and str(comment_metadata['user-request']['comment-id']) in self.user_requests:
                        self.user_requests[str(comment_metadata['user-request']['comment-id'])]['outcome'] = comment_metadata['user-request']['outcome']
                        comment_metadata.pop('user-request')
                    comments_metadata.update(comment_metadata)
                except (ValueError, yaml.scanner.ScannerError,yaml.parser.ParserError):
                    for line in comment['message'].split('\n'):
                        for cc in comment_commands:
                            rs = re.search('^%s$' % cc, line)
                            if rs is not None:
                                self.user_requests[str(comment['timestamp'])] = dict()
                                self.user_requests[str(comment['timestamp'])]['type'] = cc
                                self.user_requests[str(comment['timestamp'])]['outcome'] = "open"

        return comments_metadata
Example #2
0
    def scan_replica_patches(self, patches_branch=None):
        # Mutations are only handled one at a time per branch
        if patches_branch:
            patches_branches = [patches_branch]
        else:
            patches_branches = list()
            for original_branch in self.original_branches:
                patches_branches.append(
                    self.underlayer.branch_maps['original->patches']
                    [original_branch])

        for patches_branch in patches_branches:
            recombination, remaining_changes = self.underlayer.get_recombination_from_patches(
                patches_branch)
            # TODO: handle new patchset on same branch-patches review.
            if recombination:
                recomb = recombination.__dict__
                log.debugvar('recomb')
                recombination.handle_status()
                if remaining_changes:
                    log.warning(
                        "Remaining mutation changes %s will be handled in order one at a time after recombination %s is completed "
                        % (' '.join(remaining_changes), recombination.uuid))
            else:
                logsummary.info(
                    "Project %s no new patches in patches branch %s" %
                    (self.project_name, patches_branch))
Example #3
0
    def fetch_untested_recombinations(self, test_basedir, recomb_id=None):
        changes_infos = dict()
        dirlist = self.underlayer.fetch_recombinations(test_basedir, "untested", recomb_id=recomb_id)

        if not dirlist:
            logsummary.info("Project '%s': no untested recombinations" % self.project_name)

        if not self.test_types:
            logsummary.info("Project '%s': no tests specified" % self.project_name)
        else:
            for change_number in dirlist:
                tests = dict()
                projects = self.get_reverse_dependencies(tags=['included','contained','required','classes', 'functions'])
                projects[self.project_name] = self.test_types
                log.debugvar('projects')
                for name in projects:
                    tests[name] = dict()
                    tests[name]["types"] = dict()
                    for test_type in projects[name]:
                        result_file = "%s/%s/results/%s/%s_results.xml" % (self.project_name, change_number, test_type, name)
                        tests[name]["types"][test_type] = result_file
                changes_infos[change_number] = {
                    "target_project" : self.project_name,
                    'recombination_dir': dirlist[change_number],
                    "recombination_id" : change_number,
                    "tests": tests ,
                }
                logsummary.info("Fetched recombination %s on dir %s" % (change_number, dirlist[change_number]))

        return changes_infos
Example #4
0
    def scan_original_distance(self, original_branch):
        replica_branch = self.underlayer.branch_maps['original->replica'][original_branch]
        target_branch = self.underlayer.branch_maps['original->target'][original_branch]
        log.debug("Scanning distance from original branch %s" % original_branch)
#        if self.replication_strategy == "change-by-change" and revision_exists(self.ref_locks[replica_branch], replica_branch):
#                log.info("Cannot replicate branch past the specified lock")

        self.recombinations[original_branch] = self.get_recombinations_by_interval(original_branch)
        slices = self.get_slices(self.recombinations[original_branch])
        recombinations = self.recombinations[original_branch]


        log.debugvar('slices')
        # Master sync on merged changes
        # we really need only the last commit in the slice
        # we advance the master to that, and all the others will be merged too
        if slices['MERGED']:
            # one or more changes are merged in midstream, but missing in master
            # master is out of sync, changes need to be pushed
            # but check first if the change was changed with a merge commit
            # if yes, push THAT to master, if not, it's just a fast forward
            segment = slices['MERGED'][0]
            recomb_id = list(recombinations)[segment['end'] - 1]
            recombination = recombinations[recomb_id]
            recombination.handle_status()

        # Gerrit operations from approved changes
        # NOthing 'approved' can be merged if it has some "present" before in the history
        skip_list = set()
        for index, approved_segment in enumerate(slices['APPROVED']):
            for present_segment in slices['PRESENT']:
                if present_segment['start'] < approved_segment['start']:
                    skip_list.add(index)

        for index in list(skip_list)[::-1]:
            segment = slices['APPROVED'].pop(index)
            for recomb_id in list(recombinations)[segment['start']:segment['end']]:
                log.warning("Recombination %s is approved but waiting for previous unapproved changes, skipping" % recomb_id)

        # Merge what remains
        for segment in slices['APPROVED']:
            for recomb_id in list(recombinations)[segment['start']:segment['end']]:
                recombination = recombinations[recomb_id]
                recombination.handle_status()

        # Notify of presence
        for segment in slices['PRESENT']:
            for recomb_id in list(recombinations)[segment['start']:segment['end']]:
                recombination = recombinations[recomb_id]
                log.warning("Recombination %s already present in replica gerrit as change %s and waiting for approval" % (recomb_id, recombination.number))
                recombination.handle_status()

        # Gerrit operations for missing changes
        for segment in slices['MISSING']:
            for recomb_id in list(recombinations)[segment['start']:segment['end']]:
                log.warning("Recombination %s is missing from replica gerrit" % recomb_id)
                recombination = recombinations[recomb_id]
                recombination.handle_status()

        return True
 def serve_requests(self):
     ur = self.user_requests
     log.debugvar('ur')
     if self.user_requests:
         log.info("Serving user requests in recombination %s comments" % self.uuid)
         for comment_id in self.user_requests:
             if self.user_requests[comment_id]['outcome'] != "completed":
                 if self.user_requests[comment_id]['type'] == "DISCARD":
                     served = { 'user-request': { 'comment-id' : comment_id, 'type': self.user_requests[comment_id]['type'], 'outcome': 'completed'}, 'recombine-status': 'DISCARDED'}
                     comment = yaml.safe_dump(served)
                     self.comment(comment, code_review="-2")
                     raise ValueError
                     self.abandon()
Example #6
0
    def delete_stale_branches(self):
        recomb_active_branches = list()
        target_stale_branches = list()
        recomb_all_branches = self.underlayer.list_branches('replica', pattern='recomb*')
        infos = self.underlayer.replica_remote.query_changes_json('"status:open AND project:%s"' % self.replica_project['name'])
        for info in infos:
            recomb_active_branches.append(info['branch'])

        log.debugvar('recomb_active_branches')
        recomb_stale_branches = list(set(recomb_all_branches) - set(recomb_active_branches))
        log.debugvar('recomb_stale_branches')
        self.underlayer.delete_remote_branches('replica', recomb_stale_branches)
        for recomb_branch in recomb_stale_branches:
            target_stale_branches.append(re.sub('recomb-','target-',recomb_branch))
        self.underlayer.delete_remote_branches('replica', target_stale_branches)
Example #7
0
 def prepare_tests(self, tests_basedir, recomb_id=None):
     logsummary.info('Fetching untested recombinations')
     tester_vars = dict()
     tester_vars['projects_conf'] = {'projects': self.projects_conf}
     for project_name in self.projects:
         logsummary.info('Project: %s' % project_name)
         project = self.projects[project_name]
         log.debugvar('recomb_id')
         #try:
         changes_infos = project.fetch_untested_recombinations(
             tests_basedir, recomb_id=recomb_id)
         for change_number in changes_infos:
             tester_vars[change_number] = changes_infos[change_number]
     # except Exception, e:
     #    logsummary.info("Problem with project %s: %s. Skipping" % (project_name, e))
     return tester_vars
Example #8
0
    def scan_replica_patches(self, patches_branch=None):
        # Mutations are only handled one at a time per branch
        if patches_branch:
            patches_branches = [patches_branch]
        else:
            patches_branches = list()
            for original_branch in self.original_branches:
                patches_branches.append(self.underlayer.branch_maps['original->patches'][original_branch])

        for patches_branch in patches_branches:
            recombination, remaining_changes = self.underlayer.get_recombination_from_patches(patches_branch)
            # TODO: handle new patchset on same branch-patches review.
            if recombination:
                recomb = recombination.__dict__
                log.debugvar('recomb')
                recombination.handle_status()
                if remaining_changes:
                    log.warning("Remaining mutation changes %s will be handled in order one at a time after recombination %s is completed " % (' '.join(remaining_changes), recombination.uuid))
            else:
                logsummary.info("Project %s no new patches in patches branch %s" % (self.project_name, patches_branch))
Example #9
0
    def delete_stale_branches(self):
        recomb_active_branches = list()
        target_stale_branches = list()
        recomb_all_branches = self.underlayer.list_branches('replica',
                                                            pattern='recomb*')
        infos = self.underlayer.replica_remote.query_changes_json(
            '"status:open AND project:%s"' % self.replica_project['name'])
        for info in infos:
            recomb_active_branches.append(info['branch'])

        log.debugvar('recomb_active_branches')
        recomb_stale_branches = list(
            set(recomb_all_branches) - set(recomb_active_branches))
        log.debugvar('recomb_stale_branches')
        self.underlayer.delete_remote_branches('replica',
                                               recomb_stale_branches)
        for recomb_branch in recomb_stale_branches:
            target_stale_branches.append(
                re.sub('recomb-', 'target-', recomb_branch))
        self.underlayer.delete_remote_branches('replica',
                                               target_stale_branches)
Example #10
0
    def fetch_untested_recombinations(self, test_basedir, recomb_id=None):
        changes_infos = dict()
        dirlist = self.underlayer.fetch_recombinations(test_basedir,
                                                       "untested",
                                                       recomb_id=recomb_id)

        if not dirlist:
            logsummary.info("Project '%s': no untested recombinations" %
                            self.project_name)

        if not self.test_types:
            logsummary.info("Project '%s': no tests specified" %
                            self.project_name)
        else:
            for change_number in dirlist:
                tests = dict()
                projects = self.get_reverse_dependencies(tags=[
                    'included', 'contained', 'required', 'classes', 'functions'
                ])
                projects[self.project_name] = self.test_types
                log.debugvar('projects')
                for name in projects:
                    tests[name] = dict()
                    tests[name]["types"] = dict()
                    for test_type in projects[name]:
                        result_file = "%s/%s/results/%s/%s_results.xml" % (
                            self.project_name, change_number, test_type, name)
                        tests[name]["types"][test_type] = result_file
                changes_infos[change_number] = {
                    "target_project": self.project_name,
                    'recombination_dir': dirlist[change_number],
                    "recombination_id": change_number,
                    "tests": tests,
                }
                logsummary.info("Fetched recombination %s on dir %s" %
                                (change_number, dirlist[change_number]))

        return changes_infos
Example #11
0
    def __init__(self,
                 projects_conf,
                 base_dir,
                 filter_projects=None,
                 filter_method=None,
                 filter_branches=None,
                 fetch=True):
        self.projects = dict()
        self.projects_conf = projects_conf
        self.base_dir = base_dir
        # extract reverse dependencies
        for project in self.projects_conf:
            self.projects_conf[project]["rev-deps"] = {}
        for project in self.projects_conf:
            if "test-teps" in self.projects_conf[project]:
                for test_dep in self.projects_conf[project]["test-deps"]:
                    rev_dep = {
                        project: {
                            "tags":
                            self.projects_conf[project]["test-deps"][test_dep],
                            "tests":
                            self.projects_conf[project]["replica"]["tests"]
                        }
                    }
                    self.projects_conf[test_dep]["rev-deps"].update(rev_dep)

        # restrict project to operate on
        projects = copy.deepcopy(projects_conf)
        project_list = list(projects)
        if filter_method:
            new_projects = dict()
            log.info('Filtering projects with watch method: %s' %
                     filter_method)
            for project_name in projects:
                if projects[project_name]['original'][
                        'watch-method'] == filter_method:
                    new_projects[project_name] = projects[project_name]
            projects = new_projects
        if filter_projects:
            new_projects = dict()
            log.info('Filtering projects with names: %s' % filter_projects)
            project_names = filter_projects.split(',')
            for project_name in project_names:
                if project_name not in project_list:
                    log.error(
                        "Project %s is not present in projects configuration" %
                        project_name)
                try:
                    new_projects[project_name] = projects[project_name]
                except KeyError:
                    log.warning(
                        "Project %s already discarded by previous filter" %
                        project_name)
            projects = new_projects
        if filter_branches:
            log.info("Filtering branches: %s" % filter_branches)
            branches = filter_branches.split(',')
            for project_name in projects:
                projects[project_name]['original']['watch-branches'] = branches

        if not projects:
            log.error("Project list to operate on is empty")
            raise ValueError
        log.debugvar('projects')

        logsummary.info(
            "initializing and updating local repositories for relevant projects"
        )

        for project_name in projects:
            try:
                self.projects[project_name] = Project(project_name,
                                                      projects[project_name],
                                                      self.base_dir + "/" +
                                                      project_name,
                                                      fetch=fetch)
                logsummary.info("Project: %s initialized" % project_name)
            except Exception, e:
                traceback.print_exc(file=sys.stdout)
                log.error(e)
                logsummary.error("Project %s skipped, reason: %s" %
                                 (project_name, e))
Example #12
0
    def scan_original_distance(self, original_branch):
        replica_branch = self.underlayer.branch_maps['original->replica'][
            original_branch]
        target_branch = self.underlayer.branch_maps['original->target'][
            original_branch]
        log.debug("Scanning distance from original branch %s" %
                  original_branch)
        #        if self.replication_strategy == "change-by-change" and revision_exists(self.ref_locks[replica_branch], replica_branch):
        #                log.info("Cannot replicate branch past the specified lock")

        self.recombinations[
            original_branch] = self.get_recombinations_by_interval(
                original_branch)
        slices = self.get_slices(self.recombinations[original_branch])
        recombinations = self.recombinations[original_branch]

        log.debugvar('slices')
        # Master sync on merged changes
        # we really need only the last commit in the slice
        # we advance the master to that, and all the others will be merged too
        if slices['MERGED']:
            # one or more changes are merged in midstream, but missing in master
            # master is out of sync, changes need to be pushed
            # but check first if the change was changed with a merge commit
            # if yes, push THAT to master, if not, it's just a fast forward
            segment = slices['MERGED'][0]
            recomb_id = list(recombinations)[segment['end'] - 1]
            recombination = recombinations[recomb_id]
            recombination.handle_status()

        # Gerrit operations from approved changes
        # NOthing 'approved' can be merged if it has some "present" before in the history
        skip_list = set()
        for index, approved_segment in enumerate(slices['APPROVED']):
            for present_segment in slices['PRESENT']:
                if present_segment['start'] < approved_segment['start']:
                    skip_list.add(index)

        for index in list(skip_list)[::-1]:
            segment = slices['APPROVED'].pop(index)
            for recomb_id in list(
                    recombinations)[segment['start']:segment['end']]:
                log.warning(
                    "Recombination %s is approved but waiting for previous unapproved changes, skipping"
                    % recomb_id)

        # Merge what remains
        for segment in slices['APPROVED']:
            for recomb_id in list(
                    recombinations)[segment['start']:segment['end']]:
                recombination = recombinations[recomb_id]
                recombination.handle_status()

        # Notify of presence
        for segment in slices['PRESENT']:
            for recomb_id in list(
                    recombinations)[segment['start']:segment['end']]:
                recombination = recombinations[recomb_id]
                log.warning(
                    "Recombination %s already present in replica gerrit as change %s and waiting for approval"
                    % (recomb_id, recombination.number))
                recombination.handle_status()

        # Gerrit operations for missing changes
        for segment in slices['MISSING']:
            for recomb_id in list(
                    recombinations)[segment['start']:segment['end']]:
                log.warning("Recombination %s is missing from replica gerrit" %
                            recomb_id)
                recombination = recombinations[recomb_id]
                recombination.handle_status()

        return True
 def load_from_remote(self, search_value, branch=None):
     data = self.remote.get_change_data(search_value, branch=branch)
     log.debugvar('data')
     self.load_data(data)