コード例 #1
0
    def test_supersede(self):
        self.wf.setup_rings()
        staging = self.wf.create_staging('A', freeze=True)

        rq1 = self.wf.create_submit_request('devel:wine', 'wine')
        ret = SelectCommand(self.wf.api, staging.name).perform(['wine'])
        rq2 = self.wf.create_submit_request('devel:wine',
                                            'wine',
                                            text='Something new')
        self.wf.api._packages_staged = None

        self.osc_user('staging-bot')
        Config.get(self.wf.apiurl, self.wf.project)

        SupersedeCommand(self.wf.api).perform()

        self.assertEqual(rq1.reviews(),
                         [{
                             'state': 'accepted',
                             'by_group': 'factory-staging'
                         }, {
                             'state': 'accepted',
                             'by_project': 'openSUSE:Factory:Staging:A'
                         }, {
                             'state': 'declined',
                             'by_group': 'factory-staging'
                         }])
        self.assertEqual(rq2.reviews(),
                         [{
                             'state': 'accepted',
                             'by_group': 'factory-staging'
                         }, {
                             'state': 'new',
                             'by_project': 'openSUSE:Factory:Staging:A'
                         }])
コード例 #2
0
    def request_repository_pairs(self, request, action):
        if str2bool(
                Config.get(self.apiurl,
                           action.tgt_project).get('repo_checker-project-skip',
                                                   'False')):
            # Do not change message as this should only occur in requests
            # targeting multiple projects such as in maintenance workflow in
            # which the message should be set by other actions.
            self.logger.debug('skipping review of action targeting {}'.format(
                action.tgt_project))
            return True

        repository = self.project_repository(action.tgt_project)
        if not repository:
            self.review_messages['declined'] = ERROR_REPO_SPECIFIED.format(
                action.tgt_project)
            return False

        repository_pairs = []
        # Assumes maintenance_release target project has staging disabled.
        staging = Config.get(self.apiurl, action.tgt_project).get('staging')
        if staging:
            api = self.staging_api(staging)
            stage_info = api.packages_staged.get(action.tgt_package)
            if not stage_info or str(stage_info['rq_id']) != str(
                    request.reqid):
                self.logger.info('{} not staged'.format(request.reqid))
                return None

            if not self.force and not self.staging_build_failure_check(
                    api, stage_info['prj']):
                self.logger.info(
                    '{} not ready due to staging build failure(s)'.format(
                        request.reqid))
                return None

            # Staging setup is convoluted and thus the repository setup does not
            # contain a path to the target project. Instead the ports repository
            # is used to import the target prjconf. As such the staging group
            # repository must be explicitly layered on top of target project.
            repository_pairs.append([stage_info['prj'], repository])
            repository_pairs.extend(
                repository_path_expand(self.apiurl, action.tgt_project,
                                       repository))
        else:
            # Find a repository which links to target project "main" repository.
            repository = repository_path_search(self.apiurl,
                                                action.src_project,
                                                action.tgt_project, repository)
            if not repository:
                self.review_messages['declined'] = ERROR_REPO_SPECIFIED.format(
                    action.tgt_project)
                return False

            repository_pairs.extend(
                repository_path_expand(self.apiurl, action.src_project,
                                       repository))

        return repository_pairs
コード例 #3
0
    def test_get_memoize_reset(self):
        """Ensure memoize_session_reset() properly forces re-fetch of config."""
        self.assertEqual('remote-indeed', Config.get(obs.APIURL, obs.PROJECT)['remote-only'])

        attribute_value_save(obs.APIURL, obs.PROJECT, 'Config', 'remote-only = new value\n')
        memoize_session_reset()

        self.assertEqual('new value', Config.get(obs.APIURL, obs.PROJECT)['remote-only'])
コード例 #4
0
    def test_get_memoize_reset(self):
        """Ensure memoize_session_reset() properly forces re-fetch of config."""
        wf = self.setup_vcr()
        self.assertEqual('remote-indeed', Config.get(wf.apiurl, wf.project)['remote-only'])

        attribute_value_save(wf.apiurl, wf.project, 'Config', 'remote-only = new value\n')
        memoize_session_reset()

        self.assertEqual('new value', Config.get(wf.apiurl, wf.project)['remote-only'])
コード例 #5
0
    def test_get_memoize_reset(self):
        """Ensure memoize_session_reset() properly forces re-fetch of config."""
        self.assertEqual('remote-indeed',
                         Config.get(APIURL, PROJECT)['remote-only'])

        attribute_value_save(APIURL, PROJECT, 'Config',
                             'remote-only = new value\n')
        memoize_session_reset()

        self.assertEqual('new value',
                         Config.get(APIURL, PROJECT)['remote-only'])
コード例 #6
0
    def staging_api(self, project):
        # Allow for the Staging subproject to be passed directly from config
        # which should be stripped before initializing StagingAPI. This allows
        # for NonFree subproject to utilize StagingAPI for main project.
        if project.endswith(':Staging'):
            project = project[:-8]

        if project not in self.staging_apis:
            Config.get(self.apiurl, project)
            self.staging_apis[project] = StagingAPI(self.apiurl, project)

        return self.staging_apis[project]
コード例 #7
0
    def test_get_memoize_reset(self):
        """Ensure memoize_session_reset() properly forces re-fetch of config."""
        wf = self.setup_vcr()
        self.assertEqual('remote-indeed',
                         Config.get(wf.apiurl, wf.project)['remote-only'])

        attribute_value_save(wf.apiurl, wf.project, 'Config',
                             'remote-only = new value\n')
        memoize_session_reset()

        self.assertEqual('new value',
                         Config.get(wf.apiurl, wf.project)['remote-only'])
コード例 #8
0
    def staging_api(self, project):
        # Allow for the Staging subproject to be passed directly from config
        # which should be stripped before initializing StagingAPI. This allows
        # for NonFree subproject to utilize StagingAPI for main project.
        if project.endswith(':Staging'):
            project = project[:-8]

        if project not in self.staging_apis:
            Config.get(self.apiurl, project)
            self.staging_apis[project] = StagingAPI(self.apiurl, project)

        return self.staging_apis[project]
コード例 #9
0
    def check_one_request(self, req):
        config = Config.get(self.apiurl, req.actions[0].tgt_project)
        self.needs_legal_review = False
        self.needs_reviewteam = False
        self.needs_release_manager = False
        self.pending_factory_submission = False
        self.source_in_factory = None
        self.do_check_maintainer_review = not self.ibs
        self.packages = {}

        request_ok = ReviewBot.ReviewBot.check_one_request(self, req)

        self.logger.debug("review result: %s", request_ok)
        if self.pending_factory_submission:
            self.logger.info("submission is waiting for a Factory request to complete")
            creator = req.get_creator()
            bot_name = self.bot_name.lower()
            if self.automatic_submission and creator != bot_name:
                self.logger.info('@{}: this request would have been automatically created by {} after the Factory submission was accepted in order to eleviate the need to manually create requests for packages sourced from Factory'.format(creator, bot_name))
        elif self.source_in_factory:
            self.logger.info("perfect. the submitted sources are in or accepted for Factory")
        elif self.source_in_factory == False:
            self.logger.warning("the submitted sources are NOT in Factory")

        if request_ok == False:
            self.logger.info("NOTE: if you think the automated review was wrong here, please talk to the release team before reopening the request")

        if self.do_comments:
            result = None
            if request_ok is None:
                state = 'seen'
            elif request_ok:
                state = 'done'
                result = 'accepted'
            else:
                state = 'done'
                result = 'declined'

            self.comment_write(state, result)

        add_review_groups = []
        if self.needs_release_manager:
            add_review_groups.append(self.release_manager_group or
                                     config.get(self.override_group_key))
        if self.needs_reviewteam:
            add_review_groups.append(self.review_team_group or
                                     config.get('review-team'))
        if self.needs_legal_review:
            add_review_groups.append(self.legal_review_group or
                                     config.get('legal-review-group'))
        if self.needs_check_source and self.check_source_group is not None:
            add_review_groups.append(self.check_source_group)

        for group in add_review_groups:
            if group is None:
                continue
            self.logger.info("{0} needs review by [{1}](/group/show/{1})".format(req.reqid, group))
            self.add_review(req, by_group=group)

        return request_ok
コード例 #10
0
    def target_project_config(self, project):
        # Load project config and allow for remote entries.
        config = Config.get(self.apiurl, project)

        self.single_action_require = str2bool(config.get('check-source-single-action-require', 'False'))
        self.ignore_devel = not str2bool(config.get('devel-project-enforce', 'False'))
        self.in_air_rename_allow = str2bool(config.get('check-source-in-air-rename-allow', 'False'))
        self.add_review_team = str2bool(config.get('check-source-add-review-team', 'True'))
        self.review_team = config.get('review-team')
        self.staging_group = config.get('staging-group')
        self.repo_checker = config.get('repo-checker')
        self.devel_whitelist = config.get('devel-whitelist', '').split()
        self.skip_add_reviews = False

        if self.action.type == 'maintenance_incident':
            # The workflow effectively enforces the names to match and the
            # parent code sets target_package from source_package so this check
            # becomes useless and awkward to perform.
            self.in_air_rename_allow = True

            # The target project will be set to product and thus inherit
            # settings, but override since real target is not product.
            self.single_action_require = False

            # It might make sense to supersede maintbot, but for now.
            self.skip_add_reviews = True
コード例 #11
0
    def check_action_submit(self, request, action):
        repository_pairs = self.request_repository_pairs(request, action)
        if not isinstance(repository_pairs, list):
            return repository_pairs

        # use project_only results by default as reference
        whitelist = None
        config = Config.get(self.apiurl, action.tgt_project)
        staging = config.get('staging')
        arch_whitelist = config.get('repo_checker-arch-whitelist')
        cycle_packages = config.get('repo_checker-allowed-in-cycles')
        if staging:
            api = self.staging_api(staging)
            if not api.is_adi_project(repository_pairs[0][0]):
                # For "leaky" ring packages in letter stagings, where the
                # repository setup does not include the target project, that are
                # not intended to to have all run-time dependencies satisfied.
                whitelist = set(config.get('repo_checker-binary-whitelist-ring', '').split(' '))

        state_hash = self.repository_state(repository_pairs, True)
        if not self.repository_check(repository_pairs, state_hash, True,
                                     arch_whitelist=arch_whitelist,
                                     whitelist=whitelist,
                                     cycle_packages=cycle_packages):
            return None

        self.review_messages['accepted'] = 'cycle and install check passed'
        return True
コード例 #12
0
    def check_action_submit(self, request, action):
        repository_pairs = self.request_repository_pairs(request, action)
        if not isinstance(repository_pairs, list):
            return repository_pairs

        # use project_only results by default as reference
        whitelist = None
        config = Config.get(self.apiurl, action.tgt_project)
        staging = config.get('staging')
        arch_whitelist = config.get('repo_checker-arch-whitelist')
        cycle_packages = config.get('repo_checker-allowed-in-cycles')
        if staging:
            api = self.staging_api(staging)
            if not api.is_adi_project(repository_pairs[0][0]):
                # For "leaky" ring packages in letter stagings, where the
                # repository setup does not include the target project, that are
                # not intended to to have all run-time dependencies satisfied.
                whitelist = config.get('repo_checker-binary-whitelist-ring', '').split(' ')

        state_hash = self.repository_state(repository_pairs, True)
        if not self.repository_check(repository_pairs, state_hash, True,
                                     arch_whitelist=arch_whitelist,
                                     whitelist=whitelist,
                                     cycle_packages=cycle_packages):
            return None

        self.review_messages['accepted'] = 'cycle and install check passed'
        return True
コード例 #13
0
    def request_age_wait(self,
                         age_min=None,
                         request=None,
                         target_project=None):
        if not request:
            request = self.request

        if not target_project:
            target_project = self.action.tgt_project

        if age_min is None or isinstance(age_min, str):
            key = self.request_age_min_key if age_min is None else age_min
            age_min = int(
                Config.get(self.apiurl,
                           target_project).get(key,
                                               self.request_age_min_default))

        age = request_age(request).total_seconds()
        if age < age_min:
            self.logger.info(
                'skipping {} of age {:.2f}s since it is younger than {}s'.
                format(request.reqid, age, age_min))
            return True

        return False
コード例 #14
0
    def check_one_request(self, req):
        config = Config.get(self.apiurl, req.actions[0].tgt_project)
        self.needs_legal_review = False
        self.needs_reviewteam = False
        self.needs_release_manager = False
        self.pending_factory_submission = False
        self.source_in_factory = None
        self.do_check_maintainer_review = not self.ibs
        self.packages = {}

        request_ok = ReviewBot.ReviewBot.check_one_request(self, req)

        self.logger.debug("review result: %s", request_ok)
        if self.pending_factory_submission:
            self.logger.info("submission is waiting for a Factory request to complete")
            creator = req.get_creator()
            bot_name = self.bot_name.lower()
            if self.automatic_submission and creator != bot_name:
                self.logger.info('@{}: this request would have been automatically created by {} after the Factory submission was accepted in order to eleviate the need to manually create requests for packages sourced from Factory'.format(creator, bot_name))
        elif self.source_in_factory:
            self.logger.info("perfect. the submitted sources are in or accepted for Factory")
        elif self.source_in_factory == False:
            self.logger.warn("the submitted sources are NOT in Factory")

        if request_ok == False:
            self.logger.info("NOTE: if you think the automated review was wrong here, please talk to the release team before reopening the request")

        if self.do_comments:
            result = None
            if request_ok is None:
                state = 'seen'
            elif request_ok:
                state = 'done'
                result = 'accepted'
            else:
                state = 'done'
                result = 'declined'

            self.comment_write(state, result)

        add_review_groups = []
        if self.needs_release_manager:
            add_review_groups.append(self.release_manager_group or
                                     config.get(self.override_group_key))
        if self.needs_reviewteam:
            add_review_groups.append(self.review_team_group or
                                     config.get('review-team'))
        if self.needs_legal_review:
            add_review_groups.append(self.legal_review_group or
                                     config.get('legal-review-group'))
        if self.needs_check_source and self.check_source_group is not None:
            add_review_groups.append(self.check_source_group)

        for group in add_review_groups:
            if group is None:
                continue
            self.logger.info("{0} needs review by [{1}](/group/show/{1})".format(req.reqid, group))
            self.add_review(req, by_group=group)

        return request_ok
コード例 #15
0
    def target_project_config(self, project):
        # Load project config and allow for remote entries.
        config = Config.get(self.apiurl, project)

        self.single_action_require = str2bool(
            config.get('check-source-single-action-require', 'False'))
        self.ignore_devel = not str2bool(
            config.get('devel-project-enforce', 'False'))
        self.in_air_rename_allow = str2bool(
            config.get('check-source-in-air-rename-allow', 'False'))
        self.add_review_team = str2bool(
            config.get('check-source-add-review-team', 'True'))
        self.review_team = config.get('review-team')
        self.mail_release_list = config.get('mail-release-list')
        self.staging_group = config.get('staging-group')
        self.repo_checker = config.get('repo-checker')
        self.devel_whitelist = config.get('devel-whitelist', '').split()
        self.skip_add_reviews = False
        self.security_review_team = config.get('security-review-team',
                                               'security-team')
        self.bad_rpmlint_entries = config.get('bad-rpmlint-entries',
                                              '').split()

        if self.action.type == 'maintenance_incident':
            # The workflow effectively enforces the names to match and the
            # parent code sets target_package from source_package so this check
            # becomes useless and awkward to perform.
            self.in_air_rename_allow = True

            # The target project will be set to product and thus inherit
            # settings, but override since real target is not product.
            self.single_action_require = False

            # It might make sense to supersede maintbot, but for now.
            self.skip_add_reviews = True
コード例 #16
0
def mail_send(apiurl,
              project,
              to,
              subject,
              body,
              from_key='maintainer',
              followup_to_key='release-list',
              dry=False):

    config = Config.get(apiurl, project)
    if from_key is None:
        sender = entity_email(apiurl,
                              conf.get_apiurl_usr(apiurl),
                              include_name=True)
    else:
        sender = config['mail-{}'.format(from_key)]

    if '@' not in to:
        to = config['mail-{}'.format(to)]

    followup_to = config.get('mail-{}'.format(followup_to_key))
    relay = config.get('mail-relay', 'relay.suse.de')

    mail_send_with_details(text=body,
                           subject=subject,
                           relay=relay,
                           sender=sender,
                           followup_to=followup_to,
                           to=to,
                           dry=dry)
コード例 #17
0
    def __init__(self, opensuse_project, sle_project, alternative_project,
                 print_only, verbose):
        self.upload_project = opensuse_project
        self.opensuse_project = opensuse_project
        if alternative_project:
            self.opensuse_project = alternative_project
        self.sle_project = sle_project
        self.print_only = print_only
        self.verbose = verbose
        self.apiurl = osc.conf.config['apiurl']
        self.debug = osc.conf.config['debug']

        config = Config.get(self.apiurl, opensuse_project)
        # binary rpms of packages from `skippkg-finder-skiplist-ignores`
        # be found in the `package_binaries` thus format must to be like
        # SUSE:SLE-15:Update_libcdio.12032, PROJECT-NAME_PACKAGE-NAME
        self.skiplist_ignored = set(
            config.get('skippkg-finder-skiplist-ignores', '').split(' '))

        # supplement RPMs for skipping from the ftp-tree
        self.skiplist_supplement_regex = set(
            config.get('skippkg-finder-skiplist-supplement-regex',
                       '').split(' '))
        # drops off RPM from a list of the supplement RPMs due to regex
        self.skiplist_supplement_ignores = set(
            config.get('skippkg-finder-skiplist-supplement-ignores',
                       '').split(' '))
コード例 #18
0
ファイル: util.py プロジェクト: openSUSE/osc-plugin-factory
def mail_send(apiurl, project, to, subject, body, from_key='maintainer',
              followup_to_key='release-list', dry=False):
    from email.mime.text import MIMEText
    import email.utils
    import smtplib

    config = Config.get(apiurl, project)
    msg = MIMEText(body)
    msg['Message-ID'] = email.utils.make_msgid()
    msg['Date'] = email.utils.formatdate(localtime=1)
    if from_key is None:
        msg['From'] = entity_email(apiurl, conf.get_apiurl_usr(apiurl), include_name=True)
    else:
        msg['From'] = config['mail-{}'.format(from_key)]
    if '@' not in to:
        to = config['mail-{}'.format(to)]
    msg['To'] = to
    followup_to = config.get('mail-{}'.format(followup_to_key))
    if followup_to:
        msg['Mail-Followup-To'] = followup_to
    msg['Subject'] = subject

    if dry:
        print(msg.as_string())
        return

    s = smtplib.SMTP(config.get('mail-relay', 'relay.suse.de'))
    s.sendmail(msg['From'], [msg['To']], msg.as_string())
    s.quit()
コード例 #19
0
def policy_get_preprocess(apiurl, origin, policy):
    project = origin.rstrip('~')
    config_project = Config.get(apiurl, project)
    policy['pending_submission_allowed_reviews'] = filter(None, [
        config_resolve_variable(v, config_project, 'config_source')
        for v in policy['pending_submission_allowed_reviews']])

    return policy
コード例 #20
0
def policy_get_preprocess(apiurl, origin, policy):
    project = origin.rstrip('~')
    config_project = Config.get(apiurl, project)
    policy['pending_submission_allowed_reviews'] = filter(None, [
        config_resolve_variable(v, config_project, 'config_source')
        for v in policy['pending_submission_allowed_reviews']])

    return policy
コード例 #21
0
    def project_pseudometa_file_name(self, project, repository):
        filename = 'repo_checker'

        main_repo = Config.get(self.apiurl, project).get('main-repo')
        if not main_repo:
            filename += '.' + repository

        return filename
コード例 #22
0
    def project_pseudometa_file_name(self, project, repository):
        filename = 'repo_checker'

        main_repo = Config.get(self.api.apiurl, project).get('main-repo')
        if not main_repo:
            filename += '.' + repository

        return filename
コード例 #23
0
    def request_repository_pairs(self, request, action):
        if str2bool(Config.get(self.apiurl, action.tgt_project).get('repo_checker-project-skip', 'False')):
            # Do not change message as this should only occur in requests
            # targeting multiple projects such as in maintenance workflow in
            # which the message should be set by other actions.
            self.logger.debug('skipping review of action targeting {}'.format(action.tgt_project))
            return True

        repository = self.project_repository(action.tgt_project)
        if not repository:
            self.review_messages['declined'] = ERROR_REPO_SPECIFIED.format(action.tgt_project)
            return False

        repository_pairs = []
        # Assumes maintenance_release target project has staging disabled.
        staging = Config.get(self.apiurl, action.tgt_project).get('staging')
        if staging:
            api = self.staging_api(staging)
            stage_info = api.packages_staged.get(action.tgt_package)
            if not stage_info or str(stage_info['rq_id']) != str(request.reqid):
                self.logger.info('{} not staged'.format(request.reqid))
                return None

            if not self.force and not self.staging_build_failure_check(api, stage_info['prj']):
                self.logger.info('{} not ready due to staging build failure(s)'.format(request.reqid))
                return None

            # Staging setup is convoluted and thus the repository setup does not
            # contain a path to the target project. Instead the ports repository
            # is used to import the target prjconf. As such the staging group
            # repository must be explicitly layered on top of target project.
            repository_pairs.append([stage_info['prj'], repository])
            repository_pairs.extend(repository_path_expand(self.apiurl, action.tgt_project, repository))
        else:
            # Find a repository which links to target project "main" repository.
            repository = repository_path_search(
                self.apiurl, action.src_project, action.tgt_project, repository)
            if not repository:
                self.review_messages['declined'] = ERROR_REPO_SPECIFIED.format(action.tgt_project)
                return False

            repository_pairs.extend(repository_path_expand(self.apiurl, action.src_project, repository))

        return repository_pairs
コード例 #24
0
    def target_archs(self, project, repository):
        archs = target_archs(self.apiurl, project, repository)

        # Check for arch whitelist and use intersection.
        whitelist = Config.get(self.apiurl, project).get('repo_checker-arch-whitelist')
        if whitelist:
            archs = list(set(whitelist.split(' ')).intersection(set(archs)))

        # Trick to prioritize x86_64.
        return sorted(archs, reverse=True)
コード例 #25
0
def origin_devel_projects(apiurl, project):
    projects = set(devel_projects(apiurl, project))

    for devel_project, _ in origin_devel_project_requests(apiurl, project):
        projects.add(devel_project)

    devel_whitelist = Config.get(apiurl, project).get('devel-whitelist',
                                                      '').split()
    projects.update(devel_whitelist)

    return sorted(projects)
コード例 #26
0
    def target_archs(self, project, repository):
        archs = target_archs(self.apiurl, project, repository)

        # Check for arch whitelist and use intersection.
        whitelist = Config.get(self.apiurl,
                               project).get('repo_checker-arch-whitelist')
        if whitelist:
            archs = list(set(whitelist.split(' ')).intersection(set(archs)))

        # Trick to prioritize x86_64.
        return sorted(archs, reverse=True)
コード例 #27
0
    def project_only(self, project, post_comments=False):
        repository = self.project_repository(project)
        if not repository:
            self.logger.error(ERROR_REPO_SPECIFIED.format(project))
            return

        config = Config.get(self.apiurl, project)
        arch_whitelist = config.get('repo_checker-arch-whitelist')

        repository_pairs = repository_path_expand(self.apiurl, project, repository)
        state_hash = self.repository_state(repository_pairs, False)
        self.repository_check(repository_pairs, state_hash, False, bool(post_comments), arch_whitelist=arch_whitelist)
コード例 #28
0
    def binary_whitelist(self, override_pair, overridden_pair, arch):
        whitelist = self.binary_list_existing_problem(overridden_pair[0], overridden_pair[1])

        staging = Config.get(self.apiurl, overridden_pair[0]).get('staging')
        if staging:
            additions = self.staging_api(staging).get_prj_pseudometa(
                override_pair[0]).get('config', {})
            prefix = 'repo_checker-binary-whitelist'
            for key in [prefix, '-'.join([prefix, arch])]:
                whitelist.update(additions.get(key, '').split(' '))

        return set(filter(None, whitelist))
コード例 #29
0
    def project_only(self, project, post_comments=False):
        repository = self.project_repository(project)
        if not repository:
            self.logger.error(ERROR_REPO_SPECIFIED.format(project))
            return

        config = Config.get(self.apiurl, project)
        arch_whitelist = config.get('repo_checker-arch-whitelist')

        repository_pairs = repository_path_expand(self.apiurl, project, repository)
        state_hash = self.repository_state(repository_pairs, False)
        self.repository_check(repository_pairs, state_hash, False, bool(post_comments), arch_whitelist=arch_whitelist)
コード例 #30
0
ファイル: OBSLocal.py プロジェクト: e4t/osc-plugin-factory
    def remote_config_set(self, config, replace_all=False):
        if not replace_all:
            config_existing = Config.get(self.apiurl, self.project)
            config_existing.update(config)
            config = config_existing

        config_lines = []
        for key, value in config.items():
            config_lines.append(f'{key} = {value}')

        attribute_value_save(APIURL, self.project, 'Config',
                             '\n'.join(config_lines))
コード例 #31
0
def policy_get_preprocess(apiurl, origin, policy):
    project = origin.rstrip('~')
    config_project = Config.get(apiurl, project)
    for suffix in ('', '_update'):
        key = 'pending_submission_allowed_reviews{}'.format(suffix)
        policy[key] = list(
            filter(None, [
                config_resolve_variable(v, config_project, 'config_source')
                for v in policy[key]
            ]))

    return policy
コード例 #32
0
    def binary_whitelist(self, override_pair, overridden_pair, arch):
        whitelist = self.binary_list_existing_problem(overridden_pair[0], overridden_pair[1])

        staging = Config.get(self.apiurl, overridden_pair[0]).get('staging')
        if staging:
            additions = self.staging_api(staging).get_prj_pseudometa(
                override_pair[0]).get('config', {})
            prefix = 'repo_checker-binary-whitelist'
            for key in [prefix, '-'.join([prefix, arch])]:
                whitelist.update(additions.get(key, '').split(' '))

        whitelist = filter(None, whitelist)
        return whitelist
コード例 #33
0
    def request_override_check_users(self, project):
        """Determine users allowed to override review in a comment command."""
        config = Config.get(self.apiurl, project)

        users = []
        group = config.get('staging-group')
        if group:
            users += group_members(self.apiurl, group)

        if self.override_group_key:
            override_group = config.get(self.override_group_key)
            if override_group:
                users += group_members(self.apiurl, override_group)

        return users
コード例 #34
0
    def request_override_check_users(self, project):
        """Determine users allowed to override review in a comment command."""
        config = Config.get(self.apiurl, project)

        users = []
        group = config.get('staging-group')
        if group:
            users += group_members(self.apiurl, group)

        if self.override_group_key:
            override_group = config.get(self.override_group_key)
            if override_group:
                users += group_members(self.apiurl, override_group)

        return users
コード例 #35
0
    def check(self, project, repository):
        if not repository:
            repository = self.project_repository(project)
        if not repository:
            self.logger.error('a repository must be specified via OSRT:Config main-repo for {}'.format(project))
            return

        config = Config.get(self.apiurl, project)

        archs = target_archs(self.apiurl, project, repository)
        if not len(archs):
            self.logger.debug('{} has no relevant architectures'.format(project))
            return None

        for arch in archs:
            self.check_pra(project, repository, arch)
コード例 #36
0
    def project_repository(self, project):
        repository = Config.get(self.apiurl, project).get('main-repo')
        if not repository:
            self.logger.debug('no main-repo defined for {}'.format(project))

            search_project = 'openSUSE:Factory'
            for search_repository in ('snapshot', 'standard'):
                repository = repository_path_search(
                    self.apiurl, project, search_project, search_repository)

                if repository:
                    self.logger.debug('found chain to {}/{} via {}'.format(
                        search_project, search_repository, repository))
                    break

        return repository
コード例 #37
0
    def project_repository(self, project):
        repository = Config.get(self.apiurl, project).get('main-repo')
        if not repository:
            self.logger.debug('no main-repo defined for {}'.format(project))

            search_project = 'openSUSE:Factory'
            for search_repository in ('snapshot', 'standard'):
                repository = repository_path_search(
                    self.apiurl, project, search_project, search_repository)

                if repository:
                    self.logger.debug('found chain to {}/{} via {}'.format(
                        search_project, search_repository, repository))
                    break

        return repository
コード例 #38
0
    def package_comments(self, project, repository):
        self.logger.info('{} package comments'.format(len(
            self.package_results)))

        for package, sections in self.package_results.items():
            if str2bool(
                    Config.get(self.apiurl, project).get(
                        'repo_checker-package-comment-devel', 'False')):
                bot_name_suffix = project
                comment_project, comment_package = devel_project_fallback(
                    self.apiurl, project, package)
                if comment_project is None or comment_package is None:
                    self.logger.warning(
                        'unable to find devel project for {}'.format(package))
                    continue

                message = 'The version of this package in [`{project}`](/package/show/{project}/{package}) ' \
                    'has installation issues and may not be installable:'.format(
                        project=project, package=package)
            else:
                bot_name_suffix = repository
                comment_project = project
                comment_package = package
                message = 'This package has installation issues and may not be installable from the `{}` ' \
                    'repository:'.format(repository)

            # Sort sections by text to group binaries together.
            sections = sorted(sections, key=lambda s: s.text)
            message += '\n\n<pre>\n{}\n</pre>'.format('\n'.join(
                [section.text for section in sections]).strip())

            # Generate a hash based on the binaries involved and the number of
            # sections. This eliminates version or release changes from causing
            # an update to the comment while still updating on relevant changes.
            binaries = set()
            for section in sections:
                binaries.update(section.binaries)
            info = ';'.join(['::'.join(sorted(binaries)), str(len(sections))])
            reference = hashlib.sha1(info).hexdigest()[:7]

            # Post comment on package in order to notifiy maintainers.
            self.comment_write(state='seen',
                               result=reference,
                               bot_name_suffix=bot_name_suffix,
                               project=comment_project,
                               package=comment_package,
                               message=message)
コード例 #39
0
def config_resolve(apiurl, project, config):
    defaults = POLICY_DEFAULTS.copy()
    defaults_workarounds = POLICY_DEFAULTS.copy()

    origins_original = config_origin_list(config)

    config_project = Config.get(apiurl, project)
    config_resolve_variables(config, config_project)

    origins = config['origins']
    i = 0
    while i < len(origins):
        origin = next(iter(origins[i]))
        values = origins[i][origin]

        if origin == '*':
            del origins[i]
            defaults.update(values)
            defaults_workarounds.update(values)
            config_resolve_apply(config, values, until='*')
        elif origin == '*~':
            del origins[i]
            defaults_workarounds.update(values)
            config_resolve_create_workarounds(config, values, origins_original)
            config_resolve_apply(config, values, workaround=True, until='*~')
        elif '*' in origin:
            # Does not allow for family + workaround expansion (ie. foo*~).
            del origins[i]
            config_resolve_create_family(apiurl, project, config, i, origin,
                                         values)
        elif origin.endswith('~'):
            values_new = deepcopy(defaults_workarounds)
            values_new.update(values)
            values.update(values_new)
            i += 1
        else:
            values_new = deepcopy(defaults)
            values_new.update(values)
            values.update(values_new)
            i += 1

    return config
コード例 #40
0
def config_resolve(apiurl, project, config):
    defaults = POLICY_DEFAULTS.copy()
    defaults_workarounds = POLICY_DEFAULTS.copy()

    origins_original = config_origin_list(config)

    config_project = Config.get(apiurl, project)
    config_resolve_variables(config, config_project)

    origins = config['origins']
    i = 0
    while i < len(origins):
        origin = origins[i].keys()[0]
        values = origins[i][origin]

        if origin == '*':
            del origins[i]
            defaults.update(values)
            defaults_workarounds.update(values)
            config_resolve_apply(config, values, until='*')
        elif origin == '*~':
            del origins[i]
            defaults_workarounds.update(values)
            config_resolve_create_workarounds(config, values, origins_original)
            config_resolve_apply(config, values, workaround=True, until='*~')
        elif '*' in origin:
            # Does not allow for family + workaround expansion (ie. foo*~).
            del origins[i]
            config_resolve_create_family(apiurl, project, config, i, origin, values)
        elif origin.endswith('~'):
            values_new = deepcopy(defaults_workarounds)
            values_new.update(values)
            values.update(values_new)
            i += 1
        else:
            values_new = deepcopy(defaults)
            values_new.update(values)
            values.update(values_new)
            i += 1

    return config
コード例 #41
0
    def check_action_maintenance_release(self, request, action):
        # No reason to special case patchinfo since same source and target
        # projects which is all that repo_checker cares about.

        if action.tgt_project in self.checked_targets:
            return True

        target_config = Config.get(self.apiurl, action.tgt_project)
        if str2bool(target_config.get('repo_checker-project-skip', 'False')):
            # Do not change message as this should only occur in requests
            # targeting multiple projects such as in maintenance workflow in
            # which the message should be set by other actions.
            self.logger.debug('skipping review of action targeting {}'.format(
                action.tgt_project))
            return True

        repository = target_config.get('main-repo')
        if not repository:
            raise Exception('Missing main-repo in OSRT:Config')

        # Find a repository which links to target project "main" repository.
        repository = repository_path_search(self.apiurl, action.src_project,
                                            action.tgt_project, repository)
        if not repository:
            raise Exception('Missing repositories')

        repository_pairs = repository_path_expand(self.apiurl,
                                                  action.src_project,
                                                  repository)

        self.checked_targets.add(action.tgt_project)
        archs = set(target_archs(self.apiurl, action.src_project, repository))
        arch_whitelist = target_config.get('repo_checker-arch-whitelist', None)
        if arch_whitelist:
            archs = set(arch_whitelist.split(' ')).intersection(archs)

        if not self.repository_check(repository_pairs, archs):
            return None

        self.review_messages['accepted'] = 'install check passed'
        return True
コード例 #42
0
    def package_comments(self, project, repository):
        self.logger.info('{} package comments'.format(len(self.package_results)))

        for package, sections in self.package_results.items():
            if str2bool(Config.get(self.apiurl, project).get('repo_checker-package-comment-devel', 'False')):
                bot_name_suffix = project
                comment_project, comment_package = devel_project_fallback(self.apiurl, project, package)
                if comment_project is None or comment_package is None:
                    self.logger.warning('unable to find devel project for {}'.format(package))
                    continue

                message = 'The version of this package in [`{project}`](/package/show/{project}/{package}) ' \
                    'has installation issues and may not be installable:'.format(
                        project=project, package=package)
            else:
                bot_name_suffix = repository
                comment_project = project
                comment_package = package
                message = 'This package has installation issues and may not be installable from the `{}` ' \
                    'repository:'.format(repository)

            # Sort sections by text to group binaries together.
            sections = sorted(sections, key=lambda s: s.text)
            message += '\n\n<pre>\n{}\n</pre>'.format(
                '\n'.join([section.text for section in sections]).strip())

            # Generate a hash based on the binaries involved and the number of
            # sections. This eliminates version or release changes from causing
            # an update to the comment while still updating on relevant changes.
            binaries = set()
            for section in sections:
                binaries.update(section.binaries)
            info = ';'.join(['::'.join(sorted(binaries)), str(len(sections))])
            reference = hashlib.sha1(info).hexdigest()[:7]

            # Post comment on package in order to notifiy maintainers.
            self.comment_write(state='seen', result=reference, bot_name_suffix=bot_name_suffix,
                               project=comment_project, package=comment_package, message=message)
コード例 #43
0
    def remote_config_set(self, config, replace_all=False):
        """Sets the values of the 'Config' attribute for the target project.

        That attribute stores a set of values that are useful to influence the behavior of several
        tools and bots in the context of the given project. For convenience, such a collection of
        values is usually accessed using a :class:`osclib.Config` object. See :func:`load_config`.

        :param config: values to write into the attribute
        :type config: dict[str, str]
        :param replace_all: whether the previous content of 'Config' should be cleared up
        :type replace_all: bool
        """

        if not replace_all:
            config_existing = Config.get(self.apiurl, self.project)
            config_existing.update(config)
            config = config_existing

        config_lines = []
        for key, value in config.items():
            config_lines.append(f'{key} = {value}')

        attribute_value_save(APIURL, self.project, 'Config', '\n'.join(config_lines))
コード例 #44
0
def mail_send(apiurl,
              project,
              to,
              subject,
              body,
              from_key='maintainer',
              followup_to_key='release-list',
              dry=False):
    from email.mime.text import MIMEText
    import email.utils
    import smtplib

    config = Config.get(apiurl, project)
    msg = MIMEText(body)
    msg['Message-ID'] = email.utils.make_msgid()
    msg['Date'] = email.utils.formatdate(localtime=1)
    if from_key is None:
        msg['From'] = entity_email(apiurl,
                                   conf.get_apiurl_usr(apiurl),
                                   include_name=True)
    else:
        msg['From'] = config['mail-{}'.format(from_key)]
    if '@' not in to:
        to = config['mail-{}'.format(to)]
    msg['To'] = to
    followup_to = config.get('mail-{}'.format(followup_to_key))
    if followup_to:
        msg['Mail-Followup-To'] = followup_to
    msg['Subject'] = subject

    if dry:
        print(msg.as_string())
        return

    s = smtplib.SMTP(config.get('mail-relay', 'relay.suse.de'))
    s.sendmail(msg['From'], [msg['To']], msg.as_string())
    s.quit()
コード例 #45
0
def project_pseudometa_package(apiurl, project):
    package = Config.get(apiurl, project).get('pseudometa_package', '00Meta')
    if '/' in package:
        project, package = package.split('/', 2)

    return project, package
コード例 #46
0
        default='openSUSE:Factory',
        help='project to check (ex. openSUSE:Factory, openSUSE:Leap:15.1)')
    parser.add_argument('-d',
                        '--debug',
                        action='store_true',
                        default=False,
                        help='enable debug information')
    parser.add_argument('-A', '--apiurl', metavar='URL', help='API URL')

    args = parser.parse_args()

    osc.conf.get_config(override_apiurl=args.apiurl)
    osc.conf.config['debug'] = args.debug

    apiurl = osc.conf.config['apiurl']
    config = Config.get(apiurl, args.project)
    api = StagingAPI(apiurl, args.project)
    staging_report = InstallChecker(api, config)

    if args.debug:
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.INFO)

    if args.staging:
        if not staging_report.staging(api.prj_from_short(args.staging),
                                      force=True):
            sys.exit(1)
    else:
        for staging in api.get_staging_projects():
            if api.is_adi_project(staging):
コード例 #47
0
    def check_pra(self, project, repository, arch):
        config = Config.get(self.apiurl, project)

        oldstate = None
        self.store_filename = 'rebuildpacs.{}-{}.yaml'.format(project, repository)
        if self.store_project and self.store_package:
            state_yaml = source_file_load(self.apiurl, self.store_project, self.store_package,
                                        self.store_filename)
            if state_yaml:
                oldstate = yaml.safe_load(state_yaml)

        oldstate = oldstate or {}
        oldstate.setdefault('check', {})
        if not isinstance(oldstate['check'], dict):
            oldstate['check'] = {}
        oldstate.setdefault('leafs', {})
        if not isinstance(oldstate['leafs'], dict):
            oldstate['leafs'] = {}

        repository_pairs = repository_path_expand(self.apiurl, project, repository)
        directories = []
        primaryxmls = []
        for pair_project, pair_repository in repository_pairs:
            mirrored = mirror(self.apiurl, pair_project, pair_repository, arch)
            if os.path.isdir(mirrored):
                directories.append(mirrored)
            else:
                primaryxmls.append(mirrored)

        parsed = dict()
        with tempfile.TemporaryDirectory(prefix='repochecker') as dir:
            pfile = os.path.join(dir, 'packages')

            SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
            script = os.path.join(SCRIPT_PATH, 'write_repo_susetags_file.pl')
            parts = ['perl', script, dir] + directories

            p = subprocess.run(parts)
            if p.returncode:
                # technically only 126, but there is no other value atm -
                # so if some other perl error happens, we don't continue
                raise CorruptRepos

            target_packages = []
            with open(os.path.join(dir, 'catalog.yml')) as file:
                catalog = yaml.safe_load(file)
                if catalog is not None:
                    target_packages = catalog.get(directories[0], [])

            parsed = parsed_installcheck([pfile] + primaryxmls, arch, target_packages, [])
            for package in parsed:
                parsed[package]['output'] = "\n".join(parsed[package]['output'])

            # let's risk a N*N algorithm in the hope that we have a limited N
            for package1 in parsed:
                output = parsed[package1]['output']
                for package2 in parsed:
                    if package1 == package2:
                        continue
                    output = output.replace(parsed[package2]['output'], 'FOLLOWUP(' + package2 + ')')
                parsed[package1]['output'] = output

            for package in parsed:
                parsed[package]['output'] = self._split_and_filter(parsed[package]['output'])

        url = makeurl(self.apiurl, ['build', project, '_result'], {
                      'repository': repository, 'arch': arch, 'code': 'succeeded'})
        root = ET.parse(http_GET(url)).getroot()
        succeeding = list(map(lambda x: x.get('package'), root.findall('.//status')))

        per_source = dict()

        for package, entry in parsed.items():
            source = "{}/{}/{}/{}".format(project, repository, arch, entry['source'])
            per_source.setdefault(source, {'output': [], 'builds': entry['source'] in succeeding})
            per_source[source]['output'].extend(entry['output'])

        rebuilds = set()

        for source in sorted(per_source):
            if not len(per_source[source]['output']):
                continue
            self.logger.debug("{} builds: {}".format(source, per_source[source]['builds']))
            self.logger.debug("  " + "\n  ".join(per_source[source]['output']))
            if not per_source[source]['builds']:  # nothing we can do
                continue
            old_output = oldstate['check'].get(source, {}).get('problem', [])
            if sorted(old_output) == sorted(per_source[source]['output']):
                self.logger.debug("unchanged problem")
                continue
            self.logger.info("rebuild %s", source)
            rebuilds.add(os.path.basename(source))
            for line in difflib.unified_diff(old_output, per_source[source]['output'], 'before', 'now'):
                self.logger.debug(line.strip())
            oldstate['check'][source] = {'problem': per_source[source]['output'],
                                         'rebuild': str(datetime.datetime.now())}

        for source in list(oldstate['check']):
            if not source.startswith('{}/{}/{}/'.format(project, repository, arch)):
                continue
            if not os.path.basename(source) in succeeding:
                continue
            if source not in per_source:
                self.logger.info("No known problem, erasing %s", source)
                del oldstate['check'][source]

        packages = config.get('rebuildpacs-leafs', '').split()
        if not self.rebuild: # ignore in this case
            packages = []

        # first round: collect all infos from obs
        infos = dict()
        for package in packages:
            subpacks, build_deps = self.check_leaf_package(project, repository, arch, package)
            infos[package] = {'subpacks': subpacks, 'deps': build_deps}

        # calculate rebuild triggers
        rebuild_triggers = dict()
        for package1 in packages:
            for package2 in packages:
                if package1 == package2:
                    continue
                for subpack in infos[package1]['subpacks']:
                    if subpack in infos[package2]['deps']:
                        rebuild_triggers.setdefault(package1, set())
                        rebuild_triggers[package1].add(package2)
                        # ignore this depencency. we already trigger both of them
                        del infos[package2]['deps'][subpack]

        # calculate build info hashes
        for package in packages:
            if not package in succeeding:
                self.logger.debug("Ignore %s for the moment, not succeeding", package)
                continue
            m = hashlib.sha256()
            for bdep in sorted(infos[package]['deps']):
                m.update(bytes(bdep + '-' + infos[package]['deps'][bdep], 'utf-8'))
            state_key = '{}/{}/{}/{}'.format(project, repository, arch, package)
            olddigest = oldstate['leafs'].get(state_key, {}).get('buildinfo')
            if olddigest == m.hexdigest():
                continue
            self.logger.info("rebuild leaf package %s (%s vs %s)", package, olddigest, m.hexdigest())
            rebuilds.add(package)
            oldstate['leafs'][state_key] = {'buildinfo': m.hexdigest(),
                                            'rebuild': str(datetime.datetime.now())}

        if self.dryrun:
            if self.rebuild:
                self.logger.info("To rebuild: %s", ' '.join(rebuilds))
            return

        if not self.rebuild or not len(rebuilds):
            self.logger.debug("Nothing to rebuild")
            # in case we do rebuild, wait for it to succeed before saving
            self.store_yaml(oldstate, project, repository, arch)
            return

        query = {'cmd': 'rebuild', 'repository': repository, 'arch': arch, 'package': rebuilds}
        url = makeurl(self.apiurl, ['build', project])
        headers = { 'Content-Type': 'application/x-www-form-urlencoded' }
        http_request('POST', url, headers, data=urlencode(query, doseq=True))

        self.store_yaml(oldstate, project, repository, arch)
コード例 #48
0
    def repository_check(self, repository_pairs, state_hash, simulate_merge, whitelist=None, arch_whitelist=None, post_comments=False, cycle_packages=None):
        comment = []
        project, repository = repository_pairs[0]
        self.logger.info('checking {}/{}@{}[{}]'.format(
            project, repository, state_hash, len(repository_pairs)))

        archs = self.target_archs(project, repository, arch_whitelist)
        new_pairs = []
        for pair in repository_pairs:
            has_all = True
            for arch in archs:
                if not repository_arch_state(self.apiurl, pair[0], pair[1], arch):
                    has_all = False
                    break
            # ignore repositories only inherited for config
            if has_all:
                new_pairs.append(pair)
        repository_pairs = new_pairs

        published = repositories_published(self.apiurl, repository_pairs, archs)

        if not self.force:
            if state_hash == self.repository_state_last(project, repository, simulate_merge):
                self.logger.info('{} build unchanged'.format(project))
                # TODO keep track of skipped count for cycle summary
                return None

            # For submit style requests, want to process if top layer is done,
            # but not mark review as final until all layers are published.
            if published is not True and (not simulate_merge or published[0] == project):
                # Require all layers to be published except when the top layer
                # is published in a simulate merge (allows quicker feedback with
                # potentially incorrect resutls for staging).
                self.logger.info('{}/{} not published'.format(published[0], published[1]))
                return None

        # Drop non-published repository information and thus reduce to boolean.
        published = published is True

        if not simulate_merge:
            # Top of pseudometa file.
            comment.append(state_hash)

            if post_comments:
                # Stores parsed install_check() results grouped by package.
                self.package_results = {}

        if not len(archs):
            self.logger.debug('{} has no relevant architectures'.format(project))
            return None

        result = True
        for arch in archs:
            directories = []
            for pair_project, pair_repository in repository_pairs:
                directories.append(self.mirror(pair_project, pair_repository, arch))

            if simulate_merge:
                ignore = self.simulated_merge_ignore(repository_pairs[0], repository_pairs[1], arch)
                if not whitelist:
                    whitelist = self.binary_whitelist(repository_pairs[0], repository_pairs[1], arch)

                results = {
                    'cycle': self.cycle_check(repository_pairs[0][0], repository_pairs[0][1], arch, cycle_packages),
                    'install': self.install_check(
                        repository_pairs[1], arch, directories, ignore, whitelist),
                }
            else:
                # Only products themselves will want no-filter or perhaps
                # projects working on cleaning up a product.
                no_filter = str2bool(Config.get(self.apiurl, project).get('repo_checker-no-filter'))
                results = {
                    'cycle': CheckResult(True, None),
                    'install': self.install_check(repository_pairs[0], arch, directories,
                                                  parse=post_comments, no_filter=no_filter),
                }

            if not all(result.success for _, result in results.items()):
                # Not all checks passed, build comment.
                result = False
                self.result_comment(repository, arch, results, comment)

        if simulate_merge:
            info_extra = {'build': state_hash}
            if not result:
                # Some checks in group did not pass, post comment.
                # Avoid identical comments with different build hash during
                # target project build phase. Once published update regardless.
                self.comment_write(state='seen', result='failed', project=project,
                                   message='\n'.join(comment).strip(), identical=True,
                                   info_extra=info_extra, info_extra_identical=published,
                                   bot_name_suffix=repository)
            else:
                # Post passed comment only if previous failed comment.
                text = 'Previously reported problems have been resolved.'
                self.comment_write(state='done', result='passed', project=project,
                                   message=text, identical=True, only_replace=True,
                                   info_extra=info_extra, bot_name_suffix=repository)
        else:
            text = '\n'.join(comment).strip()
            if not self.dryrun:
                filename = self.project_pseudometa_file_name(project, repository)
                project_pseudometa_file_ensure(
                    self.apiurl, project, filename, text + '\n', 'repo_checker project_only run')
            else:
                print(text)

            if post_comments:
                self.package_comments(project, repository)

        if result and not published:
            # Wait for the complete stack to build before positive result.
            self.logger.debug('demoting result from accept to ignore due to non-published layer')
            result = None

        return result
コード例 #49
0
    def repository_check(self,
                         repository_pairs,
                         state_hash,
                         simulate_merge,
                         post_comments=False):
        comment = []
        project, repository = repository_pairs[0]
        self.logger.info('checking {}/{}@{}[{}]'.format(
            project, repository, state_hash, len(repository_pairs)))

        published = repositories_published(self.apiurl, repository_pairs)

        if not self.force:
            if state_hash == self.repository_state_last(
                    project, repository, not simulate_merge):
                self.logger.info('{} build unchanged'.format(project))
                # TODO keep track of skipped count for cycle summary
                return None

            # For submit style requests, want to process if top layer is done,
            # but not mark review as final until all layers are published.
            if published is not True and (not simulate_merge
                                          or published[0] == project):
                # Require all layers to be published except when the top layer
                # is published in a simulate merge (allows quicker feedback with
                # potentially incorrect resutls for staging).
                self.logger.info('{}/{} not published'.format(
                    published[0], published[1]))
                return None

        # Drop non-published repository information and thus reduce to boolean.
        published = published is True

        if simulate_merge:
            # Restrict top layer archs to the whitelisted archs from merge layer.
            archs = set(target_archs(self.apiurl, project,
                                     repository)).intersection(
                                         set(
                                             self.target_archs(
                                                 repository_pairs[1][0],
                                                 repository_pairs[1][1])))
        else:
            # Top of pseudometa file.
            comment.append(state_hash)
            archs = self.target_archs(project, repository)

            if post_comments:
                # Stores parsed install_check() results grouped by package.
                self.package_results = {}

        if not len(archs):
            self.logger.debug(
                '{} has no relevant architectures'.format(project))
            return None

        result = True
        for arch in archs:
            directories = []
            for pair_project, pair_repository in repository_pairs:
                directories.append(
                    self.mirror(pair_project, pair_repository, arch))

            if simulate_merge:
                ignore = self.simulated_merge_ignore(repository_pairs[0],
                                                     repository_pairs[1], arch)
                whitelist = self.binary_whitelist(repository_pairs[0],
                                                  repository_pairs[1], arch)

                results = {
                    'cycle':
                    self.cycle_check(repository_pairs[0], repository_pairs[1],
                                     arch),
                    'install':
                    self.install_check(repository_pairs[1], arch, directories,
                                       ignore, whitelist),
                }
            else:
                # Only products themselves will want no-filter or perhaps
                # projects working on cleaning up a product.
                no_filter = str2bool(
                    Config.get(self.apiurl,
                               project).get('repo_checker-no-filter'))
                results = {
                    'cycle':
                    CheckResult(True, None),
                    'install':
                    self.install_check(repository_pairs[0],
                                       arch,
                                       directories,
                                       parse=post_comments,
                                       no_filter=no_filter),
                }

            if not all(result.success for _, result in results.items()):
                # Not all checks passed, build comment.
                result = False
                self.result_comment(repository, arch, results, comment)

        if simulate_merge:
            info_extra = {'build': state_hash}
            if not result:
                # Some checks in group did not pass, post comment.
                # Avoid identical comments with different build hash during
                # target project build phase. Once published update regardless.
                self.comment_write(state='seen',
                                   result='failed',
                                   project=project,
                                   message='\n'.join(comment).strip(),
                                   identical=True,
                                   info_extra=info_extra,
                                   info_extra_identical=published,
                                   bot_name_suffix=repository)
            else:
                # Post passed comment only if previous failed comment.
                text = 'Previously reported problems have been resolved.'
                self.comment_write(state='done',
                                   result='passed',
                                   project=project,
                                   message=text,
                                   identical=True,
                                   only_replace=True,
                                   info_extra=info_extra,
                                   bot_name_suffix=repository)
        else:
            text = '\n'.join(comment).strip()
            if not self.dryrun:
                filename = self.project_pseudometa_file_name(
                    project, repository)
                project_pseudometa_file_ensure(
                    self.apiurl, project, filename, text + '\n',
                    'repo_checker project_only run')
            else:
                print(text)

            if post_comments:
                self.package_comments(project, repository)

        if result and not published:
            # Wait for the complete stack to build before positive result.
            self.logger.debug(
                'demoting result from accept to ignore due to non-published layer'
            )
            result = None

        return result