def target_project_config(self, project):
        # Load project config and allow for remote entries.
        config = Config.get(self.apiurl, project)

        self.single_action_require = str2bool(config.get('check-source-single-action-require', 'False'))
        self.ignore_devel = not str2bool(config.get('devel-project-enforce', 'False'))
        self.in_air_rename_allow = str2bool(config.get('check-source-in-air-rename-allow', 'False'))
        self.add_review_team = str2bool(config.get('check-source-add-review-team', 'True'))
        self.review_team = config.get('review-team')
        self.staging_group = config.get('staging-group')
        self.repo_checker = config.get('repo-checker')
        self.devel_whitelist = config.get('devel-whitelist', '').split()
        self.skip_add_reviews = False

        if self.action.type == 'maintenance_incident':
            # The workflow effectively enforces the names to match and the
            # parent code sets target_package from source_package so this check
            # becomes useless and awkward to perform.
            self.in_air_rename_allow = True

            # The target project will be set to product and thus inherit
            # settings, but override since real target is not product.
            self.single_action_require = False

            # It might make sense to supersede maintbot, but for now.
            self.skip_add_reviews = True
Exemple #2
0
    def target_project_config(self, project):
        # Load project config and allow for remote entries.
        config = Config.get(self.apiurl, project)

        self.single_action_require = str2bool(
            config.get('check-source-single-action-require', 'False'))
        self.ignore_devel = not str2bool(
            config.get('devel-project-enforce', 'False'))
        self.in_air_rename_allow = str2bool(
            config.get('check-source-in-air-rename-allow', 'False'))
        self.add_review_team = str2bool(
            config.get('check-source-add-review-team', 'True'))
        self.review_team = config.get('review-team')
        self.mail_release_list = config.get('mail-release-list')
        self.staging_group = config.get('staging-group')
        self.repo_checker = config.get('repo-checker')
        self.devel_whitelist = config.get('devel-whitelist', '').split()
        self.skip_add_reviews = False
        self.security_review_team = config.get('security-review-team',
                                               'security-team')
        self.bad_rpmlint_entries = config.get('bad-rpmlint-entries',
                                              '').split()

        if self.action.type == 'maintenance_incident':
            # The workflow effectively enforces the names to match and the
            # parent code sets target_package from source_package so this check
            # becomes useless and awkward to perform.
            self.in_air_rename_allow = True

            # The target project will be set to product and thus inherit
            # settings, but override since real target is not product.
            self.single_action_require = False

            # It might make sense to supersede maintbot, but for now.
            self.skip_add_reviews = True
    def target_project_config(self, project):
        # Load project config and allow for remote entries.
        self.staging_api(project)
        config = self.staging_config[project]

        self.ignore_devel = not str2bool(
            config.get('devel-project-enforce', 'False'))
        self.in_air_rename_allow = str2bool(
            config.get('check-source-in-air-rename-allow', 'False'))
        self.add_review_team = str2bool(
            config.get('check-source-add-review-team', 'True'))
        self.review_team = config.get('review-team')
        self.repo_checker = config.get('repo-checker')
        self.devel_whitelist = config.get('devel-whitelist', '').split()
Exemple #4
0
    def request_repository_pairs(self, request, action):
        if str2bool(
                Config.get(self.apiurl,
                           action.tgt_project).get('repo_checker-project-skip',
                                                   'False')):
            # Do not change message as this should only occur in requests
            # targeting multiple projects such as in maintenance workflow in
            # which the message should be set by other actions.
            self.logger.debug('skipping review of action targeting {}'.format(
                action.tgt_project))
            return True

        repository = self.project_repository(action.tgt_project)
        if not repository:
            self.review_messages['declined'] = ERROR_REPO_SPECIFIED.format(
                action.tgt_project)
            return False

        repository_pairs = []
        # Assumes maintenance_release target project has staging disabled.
        staging = Config.get(self.apiurl, action.tgt_project).get('staging')
        if staging:
            api = self.staging_api(staging)
            stage_info = api.packages_staged.get(action.tgt_package)
            if not stage_info or str(stage_info['rq_id']) != str(
                    request.reqid):
                self.logger.info('{} not staged'.format(request.reqid))
                return None

            if not self.force and not self.staging_build_failure_check(
                    api, stage_info['prj']):
                self.logger.info(
                    '{} not ready due to staging build failure(s)'.format(
                        request.reqid))
                return None

            # Staging setup is convoluted and thus the repository setup does not
            # contain a path to the target project. Instead the ports repository
            # is used to import the target prjconf. As such the staging group
            # repository must be explicitly layered on top of target project.
            repository_pairs.append([stage_info['prj'], repository])
            repository_pairs.extend(
                repository_path_expand(self.apiurl, action.tgt_project,
                                       repository))
        else:
            # Find a repository which links to target project "main" repository.
            repository = repository_path_search(self.apiurl,
                                                action.src_project,
                                                action.tgt_project, repository)
            if not repository:
                self.review_messages['declined'] = ERROR_REPO_SPECIFIED.format(
                    action.tgt_project)
                return False

            repository_pairs.extend(
                repository_path_expand(self.apiurl, action.src_project,
                                       repository))

        return repository_pairs
Exemple #5
0
    def package_comments(self, project, repository):
        self.logger.info('{} package comments'.format(len(
            self.package_results)))

        for package, sections in self.package_results.items():
            if str2bool(
                    Config.get(self.apiurl, project).get(
                        'repo_checker-package-comment-devel', 'False')):
                bot_name_suffix = project
                comment_project, comment_package = devel_project_fallback(
                    self.apiurl, project, package)
                if comment_project is None or comment_package is None:
                    self.logger.warning(
                        'unable to find devel project for {}'.format(package))
                    continue

                message = 'The version of this package in [`{project}`](/package/show/{project}/{package}) ' \
                    'has installation issues and may not be installable:'.format(
                        project=project, package=package)
            else:
                bot_name_suffix = repository
                comment_project = project
                comment_package = package
                message = 'This package has installation issues and may not be installable from the `{}` ' \
                    'repository:'.format(repository)

            # Sort sections by text to group binaries together.
            sections = sorted(sections, key=lambda s: s.text)
            message += '\n\n<pre>\n{}\n</pre>'.format('\n'.join(
                [section.text for section in sections]).strip())

            # Generate a hash based on the binaries involved and the number of
            # sections. This eliminates version or release changes from causing
            # an update to the comment while still updating on relevant changes.
            binaries = set()
            for section in sections:
                binaries.update(section.binaries)
            info = ';'.join(['::'.join(sorted(binaries)), str(len(sections))])
            reference = hashlib.sha1(info).hexdigest()[:7]

            # Post comment on package in order to notifiy maintainers.
            self.comment_write(state='seen',
                               result=reference,
                               bot_name_suffix=bot_name_suffix,
                               project=comment_project,
                               package=comment_package,
                               message=message)
    def request_repository_pairs(self, request, action):
        if str2bool(Config.get(self.apiurl, action.tgt_project).get('repo_checker-project-skip', 'False')):
            # Do not change message as this should only occur in requests
            # targeting multiple projects such as in maintenance workflow in
            # which the message should be set by other actions.
            self.logger.debug('skipping review of action targeting {}'.format(action.tgt_project))
            return True

        repository = self.project_repository(action.tgt_project)
        if not repository:
            self.review_messages['declined'] = ERROR_REPO_SPECIFIED.format(action.tgt_project)
            return False

        repository_pairs = []
        # Assumes maintenance_release target project has staging disabled.
        staging = Config.get(self.apiurl, action.tgt_project).get('staging')
        if staging:
            api = self.staging_api(staging)
            stage_info = api.packages_staged.get(action.tgt_package)
            if not stage_info or str(stage_info['rq_id']) != str(request.reqid):
                self.logger.info('{} not staged'.format(request.reqid))
                return None

            if not self.force and not self.staging_build_failure_check(api, stage_info['prj']):
                self.logger.info('{} not ready due to staging build failure(s)'.format(request.reqid))
                return None

            # Staging setup is convoluted and thus the repository setup does not
            # contain a path to the target project. Instead the ports repository
            # is used to import the target prjconf. As such the staging group
            # repository must be explicitly layered on top of target project.
            repository_pairs.append([stage_info['prj'], repository])
            repository_pairs.extend(repository_path_expand(self.apiurl, action.tgt_project, repository))
        else:
            # Find a repository which links to target project "main" repository.
            repository = repository_path_search(
                self.apiurl, action.src_project, action.tgt_project, repository)
            if not repository:
                self.review_messages['declined'] = ERROR_REPO_SPECIFIED.format(action.tgt_project)
                return False

            repository_pairs.extend(repository_path_expand(self.apiurl, action.src_project, repository))

        return repository_pairs
Exemple #7
0
    def check_action_maintenance_release(self, request, action):
        # No reason to special case patchinfo since same source and target
        # projects which is all that repo_checker cares about.

        if action.tgt_project in self.checked_targets:
            return True

        target_config = Config.get(self.apiurl, action.tgt_project)
        if str2bool(target_config.get('repo_checker-project-skip', 'False')):
            # Do not change message as this should only occur in requests
            # targeting multiple projects such as in maintenance workflow in
            # which the message should be set by other actions.
            self.logger.debug('skipping review of action targeting {}'.format(
                action.tgt_project))
            return True

        repository = target_config.get('main-repo')
        if not repository:
            raise Exception('Missing main-repo in OSRT:Config')

        # Find a repository which links to target project "main" repository.
        repository = repository_path_search(self.apiurl, action.src_project,
                                            action.tgt_project, repository)
        if not repository:
            raise Exception('Missing repositories')

        repository_pairs = repository_path_expand(self.apiurl,
                                                  action.src_project,
                                                  repository)

        self.checked_targets.add(action.tgt_project)
        archs = set(target_archs(self.apiurl, action.src_project, repository))
        arch_whitelist = target_config.get('repo_checker-arch-whitelist', None)
        if arch_whitelist:
            archs = set(arch_whitelist.split(' ')).intersection(archs)

        if not self.repository_check(repository_pairs, archs):
            return None

        self.review_messages['accepted'] = 'install check passed'
        return True
    def __init__(self, api, config):
        self.api = api
        self.logger = logging.getLogger('InstallChecker')
        self.commentapi = CommentAPI(api.apiurl)

        self.arch_whitelist = config.get('repo_checker-arch-whitelist')
        if self.arch_whitelist:
            self.arch_whitelist = set(self.arch_whitelist.split(' '))

        self.ring_whitelist = set(
            config.get('repo_checker-binary-whitelist-ring', '').split(' '))

        self.cycle_packages = config.get('repo_checker-allowed-in-cycles')
        self.calculate_allowed_cycles()

        self.ignore_duplicated = set(
            config.get('installcheck-ignore-duplicated-binaries',
                       '').split(' '))
        self.ignore_conflicts = set(
            config.get('installcheck-ignore-conflicts', '').split(' '))
        self.ignore_deletes = str2bool(
            config.get('installcheck-ignore-deletes', 'False'))
    def package_comments(self, project, repository):
        self.logger.info('{} package comments'.format(len(self.package_results)))

        for package, sections in self.package_results.items():
            if str2bool(Config.get(self.apiurl, project).get('repo_checker-package-comment-devel', 'False')):
                bot_name_suffix = project
                comment_project, comment_package = devel_project_fallback(self.apiurl, project, package)
                if comment_project is None or comment_package is None:
                    self.logger.warning('unable to find devel project for {}'.format(package))
                    continue

                message = 'The version of this package in [`{project}`](/package/show/{project}/{package}) ' \
                    'has installation issues and may not be installable:'.format(
                        project=project, package=package)
            else:
                bot_name_suffix = repository
                comment_project = project
                comment_package = package
                message = 'This package has installation issues and may not be installable from the `{}` ' \
                    'repository:'.format(repository)

            # Sort sections by text to group binaries together.
            sections = sorted(sections, key=lambda s: s.text)
            message += '\n\n<pre>\n{}\n</pre>'.format(
                '\n'.join([section.text for section in sections]).strip())

            # Generate a hash based on the binaries involved and the number of
            # sections. This eliminates version or release changes from causing
            # an update to the comment while still updating on relevant changes.
            binaries = set()
            for section in sections:
                binaries.update(section.binaries)
            info = ';'.join(['::'.join(sorted(binaries)), str(len(sections))])
            reference = hashlib.sha1(info).hexdigest()[:7]

            # Post comment on package in order to notifiy maintainers.
            self.comment_write(state='seen', result=reference, bot_name_suffix=bot_name_suffix,
                               project=comment_project, package=comment_package, message=message)
Exemple #10
0
    def init(directory='main'):
        if Cache.CACHE_DIR:
            # Stick with the first initialization to allow for StagingAPI to
            # ensure always enabled, but allow parent to change directory.
            return

        Cache.CACHE_DIR = CacheManager.directory('request', directory)

        Cache.patterns = []

        if str2bool(os.environ.get('OSRT_DISABLE_CACHE', '')):
            if conf.config['debug']:
                print('CACHE_DISABLE via $OSRT_DISABLE_CACHE', file=sys.stderr)
            return

        for pattern in Cache.PATTERNS:
            Cache.patterns.append(re.compile(pattern))

        # Replace http_request with wrapper function which needs a stored
        # version of the original function to call.
        if not hasattr(osc.core, '_http_request'):
            osc.core._http_request = osc.core.http_request
            osc.core.http_request = http_request
    def update_and_solve_target(self, api, target_project, target_config, main_repo,
                                project, scope, force, no_checkout,
                                only_release_packages, stop_after_solve):
        self.all_architectures = target_config.get('pkglistgen-archs').split(' ')
        ignore_repos = set(target_config.get('pkglistgen-ignore_repos', '').split(' '))
        self.repos = [ r for r in self.expand_repos(project, main_repo) if r[0] != project or r[1] not in ignore_repos ]
        print('[{}] {}/{}: update and solve'.format(scope, project, main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release', '000release-packages')
        oldrepos = target_config.get('pkglistgen-repos', '000update-repos')

        url = api.makeurl(['source', project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, product, 'revive')
            # TODO disable build.
            print('{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not force:
            root = ET.fromstringlist(show_results_meta(api.apiurl, project, product,
                                                       repository=[main_repo], multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')):
                print('{}/{} build in progress'.format(project, product))
                return

        drop_list = api.item_exists(project, oldrepos)
        checkout_list = [group, product, release]
        if drop_list and not only_release_packages:
            checkout_list.append(oldrepos)

        if packages.find('entry[@name="{}"]'.format(release)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, release, 'revive')
            print('{} undeleted, skip dvd until next cycle'.format(release))
            return

        # Cache dir specific to hostname and project.
        host = urlparse(api.apiurl).hostname
        cache_dir = CacheManager.directory('pkglistgen', host, project)

        if not no_checkout:
            if os.path.exists(cache_dir):
                shutil.rmtree(cache_dir)
            os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)
        oldrepos_dir = os.path.join(cache_dir, oldrepos)

        for package in checkout_list:
            if no_checkout:
                print('Skipping checkout of {}/{}'.format(project, package))
                continue
            checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir)

        file_utils.unlink_all_except(release_dir, ['weakremovers.inc'])
        if not only_release_packages:
            file_utils.unlink_all_except(product_dir)
        file_utils.copy_directory_contents(group_dir, product_dir,
                                     ['supportstatus.txt', 'groups.yml',
                                      'reference-unsorted.yml', 'reference-summary.yml',
                                      'package-groups.changes'])
        file_utils.change_extension(product_dir, '.spec.in', '.spec')
        file_utils.change_extension(product_dir, '.product.in', '.product')

        self.input_dir = group_dir
        self.output_dir = product_dir

        print('-> do_update')
        # make sure we only calculcate existant architectures
        self.filter_architectures(target_archs(api.apiurl, project, main_repo))
        self.update_repos(self.filtered_architectures)

        if only_release_packages:
            self.load_all_groups()
            self.write_group_stubs()
        else:
            summary = self.solve_project(ignore_unresolvable=str2bool(target_config.get('pkglistgen-ignore-unresolvable')),
                                         ignore_recommended=str2bool(target_config.get('pkglistgen-ignore-recommended')),
                                         locale = target_config.get('pkglistgen-locale'),
                                         locales_from = target_config.get('pkglistgen-locales-from'))

        if stop_after_solve:
            return

        if drop_list:
            weakremovers_file = os.path.join(release_dir, 'weakremovers.inc')
            self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w'))

        delete_products = target_config.get('pkglistgen-delete-products', '').split(' ')
        file_utils.unlink_list(product_dir, delete_products)

        print('-> product service')
        product_version = attribute_value_load(api.apiurl, project, 'ProductVersion')
        if not product_version:
            # for stagings the product version doesn't matter (I hope)
            product_version = '1'
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            self.replace_product_version(product_file, product_version)
            print(subprocess.check_output(
                [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8'))

        for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
            delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
            file_utils.unlink_list(product_dir, delete_kiwis)
        if scope == 'staging':
            self.strip_medium_from_staging(product_dir)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        file_utils.move_list(spec_files, release_dir)
        inc_files = glob.glob(os.path.join(group_dir, '*.inc'))
        # filter special inc file
        inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files)
        file_utils.move_list(inc_files, release_dir)

        # do not overwrite weakremovers.inc if it exists
        # we will commit there afterwards if needed
        if os.path.exists(os.path.join(group_dir, 'weakremovers.inc')) and \
           not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')):
            file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir)

        file_utils.multibuild_from_glob(release_dir, '*.spec')
        self.build_stub(release_dir, 'spec')
        self.commit_package(release_dir)

        if only_release_packages:
            return

        file_utils.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')
        self.commit_package(product_dir)

        error_output = b''
        reference_summary = os.path.join(group_dir, 'reference-summary.yml')
        if os.path.isfile(reference_summary):
            summary_file = os.path.join(product_dir, 'summary.yml')
            with open(summary_file, 'w') as f:
                f.write('# Summary of packages in groups')
                for group in sorted(summary):
                    # the unsorted group should appear filtered by
                    # unneeded.yml - so we need the content of unsorted.yml
                    # not unsorted.group (this grew a little unnaturally)
                    if group == 'unsorted':
                        continue
                    f.write('\n' + group + ':\n')
                    for package in sorted(summary[group]):
                        f.write('  - ' + package + '\n')

            try:
                error_output += subprocess.check_output(['diff', '-u', reference_summary, summary_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output
            reference_unsorted = os.path.join(group_dir, 'reference-unsorted.yml')
            unsorted_file = os.path.join(product_dir, 'unsorted.yml')
            try:
                error_output += subprocess.check_output(['diff', '-u', reference_unsorted, unsorted_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output

        if len(error_output) > 0:
            self.logger.error('Difference in yml:\n' + error_output.decode('utf-8'))
            return True
Exemple #12
0
    def repository_check(self,
                         repository_pairs,
                         state_hash,
                         simulate_merge,
                         post_comments=False):
        comment = []
        project, repository = repository_pairs[0]
        self.logger.info('checking {}/{}@{}[{}]'.format(
            project, repository, state_hash, len(repository_pairs)))

        published = repositories_published(self.apiurl, repository_pairs)

        if not self.force:
            if state_hash == self.repository_state_last(
                    project, repository, not simulate_merge):
                self.logger.info('{} build unchanged'.format(project))
                # TODO keep track of skipped count for cycle summary
                return None

            # For submit style requests, want to process if top layer is done,
            # but not mark review as final until all layers are published.
            if published is not True and (not simulate_merge
                                          or published[0] == project):
                # Require all layers to be published except when the top layer
                # is published in a simulate merge (allows quicker feedback with
                # potentially incorrect resutls for staging).
                self.logger.info('{}/{} not published'.format(
                    published[0], published[1]))
                return None

        # Drop non-published repository information and thus reduce to boolean.
        published = published is True

        if simulate_merge:
            # Restrict top layer archs to the whitelisted archs from merge layer.
            archs = set(target_archs(self.apiurl, project,
                                     repository)).intersection(
                                         set(
                                             self.target_archs(
                                                 repository_pairs[1][0],
                                                 repository_pairs[1][1])))
        else:
            # Top of pseudometa file.
            comment.append(state_hash)
            archs = self.target_archs(project, repository)

            if post_comments:
                # Stores parsed install_check() results grouped by package.
                self.package_results = {}

        if not len(archs):
            self.logger.debug(
                '{} has no relevant architectures'.format(project))
            return None

        result = True
        for arch in archs:
            directories = []
            for pair_project, pair_repository in repository_pairs:
                directories.append(
                    self.mirror(pair_project, pair_repository, arch))

            if simulate_merge:
                ignore = self.simulated_merge_ignore(repository_pairs[0],
                                                     repository_pairs[1], arch)
                whitelist = self.binary_whitelist(repository_pairs[0],
                                                  repository_pairs[1], arch)

                results = {
                    'cycle':
                    self.cycle_check(repository_pairs[0], repository_pairs[1],
                                     arch),
                    'install':
                    self.install_check(repository_pairs[1], arch, directories,
                                       ignore, whitelist),
                }
            else:
                # Only products themselves will want no-filter or perhaps
                # projects working on cleaning up a product.
                no_filter = str2bool(
                    Config.get(self.apiurl,
                               project).get('repo_checker-no-filter'))
                results = {
                    'cycle':
                    CheckResult(True, None),
                    'install':
                    self.install_check(repository_pairs[0],
                                       arch,
                                       directories,
                                       parse=post_comments,
                                       no_filter=no_filter),
                }

            if not all(result.success for _, result in results.items()):
                # Not all checks passed, build comment.
                result = False
                self.result_comment(repository, arch, results, comment)

        if simulate_merge:
            info_extra = {'build': state_hash}
            if not result:
                # Some checks in group did not pass, post comment.
                # Avoid identical comments with different build hash during
                # target project build phase. Once published update regardless.
                self.comment_write(state='seen',
                                   result='failed',
                                   project=project,
                                   message='\n'.join(comment).strip(),
                                   identical=True,
                                   info_extra=info_extra,
                                   info_extra_identical=published,
                                   bot_name_suffix=repository)
            else:
                # Post passed comment only if previous failed comment.
                text = 'Previously reported problems have been resolved.'
                self.comment_write(state='done',
                                   result='passed',
                                   project=project,
                                   message=text,
                                   identical=True,
                                   only_replace=True,
                                   info_extra=info_extra,
                                   bot_name_suffix=repository)
        else:
            text = '\n'.join(comment).strip()
            if not self.dryrun:
                filename = self.project_pseudometa_file_name(
                    project, repository)
                project_pseudometa_file_ensure(
                    self.apiurl, project, filename, text + '\n',
                    'repo_checker project_only run')
            else:
                print(text)

            if post_comments:
                self.package_comments(project, repository)

        if result and not published:
            # Wait for the complete stack to build before positive result.
            self.logger.debug(
                'demoting result from accept to ignore due to non-published layer'
            )
            result = None

        return result
def origin_update(apiurl, target_project, package):
    origin_info = origin_find(apiurl, target_project, package)
    if not origin_info:
        # Cases for a lack of origin:
        # - package does not exist in target_project
        # - initial package submission from devel (lacking devel meta on package)
        # - initial package submission overriden to allow from no origin
        # - origin project/package deleted
        #
        # Ideally, the second case should never be used and instead the first
        # case should be opted for instead.

        # Check for accepted source submission with devel annotation and create
        # change_devel request as automatic follow-up to approval.
        config = config_load(apiurl, target_project)
        request_actions = request_action_list_source(apiurl,
                                                     target_project,
                                                     package,
                                                     states=['accepted'])
        for request, action in sorted(request_actions,
                                      key=lambda i: i[0].reqid,
                                      reverse=True):
            annotation = origin_annotation_load(request, action,
                                                config['review-user'])
            if not annotation:
                continue

            origin = annotation.get('origin')
            if origin_workaround_check(origin):
                continue

            if origin not in config_origin_list(config, apiurl,
                                                target_project):
                message = f'Set devel project based on initial submission in request#{request.reqid}.'
                return request_create_change_devel(apiurl,
                                                   origin,
                                                   package,
                                                   target_project,
                                                   message=message)

        # Either the first or one of the second two cases.
        exists = entity_exists(apiurl, target_project, package)
        origin, version = origin_potential(apiurl, target_project, package,
                                           not exists)
        if origin is None:
            if not exists:
                # Package does not exist to be deleted.
                return False

            # Package is not found in any origin so request deletion.
            message = 'Package not available from any potential origin.'
            return request_create_delete(apiurl, target_project, package,
                                         message)

        if not exists:
            if origin_update_initial_blacklisted(apiurl, target_project,
                                                 package):
                return False

            message = 'Submitting new package from highest potential origin.'
            return request_create_submit(apiurl,
                                         origin,
                                         package,
                                         target_project,
                                         message=message,
                                         ignore_if_any_request=True)

        # No longer tracking previous origin (likely removed from origin) so
        # submit from the highest potential origin.
        message = 'Submitting package from highest potential origin.'
        return request_create_submit(apiurl,
                                     origin,
                                     package,
                                     target_project,
                                     message=message)

    if origin_workaround_check(origin_info.project):
        # Do not attempt to update workarounds as the expected flow is to either
        # to explicitely switched back to non-workaround or source to match at
        # some point and implicitily switch.
        return False

    if origin_info.pending:
        # Already accepted source ahead of origin so nothing to do.
        return False

    policy = policy_get(apiurl, target_project, package, origin_info.project)
    if not policy['automatic_updates']:
        return False

    mode = origin_update_mode(apiurl, target_project, package, policy,
                              origin_info.project)
    if mode['skip']:
        return False

    age = package_source_age(apiurl, origin_info.project,
                             package).total_seconds()
    if age < int(mode['delay']):
        return False

    supersede = str2bool(str(mode['supersede']))
    frequency = int(mode['frequency'])

    if policy['pending_submission_allow']:
        request_id = origin_update_pending(apiurl, origin_info.project,
                                           package, target_project, policy,
                                           supersede, frequency)
        if request_id:
            return request_id

    message = 'Newer source available from package origin.'
    return request_create_submit(apiurl,
                                 origin_info.project,
                                 package,
                                 target_project,
                                 message=message,
                                 supersede=supersede,
                                 frequency=frequency)
Exemple #14
0
    def update_and_solve_target(self, api, target_project, target_config,
                                main_repo, project, scope, force, no_checkout,
                                only_release_packages, stop_after_solve):
        self.all_architectures = target_config.get('pkglistgen-archs').split(
            ' ')
        self.use_newest_version = str2bool(
            target_config.get('pkglistgen-use-newest-version', 'False'))
        self.repos = self.expand_repos(project, main_repo)
        logging.debug('[{}] {}/{}: update and solve'.format(
            scope, project, main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release',
                                    '000release-packages')
        oldrepos = target_config.get('pkglistgen-repos', '000update-repos')

        url = api.makeurl(['source', project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, product, 'revive')
            # TODO disable build.
            logging.info(
                '{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not force:
            root = ET.fromstringlist(
                show_results_meta(api.apiurl,
                                  project,
                                  product,
                                  repository=[main_repo],
                                  multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(
                    root.xpath('result[@state="dirty"]')):
                logging.info('{}/{} build in progress'.format(
                    project, product))
                return

        drop_list = api.item_exists(project, oldrepos)
        checkout_list = [group, product, release]
        if drop_list and not only_release_packages:
            checkout_list.append(oldrepos)

        if packages.find('entry[@name="{}"]'.format(release)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, release, 'revive')
            logging.info(
                '{} undeleted, skip dvd until next cycle'.format(release))
            return

        # Cache dir specific to hostname and project.
        host = urlparse(api.apiurl).hostname
        cache_dir = CacheManager.directory('pkglistgen', host, project)

        if not no_checkout:
            if os.path.exists(cache_dir):
                shutil.rmtree(cache_dir)
            os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)
        oldrepos_dir = os.path.join(cache_dir, oldrepos)

        self.input_dir = group_dir
        self.output_dir = product_dir

        for package in checkout_list:
            if no_checkout:
                logging.debug('Skipping checkout of {}/{}'.format(
                    project, package))
                continue
            checkout_package(api.apiurl,
                             project,
                             package,
                             expand_link=True,
                             prj_dir=cache_dir,
                             outdir=os.path.join(cache_dir, package))

        file_utils.unlink_all_except(release_dir, ['weakremovers.inc'])
        if not only_release_packages:
            file_utils.unlink_all_except(product_dir)
        ignore_list = [
            'supportstatus.txt', 'summary-staging.txt',
            'package-groups.changes'
        ]
        ignore_list += self.group_input_files()
        file_utils.copy_directory_contents(group_dir, product_dir, ignore_list)
        file_utils.change_extension(product_dir, '.spec.in', '.spec')
        file_utils.change_extension(product_dir, '.product.in', '.product')

        logging.debug('-> do_update')
        # make sure we only calculcate existant architectures
        self.filter_architectures(target_archs(api.apiurl, project, main_repo))
        self.update_repos(self.filtered_architectures)

        if only_release_packages:
            self.load_all_groups()
            self.write_group_stubs()
        else:
            summary = self.solve_project(
                ignore_unresolvable=str2bool(
                    target_config.get('pkglistgen-ignore-unresolvable')),
                ignore_recommended=str2bool(
                    target_config.get('pkglistgen-ignore-recommended')),
                locale=target_config.get('pkglistgen-locale'),
                locales_from=target_config.get('pkglistgen-locales-from'))

        if stop_after_solve:
            return

        if drop_list and not only_release_packages:
            weakremovers_file = os.path.join(release_dir, 'weakremovers.inc')
            try:
                self.create_weakremovers(project,
                                         target_config,
                                         oldrepos_dir,
                                         output=open(weakremovers_file, 'w'))
            except MismatchedRepoException:
                logging.error(
                    "Failed to create weakremovers.inc due to mismatch in repos - project most likey started building again."
                )
                return

        delete_products = target_config.get('pkglistgen-delete-products',
                                            '').split(' ')
        file_utils.unlink_list(product_dir, delete_products)

        logging.debug('-> product service')
        product_version = attribute_value_load(api.apiurl, project,
                                               'ProductVersion')
        if not product_version:
            # for stagings the product version doesn't matter (I hope)
            product_version = '1'
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            self.replace_product_version(product_file, product_version)
            logging.debug(
                subprocess.check_output(
                    [PRODUCT_SERVICE, product_file, product_dir, project],
                    encoding='utf-8'))

        for delete_kiwi in target_config.get(
                'pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
            delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
            file_utils.unlink_list(product_dir, delete_kiwis)
        if scope == 'staging':
            self.strip_medium_from_staging(product_dir)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        file_utils.move_list(spec_files, release_dir)
        inc_files = glob.glob(os.path.join(group_dir, '*.inc'))
        # filter special inc file
        inc_files = filter(lambda file: file.endswith('weakremovers.inc'),
                           inc_files)
        file_utils.move_list(inc_files, release_dir)

        # do not overwrite weakremovers.inc if it exists
        # we will commit there afterwards if needed
        if os.path.exists(os.path.join(group_dir, 'weakremovers.inc')) and \
           not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')):
            file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')],
                                 release_dir)

        file_utils.multibuild_from_glob(release_dir, '*.spec')
        self.build_stub(release_dir, 'spec')
        self.commit_package(release_dir)

        if only_release_packages:
            return

        file_utils.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')

        reference_summary = os.path.join(group_dir, f'summary-{scope}.txt')
        if os.path.isfile(reference_summary):
            summary_file = os.path.join(product_dir, f'summary-{scope}.txt')
            output = []
            for group in summary:
                for package in sorted(summary[group]):
                    output.append(f'{package}:{group}')

            with open(summary_file, 'w') as f:
                for line in sorted(output):
                    f.write(line + '\n')

        self.commit_package(product_dir)

        if os.path.isfile(reference_summary):
            return self.comment.handle_package_diff(project, reference_summary,
                                                    summary_file)
Exemple #15
0
    def update_and_solve_target(self, api, target_project, target_config, main_repo,
                                project, scope, force, no_checkout,
                                only_release_packages, stop_after_solve):
        self.all_architectures = target_config.get('pkglistgen-archs').split(' ')
        self.repos = self.expand_repos(project, main_repo)
        print('[{}] {}/{}: update and solve'.format(scope, project, main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release', '000release-packages')
        oldrepos = target_config.get('pkglistgen-repos', '000update-repos')

        url = api.makeurl(['source', project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, product, 'revive')
            # TODO disable build.
            print('{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not force:
            root = ET.fromstringlist(show_results_meta(api.apiurl, project, product,
                                                       repository=[main_repo], multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')):
                print('{}/{} build in progress'.format(project, product))
                return

        drop_list = api.item_exists(project, oldrepos)
        checkout_list = [group, product, release]
        if drop_list and not only_release_packages:
            checkout_list.append(oldrepos)

        if packages.find('entry[@name="{}"]'.format(release)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, release, 'revive')
            print('{} undeleted, skip dvd until next cycle'.format(release))
            return

        # Cache dir specific to hostname and project.
        host = urlparse(api.apiurl).hostname
        cache_dir = CacheManager.directory('pkglistgen', host, project)

        if not no_checkout:
            if os.path.exists(cache_dir):
                shutil.rmtree(cache_dir)
            os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)
        oldrepos_dir = os.path.join(cache_dir, oldrepos)

        for package in checkout_list:
            if no_checkout:
                print('Skipping checkout of {}/{}'.format(project, package))
                continue
            checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir)

        file_utils.unlink_all_except(release_dir, ['weakremovers.inc'])
        if not only_release_packages:
            file_utils.unlink_all_except(product_dir)
        file_utils.copy_directory_contents(group_dir, product_dir,
                                     ['supportstatus.txt', 'groups.yml',
                                      'reference-unsorted.yml', 'reference-summary.yml',
                                      'package-groups.changes'])
        file_utils.change_extension(product_dir, '.spec.in', '.spec')
        file_utils.change_extension(product_dir, '.product.in', '.product')

        self.input_dir = group_dir
        self.output_dir = product_dir

        print('-> do_update')
        # make sure we only calculcate existant architectures
        self.filter_architectures(target_archs(api.apiurl, project, main_repo))
        self.update_repos(self.filtered_architectures)

        if only_release_packages:
            self.load_all_groups()
            self.write_group_stubs()
        else:
            summary = self.solve_project(ignore_unresolvable=str2bool(target_config.get('pkglistgen-ignore-unresolvable')),
                                         ignore_recommended=str2bool(target_config.get('pkglistgen-ignore-recommended')),
                                         locale = target_config.get('pkglistgen-local'),
                                         locales_from = target_config.get('pkglistgen-locales-from'))

        if stop_after_solve:
            return

        if drop_list:
            weakremovers_file = os.path.join(release_dir, 'weakremovers.inc')
            self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w'))

        delete_products = target_config.get('pkglistgen-delete-products', '').split(' ')
        file_utils.unlink_list(product_dir, delete_products)

        print('-> product service')
        product_version = attribute_value_load(api.apiurl, project, 'ProductVersion')
        if not product_version:
            # for stagings the product version doesn't matter (I hope)
            product_version = '1'
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            self.replace_product_version(product_file, product_version)
            print(subprocess.check_output(
                [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8'))

        for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
            delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
            file_utils.unlink_list(product_dir, delete_kiwis)
        if scope == 'staging':
            self.strip_medium_from_staging(product_dir)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        file_utils.move_list(spec_files, release_dir)
        inc_files = glob.glob(os.path.join(group_dir, '*.inc'))
        # filter special inc file
        inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files)
        file_utils.move_list(inc_files, release_dir)

        # do not overwrite weakremovers.inc if it exists
        # we will commit there afterwards if needed
        if not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')):
            file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir)

        file_utils.multibuild_from_glob(release_dir, '*.spec')
        self.build_stub(release_dir, 'spec')
        self.commit_package(release_dir)

        if only_release_packages:
            return

        file_utils.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')
        self.commit_package(product_dir)

        error_output = ''
        reference_summary = os.path.join(group_dir, 'reference-summary.yml')
        if os.path.isfile(reference_summary):
            summary_file = os.path.join(product_dir, 'summary.yml')
            with open(summary_file, 'w') as f:
                f.write('# Summary of packages in groups')
                for group in sorted(summary):
                    # the unsorted group should appear filtered by
                    # unneeded.yml - so we need the content of unsorted.yml
                    # not unsorted.group (this grew a little unnaturally)
                    if group == 'unsorted':
                        continue
                    f.write('\n' + group + ':\n')
                    for package in sorted(summary[group]):
                        f.write('  - ' + package + '\n')

            try:
                error_output += subprocess.check_output(['diff', '-u', reference_summary, summary_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output
            reference_unsorted = os.path.join(group_dir, 'reference-unsorted.yml')
            unsorted_file = os.path.join(product_dir, 'unsorted.yml')
            try:
                error_output += subprocess.check_output(['diff', '-u', reference_unsorted, unsorted_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output

        if len(error_output) > 0:
            self.logger.error('Difference in yml:\n' + error_output)
            return True
    def repository_check(self, repository_pairs, state_hash, simulate_merge, whitelist=None, arch_whitelist=None, post_comments=False, cycle_packages=None):
        comment = []
        project, repository = repository_pairs[0]
        self.logger.info('checking {}/{}@{}[{}]'.format(
            project, repository, state_hash, len(repository_pairs)))

        archs = self.target_archs(project, repository, arch_whitelist)
        new_pairs = []
        for pair in repository_pairs:
            has_all = True
            for arch in archs:
                if not repository_arch_state(self.apiurl, pair[0], pair[1], arch):
                    has_all = False
                    break
            # ignore repositories only inherited for config
            if has_all:
                new_pairs.append(pair)
        repository_pairs = new_pairs

        published = repositories_published(self.apiurl, repository_pairs, archs)

        if not self.force:
            if state_hash == self.repository_state_last(project, repository, simulate_merge):
                self.logger.info('{} build unchanged'.format(project))
                # TODO keep track of skipped count for cycle summary
                return None

            # For submit style requests, want to process if top layer is done,
            # but not mark review as final until all layers are published.
            if published is not True and (not simulate_merge or published[0] == project):
                # Require all layers to be published except when the top layer
                # is published in a simulate merge (allows quicker feedback with
                # potentially incorrect resutls for staging).
                self.logger.info('{}/{} not published'.format(published[0], published[1]))
                return None

        # Drop non-published repository information and thus reduce to boolean.
        published = published is True

        if not simulate_merge:
            # Top of pseudometa file.
            comment.append(state_hash)

            if post_comments:
                # Stores parsed install_check() results grouped by package.
                self.package_results = {}

        if not len(archs):
            self.logger.debug('{} has no relevant architectures'.format(project))
            return None

        result = True
        for arch in archs:
            directories = []
            for pair_project, pair_repository in repository_pairs:
                directories.append(self.mirror(pair_project, pair_repository, arch))

            if simulate_merge:
                ignore = self.simulated_merge_ignore(repository_pairs[0], repository_pairs[1], arch)
                if not whitelist:
                    whitelist = self.binary_whitelist(repository_pairs[0], repository_pairs[1], arch)

                results = {
                    'cycle': self.cycle_check(repository_pairs[0][0], repository_pairs[0][1], arch, cycle_packages),
                    'install': self.install_check(
                        repository_pairs[1], arch, directories, ignore, whitelist),
                }
            else:
                # Only products themselves will want no-filter or perhaps
                # projects working on cleaning up a product.
                no_filter = str2bool(Config.get(self.apiurl, project).get('repo_checker-no-filter'))
                results = {
                    'cycle': CheckResult(True, None),
                    'install': self.install_check(repository_pairs[0], arch, directories,
                                                  parse=post_comments, no_filter=no_filter),
                }

            if not all(result.success for _, result in results.items()):
                # Not all checks passed, build comment.
                result = False
                self.result_comment(repository, arch, results, comment)

        if simulate_merge:
            info_extra = {'build': state_hash}
            if not result:
                # Some checks in group did not pass, post comment.
                # Avoid identical comments with different build hash during
                # target project build phase. Once published update regardless.
                self.comment_write(state='seen', result='failed', project=project,
                                   message='\n'.join(comment).strip(), identical=True,
                                   info_extra=info_extra, info_extra_identical=published,
                                   bot_name_suffix=repository)
            else:
                # Post passed comment only if previous failed comment.
                text = 'Previously reported problems have been resolved.'
                self.comment_write(state='done', result='passed', project=project,
                                   message=text, identical=True, only_replace=True,
                                   info_extra=info_extra, bot_name_suffix=repository)
        else:
            text = '\n'.join(comment).strip()
            if not self.dryrun:
                filename = self.project_pseudometa_file_name(project, repository)
                project_pseudometa_file_ensure(
                    self.apiurl, project, filename, text + '\n', 'repo_checker project_only run')
            else:
                print(text)

            if post_comments:
                self.package_comments(project, repository)

        if result and not published:
            # Wait for the complete stack to build before positive result.
            self.logger.debug('demoting result from accept to ignore due to non-published layer')
            result = None

        return result