Ejemplo n.º 1
0
    def perform(self, save=False):
        duplicates = {}
        for arch in sorted(target_archs(self.api.apiurl, self.api.project), reverse=True):
            package_binaries, _ = package_binary_list(
                self.api.apiurl, self.api.project, 'standard', arch,
                strip_multibuild=False, exclude_src_debug=True)
            binaries = {}
            for pb in package_binaries:
                if pb.arch != 'noarch' and pb.arch != arch:
                    continue

                binaries.setdefault(arch, {})

                if pb.name in binaries[arch]:
                    duplicates.setdefault(arch, {})
                    duplicates[arch].setdefault(pb.name, set())
                    duplicates[arch][pb.name].add(pb.package)
                    duplicates[arch][pb.name].add(binaries[arch][pb.name])

                    continue

                binaries[arch][pb.name] = pb.package

        # convert sets to lists for readable yaml
        for arch in duplicates.keys():
            for name in duplicates[arch].keys():
                duplicates[arch][name] = list(duplicates[arch][name])

        current = yaml.dump(duplicates, default_flow_style=False)
        if save:
            self.api.pseudometa_file_ensure('duplicate_binaries', current)
        else:
            print(current)
Ejemplo n.º 2
0
 def has_whitelist_warnings(self, source_project, source_package,
                            target_project, target_package):
     # this checks if this is a submit to an product project and it has warnings for non-whitelisted permissions/files
     found_entries = set()
     url = osc.core.makeurl(self.apiurl, ['build', target_project])
     xml = ET.parse(osc.core.http_GET(url)).getroot()
     for f in xml.findall('entry'):
         # we check all repos in the source project for errors that exist in the target project
         repo = f.attrib['name']
         query = {
             'last': 1,
         }
         for arch in target_archs(self.apiurl, source_project, repo):
             url = osc.core.makeurl(self.apiurl, [
                 'build', source_project, repo, arch, source_package, '_log'
             ],
                                    query=query)
             try:
                 result = osc.core.http_GET(url)
                 contents = str(result.read())
                 for entry in self.bad_rpmlint_entries:
                     if (': W: ' + entry
                             in contents) and not (entry in found_entries):
                         self.logger.info(
                             f'found missing whitelist for warning: {entry}'
                         )
                         found_entries.add(entry)
             except HTTPError as e:
                 self.logger.info('ERROR in URL %s [%s]' % (url, e))
     return found_entries
Ejemplo n.º 3
0
    def target_archs_from_prairs(self, repository_pairs, simulate_merge):
        if simulate_merge:
            # Restrict top layer archs to the whitelisted archs from merge layer.
            return set(target_archs(self.apiurl, repository_pairs[0][0], repository_pairs[0][1])).intersection(
                   set(self.target_archs(repository_pairs[1][0], repository_pairs[1][1])))

        return self.target_archs(repository_pairs[0][0], repository_pairs[0][1])
Ejemplo n.º 4
0
    def perform(self, save=False):
        duplicates = {}
        for arch in sorted(target_archs(self.api.apiurl, self.api.project),
                           reverse=True):
            url = self.api.makeurl(
                ['build', self.api.project, 'standard', arch],
                {'view': 'binaryversions'})
            data = http_GET(url)
            root = ET.parse(data).getroot()

            binaries = {}
            for packagenode in root.findall('.//binaryversionlist'):
                package = packagenode.get('package')
                for binarynode in packagenode.findall('binary'):
                    binary = binarynode.get('name')
                    # StagingAPI.fileinfo_ext(), but requires lots of calls.
                    match = re.match(r'(.*)-([^-]+)-([^-]+)\.([^-\.]+)\.rpm',
                                     binary)
                    if not match:
                        continue
                    parch = match.group(4)
                    if parch in ('src', 'nosrc'):
                        continue

                    name = match.group(1)

                    if arch in self.ignore_extra_archs \
                        and package in self.ignore_extra_archs[arch] \
                        and parch in self.ignore_extra_archs[arch][package]:
                        continue

                    binaries.setdefault(arch, {})
                    if name in binaries[arch]:
                        duplicates.setdefault(arch, {})
                        duplicates[arch].setdefault(name, set()).add(package)
                        duplicates[arch][name].add(binaries[arch][name])

                        continue

                    binaries[arch][name] = package

        # convert sets to lists for readable yaml
        for arch in duplicates.keys():
            for name in duplicates[arch].keys():
                duplicates[arch][name] = list(duplicates[arch][name])

        current = yaml.dump(duplicates, default_flow_style=False)
        if save:
            args = [
                '{}:Staging'.format(self.api.project), 'dashboard',
                'duplicate_binaries'
            ]
            previous = self.api.load_file_content(*args)
            if current != previous:
                args.append(current)
                self.api.save_file_content(*args)
        else:
            print(current)
    def target_archs(self, project, repository):
        archs = target_archs(self.api.apiurl, project, repository)

        # Check for arch whitelist and use intersection.
        if self.arch_whitelist:
            archs = list(self.arch_whitelist.intersection(set(archs)))

        # Trick to prioritize x86_64.
        return sorted(archs, reverse=True)
    def target_archs(self, project, repository):
        archs = target_archs(self.api.apiurl, project, repository)

        # Check for arch whitelist and use intersection.
        if self.arch_whitelist:
            archs = list(self.arch_whitelist.intersection(set(archs)))

        # Trick to prioritize x86_64.
        return sorted(archs, reverse=True)
Ejemplo n.º 7
0
    def target_archs(self, project, repository):
        archs = target_archs(self.apiurl, project, repository)

        # Check for arch whitelist and use intersection.
        whitelist = Config.get(self.apiurl, project).get('repo_checker-arch-whitelist')
        if whitelist:
            archs = list(set(whitelist.split(' ')).intersection(set(archs)))

        # Trick to prioritize x86_64.
        return sorted(archs, reverse=True)
Ejemplo n.º 8
0
    def target_archs(self, project, repository):
        archs = target_archs(self.apiurl, project, repository)

        # Check for arch whitelist and use intersection.
        whitelist = Config.get(self.apiurl,
                               project).get('repo_checker-arch-whitelist')
        if whitelist:
            archs = list(set(whitelist.split(' ')).intersection(set(archs)))

        # Trick to prioritize x86_64.
        return sorted(archs, reverse=True)
Ejemplo n.º 9
0
    def target_archs(self, project):
        archs = target_archs(self.apiurl, project)

        # Check for arch whitelist and use intersection.
        product = project.split(':Staging:', 1)[0]
        whitelist = self.staging_config[product].get(
            'repo_checker-arch-whitelist')
        if whitelist:
            archs = list(set(whitelist.split(' ')).intersection(set(archs)))

        # Trick to prioritize x86_64.
        return sorted(archs, reverse=True)
Ejemplo n.º 10
0
    def target_archs_from_prairs(self, repository_pairs, simulate_merge):
        if simulate_merge:
            # Restrict top layer archs to the whitelisted archs from merge layer.
            return set(
                target_archs(self.apiurl, repository_pairs[0][0],
                             repository_pairs[0][1])).intersection(
                                 set(
                                     self.target_archs(
                                         repository_pairs[1][0],
                                         repository_pairs[1][1])))

        return self.target_archs(repository_pairs[0][0],
                                 repository_pairs[0][1])
Ejemplo n.º 11
0
    def still_alive(self):
        if not self.archs:
            self.archs = target_archs(self.apiurl, self.project,
                                      self.repository)
            # initial check
            if self.check_all_archs():
                self.stop()
                return

        # https://gitlab.com/gitlab-org/gitlab-runner/issues/3144
        # forces us to output something every couple of seconds :(
        print('Still alive: {}'.format(datetime.datetime.now().time()))
        self.restart_timer()
Ejemplo n.º 12
0
 def check_all_archs(self, project, repository):
     ids = {}
     try:
         archs = target_archs(self.apiurl, project, repository)
     except HTTPError:
         return None
     for arch in archs:
         repoid = self.check_arch(project, repository, arch)
         if not repoid:
             self.logger.info('{}/{}/{} not yet done'.format(project, repository, arch))
             return None
         ids[arch] = repoid
     self.logger.info('All of {}/{} finished'.format(project, repository))
     return ids
Ejemplo n.º 13
0
 def check_all_archs(self, project, repository):
     ids = {}
     try:
         archs = target_archs(self.apiurl, project, repository)
     except HTTPError:
         return None
     for arch in archs:
         repoid = self.check_arch(project, repository, arch)
         if not repoid:
             self.logger.info('{}/{}/{} not yet done'.format(project, repository, arch))
             return None
         ids[arch] = repoid
     self.logger.info('All of {}/{} finished'.format(project, repository))
     return ids
Ejemplo n.º 14
0
    def check(self, project, repository):
        if not repository:
            repository = self.project_repository(project)
        if not repository:
            self.logger.error('a repository must be specified via OSRT:Config main-repo for {}'.format(project))
            return

        config = Config.get(self.apiurl, project)

        archs = target_archs(self.apiurl, project, repository)
        if not len(archs):
            self.logger.debug('{} has no relevant architectures'.format(project))
            return None

        for arch in archs:
            self.check_pra(project, repository, arch)
Ejemplo n.º 15
0
    def perform(self, save=False):
        duplicates = {}
        for arch in sorted(target_archs(self.api.apiurl, self.api.project),
                           reverse=True):
            package_binaries, _ = package_binary_list(self.api.apiurl,
                                                      self.api.project,
                                                      'standard',
                                                      arch,
                                                      strip_multibuild=False,
                                                      exclude_src_debug=True)
            binaries = {}
            for pb in package_binaries:
                if pb.arch != 'noarch' and pb.arch != arch:
                    continue

                binaries.setdefault(arch, {})

                if pb.name in binaries[arch]:
                    duplicates.setdefault(arch, {})
                    duplicates[arch].setdefault(pb.name, set())
                    duplicates[arch][pb.name].add(pb.package)
                    duplicates[arch][pb.name].add(binaries[arch][pb.name])

                    continue

                binaries[arch][pb.name] = pb.package

        # convert sets to lists for readable yaml
        for arch in duplicates.keys():
            for name in duplicates[arch].keys():
                duplicates[arch][name] = list(duplicates[arch][name])

        current = yaml.dump(duplicates, default_flow_style=False)
        if save:
            args = [
                '{}:Staging'.format(self.api.project), 'dashboard',
                'duplicate_binaries'
            ]
            previous = self.api.load_file_content(*args)
            if current != previous:
                args.append(current)
                self.api.save_file_content(*args)
        else:
            print(current)
Ejemplo n.º 16
0
    def check_action_maintenance_release(self, request, action):
        # No reason to special case patchinfo since same source and target
        # projects which is all that repo_checker cares about.

        if action.tgt_project in self.checked_targets:
            return True

        target_config = Config.get(self.apiurl, action.tgt_project)
        if str2bool(target_config.get('repo_checker-project-skip', 'False')):
            # Do not change message as this should only occur in requests
            # targeting multiple projects such as in maintenance workflow in
            # which the message should be set by other actions.
            self.logger.debug('skipping review of action targeting {}'.format(
                action.tgt_project))
            return True

        repository = target_config.get('main-repo')
        if not repository:
            raise Exception('Missing main-repo in OSRT:Config')

        # Find a repository which links to target project "main" repository.
        repository = repository_path_search(self.apiurl, action.src_project,
                                            action.tgt_project, repository)
        if not repository:
            raise Exception('Missing repositories')

        repository_pairs = repository_path_expand(self.apiurl,
                                                  action.src_project,
                                                  repository)

        self.checked_targets.add(action.tgt_project)
        archs = set(target_archs(self.apiurl, action.src_project, repository))
        arch_whitelist = target_config.get('repo_checker-arch-whitelist', None)
        if arch_whitelist:
            archs = set(arch_whitelist.split(' ')).intersection(archs)

        if not self.repository_check(repository_pairs, archs):
            return None

        self.review_messages['accepted'] = 'install check passed'
        return True
    def check(self, project, repository):
        self.project = project
        if not repository:
            repository = self.project_repository(project)
        if not repository:
            self.logger.error(
                'a repository must be specified via OSRT:Config main-repo for {}'
                .format(project))
            return
        self.repository = repository

        archs = target_archs(self.apiurl, project, repository)
        if not len(archs):
            self.logger.debug(
                '{} has no relevant architectures'.format(project))
            return None

        for arch in archs:
            self.arch = arch
            state = self.check_pra(project, repository, arch)

        if self.comment:
            self.create_comments(state)
Ejemplo n.º 18
0
    def update_and_solve_target(self, api, target_project, target_config, main_repo,
                                project, scope, force, no_checkout,
                                only_release_packages, stop_after_solve):
        self.all_architectures = target_config.get('pkglistgen-archs').split(' ')
        ignore_repos = set(target_config.get('pkglistgen-ignore_repos', '').split(' '))
        self.repos = [ r for r in self.expand_repos(project, main_repo) if r[0] != project or r[1] not in ignore_repos ]
        print('[{}] {}/{}: update and solve'.format(scope, project, main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release', '000release-packages')
        oldrepos = target_config.get('pkglistgen-repos', '000update-repos')

        url = api.makeurl(['source', project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, product, 'revive')
            # TODO disable build.
            print('{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not force:
            root = ET.fromstringlist(show_results_meta(api.apiurl, project, product,
                                                       repository=[main_repo], multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')):
                print('{}/{} build in progress'.format(project, product))
                return

        drop_list = api.item_exists(project, oldrepos)
        checkout_list = [group, product, release]
        if drop_list and not only_release_packages:
            checkout_list.append(oldrepos)

        if packages.find('entry[@name="{}"]'.format(release)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, release, 'revive')
            print('{} undeleted, skip dvd until next cycle'.format(release))
            return

        # Cache dir specific to hostname and project.
        host = urlparse(api.apiurl).hostname
        cache_dir = CacheManager.directory('pkglistgen', host, project)

        if not no_checkout:
            if os.path.exists(cache_dir):
                shutil.rmtree(cache_dir)
            os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)
        oldrepos_dir = os.path.join(cache_dir, oldrepos)

        for package in checkout_list:
            if no_checkout:
                print('Skipping checkout of {}/{}'.format(project, package))
                continue
            checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir)

        file_utils.unlink_all_except(release_dir, ['weakremovers.inc'])
        if not only_release_packages:
            file_utils.unlink_all_except(product_dir)
        file_utils.copy_directory_contents(group_dir, product_dir,
                                     ['supportstatus.txt', 'groups.yml',
                                      'reference-unsorted.yml', 'reference-summary.yml',
                                      'package-groups.changes'])
        file_utils.change_extension(product_dir, '.spec.in', '.spec')
        file_utils.change_extension(product_dir, '.product.in', '.product')

        self.input_dir = group_dir
        self.output_dir = product_dir

        print('-> do_update')
        # make sure we only calculcate existant architectures
        self.filter_architectures(target_archs(api.apiurl, project, main_repo))
        self.update_repos(self.filtered_architectures)

        if only_release_packages:
            self.load_all_groups()
            self.write_group_stubs()
        else:
            summary = self.solve_project(ignore_unresolvable=str2bool(target_config.get('pkglistgen-ignore-unresolvable')),
                                         ignore_recommended=str2bool(target_config.get('pkglistgen-ignore-recommended')),
                                         locale = target_config.get('pkglistgen-locale'),
                                         locales_from = target_config.get('pkglistgen-locales-from'))

        if stop_after_solve:
            return

        if drop_list:
            weakremovers_file = os.path.join(release_dir, 'weakremovers.inc')
            self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w'))

        delete_products = target_config.get('pkglistgen-delete-products', '').split(' ')
        file_utils.unlink_list(product_dir, delete_products)

        print('-> product service')
        product_version = attribute_value_load(api.apiurl, project, 'ProductVersion')
        if not product_version:
            # for stagings the product version doesn't matter (I hope)
            product_version = '1'
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            self.replace_product_version(product_file, product_version)
            print(subprocess.check_output(
                [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8'))

        for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
            delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
            file_utils.unlink_list(product_dir, delete_kiwis)
        if scope == 'staging':
            self.strip_medium_from_staging(product_dir)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        file_utils.move_list(spec_files, release_dir)
        inc_files = glob.glob(os.path.join(group_dir, '*.inc'))
        # filter special inc file
        inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files)
        file_utils.move_list(inc_files, release_dir)

        # do not overwrite weakremovers.inc if it exists
        # we will commit there afterwards if needed
        if os.path.exists(os.path.join(group_dir, 'weakremovers.inc')) and \
           not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')):
            file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir)

        file_utils.multibuild_from_glob(release_dir, '*.spec')
        self.build_stub(release_dir, 'spec')
        self.commit_package(release_dir)

        if only_release_packages:
            return

        file_utils.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')
        self.commit_package(product_dir)

        error_output = b''
        reference_summary = os.path.join(group_dir, 'reference-summary.yml')
        if os.path.isfile(reference_summary):
            summary_file = os.path.join(product_dir, 'summary.yml')
            with open(summary_file, 'w') as f:
                f.write('# Summary of packages in groups')
                for group in sorted(summary):
                    # the unsorted group should appear filtered by
                    # unneeded.yml - so we need the content of unsorted.yml
                    # not unsorted.group (this grew a little unnaturally)
                    if group == 'unsorted':
                        continue
                    f.write('\n' + group + ':\n')
                    for package in sorted(summary[group]):
                        f.write('  - ' + package + '\n')

            try:
                error_output += subprocess.check_output(['diff', '-u', reference_summary, summary_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output
            reference_unsorted = os.path.join(group_dir, 'reference-unsorted.yml')
            unsorted_file = os.path.join(product_dir, 'unsorted.yml')
            try:
                error_output += subprocess.check_output(['diff', '-u', reference_unsorted, unsorted_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output

        if len(error_output) > 0:
            self.logger.error('Difference in yml:\n' + error_output.decode('utf-8'))
            return True
Ejemplo n.º 19
0
    def repository_check(self,
                         repository_pairs,
                         state_hash,
                         simulate_merge,
                         post_comments=False):
        comment = []
        project, repository = repository_pairs[0]
        self.logger.info('checking {}/{}@{}[{}]'.format(
            project, repository, state_hash, len(repository_pairs)))

        published = repositories_published(self.apiurl, repository_pairs)

        if not self.force:
            if state_hash == self.repository_state_last(
                    project, repository, not simulate_merge):
                self.logger.info('{} build unchanged'.format(project))
                # TODO keep track of skipped count for cycle summary
                return None

            # For submit style requests, want to process if top layer is done,
            # but not mark review as final until all layers are published.
            if published is not True and (not simulate_merge
                                          or published[0] == project):
                # Require all layers to be published except when the top layer
                # is published in a simulate merge (allows quicker feedback with
                # potentially incorrect resutls for staging).
                self.logger.info('{}/{} not published'.format(
                    published[0], published[1]))
                return None

        # Drop non-published repository information and thus reduce to boolean.
        published = published is True

        if simulate_merge:
            # Restrict top layer archs to the whitelisted archs from merge layer.
            archs = set(target_archs(self.apiurl, project,
                                     repository)).intersection(
                                         set(
                                             self.target_archs(
                                                 repository_pairs[1][0],
                                                 repository_pairs[1][1])))
        else:
            # Top of pseudometa file.
            comment.append(state_hash)
            archs = self.target_archs(project, repository)

            if post_comments:
                # Stores parsed install_check() results grouped by package.
                self.package_results = {}

        if not len(archs):
            self.logger.debug(
                '{} has no relevant architectures'.format(project))
            return None

        result = True
        for arch in archs:
            directories = []
            for pair_project, pair_repository in repository_pairs:
                directories.append(
                    self.mirror(pair_project, pair_repository, arch))

            if simulate_merge:
                ignore = self.simulated_merge_ignore(repository_pairs[0],
                                                     repository_pairs[1], arch)
                whitelist = self.binary_whitelist(repository_pairs[0],
                                                  repository_pairs[1], arch)

                results = {
                    'cycle':
                    self.cycle_check(repository_pairs[0], repository_pairs[1],
                                     arch),
                    'install':
                    self.install_check(repository_pairs[1], arch, directories,
                                       ignore, whitelist),
                }
            else:
                # Only products themselves will want no-filter or perhaps
                # projects working on cleaning up a product.
                no_filter = str2bool(
                    Config.get(self.apiurl,
                               project).get('repo_checker-no-filter'))
                results = {
                    'cycle':
                    CheckResult(True, None),
                    'install':
                    self.install_check(repository_pairs[0],
                                       arch,
                                       directories,
                                       parse=post_comments,
                                       no_filter=no_filter),
                }

            if not all(result.success for _, result in results.items()):
                # Not all checks passed, build comment.
                result = False
                self.result_comment(repository, arch, results, comment)

        if simulate_merge:
            info_extra = {'build': state_hash}
            if not result:
                # Some checks in group did not pass, post comment.
                # Avoid identical comments with different build hash during
                # target project build phase. Once published update regardless.
                self.comment_write(state='seen',
                                   result='failed',
                                   project=project,
                                   message='\n'.join(comment).strip(),
                                   identical=True,
                                   info_extra=info_extra,
                                   info_extra_identical=published,
                                   bot_name_suffix=repository)
            else:
                # Post passed comment only if previous failed comment.
                text = 'Previously reported problems have been resolved.'
                self.comment_write(state='done',
                                   result='passed',
                                   project=project,
                                   message=text,
                                   identical=True,
                                   only_replace=True,
                                   info_extra=info_extra,
                                   bot_name_suffix=repository)
        else:
            text = '\n'.join(comment).strip()
            if not self.dryrun:
                filename = self.project_pseudometa_file_name(
                    project, repository)
                project_pseudometa_file_ensure(
                    self.apiurl, project, filename, text + '\n',
                    'repo_checker project_only run')
            else:
                print(text)

            if post_comments:
                self.package_comments(project, repository)

        if result and not published:
            # Wait for the complete stack to build before positive result.
            self.logger.debug(
                'demoting result from accept to ignore due to non-published layer'
            )
            result = None

        return result
Ejemplo n.º 20
0
    def update_and_solve_target(self, api, target_project, target_config, main_repo,
                                project, scope, force, no_checkout,
                                only_release_packages, stop_after_solve):
        self.all_architectures = target_config.get('pkglistgen-archs').split(' ')
        self.repos = self.expand_repos(project, main_repo)
        print('[{}] {}/{}: update and solve'.format(scope, project, main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release', '000release-packages')
        oldrepos = target_config.get('pkglistgen-repos', '000update-repos')

        url = api.makeurl(['source', project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, product, 'revive')
            # TODO disable build.
            print('{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not force:
            root = ET.fromstringlist(show_results_meta(api.apiurl, project, product,
                                                       repository=[main_repo], multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')):
                print('{}/{} build in progress'.format(project, product))
                return

        drop_list = api.item_exists(project, oldrepos)
        checkout_list = [group, product, release]
        if drop_list and not only_release_packages:
            checkout_list.append(oldrepos)

        if packages.find('entry[@name="{}"]'.format(release)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, release, 'revive')
            print('{} undeleted, skip dvd until next cycle'.format(release))
            return

        # Cache dir specific to hostname and project.
        host = urlparse(api.apiurl).hostname
        cache_dir = CacheManager.directory('pkglistgen', host, project)

        if not no_checkout:
            if os.path.exists(cache_dir):
                shutil.rmtree(cache_dir)
            os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)
        oldrepos_dir = os.path.join(cache_dir, oldrepos)

        for package in checkout_list:
            if no_checkout:
                print('Skipping checkout of {}/{}'.format(project, package))
                continue
            checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir)

        file_utils.unlink_all_except(release_dir, ['weakremovers.inc'])
        if not only_release_packages:
            file_utils.unlink_all_except(product_dir)
        file_utils.copy_directory_contents(group_dir, product_dir,
                                     ['supportstatus.txt', 'groups.yml',
                                      'reference-unsorted.yml', 'reference-summary.yml',
                                      'package-groups.changes'])
        file_utils.change_extension(product_dir, '.spec.in', '.spec')
        file_utils.change_extension(product_dir, '.product.in', '.product')

        self.input_dir = group_dir
        self.output_dir = product_dir

        print('-> do_update')
        # make sure we only calculcate existant architectures
        self.filter_architectures(target_archs(api.apiurl, project, main_repo))
        self.update_repos(self.filtered_architectures)

        if only_release_packages:
            self.load_all_groups()
            self.write_group_stubs()
        else:
            summary = self.solve_project(ignore_unresolvable=str2bool(target_config.get('pkglistgen-ignore-unresolvable')),
                                         ignore_recommended=str2bool(target_config.get('pkglistgen-ignore-recommended')),
                                         locale = target_config.get('pkglistgen-local'),
                                         locales_from = target_config.get('pkglistgen-locales-from'))

        if stop_after_solve:
            return

        if drop_list:
            weakremovers_file = os.path.join(release_dir, 'weakremovers.inc')
            self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w'))

        delete_products = target_config.get('pkglistgen-delete-products', '').split(' ')
        file_utils.unlink_list(product_dir, delete_products)

        print('-> product service')
        product_version = attribute_value_load(api.apiurl, project, 'ProductVersion')
        if not product_version:
            # for stagings the product version doesn't matter (I hope)
            product_version = '1'
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            self.replace_product_version(product_file, product_version)
            print(subprocess.check_output(
                [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8'))

        for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
            delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
            file_utils.unlink_list(product_dir, delete_kiwis)
        if scope == 'staging':
            self.strip_medium_from_staging(product_dir)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        file_utils.move_list(spec_files, release_dir)
        inc_files = glob.glob(os.path.join(group_dir, '*.inc'))
        # filter special inc file
        inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files)
        file_utils.move_list(inc_files, release_dir)

        # do not overwrite weakremovers.inc if it exists
        # we will commit there afterwards if needed
        if not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')):
            file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir)

        file_utils.multibuild_from_glob(release_dir, '*.spec')
        self.build_stub(release_dir, 'spec')
        self.commit_package(release_dir)

        if only_release_packages:
            return

        file_utils.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')
        self.commit_package(product_dir)

        error_output = ''
        reference_summary = os.path.join(group_dir, 'reference-summary.yml')
        if os.path.isfile(reference_summary):
            summary_file = os.path.join(product_dir, 'summary.yml')
            with open(summary_file, 'w') as f:
                f.write('# Summary of packages in groups')
                for group in sorted(summary):
                    # the unsorted group should appear filtered by
                    # unneeded.yml - so we need the content of unsorted.yml
                    # not unsorted.group (this grew a little unnaturally)
                    if group == 'unsorted':
                        continue
                    f.write('\n' + group + ':\n')
                    for package in sorted(summary[group]):
                        f.write('  - ' + package + '\n')

            try:
                error_output += subprocess.check_output(['diff', '-u', reference_summary, summary_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output
            reference_unsorted = os.path.join(group_dir, 'reference-unsorted.yml')
            unsorted_file = os.path.join(product_dir, 'unsorted.yml')
            try:
                error_output += subprocess.check_output(['diff', '-u', reference_unsorted, unsorted_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output

        if len(error_output) > 0:
            self.logger.error('Difference in yml:\n' + error_output)
            return True
Ejemplo n.º 21
0
    parser.add_argument('-A',
                        '--apiurl',
                        metavar='URL',
                        help='API URL',
                        required=True)
    parser.add_argument('-p',
                        '--project',
                        metavar='PROJECT',
                        help='Project',
                        required=True)
    parser.add_argument('-r',
                        '--repository',
                        metavar='REPOSITORY',
                        help='Repository',
                        required=True)
    parser.add_argument('-s',
                        '--state',
                        metavar='STATE',
                        help='Status to report',
                        required=True)

    args = parser.parse_args()
    # Configure OSC
    osc.conf.get_config(override_apiurl=args.apiurl)
    #osc.conf.config['debug'] = 1

    architectures = sorted(
        target_archs(args.apiurl, args.project, args.repository))
    for arch in architectures:
        report_pipeline(args, arch, arch == architectures[-1])
    parser = argparse.ArgumentParser(description='Check if all packages built fine')
    parser.add_argument('--apiurl', '-A', type=str, help='API URL of OBS')
    parser.add_argument('-p', '--project', type=str, help='Project to check')
    parser.add_argument('-r', '--repository', type=str,
                        help='Repository to check')

    args = parser.parse_args()

    osc.conf.get_config(override_apiurl=args.apiurl)
    apiurl = osc.conf.config['apiurl']

    logging.basicConfig(level=logging.DEBUG)
    logger = logging.getLogger(__name__)

    # first check if repo is finished
    archs = target_archs(apiurl, args.project, args.repository)
    for arch in archs:
        url = makeurl(apiurl, ['build', args.project, args.repository, arch], {'view': 'status'})
        root = ET.parse(http_GET(url)).getroot()
        if root.get('code') == 'finished':
            continue
        logger.error('Repository {}/{}/{} is not yet finished'.format(args.project, args.repository, arch))
        logger.debug(ET.tostring(root).decode('utf-8'))
        sys.exit(1)

    # now check if all packages built fine
    url = makeurl(apiurl, ['build', args.project, '_result'],
                  {'view': 'summary', 'repository': args.repository})
    root = ET.parse(http_GET(url)).getroot()
    for count in root.findall('.//statuscount'):
        if int(count.get('count', 0)) == 0:
Ejemplo n.º 23
0
    def update_and_solve_target(self, api, target_project, target_config,
                                main_repo, project, scope, force, no_checkout,
                                only_release_packages, stop_after_solve):
        self.all_architectures = target_config.get('pkglistgen-archs').split(
            ' ')
        self.use_newest_version = str2bool(
            target_config.get('pkglistgen-use-newest-version', 'False'))
        self.repos = self.expand_repos(project, main_repo)
        logging.debug('[{}] {}/{}: update and solve'.format(
            scope, project, main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release',
                                    '000release-packages')
        oldrepos = target_config.get('pkglistgen-repos', '000update-repos')

        url = api.makeurl(['source', project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, product, 'revive')
            # TODO disable build.
            logging.info(
                '{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not force:
            root = ET.fromstringlist(
                show_results_meta(api.apiurl,
                                  project,
                                  product,
                                  repository=[main_repo],
                                  multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(
                    root.xpath('result[@state="dirty"]')):
                logging.info('{}/{} build in progress'.format(
                    project, product))
                return

        drop_list = api.item_exists(project, oldrepos)
        checkout_list = [group, product, release]
        if drop_list and not only_release_packages:
            checkout_list.append(oldrepos)

        if packages.find('entry[@name="{}"]'.format(release)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, release, 'revive')
            logging.info(
                '{} undeleted, skip dvd until next cycle'.format(release))
            return

        # Cache dir specific to hostname and project.
        host = urlparse(api.apiurl).hostname
        cache_dir = CacheManager.directory('pkglistgen', host, project)

        if not no_checkout:
            if os.path.exists(cache_dir):
                shutil.rmtree(cache_dir)
            os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)
        oldrepos_dir = os.path.join(cache_dir, oldrepos)

        self.input_dir = group_dir
        self.output_dir = product_dir

        for package in checkout_list:
            if no_checkout:
                logging.debug('Skipping checkout of {}/{}'.format(
                    project, package))
                continue
            checkout_package(api.apiurl,
                             project,
                             package,
                             expand_link=True,
                             prj_dir=cache_dir,
                             outdir=os.path.join(cache_dir, package))

        file_utils.unlink_all_except(release_dir, ['weakremovers.inc'])
        if not only_release_packages:
            file_utils.unlink_all_except(product_dir)
        ignore_list = [
            'supportstatus.txt', 'summary-staging.txt',
            'package-groups.changes'
        ]
        ignore_list += self.group_input_files()
        file_utils.copy_directory_contents(group_dir, product_dir, ignore_list)
        file_utils.change_extension(product_dir, '.spec.in', '.spec')
        file_utils.change_extension(product_dir, '.product.in', '.product')

        logging.debug('-> do_update')
        # make sure we only calculcate existant architectures
        self.filter_architectures(target_archs(api.apiurl, project, main_repo))
        self.update_repos(self.filtered_architectures)

        if only_release_packages:
            self.load_all_groups()
            self.write_group_stubs()
        else:
            summary = self.solve_project(
                ignore_unresolvable=str2bool(
                    target_config.get('pkglistgen-ignore-unresolvable')),
                ignore_recommended=str2bool(
                    target_config.get('pkglistgen-ignore-recommended')),
                locale=target_config.get('pkglistgen-locale'),
                locales_from=target_config.get('pkglistgen-locales-from'))

        if stop_after_solve:
            return

        if drop_list and not only_release_packages:
            weakremovers_file = os.path.join(release_dir, 'weakremovers.inc')
            try:
                self.create_weakremovers(project,
                                         target_config,
                                         oldrepos_dir,
                                         output=open(weakremovers_file, 'w'))
            except MismatchedRepoException:
                logging.error(
                    "Failed to create weakremovers.inc due to mismatch in repos - project most likey started building again."
                )
                return

        delete_products = target_config.get('pkglistgen-delete-products',
                                            '').split(' ')
        file_utils.unlink_list(product_dir, delete_products)

        logging.debug('-> product service')
        product_version = attribute_value_load(api.apiurl, project,
                                               'ProductVersion')
        if not product_version:
            # for stagings the product version doesn't matter (I hope)
            product_version = '1'
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            self.replace_product_version(product_file, product_version)
            logging.debug(
                subprocess.check_output(
                    [PRODUCT_SERVICE, product_file, product_dir, project],
                    encoding='utf-8'))

        for delete_kiwi in target_config.get(
                'pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
            delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
            file_utils.unlink_list(product_dir, delete_kiwis)
        if scope == 'staging':
            self.strip_medium_from_staging(product_dir)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        file_utils.move_list(spec_files, release_dir)
        inc_files = glob.glob(os.path.join(group_dir, '*.inc'))
        # filter special inc file
        inc_files = filter(lambda file: file.endswith('weakremovers.inc'),
                           inc_files)
        file_utils.move_list(inc_files, release_dir)

        # do not overwrite weakremovers.inc if it exists
        # we will commit there afterwards if needed
        if os.path.exists(os.path.join(group_dir, 'weakremovers.inc')) and \
           not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')):
            file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')],
                                 release_dir)

        file_utils.multibuild_from_glob(release_dir, '*.spec')
        self.build_stub(release_dir, 'spec')
        self.commit_package(release_dir)

        if only_release_packages:
            return

        file_utils.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')

        reference_summary = os.path.join(group_dir, f'summary-{scope}.txt')
        if os.path.isfile(reference_summary):
            summary_file = os.path.join(product_dir, f'summary-{scope}.txt')
            output = []
            for group in summary:
                for package in sorted(summary[group]):
                    output.append(f'{package}:{group}')

            with open(summary_file, 'w') as f:
                for line in sorted(output):
                    f.write(line + '\n')

        self.commit_package(product_dir)

        if os.path.isfile(reference_summary):
            return self.comment.handle_package_diff(project, reference_summary,
                                                    summary_file)
Ejemplo n.º 24
0
def check_xml(url, state, name):
    check = ET.Element('check')
    if url:
        se = ET.SubElement(check, 'url')
        se.text = url
    se = ET.SubElement(check, 'state')
    se.text = state
    se = ET.SubElement(check, 'name')
    se.text = name
    return ET.tostring(check)

if __name__ == '__main__':
    description = 'Create SR from FactoryCandidates to '\
                  'openSUSE Leap project for new build succeded packages.'
    parser = argparse.ArgumentParser(description=description)
    parser.add_argument('-A', '--apiurl', metavar='URL', help='API URL', required=True)
    parser.add_argument('-p', '--project', metavar='PROJECT', help='Project', required=True)
    parser.add_argument('-r', '--repository', metavar='REPOSITORY', help='Repository', required=True)
    parser.add_argument('-s', '--state', metavar='STATE', help='Status to report', required=True)
    parser.add_argument('-n', '--name', metavar='NAME', help='Name of status check', required=True)

    args = parser.parse_args()
    # Configure OSC
    osc.conf.get_config(override_apiurl=args.apiurl)
    #osc.conf.config['debug'] = 1

    architectures = sorted(target_archs(args.apiurl, args.project, args.repository))
    for arch in architectures:
        report_pipeline(args, arch, arch == architectures[-1])