Ejemplo n.º 1
0
def repository_published(apiurl, project, repository, archs=[]):
    # In a perfect world this would check for the existence of imports from i586
    # into x86_64, but in an even more perfect world OBS would show archs that
    # depend on another arch for imports as not completed until the dependent
    # arch completes. This is a simplified check that ensures x86_64 repos are
    # not indicated as published when i586 has not finished which is primarily
    # useful for repo_checker when only checking x86_64. The API treats archs as
    # a filter on what to return and thus non-existent archs do not cause an
    # issue nor alter the result.
    if 'x86_64' in archs and 'i586' not in archs:
        # Create a copy to avoid altering caller's list.
        archs = list(archs)
        archs.append('i586')

    root = ETL.fromstringlist(show_results_meta(
        apiurl, project, multibuild=True, repository=[repository], arch=archs))
    return not len(root.xpath('result[@state!="published" and @state!="unpublished"]'))
Ejemplo n.º 2
0
    def getPackageStatus(self, project, package):
        """
        getPackageStatus(project, package) -> dict

        Returns the status of a package as a dict with targets as the keys and status codes as the
        values
        """
        status = {}
        tree = ElementTree.fromstring(''.join(core.show_results_meta(self.apiurl, project, package)))
        for result in tree.findall('result'):
            target = '/'.join((result.get('repository'), result.get('arch')))
            statusnode = result.find('status')
            code = statusnode.get('code')
            details = statusnode.find('details')
            if details is not None:
                code += ': ' + details.text
            status[target] = code
        return status
Ejemplo n.º 3
0
def repository_published(apiurl, project, repository, archs=[]):
    # In a perfect world this would check for the existence of imports from i586
    # into x86_64, but in an even more perfect world OBS would show archs that
    # depend on another arch for imports as not completed until the dependent
    # arch completes. This is a simplified check that ensures x86_64 repos are
    # not indicated as published when i586 has not finished which is primarily
    # useful for repo_checker when only checking x86_64. The API treats archs as
    # a filter on what to return and thus non-existent archs do not cause an
    # issue nor alter the result.
    if 'x86_64' in archs and 'i586' not in archs:
        # Create a copy to avoid altering caller's list.
        archs = list(archs)
        archs.append('i586')

    root = ETL.fromstringlist(
        show_results_meta(apiurl,
                          project,
                          multibuild=True,
                          repository=[repository],
                          arch=archs))
    return not len(
        root.xpath('result[@state!="published" and @state!="unpublished"]'))
Ejemplo n.º 4
0
    def update_and_solve_target(self, api, target_project, target_config, main_repo,
                                project, scope, force, no_checkout,
                                only_release_packages, stop_after_solve):
        self.all_architectures = target_config.get('pkglistgen-archs').split(' ')
        ignore_repos = set(target_config.get('pkglistgen-ignore_repos', '').split(' '))
        self.repos = [ r for r in self.expand_repos(project, main_repo) if r[0] != project or r[1] not in ignore_repos ]
        print('[{}] {}/{}: update and solve'.format(scope, project, main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release', '000release-packages')
        oldrepos = target_config.get('pkglistgen-repos', '000update-repos')

        url = api.makeurl(['source', project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, product, 'revive')
            # TODO disable build.
            print('{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not force:
            root = ET.fromstringlist(show_results_meta(api.apiurl, project, product,
                                                       repository=[main_repo], multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')):
                print('{}/{} build in progress'.format(project, product))
                return

        drop_list = api.item_exists(project, oldrepos)
        checkout_list = [group, product, release]
        if drop_list and not only_release_packages:
            checkout_list.append(oldrepos)

        if packages.find('entry[@name="{}"]'.format(release)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, release, 'revive')
            print('{} undeleted, skip dvd until next cycle'.format(release))
            return

        # Cache dir specific to hostname and project.
        host = urlparse(api.apiurl).hostname
        cache_dir = CacheManager.directory('pkglistgen', host, project)

        if not no_checkout:
            if os.path.exists(cache_dir):
                shutil.rmtree(cache_dir)
            os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)
        oldrepos_dir = os.path.join(cache_dir, oldrepos)

        for package in checkout_list:
            if no_checkout:
                print('Skipping checkout of {}/{}'.format(project, package))
                continue
            checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir)

        file_utils.unlink_all_except(release_dir, ['weakremovers.inc'])
        if not only_release_packages:
            file_utils.unlink_all_except(product_dir)
        file_utils.copy_directory_contents(group_dir, product_dir,
                                     ['supportstatus.txt', 'groups.yml',
                                      'reference-unsorted.yml', 'reference-summary.yml',
                                      'package-groups.changes'])
        file_utils.change_extension(product_dir, '.spec.in', '.spec')
        file_utils.change_extension(product_dir, '.product.in', '.product')

        self.input_dir = group_dir
        self.output_dir = product_dir

        print('-> do_update')
        # make sure we only calculcate existant architectures
        self.filter_architectures(target_archs(api.apiurl, project, main_repo))
        self.update_repos(self.filtered_architectures)

        if only_release_packages:
            self.load_all_groups()
            self.write_group_stubs()
        else:
            summary = self.solve_project(ignore_unresolvable=str2bool(target_config.get('pkglistgen-ignore-unresolvable')),
                                         ignore_recommended=str2bool(target_config.get('pkglistgen-ignore-recommended')),
                                         locale = target_config.get('pkglistgen-locale'),
                                         locales_from = target_config.get('pkglistgen-locales-from'))

        if stop_after_solve:
            return

        if drop_list:
            weakremovers_file = os.path.join(release_dir, 'weakremovers.inc')
            self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w'))

        delete_products = target_config.get('pkglistgen-delete-products', '').split(' ')
        file_utils.unlink_list(product_dir, delete_products)

        print('-> product service')
        product_version = attribute_value_load(api.apiurl, project, 'ProductVersion')
        if not product_version:
            # for stagings the product version doesn't matter (I hope)
            product_version = '1'
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            self.replace_product_version(product_file, product_version)
            print(subprocess.check_output(
                [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8'))

        for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
            delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
            file_utils.unlink_list(product_dir, delete_kiwis)
        if scope == 'staging':
            self.strip_medium_from_staging(product_dir)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        file_utils.move_list(spec_files, release_dir)
        inc_files = glob.glob(os.path.join(group_dir, '*.inc'))
        # filter special inc file
        inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files)
        file_utils.move_list(inc_files, release_dir)

        # do not overwrite weakremovers.inc if it exists
        # we will commit there afterwards if needed
        if os.path.exists(os.path.join(group_dir, 'weakremovers.inc')) and \
           not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')):
            file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir)

        file_utils.multibuild_from_glob(release_dir, '*.spec')
        self.build_stub(release_dir, 'spec')
        self.commit_package(release_dir)

        if only_release_packages:
            return

        file_utils.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')
        self.commit_package(product_dir)

        error_output = b''
        reference_summary = os.path.join(group_dir, 'reference-summary.yml')
        if os.path.isfile(reference_summary):
            summary_file = os.path.join(product_dir, 'summary.yml')
            with open(summary_file, 'w') as f:
                f.write('# Summary of packages in groups')
                for group in sorted(summary):
                    # the unsorted group should appear filtered by
                    # unneeded.yml - so we need the content of unsorted.yml
                    # not unsorted.group (this grew a little unnaturally)
                    if group == 'unsorted':
                        continue
                    f.write('\n' + group + ':\n')
                    for package in sorted(summary[group]):
                        f.write('  - ' + package + '\n')

            try:
                error_output += subprocess.check_output(['diff', '-u', reference_summary, summary_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output
            reference_unsorted = os.path.join(group_dir, 'reference-unsorted.yml')
            unsorted_file = os.path.join(product_dir, 'unsorted.yml')
            try:
                error_output += subprocess.check_output(['diff', '-u', reference_unsorted, unsorted_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output

        if len(error_output) > 0:
            self.logger.error('Difference in yml:\n' + error_output.decode('utf-8'))
            return True
Ejemplo n.º 5
0
    def prepare_review(self):
        # Reset for request batch.
        self.requests_map = {}
        self.groups = {}
        self.groups_build = {}
        self.groups_skip_cycle = []

        # Manipulated in ensure_group().
        self.group = None
        self.mirrored = set()

        # Stores parsed install_check() results grouped by package.
        self.package_results = {}

        # Look for requests of interest and group by staging.
        skip_build = set()
        for request in self.requests:
            # Only interesting if request is staged.
            group = request_staged(request)
            if not group:
                self.logger.debug('{}: not staged'.format(request.reqid))
                continue

            if self.limit_group and group != self.limit_group:
                continue

            # Only interested if group has completed building.
            api = self.staging_api(request.actions[0].tgt_project)
            status = api.project_status(group, True)
            # Corrupted requests may reference non-existent projects and will
            # thus return a None status which should be considered not ready.
            if not status or str(
                    status['overall_state']) not in ('testing', 'review',
                                                     'acceptable'):
                # Not in a "ready" state.
                openQA_only = False  # Not relevant so set to False.
                if status and str(status['overall_state']) == 'failed':
                    # Exception to the rule is openQA only in failed state.
                    openQA_only = True
                    for project in api.project_status_walk(status):
                        if len(project['broken_packages']):
                            # Broken packages so not just openQA.
                            openQA_only = False
                            break

                if not self.force and not openQA_only:
                    self.logger.debug('{}: {} not ready'.format(
                        request.reqid, group))
                    continue

            # Only interested if request is in consistent state.
            selected = api.project_status_requests('selected')
            if request.reqid not in selected:
                self.logger.debug('{}: inconsistent state'.format(
                    request.reqid))

            if group not in self.groups_build:
                # Generate build hash based on hashes from relevant projects.
                builds = []
                for staging in api.staging_walk(group):
                    builds.append(
                        ET.fromstringlist(
                            show_results_meta(self.apiurl,
                                              staging,
                                              multibuild=True,
                                              repository=['standard'
                                                          ])).get('state'))
                builds.append(
                    ET.fromstringlist(
                        show_results_meta(self.apiurl,
                                          api.project,
                                          multibuild=True,
                                          repository=['standard'
                                                      ])).get('state'))

                # Include meta revision for config changes (like whitelist).
                builds.append(str(api.get_prj_meta_revision(group)))
                self.groups_build[group] = hashlib.sha1(
                    ''.join(builds)).hexdigest()[:7]

                # Determine if build has changed since last comment.
                comments = self.comment_api.get_comments(project_name=group)
                _, info = self.comment_api.comment_find(
                    comments, self.bot_name)
                if info and self.groups_build[group] == info.get('build'):
                    skip_build.add(group)

                # Look for skip-cycle comment command.
                users = self.request_override_check_users(
                    request.actions[0].tgt_project)
                for _, who in self.comment_api.command_find(
                        comments, self.review_user, 'skip-cycle', users):
                    self.logger.debug(
                        'comment command: skip-cycle by {}'.format(who))
                    self.groups_skip_cycle.append(group)
                    break

            if not self.force and group in skip_build:
                self.logger.debug('{}: {} build unchanged'.format(
                    request.reqid, group))
                continue

            self.requests_map[int(request.reqid)] = group

            requests = self.groups.get(group, [])
            requests.append(request)
            self.groups[group] = requests

            self.logger.debug('{}: {} ready'.format(request.reqid, group))

        # Filter out undesirable requests and ensure requests are ordered
        # together with group for efficiency.
        count_before = len(self.requests)
        self.requests = []
        for group, requests in sorted(self.groups.items()):
            self.requests.extend(requests)

        self.logger.debug('requests: {} skipped, {} queued'.format(
            count_before - len(self.requests), len(self.requests)))
Ejemplo n.º 6
0
    def update_and_solve_target(self, api, target_project, target_config,
                                main_repo, project, scope, force, no_checkout,
                                only_release_packages, stop_after_solve):
        self.all_architectures = target_config.get('pkglistgen-archs').split(
            ' ')
        self.use_newest_version = str2bool(
            target_config.get('pkglistgen-use-newest-version', 'False'))
        self.repos = self.expand_repos(project, main_repo)
        logging.debug('[{}] {}/{}: update and solve'.format(
            scope, project, main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release',
                                    '000release-packages')
        oldrepos = target_config.get('pkglistgen-repos', '000update-repos')

        url = api.makeurl(['source', project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, product, 'revive')
            # TODO disable build.
            logging.info(
                '{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not force:
            root = ET.fromstringlist(
                show_results_meta(api.apiurl,
                                  project,
                                  product,
                                  repository=[main_repo],
                                  multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(
                    root.xpath('result[@state="dirty"]')):
                logging.info('{}/{} build in progress'.format(
                    project, product))
                return

        drop_list = api.item_exists(project, oldrepos)
        checkout_list = [group, product, release]
        if drop_list and not only_release_packages:
            checkout_list.append(oldrepos)

        if packages.find('entry[@name="{}"]'.format(release)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, release, 'revive')
            logging.info(
                '{} undeleted, skip dvd until next cycle'.format(release))
            return

        # Cache dir specific to hostname and project.
        host = urlparse(api.apiurl).hostname
        cache_dir = CacheManager.directory('pkglistgen', host, project)

        if not no_checkout:
            if os.path.exists(cache_dir):
                shutil.rmtree(cache_dir)
            os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)
        oldrepos_dir = os.path.join(cache_dir, oldrepos)

        self.input_dir = group_dir
        self.output_dir = product_dir

        for package in checkout_list:
            if no_checkout:
                logging.debug('Skipping checkout of {}/{}'.format(
                    project, package))
                continue
            checkout_package(api.apiurl,
                             project,
                             package,
                             expand_link=True,
                             prj_dir=cache_dir,
                             outdir=os.path.join(cache_dir, package))

        file_utils.unlink_all_except(release_dir, ['weakremovers.inc'])
        if not only_release_packages:
            file_utils.unlink_all_except(product_dir)
        ignore_list = [
            'supportstatus.txt', 'summary-staging.txt',
            'package-groups.changes'
        ]
        ignore_list += self.group_input_files()
        file_utils.copy_directory_contents(group_dir, product_dir, ignore_list)
        file_utils.change_extension(product_dir, '.spec.in', '.spec')
        file_utils.change_extension(product_dir, '.product.in', '.product')

        logging.debug('-> do_update')
        # make sure we only calculcate existant architectures
        self.filter_architectures(target_archs(api.apiurl, project, main_repo))
        self.update_repos(self.filtered_architectures)

        if only_release_packages:
            self.load_all_groups()
            self.write_group_stubs()
        else:
            summary = self.solve_project(
                ignore_unresolvable=str2bool(
                    target_config.get('pkglistgen-ignore-unresolvable')),
                ignore_recommended=str2bool(
                    target_config.get('pkglistgen-ignore-recommended')),
                locale=target_config.get('pkglistgen-locale'),
                locales_from=target_config.get('pkglistgen-locales-from'))

        if stop_after_solve:
            return

        if drop_list and not only_release_packages:
            weakremovers_file = os.path.join(release_dir, 'weakremovers.inc')
            try:
                self.create_weakremovers(project,
                                         target_config,
                                         oldrepos_dir,
                                         output=open(weakremovers_file, 'w'))
            except MismatchedRepoException:
                logging.error(
                    "Failed to create weakremovers.inc due to mismatch in repos - project most likey started building again."
                )
                return

        delete_products = target_config.get('pkglistgen-delete-products',
                                            '').split(' ')
        file_utils.unlink_list(product_dir, delete_products)

        logging.debug('-> product service')
        product_version = attribute_value_load(api.apiurl, project,
                                               'ProductVersion')
        if not product_version:
            # for stagings the product version doesn't matter (I hope)
            product_version = '1'
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            self.replace_product_version(product_file, product_version)
            logging.debug(
                subprocess.check_output(
                    [PRODUCT_SERVICE, product_file, product_dir, project],
                    encoding='utf-8'))

        for delete_kiwi in target_config.get(
                'pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
            delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
            file_utils.unlink_list(product_dir, delete_kiwis)
        if scope == 'staging':
            self.strip_medium_from_staging(product_dir)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        file_utils.move_list(spec_files, release_dir)
        inc_files = glob.glob(os.path.join(group_dir, '*.inc'))
        # filter special inc file
        inc_files = filter(lambda file: file.endswith('weakremovers.inc'),
                           inc_files)
        file_utils.move_list(inc_files, release_dir)

        # do not overwrite weakremovers.inc if it exists
        # we will commit there afterwards if needed
        if os.path.exists(os.path.join(group_dir, 'weakremovers.inc')) and \
           not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')):
            file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')],
                                 release_dir)

        file_utils.multibuild_from_glob(release_dir, '*.spec')
        self.build_stub(release_dir, 'spec')
        self.commit_package(release_dir)

        if only_release_packages:
            return

        file_utils.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')

        reference_summary = os.path.join(group_dir, f'summary-{scope}.txt')
        if os.path.isfile(reference_summary):
            summary_file = os.path.join(product_dir, f'summary-{scope}.txt')
            output = []
            for group in summary:
                for package in sorted(summary[group]):
                    output.append(f'{package}:{group}')

            with open(summary_file, 'w') as f:
                for line in sorted(output):
                    f.write(line + '\n')

        self.commit_package(product_dir)

        if os.path.isfile(reference_summary):
            return self.comment.handle_package_diff(project, reference_summary,
                                                    summary_file)
Ejemplo n.º 7
0
    def update_and_solve_target(self, api, target_project, target_config, main_repo,
                                project, scope, force, no_checkout,
                                only_release_packages, stop_after_solve):
        self.all_architectures = target_config.get('pkglistgen-archs').split(' ')
        self.repos = self.expand_repos(project, main_repo)
        print('[{}] {}/{}: update and solve'.format(scope, project, main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release', '000release-packages')
        oldrepos = target_config.get('pkglistgen-repos', '000update-repos')

        url = api.makeurl(['source', project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, product, 'revive')
            # TODO disable build.
            print('{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not force:
            root = ET.fromstringlist(show_results_meta(api.apiurl, project, product,
                                                       repository=[main_repo], multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')):
                print('{}/{} build in progress'.format(project, product))
                return

        drop_list = api.item_exists(project, oldrepos)
        checkout_list = [group, product, release]
        if drop_list and not only_release_packages:
            checkout_list.append(oldrepos)

        if packages.find('entry[@name="{}"]'.format(release)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, release, 'revive')
            print('{} undeleted, skip dvd until next cycle'.format(release))
            return

        # Cache dir specific to hostname and project.
        host = urlparse(api.apiurl).hostname
        cache_dir = CacheManager.directory('pkglistgen', host, project)

        if not no_checkout:
            if os.path.exists(cache_dir):
                shutil.rmtree(cache_dir)
            os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)
        oldrepos_dir = os.path.join(cache_dir, oldrepos)

        for package in checkout_list:
            if no_checkout:
                print('Skipping checkout of {}/{}'.format(project, package))
                continue
            checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir)

        file_utils.unlink_all_except(release_dir, ['weakremovers.inc'])
        if not only_release_packages:
            file_utils.unlink_all_except(product_dir)
        file_utils.copy_directory_contents(group_dir, product_dir,
                                     ['supportstatus.txt', 'groups.yml',
                                      'reference-unsorted.yml', 'reference-summary.yml',
                                      'package-groups.changes'])
        file_utils.change_extension(product_dir, '.spec.in', '.spec')
        file_utils.change_extension(product_dir, '.product.in', '.product')

        self.input_dir = group_dir
        self.output_dir = product_dir

        print('-> do_update')
        # make sure we only calculcate existant architectures
        self.filter_architectures(target_archs(api.apiurl, project, main_repo))
        self.update_repos(self.filtered_architectures)

        if only_release_packages:
            self.load_all_groups()
            self.write_group_stubs()
        else:
            summary = self.solve_project(ignore_unresolvable=str2bool(target_config.get('pkglistgen-ignore-unresolvable')),
                                         ignore_recommended=str2bool(target_config.get('pkglistgen-ignore-recommended')),
                                         locale = target_config.get('pkglistgen-local'),
                                         locales_from = target_config.get('pkglistgen-locales-from'))

        if stop_after_solve:
            return

        if drop_list:
            weakremovers_file = os.path.join(release_dir, 'weakremovers.inc')
            self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w'))

        delete_products = target_config.get('pkglistgen-delete-products', '').split(' ')
        file_utils.unlink_list(product_dir, delete_products)

        print('-> product service')
        product_version = attribute_value_load(api.apiurl, project, 'ProductVersion')
        if not product_version:
            # for stagings the product version doesn't matter (I hope)
            product_version = '1'
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            self.replace_product_version(product_file, product_version)
            print(subprocess.check_output(
                [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8'))

        for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
            delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
            file_utils.unlink_list(product_dir, delete_kiwis)
        if scope == 'staging':
            self.strip_medium_from_staging(product_dir)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        file_utils.move_list(spec_files, release_dir)
        inc_files = glob.glob(os.path.join(group_dir, '*.inc'))
        # filter special inc file
        inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files)
        file_utils.move_list(inc_files, release_dir)

        # do not overwrite weakremovers.inc if it exists
        # we will commit there afterwards if needed
        if not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')):
            file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir)

        file_utils.multibuild_from_glob(release_dir, '*.spec')
        self.build_stub(release_dir, 'spec')
        self.commit_package(release_dir)

        if only_release_packages:
            return

        file_utils.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')
        self.commit_package(product_dir)

        error_output = ''
        reference_summary = os.path.join(group_dir, 'reference-summary.yml')
        if os.path.isfile(reference_summary):
            summary_file = os.path.join(product_dir, 'summary.yml')
            with open(summary_file, 'w') as f:
                f.write('# Summary of packages in groups')
                for group in sorted(summary):
                    # the unsorted group should appear filtered by
                    # unneeded.yml - so we need the content of unsorted.yml
                    # not unsorted.group (this grew a little unnaturally)
                    if group == 'unsorted':
                        continue
                    f.write('\n' + group + ':\n')
                    for package in sorted(summary[group]):
                        f.write('  - ' + package + '\n')

            try:
                error_output += subprocess.check_output(['diff', '-u', reference_summary, summary_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output
            reference_unsorted = os.path.join(group_dir, 'reference-unsorted.yml')
            unsorted_file = os.path.join(product_dir, 'unsorted.yml')
            try:
                error_output += subprocess.check_output(['diff', '-u', reference_unsorted, unsorted_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output

        if len(error_output) > 0:
            self.logger.error('Difference in yml:\n' + error_output)
            return True
Ejemplo n.º 8
0
    def update_and_solve_target(self,
                                apiurl,
                                target_project,
                                target_config,
                                main_repo,
                                opts,
                                skip_release=False):
        print('[{}] {}/{}: update and solve'.format(opts.scope, opts.project,
                                                    main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release',
                                    '000release-packages')

        url = makeurl(apiurl, ['source', opts.project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.options.dry:
                undelete_package(apiurl, opts.project, product, 'revive')
            # TODO disable build.
            print('{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not opts.force:
            root = ET.fromstringlist(
                show_results_meta(apiurl,
                                  opts.project,
                                  product,
                                  repository=[main_repo],
                                  multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(
                    root.xpath('result[@state="dirty"]')):
                print('{}/{} build in progress'.format(opts.project, product))
                return

        checkout_list = [group, product]
        if not skip_release:
            checkout_list.append(release)

            if packages.find('entry[@name="{}"]'.format(release)) is None:
                if not self.options.dry:
                    undelete_package(apiurl, opts.project, product, 'revive')
                print(
                    '{} undeleted, skip dvd until next cycle'.format(release))
                return

        # Cache dir specific to hostname and project.
        host = urlparse.urlparse(apiurl).hostname
        cache_dir = save_cache_path('opensuse-packagelists', host,
                                    opts.project)

        if os.path.exists(cache_dir):
            shutil.rmtree(cache_dir)
        os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)

        for package in checkout_list:
            checkout_package(apiurl,
                             opts.project,
                             package,
                             expand_link=True,
                             prj_dir=cache_dir)

        if not skip_release:
            self.unlink_all_except(release_dir)
        self.unlink_all_except(product_dir)
        self.copy_directory_contents(
            group_dir, product_dir,
            ['supportstatus.txt', 'groups.yml', 'package-groups.changes'])
        self.change_extension(product_dir, '.spec.in', '.spec')

        self.options.input_dir = group_dir
        self.options.output_dir = product_dir
        self.postoptparse()

        print('-> do_update')
        self.do_update('update', opts)

        print('-> do_solve')
        opts.ignore_unresolvable = bool(
            target_config.get('pkglistgen-ignore-unresolvable'))
        opts.ignore_recommended = bool(
            target_config.get('pkglistgen-ignore-recommended'))
        opts.include_suggested = bool(
            target_config.get('pkglistgen-include-suggested'))
        opts.locale = target_config.get('pkglistgen-local')
        opts.locales_from = target_config.get('pkglistgen-locales-from')
        self.do_solve('solve', opts)

        delete_products = target_config.get('pkglistgen-delete-products',
                                            '').split(' ')
        self.unlink_list(product_dir, delete_products)

        print('-> product service')
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            print(
                subprocess.check_output(
                    [PRODUCT_SERVICE, product_file, product_dir,
                     opts.project]))

        delete_kiwis = target_config.get(
            'pkglistgen-delete-kiwis-{}'.format(opts.scope), '').split(' ')
        self.unlink_list(product_dir, delete_kiwis)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        if skip_release:
            self.unlink_list(None, spec_files)
        else:
            self.move_list(spec_files, release_dir)

        self.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')
        self.commit_package(product_dir)

        if not skip_release:
            self.multibuild_from_glob(release_dir, '*.spec')
            self.build_stub(release_dir, 'spec')
            self.commit_package(release_dir)