def simulated_merge_ignore(self, override_pair, overridden_pair, arch):
        """Determine the list of binaries to similate overides in overridden layer."""
        _, binary_map = package_binary_list(self.apiurl, override_pair[0], override_pair[1], arch)
        packages = set(binary_map.values())

        binaries, _ = package_binary_list(self.apiurl, overridden_pair[0], overridden_pair[1], arch)
        for binary in binaries:
            if binary.package in packages:
                yield binary.name
    def simulated_merge_ignore(self, override_pair, overridden_pair, arch):
        """Determine the list of binaries to similate overides in overridden layer."""
        _, binary_map = package_binary_list(self.apiurl, override_pair[0], override_pair[1], arch)
        packages = set(binary_map.values())

        binaries, _ = package_binary_list(self.apiurl, overridden_pair[0], overridden_pair[1], arch)
        for binary in binaries:
            if binary.package in packages:
                yield binary.name
    def ignore_from_staging(self, project, staging, arch):
        """Determine the target project binaries to ingore in favor of staging."""
        _, binary_map = package_binary_list(self.apiurl, staging, 'standard',
                                            arch)
        packages = set(binary_map.values())

        binaries, _ = package_binary_list(self.apiurl, project, 'standard',
                                          arch)
        for binary in binaries:
            if binary.package in packages:
                yield binary.name
示例#4
0
    def perform(self, save=False):
        duplicates = {}
        for arch in sorted(target_archs(self.api.apiurl, self.api.project), reverse=True):
            package_binaries, _ = package_binary_list(
                self.api.apiurl, self.api.project, 'standard', arch,
                strip_multibuild=False, exclude_src_debug=True)
            binaries = {}
            for pb in package_binaries:
                if pb.arch != 'noarch' and pb.arch != arch:
                    continue

                binaries.setdefault(arch, {})

                if pb.name in binaries[arch]:
                    duplicates.setdefault(arch, {})
                    duplicates[arch].setdefault(pb.name, set())
                    duplicates[arch][pb.name].add(pb.package)
                    duplicates[arch][pb.name].add(binaries[arch][pb.name])

                    continue

                binaries[arch][pb.name] = pb.package

        # convert sets to lists for readable yaml
        for arch in duplicates.keys():
            for name in duplicates[arch].keys():
                duplicates[arch][name] = list(duplicates[arch][name])

        current = yaml.dump(duplicates, default_flow_style=False)
        if save:
            self.api.pseudometa_file_ensure('duplicate_binaries', current)
        else:
            print(current)
    def install_check_sections_group(self, project, repository, arch, sections):
        _, binary_map = package_binary_list(self.api.apiurl, project, repository, arch)

        for section in sections:
            # If switch to creating bugs likely makes sense to join packages to
            # form grouping key and create shared bugs for conflicts.
            # Added check for b in binary_map after encountering:
            # https://lists.opensuse.org/opensuse-buildservice/2017-08/msg00035.html
            # Under normal circumstances this should never occur.
            packages = set([binary_map[b] for b in section.binaries if b in binary_map])
            for package in packages:
                self.package_results.setdefault(package, [])
                self.package_results[package].append(section)
    def install_check_sections_group(self, project, repository, arch, sections):
        _, binary_map = package_binary_list(self.api.apiurl, project, repository, arch)

        for section in sections:
            # If switch to creating bugs likely makes sense to join packages to
            # form grouping key and create shared bugs for conflicts.
            # Added check for b in binary_map after encountering:
            # https://lists.opensuse.org/opensuse-buildservice/2017-08/msg00035.html
            # Under normal circumstances this should never occur.
            packages = set([binary_map[b] for b in section.binaries if b in binary_map])
            for package in packages:
                self.package_results.setdefault(package, [])
                self.package_results[package].append(section)
def unmaintained(apiurl, project_target):
    lookup = yaml.safe_load(
        source_file_load(apiurl, project_target, '00Meta', 'lookup.yml'))
    lookup_total = len(lookup)
    lookup = {k: v for k, v in lookup.iteritems() if v.startswith('SUSE:SLE')}

    package_binaries, _ = package_binary_list(apiurl, project_target,
                                              'standard', 'x86_64')
    package_binaries_total = len(package_binaries)
    package_binaries = [
        pb for pb in package_binaries
        if pb.arch != 'src' and pb.package in lookup
    ]

    # Determine max length possible for each column.
    maxes = [
        len(max([b.name for b in package_binaries], key=len)),
        len(max(lookup.keys(), key=len)),
        len(max(lookup.values(), key=len)),
    ]
    line_format = ' '.join(['{:<' + str(m) + '}' for m in maxes])

    print(line_format.format('binary', 'package', 'source project'))

    project_sources = {}
    binaries_unmaintained = 0
    packages_unmaintained = set()
    for package_binary in sorted(package_binaries, key=lambda pb: pb.name):
        project_source = lookup[package_binary.package]
        if project_source not in project_sources:
            # Load binaries referenced in kiwi the first time source encountered.
            project_sources[project_source] = kiwi_binaries(
                apiurl, project_source)

        if package_binary.name not in project_sources[project_source]:
            print(
                line_format.format(package_binary.name, package_binary.package,
                                   project_source))

            binaries_unmaintained += 1
            packages_unmaintained.add(package_binary.package)

    print(
        '{:,} of {:,} binaries ({:,} packages) unmaintained from SLE of {:,} total binaries ({:,} packages) in project'
        .format(binaries_unmaintained, len(package_binaries),
                len(packages_unmaintained), package_binaries_total,
                lookup_total))
示例#8
0
    def perform(self, save=False):
        duplicates = {}
        for arch in sorted(target_archs(self.api.apiurl, self.api.project),
                           reverse=True):
            package_binaries, _ = package_binary_list(self.api.apiurl,
                                                      self.api.project,
                                                      'standard',
                                                      arch,
                                                      strip_multibuild=False,
                                                      exclude_src_debug=True)
            binaries = {}
            for pb in package_binaries:
                if pb.arch != 'noarch' and pb.arch != arch:
                    continue

                binaries.setdefault(arch, {})

                if pb.name in binaries[arch]:
                    duplicates.setdefault(arch, {})
                    duplicates[arch].setdefault(pb.name, set())
                    duplicates[arch][pb.name].add(pb.package)
                    duplicates[arch][pb.name].add(binaries[arch][pb.name])

                    continue

                binaries[arch][pb.name] = pb.package

        # convert sets to lists for readable yaml
        for arch in duplicates.keys():
            for name in duplicates[arch].keys():
                duplicates[arch][name] = list(duplicates[arch][name])

        current = yaml.dump(duplicates, default_flow_style=False)
        if save:
            args = [
                '{}:Staging'.format(self.api.project), 'dashboard',
                'duplicate_binaries'
            ]
            previous = self.api.load_file_content(*args)
            if current != previous:
                args.append(current)
                self.api.save_file_content(*args)
        else:
            print(current)
def unmaintained(apiurl, project_target):
    lookup = yaml.safe_load(source_file_load(
        apiurl, project_target, '00Meta', 'lookup.yml'))
    lookup_total = len(lookup)
    lookup = {k: v for k, v in lookup.iteritems() if v.startswith('SUSE:SLE')}

    package_binaries, _ = package_binary_list(
        apiurl, project_target, 'standard', 'x86_64', exclude_src_debug=True)
    package_binaries_total = len(package_binaries)
    package_binaries = [pb for pb in package_binaries if pb.package in lookup]

    # Determine max length possible for each column.
    maxes = [
        len(max([b.name for b in package_binaries], key=len)),
        len(max(lookup.keys(), key=len)),
        len(max(lookup.values(), key=len)),
    ]
    line_format = ' '.join(['{:<' + str(m) + '}' for m in maxes])

    print(line_format.format('binary', 'package', 'source project'))

    project_sources = {}
    binaries_unmaintained = 0
    packages_unmaintained = set()
    for package_binary in sorted(package_binaries, key=lambda pb: pb.name):
        project_source = lookup[package_binary.package]
        if project_source not in project_sources:
            # Load binaries referenced in kiwi the first time source encountered.
            project_sources[project_source] = kiwi_binaries(apiurl, project_source)

        if package_binary.name not in project_sources[project_source]:
            print(line_format.format(
                package_binary.name, package_binary.package, project_source))

            binaries_unmaintained += 1
            packages_unmaintained.add(package_binary.package)

    print('{:,} of {:,} binaries ({:,} packages) unmaintained from SLE of {:,} total binaries ({:,} packages) in project'.format(
        binaries_unmaintained, len(package_binaries), len(packages_unmaintained), package_binaries_total, lookup_total))