Beispiel #1
0
    def test_select_multiple_spec(self):
        self.wf.setup_rings()
        staging = self.wf.create_staging('A', freeze=True)

        project = self.wf.create_project('devel:gcc')
        package = OBSLocal.Package(name='gcc8', project=project)
        package.create_commit(filename='gcc8.spec', text='Name: gcc8')
        package.create_commit(filename='gcc8-tests.spec')
        self.wf.submit_package(package)

        ret = SelectCommand(self.wf.api, staging.name).perform(['gcc8'])
        self.assertEqual(True, ret)

        self.assertEqual(package_list(self.wf.apiurl, staging.name),
                         ['gcc8', 'gcc8-tests'])
        file = source_file_load(self.wf.apiurl, staging.name, 'gcc8',
                                'gcc8.spec')
        self.assertEqual(file, 'Name: gcc8')
        # we should see the spec file also in the 2nd package
        file = source_file_load(self.wf.apiurl, staging.name, 'gcc8-tests',
                                'gcc8.spec')
        self.assertEqual(file, 'Name: gcc8')

        uc = UnselectCommand(self.wf.api)
        self.assertIsNone(uc.perform(['gcc8'], False, None))

        # no stale links
        self.assertEqual([], package_list(self.wf.apiurl, staging.name))
Beispiel #2
0
    def test_select_multibuild_package(self):
        self.wf.setup_rings()
        staging = self.wf.create_staging('A', freeze=True)

        project = self.wf.create_project('devel:gcc')
        package = OBSLocal.Package(name='gcc9', project=project)
        package.create_commit(filename='gcc9.spec', text='Name: gcc9')
        package.create_commit(filename='gcc9-tests.spec')
        package.create_commit(
            '<multibuild><flavor>gcc9-tests.spec</flavor></multibuild>',
            filename='_multibuild')
        self.wf.submit_package(package)

        ret = SelectCommand(self.wf.api, staging.name).perform(['gcc9'])
        self.assertEqual(True, ret)

        self.assertEqual(package_list(self.wf.apiurl, staging.name), ['gcc9'])
        file = source_file_load(self.wf.apiurl, staging.name, 'gcc9',
                                'gcc9.spec')
        self.assertEqual(file, 'Name: gcc9')

        uc = UnselectCommand(self.wf.api)
        self.assertIsNone(uc.perform(['gcc9'], False, None))

        # no stale links
        self.assertEqual([], package_list(self.wf.apiurl, staging.name))
    def update_factory_version(self):
        """Update project (Factory, 13.2, ...) version if is necessary."""

        # XXX TODO - This method have `factory` in the name.  Can be
        # missleading.

        project = self.api.project
        curr_version = date.today().strftime('%Y%m%d')
        update_version_attr = False
        url = self.api.makeurl(['source', project], {'view': 'productlist'})

        products = ET.parse(http_GET(url)).getroot()
        for product in products.findall('product'):
            product_name = product.get('name') + '.product'
            product_pkg = product.get('originpackage')
            product_spec = source_file_load(self.api.apiurl, project, product_pkg, product_name)
            new_product = re.sub(r'<version>\d{8}</version>', '<version>%s</version>' % curr_version, product_spec)

            if product_spec != new_product:
                update_version_attr = True
                url = self.api.makeurl(['source', project, product_pkg,  product_name])
                http_PUT(url + '?comment=Update+version', data=new_product)

        if update_version_attr:
            self.update_version_attribute(project, curr_version)

        ports_prjs = ['PowerPC', 'ARM', 'zSystems' ]

        for ports in ports_prjs:
            project = self.api.project + ':' + ports
            if self.api.item_exists(project) and update_version_attr:
                self.update_version_attribute(project, curr_version)
Beispiel #4
0
    def suppresses_whitelist_warnings(self, source_project, source_package):
        # checks if there's a rpmlintrc that suppresses warnings that we check
        found_entries = set()
        contents = source_file_load(self.apiurl, source_project,
                                    source_package,
                                    source_package + '-rpmlintrc')
        if contents:
            contents = re.sub(r'(?m)^ *#.*\n?', '', contents)
            matches = re.findall(r'addFilter\(["\']([^"\']+)["\']\)', contents)
            # this is a bit tricky. Since users can specify arbitrary regular expresions it's not easy
            # to match bad_rpmlint_entries against what we found
            for entry in self.bad_rpmlint_entries:
                for match in matches:
                    # First we try to see if our entries appear verbatim in the rpmlint entries
                    if entry in match:
                        self.logger.info(
                            f'found suppressed whitelist warning: {match}')
                        found_entries.add(match)
                    # if that's not the case then we check if one of the entries in the rpmlint file would match one
                    # of our entries (e.g. addFilter(".*")
                    elif re.search(match,
                                   entry) and match not in found_entries:
                        self.logger.info(
                            f'found rpmlint entry that suppresses an important warning: {match}'
                        )
                        found_entries.add(match)

        return found_entries
Beispiel #5
0
    def create_new_links(self, project, pkgname, oldspeclist):
        filelist = self.api.get_filelist_for_package(pkgname=pkgname,
                                                     project=project,
                                                     extension='spec')
        removedspecs = set(oldspeclist) - set(filelist)
        for spec in removedspecs:
            # Deleting all the packages that no longer have a .spec file
            url = self.api.makeurl(['source', project, spec[:-5]])
            print("Deleting package %s from project %s" % (spec[:-5], project))
            try:
                http_DELETE(url)
            except HTTPError as err:
                if err.code == 404:
                    # the package link was not yet created, which was likely a mistake from earlier
                    pass
                else:
                    # If the package was there bug could not be delete, raise the error
                    raise

            # Remove package from Rings in case 2nd specfile was removed
            if self.api.ring_packages.get(spec[:-5]):
                delete_package(self.api.apiurl,
                               self.api.ring_packages.get(spec[:-5]),
                               spec[:-5],
                               force=True,
                               msg="Cleanup package in Rings")

        if len(filelist) > 1:
            # There is more than one .spec file in the package; link package containers as needed
            origmeta = source_file_load(self.api.apiurl, project, pkgname,
                                        '_meta')
            for specfile in filelist:
                package = specfile[:
                                   -5]  # stripping .spec off the filename gives the packagename
                if package == pkgname:
                    # This is the original package and does not need to be linked to itself
                    continue
                # Check if the target package already exists, if it does not, we get a HTTP error 404 to catch
                if not self.api.item_exists(project, package):
                    print("Creating new package %s linked to %s" %
                          (package, pkgname))
                    # new package does not exist. Let's link it with new metadata
                    newmeta = re.sub(r'(<package.*name=.){}'.format(pkgname),
                                     r'\1{}'.format(package), origmeta)
                    newmeta = re.sub(r'<devel.*>',
                                     r'<devel package="{}"/>'.format(pkgname),
                                     newmeta)
                    newmeta = re.sub(r'<bcntsynctag>.*</bcntsynctag>', r'',
                                     newmeta)
                    newmeta = re.sub(
                        r'</package>',
                        r'<bcntsynctag>{}</bcntsynctag></package>'.format(
                            pkgname), newmeta)
                    source_file_save(self.api.apiurl, project, package,
                                     '_meta', newmeta)
                    link = "<link package=\"{}\" cicount=\"copy\" />".format(
                        pkgname)
                    source_file_save(self.api.apiurl, project, package,
                                     '_link', link)
        return True
    def fix_linking_packages(self, package, dry=False):
        project = self.api.project
        file_list = self.api.get_filelist_for_package(package, project)
        # ignore linked packages
        if '_link' in file_list:
            return
        needed_links = set()
        # if there's a multibuild we assume all flavors are built
        # using multibuild. So any potential previous links have to
        # be removed ie set of needed_links left empty.
        if '_multibuild' not in file_list:
            for file in file_list:
                if file.endswith('.spec') and file != f'{package}.spec':
                    needed_links.add(file[:-5])
        local_links = set()
        for link in self.api.linked_packages(package):
            if link['project'] == project:
                local_links.add(link['package'])

        # Deleting all the packages that no longer have a .spec file
        for link in local_links - needed_links:
            print(f"Deleting package {project}/{link}")
            if dry:
                continue
            try:
                delete_package(self.api.apiurl, project, link, msg=f"No longer linking to {package}")
            except HTTPError as err:
                if err.code == 404:
                    # the package link was not yet created, which was likely a mistake from earlier
                    pass
                else:
                    # If the package was there bug could not be delete, raise the error
                    raise

            # Remove package from Rings in case 2nd specfile was removed
            if self.api.ring_packages.get(link):
                delete_package(self.api.apiurl, self.api.ring_packages.get(link), link, force=True, msg="Cleanup package in Rings")

        for link in needed_links - local_links:
            # There is more than one .spec file in the package; link package containers as needed
            meta = ET.fromstring(source_file_load(self.api.apiurl, project, package, '_meta'))
            print(f"Creating new link {link}->{package}")
            if dry:
                continue

            meta.attrib['name'] = link
            bcnt = meta.find('bcntsynctag')
            if bcnt is None:
                bcnt = ET.SubElement(meta, 'bcntsynctag')
            bcnt.text = package
            devel = meta.find('devel')
            if devel is None:
                devel = ET.SubElement(meta, 'devel')
            devel.attrib['project'] = project
            devel.attrib['package'] = package

            source_file_save(self.api.apiurl, project, link, '_meta', ET.tostring(meta))
            xml = f"<link package='{package}' cicount='copy' />"
            source_file_save(self.api.apiurl, project, link, '_link', xml)
    def update_product_version(self, project, product, arch, version):
        if not self.api.item_exists(project, product):
            return None

        kiwifile = source_file_load(self.api.apiurl, project, product, 'PRODUCT-'+arch+'.kiwi')

        tmpkiwifile = re.sub(r'<productinfo name="VERSION">.*</productinfo>', '<productinfo name="VERSION">%s</productinfo>' % version, kiwifile)
        newkiwifile = re.sub(r'<productvar name="VERSION">.*</productvar>', '<productvar name="VERSION">%s</productvar>' % version, tmpkiwifile)

        source_file_save(self.api.apiurl, project, product, 'PRODUCT-' + arch + '.kiwi', newkiwifile)
Beispiel #8
0
    def update_product_version(self, project, product, arch, version):
        if not self.api.item_exists(project, product):
            return None

        kiwifile = source_file_load(self.api.apiurl, project, product, 'PRODUCT-'+arch+'.kiwi')

        tmpkiwifile = re.sub(r'<productinfo name="VERSION">.*</productinfo>', '<productinfo name="VERSION">%s</productinfo>' % version, kiwifile)
        newkiwifile = re.sub(r'<productvar name="VERSION">.*</productvar>', '<productvar name="VERSION">%s</productvar>' % version, tmpkiwifile)

        source_file_save(self.api.apiurl, project, product, 'PRODUCT-' + arch + '.kiwi', newkiwifile)
def kiwi_binaries(apiurl, project):
    binaries = set()
    for filename in meta_get_filelist(apiurl, project, '000product'):
        if not filename.endswith('.kiwi'):
            continue

        kiwi = ET.fromstring(source_file_load(
            apiurl, project, '000product', filename))

        binaries.update(kiwi.xpath('//instsource/repopackages/repopackage/@name'))

    return binaries
def kiwi_binaries(apiurl, project):
    binaries = set()
    for filename in meta_get_filelist(apiurl, project, '000product'):
        if not filename.endswith('.kiwi'):
            continue

        kiwi = ET.fromstring(source_file_load(
            apiurl, project, '000product', filename))

        binaries.update(kiwi.xpath('//instsource/repopackages/repopackage/@name'))

    return binaries
Beispiel #11
0
    def create_new_links(self, project, pkgname, oldspeclist):
        filelist = self.api.get_filelist_for_package(pkgname=pkgname, project=project, extension='spec')
        removedspecs = set(oldspeclist) - set(filelist)
        for spec in removedspecs:
            # Deleting all the packages that no longer have a .spec file
            url = self.api.makeurl(['source', project, spec[:-5]])
            print("Deleting package %s from project %s" % (spec[:-5], project))
            try:
                http_DELETE(url)
            except HTTPError as err:
                if err.code == 404:
                    # the package link was not yet created, which was likely a mistake from earlier
                    pass
                else:
                    # If the package was there bug could not be delete, raise the error
                    raise

            # Remove package from Rings in case 2nd specfile was removed
            if self.api.ring_packages.get(spec[:-5]):
                delete_package(self.api.apiurl, self.api.ring_packages.get(spec[:-5]), spec[:-5], force=True, msg="Cleanup package in Rings")

        if len(filelist) > 1:
            # There is more than one .spec file in the package; link package containers as needed
            origmeta = source_file_load(self.api.apiurl, project, pkgname, '_meta')
            for specfile in filelist:
                package = specfile[:-5]  # stripping .spec off the filename gives the packagename
                if package == pkgname:
                    # This is the original package and does not need to be linked to itself
                    continue
                # Check if the target package already exists, if it does not, we get a HTTP error 404 to catch
                if not self.api.item_exists(project, package):
                    print("Creating new package %s linked to %s" % (package, pkgname))
                    # new package does not exist. Let's link it with new metadata
                    newmeta = re.sub(r'(<package.*name=.){}'.format(pkgname),
                                     r'\1{}'.format(package),
                                     origmeta)
                    newmeta = re.sub(r'<devel.*>',
                                     r'<devel package="{}"/>'.format(pkgname),
                                     newmeta)
                    newmeta = re.sub(r'<bcntsynctag>.*</bcntsynctag>',
                                     r'',
                                     newmeta)
                    newmeta = re.sub(r'</package>',
                                     r'<bcntsynctag>{}</bcntsynctag></package>'.format(pkgname),
                                     newmeta)
                    source_file_save(self.api.apiurl, project, package, '_meta', newmeta)
                    link = "<link package=\"{}\" cicount=\"copy\" />".format(pkgname)
                    source_file_save(self.api.apiurl, project, package, '_link', link)
        return True
Beispiel #12
0
    def cleanup(self, project):
        if not self.api.item_exists(project):
            return False

        pkglist = self.api.list_packages(project)
        clean_list = set(pkglist) - set(self.api.cnocleanup_packages)

        for package in clean_list:
            print("[cleanup] deleted %s/%s" % (project, package))
            delete_package(self.api.apiurl,
                           project,
                           package,
                           force=True,
                           msg="autocleanup")

        # wipe Test-DVD binaries and breaks kiwi build
        if project.startswith('openSUSE:'):
            for package in pkglist:
                if package.startswith('Test-DVD-'):
                    # intend to break the kiwi file
                    arch = package.split('-')[-1]
                    fakepkgname = 'I-am-breaks-kiwi-build'
                    oldkiwifile = source_file_load(self.api.apiurl, project,
                                                   package,
                                                   'PRODUCT-' + arch + '.kiwi')
                    if oldkiwifile is not None:
                        newkiwifile = re.sub(
                            r'<repopackage name="openSUSE-release"/>',
                            '<repopackage name="%s"/>' % fakepkgname,
                            oldkiwifile)
                        source_file_save(self.api.apiurl, project, package,
                                         'PRODUCT-' + arch + '.kiwi',
                                         newkiwifile)

                    # do wipe binary now
                    query = {'cmd': 'wipe'}
                    query['package'] = package
                    query['repository'] = 'images'

                    url = self.api.makeurl(['build', project], query)
                    try:
                        http_POST(url)
                    except HTTPError as err:
                        # failed to wipe isos but we can just continue
                        pass

        return True
def unmaintained(apiurl, project_target):
    lookup = yaml.safe_load(
        source_file_load(apiurl, project_target, '00Meta', 'lookup.yml'))
    lookup_total = len(lookup)
    lookup = {k: v for k, v in lookup.iteritems() if v.startswith('SUSE:SLE')}

    package_binaries, _ = package_binary_list(apiurl, project_target,
                                              'standard', 'x86_64')
    package_binaries_total = len(package_binaries)
    package_binaries = [
        pb for pb in package_binaries
        if pb.arch != 'src' and pb.package in lookup
    ]

    # Determine max length possible for each column.
    maxes = [
        len(max([b.name for b in package_binaries], key=len)),
        len(max(lookup.keys(), key=len)),
        len(max(lookup.values(), key=len)),
    ]
    line_format = ' '.join(['{:<' + str(m) + '}' for m in maxes])

    print(line_format.format('binary', 'package', 'source project'))

    project_sources = {}
    binaries_unmaintained = 0
    packages_unmaintained = set()
    for package_binary in sorted(package_binaries, key=lambda pb: pb.name):
        project_source = lookup[package_binary.package]
        if project_source not in project_sources:
            # Load binaries referenced in kiwi the first time source encountered.
            project_sources[project_source] = kiwi_binaries(
                apiurl, project_source)

        if package_binary.name not in project_sources[project_source]:
            print(
                line_format.format(package_binary.name, package_binary.package,
                                   project_source))

            binaries_unmaintained += 1
            packages_unmaintained.add(package_binary.package)

    print(
        '{:,} of {:,} binaries ({:,} packages) unmaintained from SLE of {:,} total binaries ({:,} packages) in project'
        .format(binaries_unmaintained, len(package_binaries),
                len(packages_unmaintained), package_binaries_total,
                lookup_total))
def unmaintained(apiurl, project_target):
    lookup = yaml.safe_load(source_file_load(
        apiurl, project_target, '00Meta', 'lookup.yml'))
    lookup_total = len(lookup)
    lookup = {k: v for k, v in lookup.iteritems() if v.startswith('SUSE:SLE')}

    package_binaries, _ = package_binary_list(
        apiurl, project_target, 'standard', 'x86_64', exclude_src_debug=True)
    package_binaries_total = len(package_binaries)
    package_binaries = [pb for pb in package_binaries if pb.package in lookup]

    # Determine max length possible for each column.
    maxes = [
        len(max([b.name for b in package_binaries], key=len)),
        len(max(lookup.keys(), key=len)),
        len(max(lookup.values(), key=len)),
    ]
    line_format = ' '.join(['{:<' + str(m) + '}' for m in maxes])

    print(line_format.format('binary', 'package', 'source project'))

    project_sources = {}
    binaries_unmaintained = 0
    packages_unmaintained = set()
    for package_binary in sorted(package_binaries, key=lambda pb: pb.name):
        project_source = lookup[package_binary.package]
        if project_source not in project_sources:
            # Load binaries referenced in kiwi the first time source encountered.
            project_sources[project_source] = kiwi_binaries(apiurl, project_source)

        if package_binary.name not in project_sources[project_source]:
            print(line_format.format(
                package_binary.name, package_binary.package, project_source))

            binaries_unmaintained += 1
            packages_unmaintained.add(package_binary.package)

    print('{:,} of {:,} binaries ({:,} packages) unmaintained from SLE of {:,} total binaries ({:,} packages) in project'.format(
        binaries_unmaintained, len(package_binaries), len(packages_unmaintained), package_binaries_total, lookup_total))
    def update_factory_version(self):
        """Update project (Factory, 13.2, ...) version if is necessary."""

        # XXX TODO - This method have `factory` in the name.  Can be
        # missleading.

        project = self.api.project
        curr_version = date.today().strftime('%Y%m%d')
        update_version_attr = False
        url = self.api.makeurl(['source', project], {'view': 'productlist'})

        products = ET.parse(http_GET(url)).getroot()
        for product in products.findall('product'):
            product_name = product.get('name') + '.product'
            product_pkg = product.get('originpackage')
            product_spec = source_file_load(self.api.apiurl, project,
                                            product_pkg, product_name)
            new_product = re.sub(r'<version>\d{8}</version>',
                                 '<version>%s</version>' % curr_version,
                                 product_spec)

            if product_spec != new_product:
                update_version_attr = True
                url = self.api.makeurl(
                    ['source', project, product_pkg, product_name])
                http_PUT(url + '?comment=Update+version', data=new_product)

        if update_version_attr:
            self.update_version_attribute(project, curr_version)

        ports_prjs = ['PowerPC', 'ARM', 'zSystems']

        for ports in ports_prjs:
            project = self.api.project + ':' + ports
            if self.api.item_exists(project) and update_version_attr:
                self.update_version_attribute(project, curr_version)
Beispiel #16
0
    def cleanup(self, project):
        if not self.api.item_exists(project):
            return False

        pkglist = self.api.list_packages(project)
        clean_list = set(pkglist) - set(self.api.cnocleanup_packages)

        for package in clean_list:
            print("[cleanup] deleted %s/%s" % (project, package))
            delete_package(self.api.apiurl, project, package, force=True, msg="autocleanup")

        # wipe Test-DVD binaries and breaks kiwi build
        if project.startswith('openSUSE:'):
            for package in pkglist:
                if package.startswith('Test-DVD-'):
                    # intend to break the kiwi file
                    arch = package.split('-')[-1]
                    fakepkgname = 'I-am-breaks-kiwi-build'
                    oldkiwifile = source_file_load(self.api.apiurl, project, package, 'PRODUCT-'+arch+'.kiwi')
                    if oldkiwifile is not None:
                        newkiwifile = re.sub(r'<repopackage name="openSUSE-release"/>', '<repopackage name="%s"/>' % fakepkgname, oldkiwifile)
                        source_file_save(self.api.apiurl, project, package, 'PRODUCT-' + arch + '.kiwi', newkiwifile)

                    # do wipe binary now
                    query = { 'cmd': 'wipe' }
                    query['package'] = package
                    query['repository'] = 'images'

                    url = self.api.makeurl(['build', project], query)
                    try:
                        http_POST(url)
                    except HTTPError as err:
                        # failed to wipe isos but we can just continue
                        pass

        return True
Beispiel #17
0
    def check_pra(self, project, repository, arch):
        config = Config.get(self.apiurl, project)

        oldstate = None
        self.store_filename = 'rebuildpacs.{}-{}.yaml'.format(project, repository)
        if self.store_project and self.store_package:
            state_yaml = source_file_load(self.apiurl, self.store_project, self.store_package,
                                        self.store_filename)
            if state_yaml:
                oldstate = yaml.safe_load(state_yaml)

        oldstate = oldstate or {}
        oldstate.setdefault('check', {})
        if not isinstance(oldstate['check'], dict):
            oldstate['check'] = {}
        oldstate.setdefault('leafs', {})
        if not isinstance(oldstate['leafs'], dict):
            oldstate['leafs'] = {}

        repository_pairs = repository_path_expand(self.apiurl, project, repository)
        directories = []
        primaryxmls = []
        for pair_project, pair_repository in repository_pairs:
            mirrored = mirror(self.apiurl, pair_project, pair_repository, arch)
            if os.path.isdir(mirrored):
                directories.append(mirrored)
            else:
                primaryxmls.append(mirrored)

        parsed = dict()
        with tempfile.TemporaryDirectory(prefix='repochecker') as dir:
            pfile = os.path.join(dir, 'packages')

            SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
            script = os.path.join(SCRIPT_PATH, 'write_repo_susetags_file.pl')
            parts = ['perl', script, dir] + directories

            p = subprocess.run(parts)
            if p.returncode:
                # technically only 126, but there is no other value atm -
                # so if some other perl error happens, we don't continue
                raise CorruptRepos

            target_packages = []
            with open(os.path.join(dir, 'catalog.yml')) as file:
                catalog = yaml.safe_load(file)
                if catalog is not None:
                    target_packages = catalog.get(directories[0], [])

            parsed = parsed_installcheck([pfile] + primaryxmls, arch, target_packages, [])
            for package in parsed:
                parsed[package]['output'] = "\n".join(parsed[package]['output'])

            # let's risk a N*N algorithm in the hope that we have a limited N
            for package1 in parsed:
                output = parsed[package1]['output']
                for package2 in parsed:
                    if package1 == package2:
                        continue
                    output = output.replace(parsed[package2]['output'], 'FOLLOWUP(' + package2 + ')')
                parsed[package1]['output'] = output

            for package in parsed:
                parsed[package]['output'] = self._split_and_filter(parsed[package]['output'])

        url = makeurl(self.apiurl, ['build', project, '_result'], {
                      'repository': repository, 'arch': arch, 'code': 'succeeded'})
        root = ET.parse(http_GET(url)).getroot()
        succeeding = list(map(lambda x: x.get('package'), root.findall('.//status')))

        per_source = dict()

        for package, entry in parsed.items():
            source = "{}/{}/{}/{}".format(project, repository, arch, entry['source'])
            per_source.setdefault(source, {'output': [], 'builds': entry['source'] in succeeding})
            per_source[source]['output'].extend(entry['output'])

        rebuilds = set()

        for source in sorted(per_source):
            if not len(per_source[source]['output']):
                continue
            self.logger.debug("{} builds: {}".format(source, per_source[source]['builds']))
            self.logger.debug("  " + "\n  ".join(per_source[source]['output']))
            if not per_source[source]['builds']:  # nothing we can do
                continue
            old_output = oldstate['check'].get(source, {}).get('problem', [])
            if sorted(old_output) == sorted(per_source[source]['output']):
                self.logger.debug("unchanged problem")
                continue
            self.logger.info("rebuild %s", source)
            rebuilds.add(os.path.basename(source))
            for line in difflib.unified_diff(old_output, per_source[source]['output'], 'before', 'now'):
                self.logger.debug(line.strip())
            oldstate['check'][source] = {'problem': per_source[source]['output'],
                                         'rebuild': str(datetime.datetime.now())}

        for source in list(oldstate['check']):
            if not source.startswith('{}/{}/{}/'.format(project, repository, arch)):
                continue
            if not os.path.basename(source) in succeeding:
                continue
            if source not in per_source:
                self.logger.info("No known problem, erasing %s", source)
                del oldstate['check'][source]

        packages = config.get('rebuildpacs-leafs', '').split()
        if not self.rebuild: # ignore in this case
            packages = []

        # first round: collect all infos from obs
        infos = dict()
        for package in packages:
            subpacks, build_deps = self.check_leaf_package(project, repository, arch, package)
            infos[package] = {'subpacks': subpacks, 'deps': build_deps}

        # calculate rebuild triggers
        rebuild_triggers = dict()
        for package1 in packages:
            for package2 in packages:
                if package1 == package2:
                    continue
                for subpack in infos[package1]['subpacks']:
                    if subpack in infos[package2]['deps']:
                        rebuild_triggers.setdefault(package1, set())
                        rebuild_triggers[package1].add(package2)
                        # ignore this depencency. we already trigger both of them
                        del infos[package2]['deps'][subpack]

        # calculate build info hashes
        for package in packages:
            if not package in succeeding:
                self.logger.debug("Ignore %s for the moment, not succeeding", package)
                continue
            m = hashlib.sha256()
            for bdep in sorted(infos[package]['deps']):
                m.update(bytes(bdep + '-' + infos[package]['deps'][bdep], 'utf-8'))
            state_key = '{}/{}/{}/{}'.format(project, repository, arch, package)
            olddigest = oldstate['leafs'].get(state_key, {}).get('buildinfo')
            if olddigest == m.hexdigest():
                continue
            self.logger.info("rebuild leaf package %s (%s vs %s)", package, olddigest, m.hexdigest())
            rebuilds.add(package)
            oldstate['leafs'][state_key] = {'buildinfo': m.hexdigest(),
                                            'rebuild': str(datetime.datetime.now())}

        if self.dryrun:
            if self.rebuild:
                self.logger.info("To rebuild: %s", ' '.join(rebuilds))
            return

        if not self.rebuild or not len(rebuilds):
            self.logger.debug("Nothing to rebuild")
            # in case we do rebuild, wait for it to succeed before saving
            self.store_yaml(oldstate, project, repository, arch)
            return

        query = {'cmd': 'rebuild', 'repository': repository, 'arch': arch, 'package': rebuilds}
        url = makeurl(self.apiurl, ['build', project])
        headers = { 'Content-Type': 'application/x-www-form-urlencoded' }
        http_request('POST', url, headers, data=urlencode(query, doseq=True))

        self.store_yaml(oldstate, project, repository, arch)