Exemple #1
0
    def update_factory_version(self):
        """Update project (Factory, 13.2, ...) version if is necessary."""

        # XXX TODO - This method have `factory` in the name.  Can be
        # missleading.

        project = self.api.project
        curr_version = date.today().strftime('%Y%m%d')
        update_version_attr = False
        url = self.api.makeurl(['source', project], {'view': 'productlist'})

        products = ET.parse(http_GET(url)).getroot()
        for product in products.findall('product'):
            product_name = product.get('name') + '.product'
            product_pkg = product.get('originpackage')
            url = self.api.makeurl(['source', project, product_pkg,  product_name])
            product_spec = http_GET(url).read()
            new_product = re.sub(r'<version>\d{8}</version>', '<version>%s</version>' % curr_version, product_spec)

            if product_spec != new_product:
                update_version_attr = True
                http_PUT(url + '?comment=Update+version', data=new_product)

        if update_version_attr:
            self.update_version_attribute(project, curr_version)

        ports_prjs = ['PowerPC', 'ARM', 'zSystems' ]

        for ports in ports_prjs:
            project = self.api.project + ':' + ports
            if self.api.item_exists(project) and update_version_attr:
                self.update_version_attribute(project, curr_version)
    def check_depinfo_ring(self, prj, nextprj):
        url = makeurl(self.api.apiurl, ['build', prj, '_result'])
        root = ET.parse(http_GET(url)).getroot()
        for repo in root.findall('result'):
            repostate = repo.get('state', 'missing')
            if repostate not in ['unpublished', 'published'] or repo.get('dirty', 'false') == 'true':
                print('Repo {}/{} is in state {}'.format(repo.get('project'), repo.get('repository'), repostate))
                return False
            for package in repo.findall('status'):
                code = package.get('code')
                if code not in ['succeeded', 'excluded', 'disabled']:
                    print('Package {}/{}/{} is {}'.format(repo.get('project'), repo.get('repository'), package.get('package'), code))
                    return False

        self.find_inner_ring_links(prj)
        for arch in self.api.cstaging_dvd_archs:
            self.fill_pkgdeps(prj, 'standard', arch)

            if prj == '{}:1-MinimalX'.format(self.api.crings):
                url = makeurl(self.api.apiurl, ['build', prj, 'images', arch, 'Test-DVD-' + arch, '_buildinfo'])
                root = ET.parse(http_GET(url)).getroot()
                for bdep in root.findall('bdep'):
                    if 'name' not in bdep.attrib:
                        continue
                    b = bdep.attrib['name']
                    if b not in self.bin2src:
                        continue
                    b = self.bin2src[b]
                    self.pkgdeps[b] = 'MYdvd'

            if prj == '{}:2-TestDVD'.format(self.api.crings):
                url = makeurl(self.api.apiurl, ['build', prj, 'images', arch, 'Test-DVD-' + arch, '_buildinfo'])
                root = ET.parse(http_GET(url)).getroot()
                for bdep in root.findall('bdep'):
                    if 'name' not in bdep.attrib:
                        continue
                    b = bdep.attrib['name']
                    if b not in self.bin2src:
                        continue
                    b = self.bin2src[b]
                    self.pkgdeps[b] = 'MYdvd2'

        if prj == '{}:0-Bootstrap'.format(self.api.crings):
            url = makeurl(self.api.apiurl, ['build', prj, 'standard', '_buildconfig'])
            for line in http_GET(url).read().split('\n'):
                if line.startswith('Preinstall:') or line.startswith('Support:'):
                    for prein in line.split(':')[1].split():
                        if prein not in self.bin2src:
                            continue
                        b = self.bin2src[prein]
                        self.pkgdeps[b] = 'MYinstall'

        for source in self.sources:
            if source not in self.pkgdeps and source not in self.links:
                if source.startswith('texlive-specs-'): # XXX: texlive bullshit packaging
                    continue
                print('osc rdelete -m cleanup {} {}'.format(prj, source))
                if nextprj:
                    print('osc linkpac {} {} {}').format(self.api.project, source, nextprj)
Exemple #3
0
def entity_exists(apiurl, project, package=None):
    try:
        http_GET(makeurl(apiurl, filter(None, ['source', project, package]) + ['_meta']))
    except HTTPError as e:
        if e.code == 404:
            return False

        raise e

    return True
 def project_exists(self, project):
     """
     Return true if the given project exists
     :param project: project name to check
     """
     url = self.makeurl(['source', project, '_meta'])
     try:
         http_GET(url)
     except urllib2.HTTPError:
         return False
     return True
Exemple #5
0
def do_viewrequest(self, subcmd, opts, *args):
    """${cmd_name}: view the raw content of a request

    Usage:
       ${cmd_name} SR ...
           View the raw content of a request
    ${cmd_option_list}
    """
    apiurl = self.get_api_url()
    for sr in args:
        url = makeurl(apiurl, ['request', str(sr)])
        print http_GET(url).read()
 def item_exists(self, project, package=None):
     """
     Return true if the given project exists
     :param project: project name to check
     :param package: optional package to check
     """
     if package:
         url = self.makeurl(['source', project, package, '_meta'])
     else:
         url = self.makeurl(['source', project, '_meta'])
     try:
         http_GET(url)
     except urllib2.HTTPError:
         return False
     return True
def _checker_check_dups(self, project, opts):
    url = makeurl(opts.apiurl, ['request'], "states=new,review&project=%s&view=collection" % project)
    f = http_GET(url)
    root = ET.parse(f).getroot()
    rqs = {}
    for rq in root.findall('request'):
        id = rq.attrib['id']
        for a in rq.findall('action'):
            source = a.find('source')
            target = a.find('target')
            type = a.attrib['type']
            assert target != None
            if target.attrib['project'] != project:
                continue
            # print(id)
            # ET.dump(target)
            if 'package' not in target.attrib:
                continue
            package = target.attrib['package']
            if type + package in rqs:
                [oldid, oldsource] = rqs[type + package]
                if oldid > id:
                    s = oldid
                    oldid = id
                    id = s
                assert oldid < id
                if source != None and oldsource != None:
                    if (source.attrib['project'] == oldsource.attrib['project'] and
                       source.attrib['package'] == oldsource.attrib['package']):
                        change_request_state(opts.apiurl, str(oldid), 'superseded',
                                     'superseded by %s' % id, id)
                        continue
                print("DUPS found:", id, oldid)
            rqs[type + package] = [id, source]
    def getCommitLog(self, project, package, revision=None):
        """
        getCommitLog(project, package, revision=None) -> list

        Get commit log for package in project. If revision is set, get just the
        log for that revision.

        Each log is a tuple of the form (rev, srcmd5, version, time, user,
        comment)
        """
        u = core.makeurl(self.apiurl, ['source', project, package, '_history'])
        f = core.http_GET(u)
        root = ElementTree.parse(f).getroot()

        r = []
        revisions = root.findall('revision')
        revisions.reverse()
        for node in revisions:
            rev = int(node.get('rev'))
            if revision and rev != int(revision):
                continue
            srcmd5 = node.find('srcmd5').text
            version = node.find('version').text
            user = node.find('user').text
            try:
                comment = node.find('comment').text
            except:
                comment = '<no message>'
            t = time.localtime(int(node.find('time').text))
            t = time.strftime('%Y-%m-%d %H:%M:%S', t)

            r.append((rev, srcmd5, version, t, user, comment))
        return r
Exemple #9
0
def package_source_hash(apiurl, project, package, revision=None):
    query = {}
    if revision:
        query['rev'] = revision

    # Will not catch packages that previous had a link, but no longer do.
    if package_source_link_copy(apiurl, project, package):
        query['expand'] = 1

    try:
        url = makeurl(apiurl, ['source', project, package], query)
        root = ETL.parse(http_GET(url)).getroot()
    except HTTPError as e:
        if e.code == 400 or e.code == 404:
            # 400: revision not found, 404: package not found.
            return None

        raise e

    if revision and root.find('error') is not None:
        # OBS returns XML error instead of HTTP 404 if revision not found.
        return None

    from osclib.util import sha1_short
    return sha1_short(root.xpath('entry[@name!="_link"]/@md5'))
    def getWorkerStatus(self):
        """
        getWorkerStatus() -> list of dicts

        Get worker status as a list of dictionaries. Each dictionary contains the keys 'id',
        'hostarch', and 'status'. If the worker is building, the dict will additionally contain the
        keys 'project', 'package', 'target', and 'starttime'
        """
        url = core.makeurl(self.apiurl, ['build', '_workerstatus'])
        f = core.http_GET(url)
        tree = ElementTree.parse(f).getroot()
        workerstatus = []
        for worker in tree.findall('building'):
            d = {'id': worker.get('workerid'),
                 'status': 'building'}
            for attr in ('hostarch', 'project', 'package', 'starttime'):
                d[attr] = worker.get(attr)
            d['target'] = '/'.join((worker.get('repository'), worker.get('arch')))
            d['started'] = time.asctime(time.localtime(float(worker.get('starttime'))))
            workerstatus.append(d)
        for worker in tree.findall('idle'):
            d = {'id': worker.get('workerid'),
                 'hostarch': worker.get('hostarch'),
                 'status': 'idle'}
            workerstatus.append(d)
        return workerstatus
    def getSubmitRequests(self):
        """
        getSubmitRequests() -> list of dicts

        """
        url = core.makeurl(self.apiurl, ['search', 'request', '?match=submit'])
        f = core.http_GET(url)
        tree = ElementTree.parse(f).getroot()
        submitrequests = []
        for sr in tree.findall('request'):
            if sr.get('type') != "submit":
                continue

            d = {'id': int(sr.get('id'))}
            sb = sr.findall('submit')[0]
            src = sb.findall('source')[0]
            d['srcproject'] = src.get('project')
            d['srcpackage'] = src.get('package')
            dst = sb.findall('target')[0]
            d['dstproject'] = dst.get('project')
            d['dstpackage'] = dst.get('package')
            d['state'] = sr.findall('state')[0].get('name')

            submitrequests.append(d)
        submitrequests.sort(key=lambda x: x['id'])
        return submitrequests
    def find_request_package(self, package):
        """
        Look up the package by its name and return the SR#
        :param package: name of the package
        """

        query = 'states=new,review&project={}&view=collection&package={}'
        query = query.format(self.api.project, urllib2.quote(package))
        url = makeurl(self.api.apiurl, ['request'], query)
        f = http_GET(url)

        root = ET.parse(f).getroot()

        requests = []
        for sr in root.findall('request'):
            # Check the target matches - OBS query is case insensitive, but OBS is not
            rq_target = sr.find('action').find('target')
            if package != rq_target.get('package') or self.api.project != rq_target.get('project'):
                continue

            request = sr.get('id')
            state = sr.find('state').get('name')

            self.srs[int(request)] = {'project': self.api.project, 'state': state}
            requests.append(request)

        if len(requests) > 1:
            msg = 'There are multiple requests for package "{}": {}'
            msg = msg.format(package, ', '.join(requests))
            raise oscerr.WrongArgs(msg)

        request = int(requests[0]) if requests else None
        return request
    def fill_pkgdeps(self, prj, repo, arch):
        url = makeurl(self.api.apiurl, ['build', prj, repo, arch, '_builddepinfo'])
        f = http_GET(url)
        root = ET.parse(f).getroot()

        for package in root.findall('package'):
            source = package.find('source').text
            if package.attrib['name'].startswith('preinstall'):
                continue
            self.sources.add(source)

            for subpkg in package.findall('subpkg'):
                subpkg = subpkg.text
                if subpkg in self.bin2src:
                    if self.bin2src[subpkg] == source:
                        # different archs
                        continue
                    print('Binary {} is defined twice: {}/{}'.format(subpkg, prj, source))
                self.bin2src[subpkg] = source

        for package in root.findall('package'):
            source = package.find('source').text
            for pkg in package.findall('pkgdep'):
                if pkg.text not in self.bin2src:
                    print('Package {} not found in place'.format(pkg.text))
                    continue
                b = self.bin2src[pkg.text]
                self.pkgdeps[b] = source
Exemple #14
0
def issue_trackers(apiurl):
    url = makeurl(apiurl, ['issue_trackers'])
    root = ET.parse(http_GET(url)).getroot()
    trackers = {}
    for tracker in root.findall('issue-tracker'):
        trackers[tracker.find('name').text] = tracker.find('label').text
    return trackers
Exemple #15
0
def package_kind(apiurl, project, package):
    if package.startswith('00'):
        return 'meta'

    if ':' in package:
        return 'multibuild_subpackage'

    if package.startswith('patchinfo.'):
        return 'patchinfo'

    try:
        url = makeurl(apiurl, ['source', project, package, '_meta'])
        root = ETL.parse(http_GET(url)).getroot()
    except HTTPError as e:
        if e.code == 404:
            return None

        raise e

    if root.find('releasename') is not None:
        return 'maintenance_update'

    if root.find('bcntsynctag') is not None:
        return 'multispec_subpackage'

    # Some multispec subpackages do not have bcntsynctag, so check link.
    link = entity_source_link(apiurl, project, package)
    if link is not None and link.get('cicount') == 'copy':
        kind_target = package_kind(apiurl, project, link.get('package'))
        if kind_target != 'maintenance_update':
            # If a multispec subpackage was updated via a maintenance update the
            # proper link information is lost and it will be considered source.
            return 'multispec_subpackage'

    return 'source'
    def fill_pkgdeps(self, prj, repo, arch):
        url = makeurl(self.api.apiurl, ['build', prj, repo, arch, '_builddepinfo'])
        f = http_GET(url)
        root = ET.parse(f).getroot()

        for package in root.findall('package'):
            source = package.find('source').text
            if package.attrib['name'].startswith('preinstall'):
                continue
            self.sources.append(source)

            for subpkg in package.findall('subpkg'):
                subpkg = subpkg.text
                if subpkg in self.bin2src:
                    print('Binary {} is defined twice: {}/{}'.format(subpkg, prj, source))
                self.bin2src[subpkg] = source

        for package in root.findall('package'):
            source = package.find('source').text
            for pkg in package.findall('pkgdep'):
                if pkg.text not in self.bin2src:
                    if pkg.text.startswith('texlive-'):
                        for letter in range(ord('a'), ord('z') + 1):
                            self.pkgdeps['texlive-specs-' + chr(letter)] = 'texlive-specs-' + chr(letter)
                    else:
                        print('Package {} not found in place'.format(pkg.text))
                    continue
                b = self.bin2src[pkg.text]
                self.pkgdeps[b] = source
Exemple #17
0
def _get_verifymd5(self, p, rev):
    try:
        url = makeurl(self.get_api_url(), ['source', p.sproject, p.spackage, '?view=info&rev=%s' % rev])
        root = ET.parse(http_GET(url)).getroot()
    except urllib2.HTTPError, e:
        print 'ERROR in URL %s [%s]' % (url, e)
        return []
    def find_request_id(self, request_id):
        """
        Look up the request by ID to verify if it is correct
        :param request_id: ID of the added request
        """

        if not _is_int(request_id):
            return False

        url = makeurl(self.api.apiurl, ['request', str(request_id)])
        try:
            f = http_GET(url)
        except urllib2.HTTPError:
            return None

        root = ET.parse(f).getroot()

        if root.get('id', None) != str(request_id):
            return None

        project = root.find('action').find('target').get('project')
        if (project != self.api.project and not project.startswith(self.api.cstaging)):
            msg = 'Request {} is not for {}, but for {}'
            msg = msg.format(request_id, self.api.project, project)
            raise oscerr.WrongArgs(msg)
        self.srs[int(request_id)] = {'project': project}

        return True
    def update_factory_version(self):
        """Update project (Factory, 13.2, ...) version if is necessary."""

        # XXX TODO - This method have `factory` in the name.  Can be
        # missleading.

        # If thereis not product defined for this project, show the
        # warning and return.
        if not self.api.cproduct:
            warnings.warn('There is not product defined in the configuration file.')
            return

        project = self.api.project
        url = self.api.makeurl(['source', project, '_product', self.api.cproduct])

        product = http_GET(url).read()
        curr_version = date.today().strftime('%Y%m%d')
        new_product = re.sub(r'<version>\d{8}</version>', '<version>%s</version>' % curr_version, product)

        if product != new_product:
            http_PUT(url + '?comment=Update+version', data=new_product)

        service = {'cmd': 'runservice'}

        ports_prjs = ['PowerPC', 'ARM' ]

        for ports in ports_prjs:
            project = self.api.project + ':' + ports
            if self.api.item_exists(project):
                baseurl = ['source', project, '_product']
                url = self.api.makeurl(baseurl, query=service)
                self.api.retried_POST(url)
def entity_clone(apiurl_source, apiurl_target, path, sanitize=None, clone=None, after=None):
    if not hasattr(entity_clone, 'cloned'):
        entity_clone.cloned = []

    if path[0] == 'source' and not project_fence(path[1]):
        # Skip projects outside of fence by marking as cloned.
        if path not in entity_clone.cloned:
            entity_clone.cloned.append(path)

    if path in entity_clone.cloned:
        print('skip {}'.format('/'.join(path)))
        return

    print('clone {}'.format('/'.join(path)))
    entity_clone.cloned.append(path)

    url = makeurl(apiurl_source, path)
    entity = ET.parse(http_GET(url)).getroot()

    if sanitize:
        sanitize(entity)
    if clone:
        clone(apiurl_source, apiurl_target, entity)

    url = makeurl(apiurl_target, path)
    http_PUT(url, data=ET.tostring(entity))

    if after:
        after(apiurl_source, apiurl_target, entity)
Exemple #21
0
def build(apiurl, project, repo, arch, package):
    root = None
    try:
        url = makeurl(apiurl, ['build', project, repo, arch, package])
        root = http_GET(url).read()
    except urllib2.HTTPError, e:
        print 'ERROR in URL %s [%s]' % (url, e)
    def set_prj_pseudometa(self, project, meta):
        """
        Sets project description to the YAML of the provided object
        :param project: project to save into
        :param meta: data to save
        """

        # Get current metadata
        url = make_meta_url('prj', project, self.apiurl)
        root = ET.parse(http_GET(url)).getroot()
        # Find description
        description = root.find('description')
        # Order the requests and replace it with yaml
        meta['requests'] = sorted(meta['requests'], key=lambda x: x['id'])
        description.text = yaml.dump(meta)
        # Find title
        title = root.find('title')
        # Put something nice into title as well
        new_title = []
        for request in meta['requests']:
            new_title.append(request['package'])
        nt = ', '.join(sorted(new_title))
        title.text = nt[:240]
        # Write XML back
        url = make_meta_url('prj', project, self.apiurl, force=True)
        http_PUT(url, data=ET.tostring(root))
Exemple #23
0
def issue_tracker_by_url(apiurl, tracker_url):
    url = makeurl(apiurl, ['issue_trackers'])
    root = ETL.parse(http_GET(url)).getroot()
    if not tracker_url.endswith('/'):
        # All trackers are formatted with trailing slash.
        tracker_url += '/'
    return next(iter(root.xpath('issue-tracker[url[text()="{}"]]'.format(tracker_url)) or []), None)
    def get_package_list_from_repository(self, project, repository, arch, package):
        url = makeurl(self.apiurl, ('build', project, repository, arch, package))
        files = []
        try:
            binaries = ET.parse(http_GET(url)).getroot()
            for binary in binaries.findall('binary'):
                filename = binary.attrib['filename']
                mtime = int(binary.attrib['mtime'])

                result = re.match(r'(.*)-([^-]*)-([^-]*)\.([^-\.]+)\.rpm', filename)
                if not result:
                    if filename == 'rpmlint.log':
                        files.append((filename, '', '', mtime))
                    continue

                pname = result.group(1)
                if pname.endswith('-debuginfo') or pname.endswith('-debuginfo-32bit'):
                    continue
                if pname.endswith('-debugsource'):
                    continue
                if result.group(4) == 'src':
                    continue

                files.append((filename, pname, result.group(4), mtime))
        except urllib2.HTTPError:
            pass
            # print " - WARNING: Can't found list of packages (RPM) for %s in %s (%s, %s)" % (
            #     package, project, repository, arch)
        return files
    def get_package_information(self, project, pkgname, rev=None):
        """
        Get the revision packagename and source project to copy from
        based on content provided
        :param project: the project we are having the package in
        :param pkgname: name of the package we want to identify
        :return dict ( project, package, revision, md5sum )
        """

        package_info = {}

        query = {
            'rev': rev
        }
        if rev:
            url = self.makeurl(['source', project, pkgname], query=query)
        else:
            url = self.makeurl(['source', project, pkgname])
        content = http_GET(url)
        root = ET.parse(content).getroot()
        package_info['dir_srcmd5'] = root.attrib['srcmd5']

        linkinfo = root.find('linkinfo')
        package_info['srcmd5'] = linkinfo.attrib['srcmd5']
        package_info['rev'] = linkinfo.attrib.get('rev', None)
        package_info['project'] = linkinfo.attrib['project']
        package_info['package'] = linkinfo.attrib['package']

        return package_info
    def get_open_requests(self):
        """
        Get all requests with open review for staging project
        that are not yet included in any staging project
        :return list of pending open review requests
        """

        requests = []

        # xpath query, using the -m, -r, -s options
        where = "@by_group='{}'+and+@state='new'".format(self.cstaging_group)
        projects = [format(self.project)]
        if self.cnonfree:
            projects.append(self.cnonfree)
        targets = ["target[@project='{}']".format(p) for p in projects]

        query = "match=state/@name='review'+and+review[{}]+and+({})".format(
            where, '+or+'.join(targets))
        url = self.makeurl(['search', 'request'], query)
        f = http_GET(url)
        root = ET.parse(f).getroot()

        for rq in root.findall('request'):
            requests.append(rq)
        return requests
    def old_md5(self, src_project, tgt_project, src_package, rev):
        """Recollect old MD5 for a package."""
        # XXX TODO - instead of fixing the limit, use endtime to make
        # sure that we have the correct time frame.
        limit = 20
        query = {
            'package': src_package,
            # 'code': 'succeeded',
            'limit': limit,
        }

        repositories = self.get_project_repos(src_project,
                                              tgt_project,
                                              src_package, rev)

        srcmd5_list = []
        for repository, archs in repositories:
            for arch, status in archs:
                if srcmd5_list:
                    break
                if status not in ('succeeded', 'outdated'):
                    continue

                url = makeurl(self.apiurl, ('build', src_project,
                                            repository, arch,
                                            '_jobhistory'),
                              query=query)
                try:
                    root = ET.parse(http_GET(url)).getroot()
                    srcmd5_list = [e.get('srcmd5') for e in root.findall('jobhist')]
                except urllib2.HTTPError, e:
                    print('ERROR in URL %s [%s]' % (url, e))
    def find_request_project(self, source_project, newcand):
        """
        Look up the source project by its name and return the SR#(s)
        :param source_project: name of the source project
        :param newcand: the review state of staging-group must be new
        """

        query = 'states=new,review&project={}&view=collection'.format(self.api.project)
        url = makeurl(self.api.apiurl, ['request'], query)
        f = http_GET(url)
        root = ET.parse(f).getroot()

        ret = None
        for sr in root.findall('request'):
            # ensure staging tool don't picks the processed request again
            if newcand:
                staging_group_states = [review.get('state') for review in sr.findall('review') if review.get('by_group') == self.api.cstaging_group]
                if 'new' not in staging_group_states:
                    continue
            for act in sr.findall('action'):
                src = act.find('source')
                if src is not None and src.get('project') == source_project:
                    request = int(sr.attrib['id'])
                    state = sr.find('state').get('name')
                    self.srs[request] = {'project': self.api.project, 'state': state}
                    ret = True

        return ret
    def get_filelist_for_package(self, pkgname, project, extension=None):
        """
        Get a list of files inside a package container
        :param package: the base packagename to be linked to
        :param project: Project to verify
        :param extension: Limit the file list to files with this extension
        """

        filelist = []
        query = {
            'extension': extension
        }

        if extension:
            url = self.makeurl(['source', project, pkgname], query=query)
        else:
            url = self.makeurl(['source', project, pkgname])
        try:
            content = http_GET(url)
            for entry in ET.parse(content).getroot().findall('entry'):
                filelist.append(entry.attrib['name'])
        except urllib2.HTTPError, err:
            if err.code == 404:
                # The package we were supposed to query does not exist
                # we can pass this up and return the empty filelist
                pass
Exemple #30
0
def package_binary_list(apiurl, project, repository, arch, package=None, strip_multibuild=True, exclude_src_debug=False):
    path = ['build', project, repository, arch]
    if package:
        path.append(package)
    url = makeurl(apiurl, path, {'view': 'binaryversions'})
    root = ET.parse(http_GET(url)).getroot()

    package_binaries = []
    binary_map = {} # last duplicate wins
    for binary_list in root:
        package = binary_list.get('package')
        if strip_multibuild:
            package = package.split(':', 1)[0]

        for binary in binary_list:
            filename = binary.get('name')
            result = re.match(RPM_REGEX, filename)
            if not result:
                continue

            binary = BinaryParsed(package, result.group('filename'),
                                  result.group('name'), result.group('arch'))
            if exclude_src_debug and binary_src_debug(binary):
                continue

            package_binaries.append(binary)
            binary_map[result.group('filename')] = package

    return package_binaries, binary_map
Exemple #31
0
    def update_and_solve_target(self,
                                apiurl,
                                target_project,
                                target_config,
                                main_repo,
                                opts,
                                skip_release=False):
        print('[{}] {}/{}: update and solve'.format(opts.scope, opts.project,
                                                    main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release',
                                    '000release-packages')

        url = makeurl(apiurl, ['source', opts.project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.options.dry:
                undelete_package(apiurl, opts.project, product, 'revive')
            # TODO disable build.
            print('{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not opts.force:
            root = ET.fromstringlist(
                show_results_meta(apiurl,
                                  opts.project,
                                  product,
                                  repository=[main_repo],
                                  multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(
                    root.xpath('result[@state="dirty"]')):
                print('{}/{} build in progress'.format(opts.project, product))
                return

        checkout_list = [group, product]
        if not skip_release:
            checkout_list.append(release)

            if packages.find('entry[@name="{}"]'.format(release)) is None:
                if not self.options.dry:
                    undelete_package(apiurl, opts.project, product, 'revive')
                print(
                    '{} undeleted, skip dvd until next cycle'.format(release))
                return

        # Cache dir specific to hostname and project.
        host = urlparse.urlparse(apiurl).hostname
        cache_dir = save_cache_path('opensuse-packagelists', host,
                                    opts.project)

        if os.path.exists(cache_dir):
            shutil.rmtree(cache_dir)
        os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)

        for package in checkout_list:
            checkout_package(apiurl,
                             opts.project,
                             package,
                             expand_link=True,
                             prj_dir=cache_dir)

        if not skip_release:
            self.unlink_all_except(release_dir)
        self.unlink_all_except(product_dir)
        self.copy_directory_contents(
            group_dir, product_dir,
            ['supportstatus.txt', 'groups.yml', 'package-groups.changes'])
        self.change_extension(product_dir, '.spec.in', '.spec')

        self.options.input_dir = group_dir
        self.options.output_dir = product_dir
        self.postoptparse()

        print('-> do_update')
        self.do_update('update', opts)

        print('-> do_solve')
        opts.ignore_unresolvable = bool(
            target_config.get('pkglistgen-ignore-unresolvable'))
        opts.ignore_recommended = bool(
            target_config.get('pkglistgen-ignore-recommended'))
        opts.include_suggested = bool(
            target_config.get('pkglistgen-include-suggested'))
        opts.locale = target_config.get('pkglistgen-local')
        opts.locales_from = target_config.get('pkglistgen-locales-from')
        self.do_solve('solve', opts)

        delete_products = target_config.get('pkglistgen-delete-products',
                                            '').split(' ')
        self.unlink_list(product_dir, delete_products)

        print('-> product service')
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            print(
                subprocess.check_output(
                    [PRODUCT_SERVICE, product_file, product_dir,
                     opts.project]))

        delete_kiwis = target_config.get(
            'pkglistgen-delete-kiwis-{}'.format(opts.scope), '').split(' ')
        self.unlink_list(product_dir, delete_kiwis)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        if skip_release:
            self.unlink_list(None, spec_files)
        else:
            self.move_list(spec_files, release_dir)

        self.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')
        self.commit_package(product_dir)

        if not skip_release:
            self.multibuild_from_glob(release_dir, '*.spec')
            self.build_stub(release_dir, 'spec')
            self.commit_package(release_dir)
Exemple #32
0
def project_list_prefix(apiurl, prefix):
    """Get a list of project with the same prefix."""
    query = {'match': 'starts-with(@name, "{}")'.format(prefix)}
    url = makeurl(apiurl, ['search', 'project', 'id'], query)
    root = ETL.parse(http_GET(url)).getroot()
    return root.xpath('project/@name')
def search(apiurl, path, xpath, query={}):
    query['match'] = xpath
    url = makeurl(apiurl, ['search', path], query)
    return ETL.parse(http_GET(url)).getroot()
    def check_depinfo_ring(self, prj, nextprj):
        url = makeurl(self.api.apiurl, ['build', prj, '_result'])
        root = ET.parse(http_GET(url)).getroot()
        for repo in root.findall('result'):
            repostate = repo.get('state', 'missing')
            if repostate not in ['unpublished', 'published'] or repo.get(
                    'dirty', 'false') == 'true':
                print('Repo {}/{} is in state {}'.format(
                    repo.get('project'), repo.get('repository'), repostate))
                return False
            for package in repo.findall('status'):
                code = package.get('code')
                if code not in ['succeeded', 'excluded', 'disabled']:
                    print('Package {}/{}/{} is {}'.format(
                        repo.get('project'), repo.get('repository'),
                        package.get('package'), code))
                    return False

        self.find_inner_ring_links(prj)
        for arch in ['x86_64', 'ppc64le']:
            self.fill_pkgdeps(prj, 'standard', arch)

            if prj == '{}:1-MinimalX'.format(self.api.crings):
                url = makeurl(self.api.apiurl, [
                    'build', prj, 'images', arch, 'Test-DVD-' + arch,
                    '_buildinfo'
                ])
                root = ET.parse(http_GET(url)).getroot()
                for bdep in root.findall('bdep'):
                    if 'name' not in bdep.attrib:
                        continue
                    b = bdep.attrib['name']
                    if b not in self.bin2src:
                        continue
                    b = self.bin2src[b]
                    self.pkgdeps[b] = 'MYdvd'

            if prj == '{}:2-TestDVD'.format(self.api.crings):
                url = makeurl(self.api.apiurl, [
                    'build', prj, 'images', arch, 'Test-DVD-' + arch,
                    '_buildinfo'
                ])
                root = ET.parse(http_GET(url)).getroot()
                for bdep in root.findall('bdep'):
                    if 'name' not in bdep.attrib:
                        continue
                    b = bdep.attrib['name']
                    if b not in self.bin2src:
                        continue
                    b = self.bin2src[b]
                    self.pkgdeps[b] = 'MYdvd2'

        if prj == '{}:0-Bootstrap'.format(self.api.crings):
            url = makeurl(self.api.apiurl,
                          ['build', prj, 'standard', '_buildconfig'])
            for line in http_GET(url).read().split('\n'):
                if line.startswith('Preinstall:') or line.startswith(
                        'Support:'):
                    for prein in line.split(':')[1].split():
                        if prein not in self.bin2src:
                            continue
                        b = self.bin2src[prein]
                        self.pkgdeps[b] = 'MYinstall'

        for source in self.sources:
            if source not in self.pkgdeps and source not in self.links:
                print('osc rdelete -m cleanup {} {}'.format(prj, source))
                if nextprj:
                    print('osc linkpac {} {} {}').format(
                        self.api.project, source, nextprj)
def package_source_changed(apiurl, project, package):
    url = makeurl(apiurl, ['source', project, package, '_history'], {'limit': 1})
    root = ETL.parse(http_GET(url)).getroot()
    return datetime.fromtimestamp(int(root.find('revision/time').text), timezone.utc).replace(tzinfo=None)
Exemple #36
0
def repository_arch_state(apiurl, project, repository, arch):
    # just checking the mtimes of the repository's binaries
    url = makeurl(apiurl, ['build', project, repository, arch, '_repository'])
    from osclib.util import sha1_short
    return sha1_short(http_GET(url).read())
Exemple #37
0
    def update_and_solve_target(self, api, target_project, target_config, main_repo,
                                project, scope, force, no_checkout,
                                only_release_packages, stop_after_solve):
        self.all_architectures = target_config.get('pkglistgen-archs').split(' ')
        self.repos = self.expand_repos(project, main_repo)
        print('[{}] {}/{}: update and solve'.format(scope, project, main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release', '000release-packages')
        oldrepos = target_config.get('pkglistgen-repos', '000update-repos')

        url = api.makeurl(['source', project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, product, 'revive')
            # TODO disable build.
            print('{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not force:
            root = ET.fromstringlist(show_results_meta(api.apiurl, project, product,
                                                       repository=[main_repo], multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')):
                print('{}/{} build in progress'.format(project, product))
                return

        drop_list = api.item_exists(project, oldrepos)
        checkout_list = [group, product, release]
        if drop_list and not only_release_packages:
            checkout_list.append(oldrepos)

        if packages.find('entry[@name="{}"]'.format(release)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, release, 'revive')
            print('{} undeleted, skip dvd until next cycle'.format(release))
            return

        # Cache dir specific to hostname and project.
        host = urlparse(api.apiurl).hostname
        cache_dir = CacheManager.directory('pkglistgen', host, project)

        if not no_checkout:
            if os.path.exists(cache_dir):
                shutil.rmtree(cache_dir)
            os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)
        oldrepos_dir = os.path.join(cache_dir, oldrepos)

        for package in checkout_list:
            if no_checkout:
                print('Skipping checkout of {}/{}'.format(project, package))
                continue
            checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir)

        file_utils.unlink_all_except(release_dir, ['weakremovers.inc'])
        if not only_release_packages:
            file_utils.unlink_all_except(product_dir)
        file_utils.copy_directory_contents(group_dir, product_dir,
                                     ['supportstatus.txt', 'groups.yml',
                                      'reference-unsorted.yml', 'reference-summary.yml',
                                      'package-groups.changes'])
        file_utils.change_extension(product_dir, '.spec.in', '.spec')
        file_utils.change_extension(product_dir, '.product.in', '.product')

        self.input_dir = group_dir
        self.output_dir = product_dir

        print('-> do_update')
        # make sure we only calculcate existant architectures
        self.filter_architectures(target_archs(api.apiurl, project, main_repo))
        self.update_repos(self.filtered_architectures)

        if only_release_packages:
            self.load_all_groups()
            self.write_group_stubs()
        else:
            summary = self.solve_project(ignore_unresolvable=str2bool(target_config.get('pkglistgen-ignore-unresolvable')),
                                         ignore_recommended=str2bool(target_config.get('pkglistgen-ignore-recommended')),
                                         locale = target_config.get('pkglistgen-locale'),
                                         locales_from = target_config.get('pkglistgen-locales-from'))

        if stop_after_solve:
            return

        if drop_list:
            weakremovers_file = os.path.join(release_dir, 'weakremovers.inc')
            self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w'))

        delete_products = target_config.get('pkglistgen-delete-products', '').split(' ')
        file_utils.unlink_list(product_dir, delete_products)

        print('-> product service')
        product_version = attribute_value_load(api.apiurl, project, 'ProductVersion')
        if not product_version:
            # for stagings the product version doesn't matter (I hope)
            product_version = '1'
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            self.replace_product_version(product_file, product_version)
            print(subprocess.check_output(
                [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8'))

        for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
            delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
            file_utils.unlink_list(product_dir, delete_kiwis)
        if scope == 'staging':
            self.strip_medium_from_staging(product_dir)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        file_utils.move_list(spec_files, release_dir)
        inc_files = glob.glob(os.path.join(group_dir, '*.inc'))
        # filter special inc file
        inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files)
        file_utils.move_list(inc_files, release_dir)

        # do not overwrite weakremovers.inc if it exists
        # we will commit there afterwards if needed
        if os.path.exists(os.path.join(group_dir, 'weakremovers.inc')) and \
           not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')):
            file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir)

        file_utils.multibuild_from_glob(release_dir, '*.spec')
        self.build_stub(release_dir, 'spec')
        self.commit_package(release_dir)

        if only_release_packages:
            return

        file_utils.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')
        self.commit_package(product_dir)

        error_output = b''
        reference_summary = os.path.join(group_dir, 'reference-summary.yml')
        if os.path.isfile(reference_summary):
            summary_file = os.path.join(product_dir, 'summary.yml')
            with open(summary_file, 'w') as f:
                f.write('# Summary of packages in groups')
                for group in sorted(summary):
                    # the unsorted group should appear filtered by
                    # unneeded.yml - so we need the content of unsorted.yml
                    # not unsorted.group (this grew a little unnaturally)
                    if group == 'unsorted':
                        continue
                    f.write('\n' + group + ':\n')
                    for package in sorted(summary[group]):
                        f.write('  - ' + package + '\n')

            try:
                error_output += subprocess.check_output(['diff', '-u', reference_summary, summary_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output
            reference_unsorted = os.path.join(group_dir, 'reference-unsorted.yml')
            unsorted_file = os.path.join(product_dir, 'unsorted.yml')
            try:
                error_output += subprocess.check_output(['diff', '-u', reference_unsorted, unsorted_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output

        if len(error_output) > 0:
            self.logger.error('Difference in yml:\n' + error_output.decode('utf-8'))
            return True
 def assertOBS(self):
     url = makeurl(self.apiurl, ['about'])
     root = ET.parse(http_GET(url)).getroot()
     self.assertEqual(root.tag, 'about')
 def _project_maintainer(self, request):
     """Get the list of maintainer of the target project."""
     url = makeurl(self.apiurl, ('source', request.tgt_project, '_meta'))
     root = ET.parse(http_GET(url)).getroot()
     persons = [e.get('userid') for e in root.findall('.//person') if e.get('role') == 'maintainer']
     return persons
    parser.add_argument('-d', '--debug', action='store_true', default=False,
                        help='enable debug information')

    args = parser.parse_args()

    osc.conf.get_config(override_apiurl=args.apiurl)
    osc.conf.config['debug'] = args.debug

    apiurl = osc.conf.config['apiurl']

    if apiurl.endswith('suse.de'):
        amqp_prefix = 'suse'
        openqa_url = 'https://openqa.suse.de'
    else:
        amqp_prefix = 'opensuse'
        openqa_url = 'https://openqa.opensuse.org'

    logging.basicConfig(level=logging.INFO)

    listener = Listener(amqp_prefix, openqa_url)
    url = makeurl(apiurl, ['search', 'project', 'id'], {'match': 'attribute/@name="OSRT:OpenQAMapping"'})
    f = http_GET(url)
    root = ET.parse(f).getroot()
    for entry in root.findall('project'):
        listener.add(Project(entry.get('name')))

    try:
        listener.run(runtime=10800)
    except KeyboardInterrupt:
        listener.stop()
Exemple #41
0
 def check_arch(self, architecture):
     url = makeurl(self.apiurl,
                   ['build', self.project, self.repository, architecture],
                   {'view': 'status'})
     root = ET.parse(http_GET(url)).getroot()
     return root.get('code') == 'finished'
                                            '_jobhistory'),
                              query=query)
                try:
                    root = ET.parse(http_GET(url)).getroot()
                    srcmd5_list = [e.get('srcmd5') for e in root.findall('jobhist')]
                except urllib2.HTTPError, e:
                    print('ERROR in URL %s [%s]' % (url, e))

        md5_set = set()
        for srcmd5 in srcmd5_list:
            query = {
                'expand': 1,
                'rev': srcmd5,
            }
            url = makeurl(self.apiurl, ('source', src_project, src_package), query=query)
            root = ET.parse(http_GET(url)).getroot()
            md5_set.add(root.find('linkinfo').get('srcmd5'))

        return md5_set

    def check_specs(self, request_id=None, request=None):
        """Check a single request and load the different SPECs files.

        This method have side effects, it can ACCEPT or DECLINE
        requests after some checks.

        """

        requests = []

        if request_id:
Exemple #43
0
def fileinfo_ext(apiurl, project, repo, arch, package, filename):
    url = makeurl(apiurl, ['build', project, repo, arch, package, filename],
                  {'view': 'fileinfo_ext'})
    return ET.parse(http_GET(url)).getroot()
Exemple #44
0
    def get_packagelist(self, project, sle_pkglist=[], by_project=True):
        """
        Return the list of package's info of a project.
        If the latest package is from an incident then returns incident
        package.
        """

        pkglist = {}
        packageinfo = {}
        query = {'expand': 1}
        root = ET.parse(
            http_GET(makeurl(self.apiurl, ['source', project],
                             query=query))).getroot()
        for i in root.findall('entry'):
            pkgname = i.get('name')
            orig_project = i.get('originproject')
            is_incidentpkg = False
            # Metapackage should not be selected
            if pkgname.startswith('00') or\
                    pkgname.startswith('_') or\
                    pkgname.startswith('patchinfo.') or\
                    pkgname.startswith('skelcd-') or\
                    pkgname.startswith('installation-images') or\
                    pkgname.startswith('Leap-release') or\
                    pkgname.endswith('-mini') or\
                    '-mini.' in pkgname:
                continue
            # Ugly hack for package has dot in source package name
            # eg. go1.x incidents as the name would be go1.x.xxx
            if '.' in pkgname and re.match(r'[0-9]+$', pkgname.split('.')[-1]) and \
                    orig_project.startswith('SUSE:') and orig_project.endswith(':Update'):
                is_incidentpkg = True
                if pkgname.startswith('go1') or\
                        pkgname.startswith('bazel0') or\
                        pkgname.startswith('dotnet') or\
                        pkgname.startswith('rust1') or\
                        pkgname.startswith('ruby2'):
                    if not (pkgname.count('.') > 1):
                        is_incidentpkg = False

            # If an incident found then update the package origin info
            if is_incidentpkg:
                orig_name = re.sub(r'\.[0-9]+$', '', pkgname)
                incident_number = int(pkgname.split('.')[-1])
                if orig_name in pkglist and pkglist[orig_name][
                        'Project'] == orig_project:
                    if re.match(r'[0-9]+$',
                                pkglist[orig_name]['Package'].split('.')[-1]):
                        old_incident_number = int(
                            pkglist[orig_name]['Package'].split('.')[-1])
                        if incident_number > old_incident_number:
                            pkglist[orig_name]['Package'] = pkgname
                    else:
                        pkglist[orig_name]['Package'] = pkgname
            else:
                pkglist[pkgname] = {
                    'Project': orig_project,
                    'Package': pkgname
                }

            if sle_pkglist and pkgname in sle_pkglist and not orig_project.startswith(
                    'openSUSE'):
                pkglist[pkgname] = {
                    'Project': sle_pkglist[pkgname]['Project'],
                    'Package': sle_pkglist[pkgname]['Package']
                }

        if by_project:
            for pkg in pkglist.keys():
                if pkglist[pkg]['Project'].startswith(
                        'SUSE:') and self.is_sle_specific(pkg):
                    continue
                if pkglist[pkg]['Project'] not in packageinfo:
                    packageinfo[pkglist[pkg]['Project']] = []
                if pkglist[pkg]['Package'] not in packageinfo[pkglist[pkg]
                                                              ['Project']]:
                    packageinfo[pkglist[pkg]['Project']].append(
                        pkglist[pkg]['Package'])
            return packageinfo

        return pkglist
def builddepinfo(apiurl, project, repo, arch, order = False):
    query = {}
    if order:
        query['view'] = 'order'
    url = makeurl(apiurl, ['build', project, repo, arch, '_builddepinfo'], query)
    return ETL.parse(http_GET(url)).getroot()
Exemple #46
0
    def update_and_solve_target(self, api, target_project, target_config, main_repo,
                                project, scope, force, no_checkout,
                                only_release_packages, stop_after_solve):
        self.all_architectures = target_config.get('pkglistgen-archs').split(' ')
        self.use_newest_version = str2bool(target_config.get('pkglistgen-use-newest-version', 'False'))
        self.repos = self.expand_repos(project, main_repo)
        logging.debug('[{}] {}/{}: update and solve'.format(scope, project, main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release', '000release-packages')
        oldrepos = target_config.get('pkglistgen-repos', '000update-repos')

        url = api.makeurl(['source', project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, product, 'revive')
            # TODO disable build.
            logging.info('{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not force:
            root = ET.fromstringlist(show_results_meta(api.apiurl, project, product,
                                                       repository=[main_repo], multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')):
                logging.info('{}/{} build in progress'.format(project, product))
                return

        drop_list = api.item_exists(project, oldrepos)
        checkout_list = [group, product, release]
        if drop_list and not only_release_packages:
            checkout_list.append(oldrepos)

        if packages.find('entry[@name="{}"]'.format(release)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, release, 'revive')
            logging.info('{} undeleted, skip dvd until next cycle'.format(release))
            return

        # Cache dir specific to hostname and project.
        host = urlparse(api.apiurl).hostname
        cache_dir = CacheManager.directory('pkglistgen', host, project)

        if not no_checkout:
            if os.path.exists(cache_dir):
                shutil.rmtree(cache_dir)
            os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)
        oldrepos_dir = os.path.join(cache_dir, oldrepos)

        self.input_dir = group_dir
        self.output_dir = product_dir

        for package in checkout_list:
            if no_checkout:
                logging.debug('Skipping checkout of {}/{}'.format(project, package))
                continue
            checkout_package(api.apiurl, project, package, expand_link=True,
                             prj_dir=cache_dir, outdir=os.path.join(cache_dir, package))

        # print('RET', self.handle_package_diff(project, f"{group_dir}/summary-staging.txt", f"{product_dir}/summary-staging.txt"))

        file_utils.unlink_all_except(release_dir, ['weakremovers.inc'])
        if not only_release_packages:
            file_utils.unlink_all_except(product_dir)
        ignore_list = ['supportstatus.txt', 'summary-staging.txt', 'package-groups.changes']
        ignore_list += self.group_input_files()
        file_utils.copy_directory_contents(group_dir, product_dir, ignore_list)
        file_utils.change_extension(product_dir, '.spec.in', '.spec')
        file_utils.change_extension(product_dir, '.product.in', '.product')

        logging.debug('-> do_update')
        # make sure we only calculcate existant architectures
        self.filter_architectures(target_archs(api.apiurl, project, main_repo))
        self.update_repos(self.filtered_architectures)

        if only_release_packages:
            self.load_all_groups()
            self.write_group_stubs()
        else:
            summary = self.solve_project(ignore_unresolvable=str2bool(target_config.get('pkglistgen-ignore-unresolvable')),
                                         ignore_recommended=str2bool(
                                             target_config.get('pkglistgen-ignore-recommended')),
                                         locale=target_config.get('pkglistgen-locale'),
                                         locales_from=target_config.get('pkglistgen-locales-from'))

        if stop_after_solve:
            return

        if drop_list and not only_release_packages:
            weakremovers_file = os.path.join(release_dir, 'weakremovers.inc')
            try:
                self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w'))
            except MismatchedRepoException:
                logging.error("Failed to create weakremovers.inc due to mismatch in repos - project most likey started building again.")
                return

        delete_products = target_config.get('pkglistgen-delete-products', '').split(' ')
        file_utils.unlink_list(product_dir, delete_products)

        logging.debug('-> product service')
        product_version = attribute_value_load(api.apiurl, project, 'ProductVersion')
        if not product_version:
            # for stagings the product version doesn't matter (I hope)
            product_version = '1'
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            self.replace_product_version(product_file, product_version)
            logging.debug(subprocess.check_output(
                [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8'))

        for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
            delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
            file_utils.unlink_list(product_dir, delete_kiwis)
        if scope == 'staging':
            self.strip_medium_from_staging(product_dir)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        file_utils.move_list(spec_files, release_dir)
        inc_files = glob.glob(os.path.join(group_dir, '*.inc'))
        # filter special inc file
        inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files)
        file_utils.move_list(inc_files, release_dir)

        # do not overwrite weakremovers.inc if it exists
        # we will commit there afterwards if needed
        if os.path.exists(os.path.join(group_dir, 'weakremovers.inc')) and \
           not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')):
            file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir)

        file_utils.multibuild_from_glob(release_dir, '*.spec')
        self.build_stub(release_dir, 'spec')
        self.commit_package(release_dir)

        if only_release_packages:
            return

        file_utils.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')

        reference_summary = os.path.join(group_dir, f'summary-{scope}.txt')
        if os.path.isfile(reference_summary):
            summary_file = os.path.join(product_dir, f'summary-{scope}.txt')
            output = []
            for group in summary:
                for package in sorted(summary[group]):
                    output.append(f'{package}:{group}')

            with open(summary_file, 'w') as f:
                for line in sorted(output):
                    f.write(line + '\n')

        self.commit_package(product_dir)

        if os.path.isfile(reference_summary):
            return self.handle_package_diff(project, reference_summary, summary_file)