Exemplo n.º 1
0
    def commit_files(self, prj, pkg, files, message):
        """Commits files to OBS."""

        query = {'cmd'    : 'commitfilelist',
                 'user'   : conf.get_apiurl_usr(self.apiurl),
                 'comment': message,
                 'keeplink': 1}
        url = core.makeurl(self.apiurl, ['source', prj, pkg], query=query)

        xml = "<directory>"
        for fpath, _ in files:
            with open(fpath) as fhandle:
                xml += '<entry name="%s" md5="%s"/>' % \
                       (os.path.basename(fpath), hexdigest(fhandle))
        xml += "</directory>"

        try:
            self.core_http(core.http_POST, url, data=xml)
            for fpath, commit_flag in files:
                if commit_flag:
                    put_url = core.makeurl(
                        self.apiurl, ['source', prj, pkg,
                                      pathname2url(os.path.basename(fpath))],
                        query="rev=repository")
                    self.core_http(core.http_PUT, put_url, filep=fpath)
            self.core_http(core.http_POST, url, data=xml)
        except OSCError, err:
            raise ObsError("can't commit files to %s/%s: %s" % (prj, pkg, err))
Exemplo n.º 2
0
def entity_clone(apiurl_source, apiurl_target, path, sanitize=None, clone=None, after=None):
    if not hasattr(entity_clone, 'cloned'):
        entity_clone.cloned = []

    if path[0] == 'source' and not project_fence(path[1]):
        # Skip projects outside of fence by marking as cloned.
        if path not in entity_clone.cloned:
            entity_clone.cloned.append(path)

    if path in entity_clone.cloned:
        print('skip {}'.format('/'.join(path)))
        return

    print('clone {}'.format('/'.join(path)))
    entity_clone.cloned.append(path)

    url = makeurl(apiurl_source, path)
    entity = ET.parse(http_GET(url)).getroot()

    if sanitize:
        sanitize(entity)
    if clone:
        clone(apiurl_source, apiurl_target, entity)

    url = makeurl(apiurl_target, path)
    http_PUT(url, data=ET.tostring(entity))

    if after:
        after(apiurl_source, apiurl_target, entity)
Exemplo n.º 3
0
    def check_depinfo_ring(self, prj, nextprj):
        url = makeurl(self.api.apiurl, ['build', prj, '_result'])
        root = ET.parse(http_GET(url)).getroot()
        for repo in root.findall('result'):
            repostate = repo.get('state', 'missing')
            if repostate not in ['unpublished', 'published'] or repo.get('dirty', 'false') == 'true':
                print('Repo {}/{} is in state {}'.format(repo.get('project'), repo.get('repository'), repostate))
                return False
            for package in repo.findall('status'):
                code = package.get('code')
                if code not in ['succeeded', 'excluded', 'disabled']:
                    print('Package {}/{}/{} is {}'.format(repo.get('project'), repo.get('repository'), package.get('package'), code))
                    return False

        self.find_inner_ring_links(prj)
        for arch in self.api.cstaging_dvd_archs:
            self.fill_pkgdeps(prj, 'standard', arch)

            if prj == '{}:1-MinimalX'.format(self.api.crings):
                url = makeurl(self.api.apiurl, ['build', prj, 'images', arch, 'Test-DVD-' + arch, '_buildinfo'])
                root = ET.parse(http_GET(url)).getroot()
                for bdep in root.findall('bdep'):
                    if 'name' not in bdep.attrib:
                        continue
                    b = bdep.attrib['name']
                    if b not in self.bin2src:
                        continue
                    b = self.bin2src[b]
                    self.pkgdeps[b] = 'MYdvd'

            if prj == '{}:2-TestDVD'.format(self.api.crings):
                url = makeurl(self.api.apiurl, ['build', prj, 'images', arch, 'Test-DVD-' + arch, '_buildinfo'])
                root = ET.parse(http_GET(url)).getroot()
                for bdep in root.findall('bdep'):
                    if 'name' not in bdep.attrib:
                        continue
                    b = bdep.attrib['name']
                    if b not in self.bin2src:
                        continue
                    b = self.bin2src[b]
                    self.pkgdeps[b] = 'MYdvd2'

        if prj == '{}:0-Bootstrap'.format(self.api.crings):
            url = makeurl(self.api.apiurl, ['build', prj, 'standard', '_buildconfig'])
            for line in http_GET(url).read().split('\n'):
                if line.startswith('Preinstall:') or line.startswith('Support:'):
                    for prein in line.split(':')[1].split():
                        if prein not in self.bin2src:
                            continue
                        b = self.bin2src[prein]
                        self.pkgdeps[b] = 'MYinstall'

        for source in self.sources:
            if source not in self.pkgdeps and source not in self.links:
                if source.startswith('texlive-specs-'): # XXX: texlive bullshit packaging
                    continue
                print('osc rdelete -m cleanup {} {}'.format(prj, source))
                if nextprj:
                    print('osc linkpac {} {} {}').format(self.api.project, source, nextprj)
Exemplo n.º 4
0
    def getCommitLog(self, project, package, revision=None):
        """
        getCommitLog(project, package, revision=None) -> list

        Get commit log for package in project. If revision is set, get just the
        log for that revision.

        Each log is a tuple of the form (rev, srcmd5, version, time, user,
        comment)
        """
        u = core.makeurl(self.apiurl, ['source', project, package, '_history'])
        f = core.http_GET(u)
        root = ElementTree.parse(f).getroot()

        r = []
        revisions = root.findall('revision')
        revisions.reverse()
        for node in revisions:
            rev = int(node.get('rev'))
            if revision and rev != int(revision):
                continue
            srcmd5 = node.find('srcmd5').text
            version = node.find('version').text
            user = node.find('user').text
            try:
                comment = node.find('comment').text
            except:
                comment = '<no message>'
            t = time.localtime(int(node.find('time').text))
            t = time.strftime('%Y-%m-%d %H:%M:%S', t)

            r.append((rev, srcmd5, version, t, user, comment))
        return r
Exemplo n.º 5
0
def project_clone(apiurl_source, apiurl_target, project):
    users_clone(apiurl_source, apiurl_target, project)
    project_workaround(project)

    # Write stripped version that does not include repos with path references.
    url = makeurl(apiurl_target, ['source', project.get('name'), '_meta'])
    stripped = deepcopy(project)
    project_references_remove(stripped)
    http_PUT(url, data=ET.tostring(stripped))

    for link in project.xpath('link[@project]'):
        if not project_fence(link.get('project')):
            project.remove(link)
            break

        # Valid reference to project and thus should be cloned.
        path = ['source', link.get('project'), '_meta']
        entity_clone(apiurl_source, apiurl_target, path, clone=project_clone)

    # Clone projects referenced in repository paths.
    for repository in project.findall('repository'):
        for target in repository.xpath('./path') + repository.xpath('./releasetarget'):
            if not project_fence(target.get('project')):
                project.remove(repository)
                break

            # Valid reference to project and thus should be cloned.
            path = ['source', target.get('project'), '_meta']
            entity_clone(apiurl_source, apiurl_target, path, clone=project_clone)
Exemplo n.º 6
0
    def getWorkerStatus(self):
        """
        getWorkerStatus() -> list of dicts

        Get worker status as a list of dictionaries. Each dictionary contains the keys 'id',
        'hostarch', and 'status'. If the worker is building, the dict will additionally contain the
        keys 'project', 'package', 'target', and 'starttime'
        """
        url = core.makeurl(self.apiurl, ['build', '_workerstatus'])
        f = core.http_GET(url)
        tree = ElementTree.parse(f).getroot()
        workerstatus = []
        for worker in tree.findall('building'):
            d = {'id': worker.get('workerid'),
                 'status': 'building'}
            for attr in ('hostarch', 'project', 'package', 'starttime'):
                d[attr] = worker.get(attr)
            d['target'] = '/'.join((worker.get('repository'), worker.get('arch')))
            d['started'] = time.asctime(time.localtime(float(worker.get('starttime'))))
            workerstatus.append(d)
        for worker in tree.findall('idle'):
            d = {'id': worker.get('workerid'),
                 'hostarch': worker.get('hostarch'),
                 'status': 'idle'}
            workerstatus.append(d)
        return workerstatus
Exemplo n.º 7
0
    def getSubmitRequests(self):
        """
        getSubmitRequests() -> list of dicts

        """
        url = core.makeurl(self.apiurl, ['search', 'request', '?match=submit'])
        f = core.http_GET(url)
        tree = ElementTree.parse(f).getroot()
        submitrequests = []
        for sr in tree.findall('request'):
            if sr.get('type') != "submit":
                continue

            d = {'id': int(sr.get('id'))}
            sb = sr.findall('submit')[0]
            src = sb.findall('source')[0]
            d['srcproject'] = src.get('project')
            d['srcpackage'] = src.get('package')
            dst = sb.findall('target')[0]
            d['dstproject'] = dst.get('project')
            d['dstpackage'] = dst.get('package')
            d['state'] = sr.findall('state')[0].get('name')

            submitrequests.append(d)
        submitrequests.sort(key=lambda x: x['id'])
        return submitrequests
Exemplo n.º 8
0
    def find_request_project(self, source_project, newcand):
        """
        Look up the source project by its name and return the SR#(s)
        :param source_project: name of the source project
        :param newcand: the review state of staging-group must be new
        """

        query = 'states=new,review&project={}&view=collection'.format(self.api.project)
        url = makeurl(self.api.apiurl, ['request'], query)
        f = http_GET(url)
        root = ET.parse(f).getroot()

        ret = None
        for sr in root.findall('request'):
            # ensure staging tool don't picks the processed request again
            if newcand:
                staging_group_states = [review.get('state') for review in sr.findall('review') if review.get('by_group') == self.api.cstaging_group]
                if 'new' not in staging_group_states:
                    continue
            for act in sr.findall('action'):
                src = act.find('source')
                if src is not None and src.get('project') == source_project:
                    request = int(sr.attrib['id'])
                    state = sr.find('state').get('name')
                    self.srs[request] = {'project': self.api.project, 'state': state}
                    ret = True

        return ret
Exemplo n.º 9
0
    def fill_pkgdeps(self, prj, repo, arch):
        url = makeurl(self.api.apiurl, ['build', prj, repo, arch, '_builddepinfo'])
        f = http_GET(url)
        root = ET.parse(f).getroot()

        for package in root.findall('package'):
            source = package.find('source').text
            if package.attrib['name'].startswith('preinstall'):
                continue
            self.sources.add(source)

            for subpkg in package.findall('subpkg'):
                subpkg = subpkg.text
                if subpkg in self.bin2src:
                    if self.bin2src[subpkg] == source:
                        # different archs
                        continue
                    print('Binary {} is defined twice: {}/{}'.format(subpkg, prj, source))
                self.bin2src[subpkg] = source

        for package in root.findall('package'):
            source = package.find('source').text
            for pkg in package.findall('pkgdep'):
                if pkg.text not in self.bin2src:
                    print('Package {} not found in place'.format(pkg.text))
                    continue
                b = self.bin2src[pkg.text]
                self.pkgdeps[b] = source
Exemplo n.º 10
0
def source_file_save(apiurl, project, package, filename, content, comment=None):
    if not comment:
        comment = 'update by OSRT tools'
    comment += ' (host {})'.format(socket.gethostname())

    url = makeurl(apiurl, ['source', project, package, filename], {'comment': comment})
    http_PUT(url, data=content)
Exemplo n.º 11
0
def issue_trackers(apiurl):
    url = makeurl(apiurl, ['issue_trackers'])
    root = ET.parse(http_GET(url)).getroot()
    trackers = {}
    for tracker in root.findall('issue-tracker'):
        trackers[tracker.find('name').text] = tracker.find('label').text
    return trackers
Exemplo n.º 12
0
def package_source_hash(apiurl, project, package, revision=None):
    query = {}
    if revision:
        query['rev'] = revision

    # Will not catch packages that previous had a link, but no longer do.
    if package_source_link_copy(apiurl, project, package):
        query['expand'] = 1

    try:
        url = makeurl(apiurl, ['source', project, package], query)
        root = ETL.parse(http_GET(url)).getroot()
    except HTTPError as e:
        if e.code == 400 or e.code == 404:
            # 400: revision not found, 404: package not found.
            return None

        raise e

    if revision and root.find('error') is not None:
        # OBS returns XML error instead of HTTP 404 if revision not found.
        return None

    from osclib.util import sha1_short
    return sha1_short(root.xpath('entry[@name!="_link"]/@md5'))
def _checker_check_dups(self, project, opts):
    url = makeurl(opts.apiurl, ['request'], "states=new,review&project=%s&view=collection" % project)
    f = http_GET(url)
    root = ET.parse(f).getroot()
    rqs = {}
    for rq in root.findall('request'):
        id = rq.attrib['id']
        for a in rq.findall('action'):
            source = a.find('source')
            target = a.find('target')
            type = a.attrib['type']
            assert target != None
            if target.attrib['project'] != project:
                continue
            # print(id)
            # ET.dump(target)
            if 'package' not in target.attrib:
                continue
            package = target.attrib['package']
            if type + package in rqs:
                [oldid, oldsource] = rqs[type + package]
                if oldid > id:
                    s = oldid
                    oldid = id
                    id = s
                assert oldid < id
                if source != None and oldsource != None:
                    if (source.attrib['project'] == oldsource.attrib['project'] and
                       source.attrib['package'] == oldsource.attrib['package']):
                        change_request_state(opts.apiurl, str(oldid), 'superseded',
                                     'superseded by %s' % id, id)
                        continue
                print("DUPS found:", id, oldid)
            rqs[type + package] = [id, source]
Exemplo n.º 14
0
def issue_tracker_by_url(apiurl, tracker_url):
    url = makeurl(apiurl, ['issue_trackers'])
    root = ETL.parse(http_GET(url)).getroot()
    if not tracker_url.endswith('/'):
        # All trackers are formatted with trailing slash.
        tracker_url += '/'
    return next(iter(root.xpath('issue-tracker[url[text()="{}"]]'.format(tracker_url)) or []), None)
Exemplo n.º 15
0
    def old_md5(self, src_project, tgt_project, src_package, rev):
        """Recollect old MD5 for a package."""
        # XXX TODO - instead of fixing the limit, use endtime to make
        # sure that we have the correct time frame.
        limit = 20
        query = {
            'package': src_package,
            # 'code': 'succeeded',
            'limit': limit,
        }

        repositories = self.get_project_repos(src_project,
                                              tgt_project,
                                              src_package, rev)

        srcmd5_list = []
        for repository, archs in repositories:
            for arch, status in archs:
                if srcmd5_list:
                    break
                if status not in ('succeeded', 'outdated'):
                    continue

                url = makeurl(self.apiurl, ('build', src_project,
                                            repository, arch,
                                            '_jobhistory'),
                              query=query)
                try:
                    root = ET.parse(http_GET(url)).getroot()
                    srcmd5_list = [e.get('srcmd5') for e in root.findall('jobhist')]
                except urllib2.HTTPError, e:
                    print('ERROR in URL %s [%s]' % (url, e))
Exemplo n.º 16
0
def package_binary_list(apiurl, project, repository, arch, package=None, strip_multibuild=True, exclude_src_debug=False):
    path = ['build', project, repository, arch]
    if package:
        path.append(package)
    url = makeurl(apiurl, path, {'view': 'binaryversions'})
    root = ET.parse(http_GET(url)).getroot()

    package_binaries = []
    binary_map = {} # last duplicate wins
    for binary_list in root:
        package = binary_list.get('package')
        if strip_multibuild:
            package = package.split(':', 1)[0]

        for binary in binary_list:
            filename = binary.get('name')
            result = re.match(RPM_REGEX, filename)
            if not result:
                continue

            binary = BinaryParsed(package, result.group('filename'),
                                  result.group('name'), result.group('arch'))
            if exclude_src_debug and binary_src_debug(binary):
                continue

            package_binaries.append(binary)
            binary_map[result.group('filename')] = package

    return package_binaries, binary_map
Exemplo n.º 17
0
 def makeurl(self, l, query=None):
     """
     Wrapper around osc's makeurl passing our apiurl
     :return url made for l and query
     """
     query = [] if not query else query
     return makeurl(self.apiurl, l, query)
Exemplo n.º 18
0
    def get_package_list_from_repository(self, project, repository, arch, package):
        url = makeurl(self.apiurl, ('build', project, repository, arch, package))
        files = []
        try:
            binaries = ET.parse(http_GET(url)).getroot()
            for binary in binaries.findall('binary'):
                filename = binary.attrib['filename']
                mtime = int(binary.attrib['mtime'])

                result = re.match(r'(.*)-([^-]*)-([^-]*)\.([^-\.]+)\.rpm', filename)
                if not result:
                    if filename == 'rpmlint.log':
                        files.append((filename, '', '', mtime))
                    continue

                pname = result.group(1)
                if pname.endswith('-debuginfo') or pname.endswith('-debuginfo-32bit'):
                    continue
                if pname.endswith('-debugsource'):
                    continue
                if result.group(4) == 'src':
                    continue

                files.append((filename, pname, result.group(4), mtime))
        except urllib2.HTTPError:
            pass
            # print " - WARNING: Can't found list of packages (RPM) for %s in %s (%s, %s)" % (
            #     package, project, repository, arch)
        return files
Exemplo n.º 19
0
def build(apiurl, project, repo, arch, package):
    root = None
    try:
        url = makeurl(apiurl, ['build', project, repo, arch, package])
        root = http_GET(url).read()
    except urllib2.HTTPError, e:
        print 'ERROR in URL %s [%s]' % (url, e)
Exemplo n.º 20
0
def _get_verifymd5(self, p, rev):
    try:
        url = makeurl(self.get_api_url(), ['source', p.sproject, p.spackage, '?view=info&rev=%s' % rev])
        root = ET.parse(http_GET(url)).getroot()
    except urllib2.HTTPError, e:
        print 'ERROR in URL %s [%s]' % (url, e)
        return []
Exemplo n.º 21
0
    def fill_pkgdeps(self, prj, repo, arch):
        url = makeurl(self.api.apiurl, ['build', prj, repo, arch, '_builddepinfo'])
        f = http_GET(url)
        root = ET.parse(f).getroot()

        for package in root.findall('package'):
            # use main package name for multibuild. We can't just ignore
            # multibuild as eg installation-images has no results for the main
            # package itself
            # https://github.com/openSUSE/open-build-service/issues/4198
            name = package.attrib['name'].split(':')[0]
            if name.startswith('preinstall'):
                continue

            self.sources.add(name)

            for subpkg in package.findall('subpkg'):
                subpkg = subpkg.text
                if subpkg in self.bin2src:
                    if self.bin2src[subpkg] == name:
                        # different archs
                        continue
                    print('# Binary {} is defined twice: {}/{}'.format(subpkg, prj, name))
                self.bin2src[subpkg] = name

        for package in root.findall('package'):
            name = package.attrib['name'].split(':')[0]
            for pkg in package.findall('pkgdep'):
                if pkg.text not in self.bin2src:
                    if not pkg.text.startswith('texlive-'): # XXX: texlive bullshit packaging
                        print('Package {} not found in place'.format(pkg.text))
                    continue
                b = self.bin2src[pkg.text]
                self.pkgdeps[b] = name
Exemplo n.º 22
0
    def fill_pkgdeps(self, prj, repo, arch):
        url = makeurl(self.api.apiurl, ['build', prj, repo, arch, '_builddepinfo'])
        f = http_GET(url)
        root = ET.parse(f).getroot()

        for package in root.findall('package'):
            source = package.find('source').text
            if package.attrib['name'].startswith('preinstall'):
                continue
            self.sources.append(source)

            for subpkg in package.findall('subpkg'):
                subpkg = subpkg.text
                if subpkg in self.bin2src:
                    print('Binary {} is defined twice: {}/{}'.format(subpkg, prj, source))
                self.bin2src[subpkg] = source

        for package in root.findall('package'):
            source = package.find('source').text
            for pkg in package.findall('pkgdep'):
                if pkg.text not in self.bin2src:
                    if pkg.text.startswith('texlive-'):
                        for letter in range(ord('a'), ord('z') + 1):
                            self.pkgdeps['texlive-specs-' + chr(letter)] = 'texlive-specs-' + chr(letter)
                    else:
                        print('Package {} not found in place'.format(pkg.text))
                    continue
                b = self.bin2src[pkg.text]
                self.pkgdeps[b] = source
Exemplo n.º 23
0
    def change_review_state(self, request_id, newstate, message=''):
        """Based on osc/osc/core.py. Fixed 'by_user'."""
        query = {
            'cmd': 'changereviewstate',
            'newstate': newstate,
            # XXX TODO - We force the user here, check if the user
            # expressed in .oscrc (with the password stored) have
            # rights to become this user.
            'by_user': '******',
        }

        review_state = self.get_review_state(request_id)
        if review_state == 'accepted' and newstate != 'accepted':
            print ' - Avoid change state %s -> %s (%s)' % (review_state, newstate, message)

        code = 404
        url = makeurl(self.apiurl, ('request', str(request_id)), query=query)
        if self.readonly:
            print 'DRY RUN: POST %s' % url
            return 200
        try:
            root = ET.parse(http_POST(url, data=message)).getroot()
            code = root.attrib['code']
        except urllib2.HTTPError, e:
            print('ERROR in URL %s [%s]' % (url, e))
Exemplo n.º 24
0
def package_kind(apiurl, project, package):
    if package.startswith('00'):
        return 'meta'

    if ':' in package:
        return 'multibuild_subpackage'

    if package.startswith('patchinfo.'):
        return 'patchinfo'

    try:
        url = makeurl(apiurl, ['source', project, package, '_meta'])
        root = ETL.parse(http_GET(url)).getroot()
    except HTTPError as e:
        if e.code == 404:
            return None

        raise e

    if root.find('releasename') is not None:
        return 'maintenance_update'

    if root.find('bcntsynctag') is not None:
        return 'multispec_subpackage'

    # Some multispec subpackages do not have bcntsynctag, so check link.
    link = entity_source_link(apiurl, project, package)
    if link is not None and link.get('cicount') == 'copy':
        kind_target = package_kind(apiurl, project, link.get('package'))
        if kind_target != 'maintenance_update':
            # If a multispec subpackage was updated via a maintenance update the
            # proper link information is lost and it will be considered source.
            return 'multispec_subpackage'

    return 'source'
Exemplo n.º 25
0
 def _fill_package_meta(self, project):
     self._package_metas.setdefault(project, {})
     url = makeurl(self.apiurl, ['search', 'package'], "match=[@project='%s']" % project)
     root = ET.parse(self.retried_GET(url))
     for p in root.findall('package'):
         name = p.attrib['name']
         self._package_metas[project][name] = p
Exemplo n.º 26
0
    def find_request_package(self, package):
        """
        Look up the package by its name and return the SR#
        :param package: name of the package
        """

        query = 'states=new,review&project={}&view=collection&package={}'
        query = query.format(self.api.project, urllib2.quote(package))
        url = makeurl(self.api.apiurl, ['request'], query)
        f = http_GET(url)

        root = ET.parse(f).getroot()

        requests = []
        for sr in root.findall('request'):
            # Check the target matches - OBS query is case insensitive, but OBS is not
            rq_target = sr.find('action').find('target')
            if package != rq_target.get('package') or self.api.project != rq_target.get('project'):
                continue

            request = sr.get('id')
            state = sr.find('state').get('name')

            self.srs[int(request)] = {'project': self.api.project, 'state': state}
            requests.append(request)

        if len(requests) > 1:
            msg = 'There are multiple requests for package "{}": {}'
            msg = msg.format(package, ', '.join(requests))
            raise oscerr.WrongArgs(msg)

        request = int(requests[0]) if requests else None
        return request
Exemplo n.º 27
0
    def find_request_id(self, request_id):
        """
        Look up the request by ID to verify if it is correct
        :param request_id: ID of the added request
        """

        if not _is_int(request_id):
            return False

        url = makeurl(self.api.apiurl, ['request', str(request_id)])
        try:
            f = http_GET(url)
        except urllib2.HTTPError:
            return None

        root = ET.parse(f).getroot()

        if root.get('id', None) != str(request_id):
            return None

        project = root.find('action').find('target').get('project')
        if (project != self.api.project and not project.startswith(self.api.cstaging)):
            msg = 'Request {} is not for {}, but for {}'
            msg = msg.format(request_id, self.api.project, project)
            raise oscerr.WrongArgs(msg)
        self.srs[int(request_id)] = {'project': project}

        return True
Exemplo n.º 28
0
 def check_arch(self, project, repository, architecture):
     url = makeurl(self.apiurl, [
                   'build', project, repository, architecture], {'view': 'status'})
     root = ET.parse(http_GET(url)).getroot()
     if root.get('code') == 'finished':
         buildid = root.find('buildid')
         if buildid is not None:
             return buildid.text
Exemplo n.º 29
0
def _get_base_build_bin(self, opts):
    """Get Base:build pagacke list"""
    binaries = {}
    for arch in ('x86_64', 'i586'):
        url = makeurl(opts.apiurl, ['/build/openSUSE:Factory:Build/standard/%s/_repository' % arch,])
        root = ET.parse(http_GET(url)).getroot()
        binaries[arch] = set([e.attrib['filename'][:-4] for e in root.findall('binary')])
    return binaries
Exemplo n.º 30
0
def get_latest_packages(limit=None):
    """Returns a Package object list of the 'limit' latest updated packages

    """
    url = makeurl(APIURL, ['statistics', 'latest_updated'],
                  query={'limit':limit})
    pkg_tree = etree.parse(http_GET(url))
    return [Package(pkg_xml) for pkg_xml in pkg_tree.findall('package')]
Exemplo n.º 31
0
    parser.add_argument('-d', '--debug', action='store_true', default=False,
                        help='enable debug information')

    args = parser.parse_args()

    osc.conf.get_config(override_apiurl = args.apiurl)
    osc.conf.config['debug'] = args.debug

    apiurl = osc.conf.config['apiurl']

    if apiurl.endswith('suse.de'):
        amqp_prefix = 'suse'
        openqa_url = 'https://openqa.suse.de'
    else:
        amqp_prefix = 'opensuse'
        openqa_url = 'https://openqa.opensuse.org'

    logging.basicConfig(level=logging.INFO)

    l = Listener(amqp_prefix, openqa_url)
    url = makeurl(apiurl, ['search', 'project', 'id'], {'match': 'attribute/@name="OSRT:OpenQAMapping"'})
    f = http_GET(url)
    root = ET.parse(f).getroot()
    for entry in root.findall('project'):
        l.add(Project(entry.get('name')))

    try:
        l.run(runtime=3600)
    except KeyboardInterrupt:
        l.stop()
Exemplo n.º 32
0
def project_list_prefix(apiurl, prefix):
    """Get a list of project with the same prefix."""
    query = {'match': 'starts-with(@name, "{}")'.format(prefix)}
    url = makeurl(apiurl, ['search', 'project', 'id'], query)
    root = ETL.parse(http_GET(url)).getroot()
    return root.xpath('project/@name')
Exemplo n.º 33
0
    parser.add_argument('-p', '--project', type=str, help='Project to check')
    parser.add_argument('-r', '--repository', type=str,
                        help='Repository to check')

    args = parser.parse_args()

    osc.conf.get_config(override_apiurl=args.apiurl)
    apiurl = osc.conf.config['apiurl']

    logging.basicConfig(level=logging.DEBUG)
    logger = logging.getLogger(__name__)

    # first check if repo is finished
    archs = target_archs(apiurl, args.project, args.repository)
    for arch in archs:
        url = makeurl(apiurl, ['build', args.project, args.repository, arch], {'view': 'status'})
        root = ET.parse(http_GET(url)).getroot()
        if root.get('code') == 'finished':
            continue
        logger.error('Repository {}/{}/{} is not yet finished'.format(args.project, args.repository, arch))
        logger.debug(ET.tostring(root).decode('utf-8'))
        sys.exit(1)

    # now check if all packages built fine
    url = makeurl(apiurl, ['build', args.project, '_result'],
                  {'view': 'summary', 'repository': args.repository})
    root = ET.parse(http_GET(url)).getroot()
    counts = {'succeeded': 0, 'disabled': 0, 'excluded': 0}
    for count in root.findall('.//statuscount'):
        if int(count.get('count', 0)) == 0:
            continue
Exemplo n.º 34
0
                                            repository, arch,
                                            '_jobhistory'),
                              query=query)
                try:
                    root = ET.parse(http_GET(url)).getroot()
                    srcmd5_list = [e.get('srcmd5') for e in root.findall('jobhist')]
                except urllib2.HTTPError, e:
                    print('ERROR in URL %s [%s]' % (url, e))

        md5_set = set()
        for srcmd5 in srcmd5_list:
            query = {
                'expand': 1,
                'rev': srcmd5,
            }
            url = makeurl(self.apiurl, ('source', src_project, src_package), query=query)
            root = ET.parse(http_GET(url)).getroot()
            md5_set.add(root.find('linkinfo').get('srcmd5'))

        return md5_set

    def check_specs(self, request_id=None, request=None):
        """Check a single request and load the different SPECs files.

        This method have side effects, it can ACCEPT or DECLINE
        requests after some checks.

        """

        requests = []
Exemplo n.º 35
0
def fileinfo_ext(apiurl, project, repo, arch, package, filename):
    url = makeurl(apiurl, ['build', project, repo, arch, package, filename],
                  {'view': 'fileinfo_ext'})
    return ET.parse(http_GET(url)).getroot()
    def overall_result(self, snapshot):
        """Analyze the openQA jobs of a given snapshot Returns a QAResult"""

        if snapshot is None:
            return QA_FAILED

        jobs = self.find_openqa_results(snapshot)

        self.failed_relevant_jobs = []
        self.failed_ignored_jobs = []

        if len(jobs) < self.jobs_num():  # not yet scheduled
            logger.warning('we have only %s jobs' % len(jobs))
            return QA_INPROGRESS

        in_progress = False
        for job in jobs:
            # print json.dumps(job, sort_keys=True, indent=4)
            if job['result'] in ('failed', 'incomplete', 'skipped',
                                 'user_cancelled', 'obsoleted',
                                 'parallel_failed'):
                # print json.dumps(job, sort_keys=True, indent=4), jobname
                url = makeurl(
                    self.openqa_server,
                    ['api', 'v1', 'jobs',
                     str(job['id']), 'comments'])
                f = self.api.retried_GET(url)
                comments = json.load(f)
                refs = set()
                labeled = 0
                to_ignore = False
                for comment in comments:
                    for ref in comment['bugrefs']:
                        refs.add(str(ref))
                    if comment['userName'] == 'ttm' and comment[
                            'text'] == 'label:unknown_failure':
                        labeled = comment['id']
                    if re.search(r'@ttm:? ignore', comment['text']):
                        to_ignore = True
                # to_ignore can happen with or without refs
                ignored = True if to_ignore else len(refs) > 0
                build_nr = str(job['settings']['BUILD'])
                for ref in refs:
                    if ref not in self.issues_to_ignore:
                        if to_ignore:
                            self.issues_to_ignore[ref] = build_nr
                            self.update_pinned_descr = True
                        else:
                            ignored = False
                    else:
                        # update reference
                        self.issues_to_ignore[ref] = build_nr

                if ignored:
                    self.failed_ignored_jobs.append(job['id'])
                    if labeled:
                        text = 'Ignored issue' if len(
                            refs) > 0 else 'Ignored failure'
                        # remove flag - unfortunately can't delete comment unless admin
                        data = {'text': text}
                        self.openqa.openqa_request('PUT',
                                                   'jobs/%s/comments/%d' %
                                                   (job['id'], labeled),
                                                   data=data)

                    logger.info("job %s failed, but was ignored", job['name'])
                else:
                    self.failed_relevant_jobs.append(job['id'])
                    if not labeled and len(refs) > 0:
                        data = {'text': 'label:unknown_failure'}
                        if self.dryrun:
                            logger.info("Would label {} as unknown".format(
                                job['id']))
                        else:
                            self.openqa.openqa_request('POST',
                                                       'jobs/%s/comments' %
                                                       job['id'],
                                                       data=data)

                    joburl = '%s/tests/%s' % (self.openqa_server, job['id'])
                    logger.info("job %s failed, see %s", job['name'], joburl)

            elif job['result'] == 'passed' or job['result'] == 'softfailed':
                continue
            elif job['result'] == 'none':
                if job['state'] != 'cancelled':
                    in_progress = True
            else:
                raise Exception(job['result'])

        self.save_issues_to_ignore()

        if len(self.failed_relevant_jobs) > 0:
            return QA_FAILED

        if in_progress:
            return QA_INPROGRESS

        return QA_PASSED
Exemplo n.º 37
0
 def _project_maintainer(self, request):
     """Get the list of maintainer of the target project."""
     url = makeurl(self.apiurl, ('source', request.tgt_project, '_meta'))
     root = ET.parse(http_GET(url)).getroot()
     persons = [e.get('userid') for e in root.findall('.//person') if e.get('role') == 'maintainer']
     return persons
Exemplo n.º 38
0
    def get_packagelist(self, project, sle_pkglist=[], by_project=True):
        """
        Return the list of package's info of a project.
        If the latest package is from an incident then returns incident
        package.
        """

        pkglist = {}
        packageinfo = {}
        query = {'expand': 1}
        root = ET.parse(
            http_GET(makeurl(self.apiurl, ['source', project],
                             query=query))).getroot()
        for i in root.findall('entry'):
            pkgname = i.get('name')
            orig_project = i.get('originproject')
            is_incidentpkg = False
            # Metapackage should not be selected
            if pkgname.startswith('00') or\
                    pkgname.startswith('_') or\
                    pkgname.startswith('patchinfo.') or\
                    pkgname.startswith('skelcd-') or\
                    pkgname.startswith('installation-images') or\
                    pkgname.startswith('Leap-release') or\
                    pkgname.endswith('-mini') or\
                    '-mini.' in pkgname:
                continue
            # Ugly hack for package has dot in source package name
            # eg. go1.x incidents as the name would be go1.x.xxx
            if '.' in pkgname and re.match(r'[0-9]+$', pkgname.split('.')[-1]) and \
                    orig_project.startswith('SUSE:') and orig_project.endswith(':Update'):
                is_incidentpkg = True
                if pkgname.startswith('go1') or\
                        pkgname.startswith('bazel0') or\
                        pkgname.startswith('dotnet') or\
                        pkgname.startswith('rust1') or\
                        pkgname.startswith('ruby2'):
                    if not (pkgname.count('.') > 1):
                        is_incidentpkg = False

            # If an incident found then update the package origin info
            if is_incidentpkg:
                orig_name = re.sub(r'\.[0-9]+$', '', pkgname)
                incident_number = int(pkgname.split('.')[-1])
                if orig_name in pkglist and pkglist[orig_name][
                        'Project'] == orig_project:
                    if re.match(r'[0-9]+$',
                                pkglist[orig_name]['Package'].split('.')[-1]):
                        old_incident_number = int(
                            pkglist[orig_name]['Package'].split('.')[-1])
                        if incident_number > old_incident_number:
                            pkglist[orig_name]['Package'] = pkgname
                    else:
                        pkglist[orig_name]['Package'] = pkgname
            else:
                pkglist[pkgname] = {
                    'Project': orig_project,
                    'Package': pkgname
                }

            if sle_pkglist and pkgname in sle_pkglist and not orig_project.startswith(
                    'openSUSE'):
                pkglist[pkgname] = {
                    'Project': sle_pkglist[pkgname]['Project'],
                    'Package': sle_pkglist[pkgname]['Package']
                }

        if by_project:
            for pkg in pkglist.keys():
                if pkglist[pkg]['Project'].startswith(
                        'SUSE:') and self.is_sle_specific(pkg):
                    continue
                if pkglist[pkg]['Project'] not in packageinfo:
                    packageinfo[pkglist[pkg]['Project']] = []
                if pkglist[pkg]['Package'] not in packageinfo[pkglist[pkg]
                                                              ['Project']]:
                    packageinfo[pkglist[pkg]['Project']].append(
                        pkglist[pkg]['Package'])
            return packageinfo

        return pkglist
Exemplo n.º 39
0
def attribute_value_delete(apiurl, project, name, namespace='OSRT', package=None):
    http_DELETE(makeurl(
        apiurl, list(filter(None, ['source', project, package, '_attribute', namespace + ':' + name]))))
Exemplo n.º 40
0
 def delete(self, comment_id):
     """Remove a comment object.
     :param comment_id: Id of the comment object.
     """
     url = makeurl(self.apiurl, ['comment', comment_id])
     return http_DELETE(url)
Exemplo n.º 41
0
 def assertOBS(self):
     url = makeurl(self.apiurl, ['about'])
     root = ET.parse(http_GET(url)).getroot()
     self.assertEqual(root.tag, 'about')
Exemplo n.º 42
0
def person_clone_after(apiurl_source, apiurl_target, person):
    url = makeurl(apiurl_target,
                  ['person', person.find('login').text],
                  {'cmd': 'change_password'})
    http_POST(url, data='opensuse')
Exemplo n.º 43
0
def builddepinfo(apiurl, project, repo, arch, order = False):
    query = {}
    if order:
        query['view'] = 'order'
    url = makeurl(apiurl, ['build', project, repo, arch, '_builddepinfo'], query)
    return ETL.parse(http_GET(url)).getroot()
Exemplo n.º 44
0
def repository_arch_state(apiurl, project, repository, arch):
    # just checking the mtimes of the repository's binaries
    url = makeurl(apiurl, ['build', project, repository, arch, '_repository'])
    from osclib.util import sha1_short
    return sha1_short(http_GET(url).read())
Exemplo n.º 45
0
def search(apiurl, path, xpath, query={}):
    query['match'] = xpath
    url = makeurl(apiurl, ['search', path], query)
    return ETL.parse(http_GET(url)).getroot()
Exemplo n.º 46
0
    def check_depinfo_ring(self, prj, nextprj):
        url = makeurl(self.api.apiurl, ['build', prj, '_result'])
        root = ET.parse(http_GET(url)).getroot()
        for repo in root.findall('result'):
            repostate = repo.get('state', 'missing')
            if repostate not in ['unpublished', 'published'] or repo.get(
                    'dirty', 'false') == 'true':
                print('Repo {}/{} is in state {}'.format(
                    repo.get('project'), repo.get('repository'), repostate))
                return False
            for package in repo.findall('status'):
                code = package.get('code')
                if code not in ['succeeded', 'excluded', 'disabled']:
                    print('Package {}/{}/{} is {}'.format(
                        repo.get('project'), repo.get('repository'),
                        package.get('package'), code))
                    return False

        self.find_inner_ring_links(prj)
        for arch in ['x86_64', 'ppc64le']:
            self.fill_pkgdeps(prj, 'standard', arch)

            if prj == '{}:1-MinimalX'.format(self.api.crings):
                url = makeurl(self.api.apiurl, [
                    'build', prj, 'images', arch, 'Test-DVD-' + arch,
                    '_buildinfo'
                ])
                root = ET.parse(http_GET(url)).getroot()
                for bdep in root.findall('bdep'):
                    if 'name' not in bdep.attrib:
                        continue
                    b = bdep.attrib['name']
                    if b not in self.bin2src:
                        continue
                    b = self.bin2src[b]
                    self.pkgdeps[b] = 'MYdvd'

            if prj == '{}:2-TestDVD'.format(self.api.crings):
                url = makeurl(self.api.apiurl, [
                    'build', prj, 'images', arch, 'Test-DVD-' + arch,
                    '_buildinfo'
                ])
                root = ET.parse(http_GET(url)).getroot()
                for bdep in root.findall('bdep'):
                    if 'name' not in bdep.attrib:
                        continue
                    b = bdep.attrib['name']
                    if b not in self.bin2src:
                        continue
                    b = self.bin2src[b]
                    self.pkgdeps[b] = 'MYdvd2'

        if prj == '{}:0-Bootstrap'.format(self.api.crings):
            url = makeurl(self.api.apiurl,
                          ['build', prj, 'standard', '_buildconfig'])
            for line in http_GET(url).read().split('\n'):
                if line.startswith('Preinstall:') or line.startswith(
                        'Support:'):
                    for prein in line.split(':')[1].split():
                        if prein not in self.bin2src:
                            continue
                        b = self.bin2src[prein]
                        self.pkgdeps[b] = 'MYinstall'

        for source in self.sources:
            if source not in self.pkgdeps and source not in self.links:
                print('osc rdelete -m cleanup {} {}'.format(prj, source))
                if nextprj:
                    print('osc linkpac {} {} {}').format(
                        self.api.project, source, nextprj)
Exemplo n.º 47
0
def source_file_save(apiurl, project, package, filename, content, comment=None):
    comment = message_suffix('updated', comment)
    url = makeurl(apiurl, ['source', project, package, filename], {'comment': comment})
    http_PUT(url, data=content)
Exemplo n.º 48
0
    def update_and_solve_target(self,
                                apiurl,
                                target_project,
                                target_config,
                                main_repo,
                                opts,
                                skip_release=False):
        print('[{}] {}/{}: update and solve'.format(opts.scope, opts.project,
                                                    main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release',
                                    '000release-packages')

        url = makeurl(apiurl, ['source', opts.project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.options.dry:
                undelete_package(apiurl, opts.project, product, 'revive')
            # TODO disable build.
            print('{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not opts.force:
            root = ET.fromstringlist(
                show_results_meta(apiurl,
                                  opts.project,
                                  product,
                                  repository=[main_repo],
                                  multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(
                    root.xpath('result[@state="dirty"]')):
                print('{}/{} build in progress'.format(opts.project, product))
                return

        checkout_list = [group, product]
        if not skip_release:
            checkout_list.append(release)

            if packages.find('entry[@name="{}"]'.format(release)) is None:
                if not self.options.dry:
                    undelete_package(apiurl, opts.project, product, 'revive')
                print(
                    '{} undeleted, skip dvd until next cycle'.format(release))
                return

        # Cache dir specific to hostname and project.
        host = urlparse.urlparse(apiurl).hostname
        cache_dir = save_cache_path('opensuse-packagelists', host,
                                    opts.project)

        if os.path.exists(cache_dir):
            shutil.rmtree(cache_dir)
        os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)

        for package in checkout_list:
            checkout_package(apiurl,
                             opts.project,
                             package,
                             expand_link=True,
                             prj_dir=cache_dir)

        if not skip_release:
            self.unlink_all_except(release_dir)
        self.unlink_all_except(product_dir)
        self.copy_directory_contents(
            group_dir, product_dir,
            ['supportstatus.txt', 'groups.yml', 'package-groups.changes'])
        self.change_extension(product_dir, '.spec.in', '.spec')

        self.options.input_dir = group_dir
        self.options.output_dir = product_dir
        self.postoptparse()

        print('-> do_update')
        self.do_update('update', opts)

        print('-> do_solve')
        opts.ignore_unresolvable = bool(
            target_config.get('pkglistgen-ignore-unresolvable'))
        opts.ignore_recommended = bool(
            target_config.get('pkglistgen-ignore-recommended'))
        opts.include_suggested = bool(
            target_config.get('pkglistgen-include-suggested'))
        opts.locale = target_config.get('pkglistgen-local')
        opts.locales_from = target_config.get('pkglistgen-locales-from')
        self.do_solve('solve', opts)

        delete_products = target_config.get('pkglistgen-delete-products',
                                            '').split(' ')
        self.unlink_list(product_dir, delete_products)

        print('-> product service')
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            print(
                subprocess.check_output(
                    [PRODUCT_SERVICE, product_file, product_dir,
                     opts.project]))

        delete_kiwis = target_config.get(
            'pkglistgen-delete-kiwis-{}'.format(opts.scope), '').split(' ')
        self.unlink_list(product_dir, delete_kiwis)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        if skip_release:
            self.unlink_list(None, spec_files)
        else:
            self.move_list(spec_files, release_dir)

        self.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')
        self.commit_package(product_dir)

        if not skip_release:
            self.multibuild_from_glob(release_dir, '*.spec')
            self.build_stub(release_dir, 'spec')
            self.commit_package(release_dir)
Exemplo n.º 49
0
def request_state_change(apiurl, request_id, state):
    query = { 'newstate': state, 'cmd': 'changestate' }
    url = makeurl(apiurl, ['request', request_id], query)
    return ETL.parse(http_POST(url)).getroot().get('code')
Exemplo n.º 50
0
def package_source_changed(apiurl, project, package):
    url = makeurl(apiurl, ['source', project, package, '_history'], {'limit': 1})
    root = ETL.parse(http_GET(url)).getroot()
    return datetime.fromtimestamp(int(root.find('revision/time').text), timezone.utc).replace(tzinfo=None)