def parse_repomd(repo, baseurl):
    url = urljoin(baseurl, 'repodata/repomd.xml')
    repomd = requests.get(url)
    if repomd.status_code != requests.codes.ok:
        return False

    ns = {'r': 'http://linux.duke.edu/metadata/repo'}
    root = ET.fromstring(repomd.content)
    primary_element = root.find('.//r:data[@type="primary"]', ns)
    location = primary_element.find('r:location', ns).get('href')
    sha256_expected = primary_element.find('r:checksum[@type="sha256"]', ns).text

    f = tempfile.TemporaryFile()
    f.write(repomd.content)
    f.flush()
    os.lseek(f.fileno(), 0, os.SEEK_SET)
    repo.add_repomdxml(solv.xfopen_fd(None, f.fileno()), 0)
    url = urljoin(baseurl, location)
    with requests.get(url, stream=True) as primary:
        if primary.status_code != requests.codes.ok:
            raise Exception(url + ' does not exist')
        sha256 = hashlib.sha256(primary.content).hexdigest()
        if sha256 != sha256_expected:
            raise Exception('checksums do not match {} != {}'.format(sha256, sha256_expected))

        content = gzip.GzipFile(fileobj=io.BytesIO(primary.content))
        os.lseek(f.fileno(), 0, os.SEEK_SET)
        f.write(content.read())
        f.flush()
        os.lseek(f.fileno(), 0, os.SEEK_SET)
        repo.add_rpmmd(solv.xfopen_fd(None, f.fileno()), None, 0)
        return True

    return False
def parse_repomd(repo, baseurl):
    url = urljoin(baseurl, 'repodata/repomd.xml')
    repomd = requests.get(url)
    if repomd.status_code != requests.codes.ok:
        return False

    ns = {'r': 'http://linux.duke.edu/metadata/repo'}
    root = ET.fromstring(repomd.content)
    primary_element = root.find('.//r:data[@type="primary"]', ns)
    location = primary_element.find('r:location', ns).get('href')
    sha256_expected = primary_element.find('r:checksum[@type="sha256"]', ns).text

    f = tempfile.TemporaryFile()
    f.write(repomd.content)
    f.flush()
    os.lseek(f.fileno(), 0, os.SEEK_SET)
    repo.add_repomdxml(solv.xfopen_fd(None, f.fileno()), 0)
    url = urljoin(baseurl, location)
    with requests.get(url, stream=True) as primary:
        if primary.status_code != requests.codes.ok:
            raise Exception(url + ' does not exist')
        sha256 = hashlib.sha256(primary.content).hexdigest()
        if sha256 != sha256_expected:
            raise Exception('checksums do not match {} != {}'.format(sha256, sha256_expected))

        content = gzip.GzipFile(fileobj=io.BytesIO(primary.content))
        os.lseek(f.fileno(), 0, os.SEEK_SET)
        f.write(content.read())
        f.flush()
        os.lseek(f.fileno(), 0, os.SEEK_SET)
        repo.add_rpmmd(solv.xfopen_fd(None, f.fileno()), None, 0)
        return True

    return False
Ejemplo n.º 3
0
    def __init__(self, repos, packages, arch=None):
        """
        :param repos: An iterable of :py:class:`rpmdeplint.repodata.Repo` instances
        :param packages: An iterable of RPM package paths to be tested
        """
        # delayed import to avoid circular dependency
        from rpmdeplint.repodata import RepoDownloadError

        self.pool = solv.Pool()
        self.pool.setarch(arch)

        #: List of :py:class:`solv.Solvable` to be tested (corresponding to *packages* parameter)
        self.solvables = []
        self.commandline_repo = self.pool.add_repo('@commandline')
        for rpmpath in packages:
            solvable = self.commandline_repo.add_rpm(rpmpath)
            if solvable is None:
                # pool.errstr is already prefixed with the filename
                raise UnreadablePackageError('Failed to read package: %s' %
                                             self.pool.errstr)
            self.solvables.append(solvable)

        self.repos_by_name = {
        }  #: Mapping of {repo name: :py:class:`rpmdeplint.repodata.Repo`}
        for repo in repos:
            try:
                repo.download_repodata()
            except RepoDownloadError as e:
                if repo.skip_if_unavailable:
                    logger.warn('Skipping repo %s: %s', repo.name, e)
                    continue
                else:
                    raise
            solv_repo = self.pool.add_repo(repo.name)
            # solv.xfopen does not accept unicode filenames on Python 2
            solv_repo.add_rpmmd(
                solv.xfopen_fd(str(repo.primary_url), repo.primary.fileno()),
                None)
            solv_repo.add_rpmmd(
                solv.xfopen_fd(str(repo.filelists_url),
                               repo.filelists.fileno()), None,
                solv.Repo.REPO_EXTEND_SOLVABLES)
            self.repos_by_name[repo.name] = repo

        self.pool.addfileprovides()
        self.pool.createwhatprovides()

        # Special handling for "installonly" packages: we create jobs to mark
        # installonly package names as "multiversion" and then set those as
        # pool jobs, which means the jobs are automatically applied whenever we
        # run the solver on this pool.
        multiversion_jobs = []
        for name in installonlypkgs:
            selection = self.pool.select(name,
                                         solv.Selection.SELECTION_PROVIDES)
            multiversion_jobs.extend(
                selection.jobs(solv.Job.SOLVER_MULTIVERSION))
        self.pool.setpooljobs(multiversion_jobs)
Ejemplo n.º 4
0
 def download(self, file, uncompress, chksums=[], markincomplete=False):
     url = None
     if 'baseurl' not in self:
         if 'metalink' in self:
             if file != self['metalink']:
                 metalinkchksums = self.set_from_metalink(self['metalink'])
                 if file == 'repodata/repomd.xml' and len(chksums) == 0:
                     chksums = metalinkchksums
             else:
                 url = file
         elif 'mirrorlist' in self:
             if file != self['mirrorlist']:
                 self.set_from_mirror_list(self['mirrorlist'])
             else:
                 url = file
     if not url:
         if 'baseurl' not in self:
             logging.error("Error: {0}: no baseurl".format(self.name))
             return None
         url = re.sub(r'/$', '', self['baseurl']) + '/' + file
     logging.info("  - downloading {0}".format(url))
     f = tempfile.TemporaryFile()
     try:
         urlfile = urllib2.urlopen(url, timeout=30)
         while True:
             data = urlfile.read(8*32168)
             if len(data) == 0:
                 break
             f.write(data)
         urlfile.close()
     except urllib2.URLError as e:
         logging.error("Error: {0}: download error: {1}".format(url, e))
         if markincomplete:
             self['incomplete'] = True
         return None
     f.flush()
     os.lseek(f.fileno(), 0, os.SEEK_SET)
     verified = (len(chksums) == 0)
     for chksum in chksums:
         fchksum = solv.Chksum(chksum.type)
         if fchksum is None:
             if markincomplete:
                 self['incomplete'] = True
             continue
         fchksum.add_fd(f.fileno())
         if fchksum.raw() != chksum.raw():
             if markincomplete:
                 self['incomplete'] = True
             continue
         else:
             verified = True
     if not verified:
         logging.error("Error {0}: checksum mismatch or unknown "
                       "checksum type".format(file))
         return None
     if uncompress:
         return solv.xfopen_fd(file, os.dup(f.fileno()))
     return solv.xfopen_fd(None, os.dup(f.fileno()))
Ejemplo n.º 5
0
    def __init__(self, repos, packages, arch=None):
        """
        :param repos: An iterable of :py:class:`rpmdeplint.repodata.Repo` instances
        :param packages: An iterable of RPM package paths to be tested
        """
        # delayed import to avoid circular dependency
        from rpmdeplint.repodata import RepoDownloadError

        self.pool = solv.Pool()
        self.pool.setarch(arch)

        #: List of :py:class:`solv.Solvable` to be tested (corresponding to *packages* parameter)
        self.solvables = []
        self.commandline_repo = self.pool.add_repo('@commandline')
        for rpmpath in packages:
            solvable = self.commandline_repo.add_rpm(rpmpath)
            if solvable is None:
                # pool.errstr is already prefixed with the filename
                raise UnreadablePackageError('Failed to read package: %s'
                        % self.pool.errstr)
            self.solvables.append(solvable)

        self.repos_by_name = {}  #: Mapping of {repo name: :py:class:`rpmdeplint.repodata.Repo`}
        for repo in repos:
            try:
                repo.download_repodata()
            except RepoDownloadError as e:
                if repo.skip_if_unavailable:
                    logger.warn('Skipping repo %s: %s', repo.name, e)
                    continue
                else:
                    raise
            solv_repo = self.pool.add_repo(repo.name)
            # solv.xfopen does not accept unicode filenames on Python 2
            solv_repo.add_rpmmd(solv.xfopen_fd(str(repo.primary_url), repo.primary.fileno()),
                    None)
            solv_repo.add_rpmmd(solv.xfopen_fd(str(repo.filelists_url), repo.filelists.fileno()),
                    None, solv.Repo.REPO_EXTEND_SOLVABLES)
            self.repos_by_name[repo.name] = repo

        self.pool.addfileprovides()
        self.pool.createwhatprovides()

        # Special handling for "installonly" packages: we create jobs to mark 
        # installonly package names as "multiversion" and then set those as 
        # pool jobs, which means the jobs are automatically applied whenever we 
        # run the solver on this pool.
        multiversion_jobs = []
        for name in installonlypkgs:
            selection = self.pool.select(name, solv.Selection.SELECTION_PROVIDES)
            multiversion_jobs.extend(selection.jobs(solv.Job.SOLVER_MULTIVERSION))
        self.pool.setpooljobs(multiversion_jobs)
def parse_susetags(repo, baseurl):
    url = urljoin(baseurl, 'content')
    content = requests.get(url)
    if content.status_code != requests.codes.ok:
        return False

    f = tempfile.TemporaryFile()
    f.write(content.content)
    f.flush()
    os.lseek(f.fileno(), 0, os.SEEK_SET)
    repo.add_content(solv.xfopen_fd(None, f.fileno()), 0)

    defvendorid = repo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
    descrdir = repo.meta.lookup_str(solv.SUSETAGS_DESCRDIR)
    if not descrdir:
        descrdir = 'suse/setup/descr'

    url = urljoin(baseurl, descrdir + '/packages.gz')
    with requests.get(url, stream=True) as packages:
        if packages.status_code != requests.codes.ok:
            raise Exception(url + ' does not exist')

        content = gzip.GzipFile(fileobj=io.BytesIO(packages.content))
        os.lseek(f.fileno(), 0, os.SEEK_SET)
        f.write(content.read())
        f.flush()
        os.lseek(f.fileno(), 0, os.SEEK_SET)
        repo.add_susetags(f, defvendorid, None, solv.Repo.REPO_NO_INTERNALIZE|solv.Repo.SUSETAGS_RECORD_SHARES)
        return True
    return False
def parse_susetags(repo, baseurl):
    url = urljoin(baseurl, 'content')
    content = requests.get(url)
    if content.status_code != requests.codes.ok:
        return False

    f = tempfile.TemporaryFile()
    f.write(content.content)
    f.flush()
    os.lseek(f.fileno(), 0, os.SEEK_SET)
    repo.add_content(solv.xfopen_fd(None, f.fileno()), 0)

    defvendorid = repo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
    descrdir = repo.meta.lookup_str(solv.SUSETAGS_DESCRDIR)
    if not descrdir:
        descrdir = 'suse/setup/descr'

    url = urljoin(baseurl, descrdir + '/packages.gz')
    with requests.get(url, stream=True) as packages:
        if packages.status_code != requests.codes.ok:
            raise Exception(url + ' does not exist')

        content = gzip.GzipFile(fileobj=io.BytesIO(packages.content))
        os.lseek(f.fileno(), 0, os.SEEK_SET)
        f.write(content.read())
        f.flush()
        os.lseek(f.fileno(), 0, os.SEEK_SET)
        repo.add_susetags(f, defvendorid, None, solv.Repo.REPO_NO_INTERNALIZE | solv.Repo.SUSETAGS_RECORD_SHARES)
        return True
    return False
Ejemplo n.º 8
0
    def download(self, file, uncompress, chksum, markincomplete=False):
        url = None
        if 'baseurl' not in self:
            if 'metalink' in self:
                if file != self['metalink']:
                    metalinkchksum = self.setfrommetalink(self['metalink'])
                    if file == 'repodata/repomd.xml' and metalinkchksum and not chksum:
                        chksum = metalinkchksum
                else:
                    url = file
            elif 'mirrorlist' in self:
                if file != self['mirrorlist']:
                    self.setfrommirrorlist(self['mirrorlist'])
                else:
                    url = file
        if not url:
            if 'baseurl' not in self:
                print("%s: no baseurl" % self.name)
                return None
            url = re.sub(r'/$', '', self['baseurl']) + '/' + file
        f = tempfile.TemporaryFile(mode='wb')
        real_url = self.sub_url(url)
        mem_f = request.urlopen(real_url).read()
        f.write(mem_f)
        f.seek(0)
        if chksum:
            fchksum = solv.Chksum(chksum.type)
            if not fchksum:
                print("%s: unknown checksum type" % file)
                if markincomplete:
                    self['incomplete'] = True
                return None
            fchksum.add_fd(f.fileno())
            # force .hex() methode to avoid "<type>:unfinished" hash
            fchksum.hex()

            if fchksum != chksum:
                print(file, url, chksum, fchksum)
                print("%s: checksum mismatch" % file)
                if markincomplete:
                    self['incomplete'] = True
                return None
        if uncompress:
            return solv.xfopen_fd(file, f.fileno())
        return solv.xfopen_fd(None, f.fileno())
Ejemplo n.º 9
0
 def writecachedrepo(self, ext, repodata=None):
     if 'incomplete' in self:
         return
     tmpname = None
     try:
         if not os.path.isdir("/var/cache/solv"):
             os.mkdir("/var/cache/solv", 0o755)
         (fd, tmpname) = tempfile.mkstemp(prefix='.newsolv-',
                                          dir='/var/cache/solv')
         os.fchmod(fd, 0o444)
         f = os.fdopen(fd, 'wb+')
         f = solv.xfopen_fd(None, f.fileno())
         if not repodata:
             self.handle.write(f)
         elif ext:
             repodata.write(f)
         else:  # rewrite_repos case, do not write stubs
             self.handle.write_first_repodata(f)
         f.flush()
         if self.type != 'system' and not ext:
             if not hasattr(self, 'extcookie'):
                 self['extcookie'] = self.calc_cookie_ext(f, self['cookie'])
             f.write(self['extcookie'])
         if not ext:
             f.write(self['cookie'])
         else:
             f.write(self['extcookie'])
         f.close
         if self.handle.iscontiguous():
             # switch to saved repo to activate paging and save memory
             nf = solv.xfopen(tmpname)
             if not ext:
                 # main repo
                 self.handle.empty()
                 flags = solv.Repo.SOLV_ADD_NO_STUBS
                 if repodata:
                     flags = 0  # rewrite repos case, recreate stubs
                 if not self.handle.add_solv(nf, flags):
                     sys.exit("internal error, cannot reload solv file")
             else:
                 # extension repodata
                 # need to extend to repo boundaries, as this is how
                 # repodata.write() has written the data
                 repodata.extend_to_repo()
                 flags = solv.Repo.REPO_EXTEND_SOLVABLES
                 if ext != 'DL':
                     flags |= solv.Repo.REPO_LOCALPOOL
                 repodata.add_solv(nf, flags)
         os.rename(tmpname, self.cachepath(ext))
     except (OSError, IOError):
         if tmpname:
             os.unlink(tmpname)
Ejemplo n.º 10
0
def add_susetags(pool, file):
    oldsysrepo = pool.add_repo(file)
    defvendorid = oldsysrepo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
    f = tempfile.TemporaryFile()
    if file.endswith('.xz'):
        subprocess.call(['xz', '-cd', file], stdout=f.fileno())
    elif file.endswith('.zst'):
        subprocess.call(['zstd', '-cd', file], stdout=f.fileno())
    else:
        raise Exception("unsupported " + file)
    os.lseek(f.fileno(), 0, os.SEEK_SET)
    oldsysrepo.add_susetags(
        solv.xfopen_fd(None, f.fileno()), defvendorid, None,
        solv.Repo.REPO_NO_INTERNALIZE | solv.Repo.SUSETAGS_RECORD_SHARES)
    return oldsysrepo
def merge_susetags(output, files):
    pool = solv.Pool()
    pool.setarch()

    for file in files:
        oldsysrepo = pool.add_repo(file)
        defvendorid = oldsysrepo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
        f = tempfile.TemporaryFile()
        st = subprocess.call(['xz', '-cd', file], stdout=f.fileno())
        os.lseek(f.fileno(), 0, os.SEEK_SET)
        oldsysrepo.add_susetags(
            solv.xfopen_fd(None, f.fileno()), defvendorid, None,
            solv.Repo.REPO_NO_INTERNALIZE | solv.Repo.SUSETAGS_RECORD_SHARES)

    packages = dict()
    for s in pool.solvables_iter():
        evr = s.evr.split('-')
        release = evr.pop()
        version = '-'.join(evr)
        key = s.name + "-" + version + "." + s.arch
        if re.search('-release', s.name):  # just take one version of it
            key = s.name + "." + s.arch
        packages[key] = {
            'name': s.name,
            'version': version,
            'arch': s.arch,
            'release': release,
            'provides': set()
        }
        for dep in s.lookup_deparray(solv.SOLVABLE_PROVIDES):
            packages[key]['provides'].add(str(dep))
    output_file = open(output, 'w')
    print("=Ver: 2.0", file=output_file)
    for package in sorted(packages):
        infos = packages[package]
        print('=Pkg:',
              infos['name'],
              infos['version'],
              infos['release'],
              infos['arch'],
              file=output_file)
        print('+Prv:', file=output_file)
        for dep in sorted(infos['provides']):
            print(dep, file=output_file)
        print('-Prv:', file=output_file)
Ejemplo n.º 12
0
 def usecachedrepo(self, ext, mark=False):
     try:
         repopath = self.cachepath(ext)
         f = open(repopath, 'rb')
         f.seek(-32, os.SEEK_END)
         fcookie = f.read(32)
         if len(fcookie) != 32:
             return False
         if not ext:
             cookie = self['cookie']
         else:
             cookie = self['extcookie']
         if cookie and fcookie != cookie:
             return False
         if self.type != 'system' and not ext:
             f.seek(-32 * 2, os.SEEK_END)
             fextcookie = f.read(32)
             if len(fextcookie) != 32:
                 return False
         f.seek(0)
         f = solv.xfopen_fd('', f.fileno())
         flags = 0
         if ext:
             flags = solv.Repo.REPO_USE_LOADING | solv.Repo.REPO_EXTEND_SOLVABLES
             if ext != 'DL':
                 flags |= solv.Repo.REPO_LOCALPOOL
         if not self.handle.add_solv(f, flags):
             return False
         if self.type != 'system' and not ext:
             self['cookie'] = fcookie
             self['extcookie'] = fextcookie
         if mark:
             # no futimes in python?
             try:
                 os.utime(repopath, None)
             except Exception:
                 pass
     except IOError:
         return False
     return True
def update_project(apiurl, project):
    # Cache dir specific to hostname and project.
    host = urlparse(apiurl).hostname
    cache_dir = CacheManager.directory('update_repo_handler', host, project)
    repo_dir = os.path.join(cache_dir, '000update-repos')

    # development aid
    checkout = True
    if checkout:
        if os.path.exists(cache_dir):
            shutil.rmtree(cache_dir)
        os.makedirs(cache_dir)

        osc.core.checkout_package(apiurl, project, '000update-repos', expand_link=True, prj_dir=cache_dir)

    root = yaml.safe_load(open(os.path.join(repo_dir, 'config.yml')))
    for item in root:
        key = list(item)[0]
        opts = item[key]
        # cast 15.1 to string :)
        key = str(key)
        if not opts['url'].endswith('/'):
            opts['url'] += '/'

        if opts.get('refresh', False):
            opts['build'] = dump_solv_build(opts['url'])
            path = '{}_{}.packages'.format(key, opts['build'])
        else:
            path = key + '.packages'
        packages_file = os.path.join(repo_dir, path)

        if os.path.exists(packages_file + '.xz'):
            print(path, 'already exists')
            continue

        solv_file = packages_file + '.solv'
        dump_solv(solv_file, opts['url'])

        pool = solv.Pool()
        pool.setarch()

        if opts.get('refresh', False):
            for file in glob.glob(os.path.join(repo_dir, '{}_*.packages.xz'.format(key))):
                repo = pool.add_repo(file)
                defvendorid = repo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
                f = tempfile.TemporaryFile()
                # FIXME: port to lzma module with python3
                st = subprocess.call(['xz', '-cd', file], stdout=f.fileno())
                os.lseek(f.fileno(), 0, os.SEEK_SET)
                repo.add_susetags(solv.xfopen_fd(None, f.fileno()), defvendorid, None, solv.Repo.REPO_NO_INTERNALIZE | solv.Repo.SUSETAGS_RECORD_SHARES)

        repo1 = pool.add_repo(''.join(random.choice(string.ascii_letters) for _ in range(5)))
        repo1.add_solv(solv_file)

        print_repo_delta(pool, repo1, open(packages_file, 'w'))
        subprocess.call(['xz', '-9', packages_file])
        os.unlink(solv_file)

        url = osc.core.makeurl(apiurl, ['source', project, '000update-repos', path + '.xz'])
        osc.core.http_PUT(url, data=open(packages_file + '.xz', 'rb').read())

        del pool
Ejemplo n.º 14
0
    def create_weakremovers(self, target, target_config, directory, output):
        drops = dict()
        dropped_repos = dict()

        root = yaml.safe_load(open(os.path.join(directory, 'config.yml')))
        for item in root:
            key = list(item)[0]
            opts = item[key]
            # cast 15.1 to string :)
            key = str(key)

            oldrepos = set(glob.glob(os.path.join(directory, '{}_*.packages.xz'.format(key))))
            oldrepos |= set(glob.glob(os.path.join(directory, '{}.packages.xz'.format(key))))
            for oldrepo in sorted(oldrepos):
                pool = solv.Pool()
                pool.setarch()

                # we need some progress in the debug output - or gocd gets nervous
                self.logger.debug('checking {}'.format(oldrepo))
                oldsysrepo = pool.add_repo(oldrepo)
                defvendorid = oldsysrepo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
                f = tempfile.TemporaryFile()
                # FIXME: port to lzma module with python3
                st = subprocess.call(['xz', '-cd', oldrepo], stdout=f.fileno())
                os.lseek(f.fileno(), 0, os.SEEK_SET)
                oldsysrepo.add_susetags(solv.xfopen_fd(None, f.fileno()), defvendorid, None, solv.Repo.REPO_NO_INTERNALIZE|solv.Repo.SUSETAGS_RECORD_SHARES)

                for arch in self.all_architectures:
                    for project, repo in self.repos:
                        fn = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch))
                        r = pool.add_repo('/'.join([project, repo]))
                        r.add_solv(fn)

                pool.createwhatprovides()

                for s in oldsysrepo.solvables_iter():
                    if s.arch == 'src':
                        continue

                    oldarch = s.arch
                    if oldarch == 'i686':
                        oldarch = 'i586'

                    #print('check', s.name, oldarch)
                    haveit = False
                    for s2 in pool.whatprovides(s.nameid):
                        if s2.repo == oldsysrepo or s.nameid != s2.nameid:
                            continue
                        newarch = s2.arch
                        if newarch == 'i686':
                            newarch = 'i586'
                        if oldarch != newarch and newarch != 'noarch' and oldarch != 'noarch':
                            continue
                        haveit = True
                        break
                    if haveit:
                        continue

                    # check for already obsoleted packages
                    nevr = pool.rel2id(s.nameid, s.evrid, solv.REL_EQ)
                    for s2 in pool.whatmatchesdep(solv.SOLVABLE_OBSOLETES, nevr):
                        if s2.repo == oldsysrepo:
                            continue
                        haveit = True
                        break
                    if haveit:
                        continue
                    drops.setdefault(s.name, {'repo': key, 'archs': set()})
                    if oldarch == 'noarch':
                        drops[s.name]['archs'] |= set(self.all_architectures)
                    else:
                        drops[s.name]['archs'].add(oldarch)
                    dropped_repos[key] = 1

                del pool

        for repo in sorted(dropped_repos):
            repo_output = False
            exclusives = dict()
            for name in sorted(drops):
                if drops[name]['repo'] != repo:
                    continue
                if len(drops[name]['archs']) == len(self.all_architectures):
                    if not repo_output:
                        print('#', repo, file=output)
                        repo_output = True
                    print('Provides: weakremover({})'.format(name), file=output)
                else:
                    jarch = ' '.join(sorted(drops[name]['archs']))
                    exclusives.setdefault(jarch, []).append(name)

            for arch in sorted(exclusives):
                if not repo_output:
                    print('#', repo, file=output)
                    repo_output = True
                print('%ifarch {}'.format(arch), file=output)
                for name in sorted(exclusives[arch]):
                    print('Provides: weakremover({})'.format(name), file=output)
                print('%endif', file=output)
        output.flush()
Ejemplo n.º 15
0
def find(handle, what):
    di = handle.Dataiterator_meta(solv.REPOSITORY_REPOMD_TYPE, what, solv.Dataiterator.SEARCH_STRING)
    di.prepend_keyname(solv.REPOSITORY_REPOMD)
    for d in di:
        dp = d.parentpos()
        return dp.lookup_str(solv.REPOSITORY_REPOMD_LOCATION)
    return None


pool = solv.Pool()
pool.setarch()

handle = pool.add_repo(repo_name)
f = download_repomd(base_url + "repodata/repomd.xml")
handle.add_repomdxml(f, 0)

primary_file_name = find(handle, "primary")
primary_url = base_url + primary_file_name
primary_f = download_archive(primary_file_name, primary_url)
handle.add_rpmmd(primary_f, None, 0)

updateinfo_file_name = find(handle, "updateinfo")
updateinfo_url = base_url + updateinfo_file_name
updateinfo_f = download_archive(updateinfo_file_name, updateinfo_url)
handle.add_updateinfoxml(updateinfo_f, 0)

f_solv = open("/tmp/solv", "wb")
f_solv = solv.xfopen_fd(None, f_solv.fileno())
handle.write(f_solv)
f_solv.close()
Ejemplo n.º 16
0
def download_archive(file, url):
    f = tempfile.TemporaryFile()
    st = subprocess.call(["curl", "-f", "-s", "-L", url], stdout=f.fileno())
    os.lseek(f.fileno(), 0, os.SEEK_SET)

    return solv.xfopen_fd(file, f.fileno())
Ejemplo n.º 17
0
    di = handle.Dataiterator_meta(solv.REPOSITORY_REPOMD_TYPE, what,
                                  solv.Dataiterator.SEARCH_STRING)
    di.prepend_keyname(solv.REPOSITORY_REPOMD)
    for d in di:
        dp = d.parentpos()
        return dp.lookup_str(solv.REPOSITORY_REPOMD_LOCATION)
    return None


pool = solv.Pool()
pool.setarch()

handle = pool.add_repo(repo_name)
f = download_repomd(base_url + 'repodata/repomd.xml')
handle.add_repomdxml(f, 0)

primary_file_name = find(handle, 'primary')
primary_url = base_url + primary_file_name
primary_f = download_archive(primary_file_name, primary_url)
handle.add_rpmmd(primary_f, None, 0)

updateinfo_file_name = find(handle, 'updateinfo')
updateinfo_url = base_url + updateinfo_file_name
updateinfo_f = download_archive(updateinfo_file_name, updateinfo_url)
handle.add_updateinfoxml(updateinfo_f, 0)

f_solv = open('/tmp/solv', 'wb')
f_solv = solv.xfopen_fd(None, f_solv.fileno())
handle.write(f_solv)
f_solv.close()
def update_project(apiurl, project):
    # Cache dir specific to hostname and project.
    host = urlparse(apiurl).hostname
    cache_dir = CacheManager.directory('update_repo_handler', host, project)
    repo_dir = os.path.join(cache_dir, '000update-repos')

    # development aid
    checkout = True
    if checkout:
        if os.path.exists(cache_dir):
            shutil.rmtree(cache_dir)
        os.makedirs(cache_dir)

        osc.core.checkout_package(apiurl, project, '000update-repos', expand_link=True, prj_dir=cache_dir)

    root = yaml.safe_load(open(os.path.join(repo_dir, 'config.yml')))
    for item in root:
        key = list(item)[0]
        opts = item[key]
        # cast 15.1 to string :)
        key = str(key)
        if not opts['url'].endswith('/'):
            opts['url'] += '/'

        if opts.get('refresh', False):
            opts['build'] = dump_solv_build(opts['url'])
            path = '{}_{}.packages'.format(key, opts['build'])
        else:
            path = key + '.packages'
        packages_file = os.path.join(repo_dir, path)

        if os.path.exists(packages_file + '.xz'):
            print(path, 'already exists')
            continue

        solv_file = packages_file + '.solv'
        dump_solv(solv_file, opts['url'])

        pool = solv.Pool()
        pool.setarch()

        if opts.get('refresh', False):
            for file in glob.glob(os.path.join(repo_dir, '{}_*.packages.xz'.format(key))):
                repo = pool.add_repo(file)
                defvendorid = repo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
                f = tempfile.TemporaryFile()
                # FIXME: port to lzma module with python3
                st = subprocess.call(['xz', '-cd', file], stdout=f.fileno())
                os.lseek(f.fileno(), 0, os.SEEK_SET)
                repo.add_susetags(solv.xfopen_fd(None, f.fileno()), defvendorid, None, solv.Repo.REPO_NO_INTERNALIZE|solv.Repo.SUSETAGS_RECORD_SHARES)

        repo1 = pool.add_repo(''.join(random.choice(string.ascii_letters) for _ in range(5)))
        repo1.add_solv(solv_file)

        print_repo_delta(pool, repo1, open(packages_file, 'w'))
        subprocess.call(['xz', '-9', packages_file])
        os.unlink(solv_file)

        url = osc.core.makeurl(apiurl, ['source', project, '000update-repos', path + '.xz'])
        osc.core.http_PUT(url, data=open(packages_file + '.xz', 'rb').read())

        del pool
Ejemplo n.º 19
0
def download_archive(file, url):
    f = tempfile.TemporaryFile()
    st = subprocess.call(['curl', '-f', '-s', '-L', url], stdout=f.fileno())
    os.lseek(f.fileno(), 0, os.SEEK_SET)

    return solv.xfopen_fd(file, f.fileno())
Ejemplo n.º 20
0
    def create_weakremovers(self, target, target_config, directory, output):
        drops = dict()
        dropped_repos = dict()

        root = yaml.safe_load(open(os.path.join(directory, 'config.yml')))
        for item in root:
            key = list(item)[0]
            opts = item[key]
            # cast 15.1 to string :)
            key = str(key)

            oldrepos = set(glob.glob(os.path.join(directory, '{}_*.packages.xz'.format(key))))
            oldrepos |= set(glob.glob(os.path.join(directory, '{}.packages.xz'.format(key))))
            for oldrepo in sorted(oldrepos):
                pool = solv.Pool()
                pool.setarch()

                # we need some progress in the debug output - or gocd gets nervous
                self.logger.debug('checking {}'.format(oldrepo))
                oldsysrepo = pool.add_repo(oldrepo)
                defvendorid = oldsysrepo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
                f = tempfile.TemporaryFile()
                # FIXME: port to lzma module with python3
                st = subprocess.call(['xz', '-cd', oldrepo], stdout=f.fileno())
                os.lseek(f.fileno(), 0, os.SEEK_SET)
                oldsysrepo.add_susetags(solv.xfopen_fd(None, f.fileno()), defvendorid, None, solv.Repo.REPO_NO_INTERNALIZE | solv.Repo.SUSETAGS_RECORD_SHARES)

                for arch in self.all_architectures:
                    for project, repo in self.repos:
                        fn = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch))
                        r = pool.add_repo('/'.join([project, repo]))
                        r.add_solv(fn)

                pool.createwhatprovides()

                for s in oldsysrepo.solvables_iter():
                    if s.arch == 'src':
                        continue

                    oldarch = s.arch
                    if oldarch == 'i686':
                        oldarch = 'i586'

                    #print('check', s.name, oldarch)
                    haveit = False
                    for s2 in pool.whatprovides(s.nameid):
                        if s2.repo == oldsysrepo or s.nameid != s2.nameid:
                            continue
                        newarch = s2.arch
                        if newarch == 'i686':
                            newarch = 'i586'
                        if oldarch != newarch and newarch != 'noarch' and oldarch != 'noarch':
                            continue
                        haveit = True
                        break
                    if haveit:
                        continue

                    # check for already obsoleted packages
                    nevr = pool.rel2id(s.nameid, s.evrid, solv.REL_EQ)
                    for s2 in pool.whatmatchesdep(solv.SOLVABLE_OBSOLETES, nevr):
                        if s2.repo == oldsysrepo:
                            continue
                        haveit = True
                        break
                    if haveit:
                        continue
                    drops.setdefault(s.name, {'repo': key, 'archs': set()})
                    if oldarch == 'noarch':
                        drops[s.name]['archs'] |= set(self.all_architectures)
                    else:
                        drops[s.name]['archs'].add(oldarch)
                    dropped_repos[key] = 1

                del pool

        for repo in sorted(dropped_repos):
            repo_output = False
            exclusives = dict()
            for name in sorted(drops):
                if drops[name]['repo'] != repo:
                    continue
                if len(drops[name]['archs']) == len(self.all_architectures):
                    if not repo_output:
                        print('#', repo, file=output)
                        repo_output = True
                    print('Provides: weakremover({})'.format(name), file=output)
                else:
                    jarch = ' '.join(sorted(drops[name]['archs']))
                    exclusives.setdefault(jarch, []).append(name)

            for arch in sorted(exclusives):
                if not repo_output:
                    print('#', repo, file=output)
                    repo_output = True
                print('%ifarch {}'.format(arch), file=output)
                for name in sorted(exclusives[arch]):
                    print('Provides: weakremover({})'.format(name), file=output)
                print('%endif', file=output)
        output.flush()
Ejemplo n.º 21
0
def download_repomd(url):
    f = tempfile.TemporaryFile()
    st = subprocess.call(['curl', '-f', '-s', '-L', url], stdout=f.fileno())
    os.lseek(f.fileno(), 0, os.SEEK_SET)

    return solv.xfopen_fd(None, f.fileno())