Exemplo n.º 1
0
 def load_ext(self, repodata):
     repomdtype = repodata.lookup_str(solv.SOLVID_META,
                                      solv.REPOSITORY_REPOMD_TYPE)
     if repomdtype == 'filelists':
         ext = 'FL'
     elif repomdtype == 'deltainfo':
         ext = 'DL'
     else:
         return False
     logging.info("Loading extended metadata {1} for {0}.".format(
             self.name, repomdtype))
     if self.use_cached_repo(ext):
         logging.info("  - found recent copy in cache")
         return True
     logging.info("  - fetching")
     filename = repodata.lookup_str(solv.SOLVID_META,
                                    solv.REPOSITORY_REPOMD_LOCATION)
     filechksum = repodata.lookup_checksum(solv.SOLVID_META,
                                           solv.REPOSITORY_REPOMD_CHECKSUM)
     f = self.download(filename, True, [filechksum])
     if not f:
         return False
     if ext == 'FL':
         self.handle.add_rpmmd(f, 'FL', solv.Repo.REPO_USE_LOADING |
                               solv.Repo.REPO_EXTEND_SOLVABLES)
     elif ext == 'DL':
         self.handle.add_deltainfoxml(f, solv.Repo.REPO_USE_LOADING)
     solv.xfclose(f)
     self.write_cached_repo(ext, repodata)
     return True
Exemplo n.º 2
0
 def set_from_mirror_list(self, mirrorlist):
     nf = self.download(mirrorlist, False)
     if not nf:
         return
     f = os.fdopen(os.dup(solv.xfileno(nf)), 'r')
     solv.xfclose(nf)
     urls = []
     for l in f.readline():
         l = l.strip()
         if l[0:6] == 'http://' or l[0:7] == 'https://':
             urls.append(l)
     self.set_from_urls(urls)
     f.close()
Exemplo n.º 3
0
 def write_cached_repo(self, ext, info=None):
     try:
         if not os.path.isdir(self.md_cache_dir):
             os.mkdir(self.md_cache_dir, 0755)
         (fd, tmpname) = tempfile.mkstemp(prefix='.newsolv-',
                                          dir=self.md_cache_dir)
         os.fchmod(fd, 0444)
         f = os.fdopen(fd, 'w+')
         if not info:
             self.handle.write(f)
         elif ext:
             info.write(f)
         else:       # rewrite_repos case
             self.handle.write_first_repodata(f)
         if self.type != 'system' and not ext:
             if 'extcookie' not in self:
                 self.get_ext_cookie(f)
             f.write(self['extcookie'])
         if not ext:
             f.write(self['cookie'])
         else:
             f.write(self['extcookie'])
         f.close()
         if self.handle.iscontiguous():
             # Switch to saved repo to activate paging and save memory.
             nf = solv.xfopen(tmpname)
             if not ext:
                 # Main repository.
                 self.handle.empty()
                 if not self.handle.add_solv(nf, solv.Repo.SOLV_ADD_NO_STUBS):
                     sys.exit("Internal error, cannot reload solv file.")
             else:
                 # Extension repodata.
                 # Need to extend to repo boundaries, as this is how
                 # info.write() has written the data.
                 info.extend_to_repo()
                 # LOCALPOOL does not help as pool already contains all ids
                 info.add_solv(nf, solv.Repo.REPO_EXTEND_SOLVABLES)
             solv.xfclose(nf)
         os.rename(tmpname, self.cache_path(ext))
     except IOError, e:
         if tmpname:
             os.unlink(tmpname)
Exemplo n.º 4
0
 def set_from_metalink(self, metalink):
     nf = self.download(metalink, False)
     if not nf:
         return None
     f = os.fdopen(os.dup(solv.xfileno(nf)), 'r')
     solv.xfclose(nf)
     urls = []
     chksums = []
     for l in f.readlines():
         l = l.strip()
         m = re.match(r'^https?://.+/', l)
         if m:
             urls.append(m.group(0))
         m = re.match(r'^<hash type="sha256">([0-9a-fA-F]{64})</hash>', l)
         if m:
             chksums.append(solv.Chksum(solv.REPOKEY_TYPE_SHA256, m.group(1)))
         m = re.match(r'^<url.*>(https?://.+)repodata/repomd.xml</url>', l)
         if m:
             urls.append(m.group(1))
     if len(urls) == 0:
         chksums = [] # in case the metalink is about a different file
     f.close()
     self.set_from_urls(urls)
     return chksums
Exemplo n.º 5
0
    def load_if_changed(self):
        logging.info("Checking rpmmd repo '{0}'.".format(self.name))
        sys.stdout.flush()
        f = self.download("repodata/repomd.xml", False)
        if not f:
            logging.info("  - no repomd.xml file, skipping")
            self.handle.free(True)
            del self.handle
            return False

        # Calculate a cookie from repomd contents.
        chksum = solv.Chksum(solv.REPOKEY_TYPE_SHA256)
        chksum.add_fp(f)
        self['cookie'] = chksum.raw()

        if self.use_cached_repo(None, True):
            logging.info("  - using cached metadata")
            solv.xfclose(f)
            return True
        os.lseek(solv.xfileno(f), 0, os.SEEK_SET)
        self.handle.add_repomdxml(f, 0)
        solv.xfclose(f)
        logging.info("  - fetching metadata")
        (filename, filechksum) = self.find('primary')
        if filename:
            f = self.download(filename, True, [filechksum], True)
            if f:
                self.handle.add_rpmmd(f, None, 0)
                solv.xfclose(f)
            if 'incomplete' in self:
                return False # Hopeless, need good primary.
        (filename, filechksum) = self.find('updateinfo')
        if filename:
            f = self.download(filename, True, [filechksum], True)
            if f:
                self.handle.add_updateinfoxml(f, 0)
                solv.xfclose(f)
        self.add_exts()
        if 'incomplete' not in self:
            self.write_cached_repo(None)
        # Must be called after writing the repo.
        self.handle.create_stubs()
        return True