Esempio n. 1
0
    def getDbFile(self, dbtype):
        if dbtype != "primary":
            log.info2("Loading %s for %s...", dbtype, self.reponame)

        cachebase = os.path.join(self.config.cachedir, self.reponame)
        cachepath = os.path.join(self.config.cachedir, self.reponame, "sqlite")

        if not os.path.isdir(cachebase):
            os.makedirs(cachebase)
        if not os.path.isdir(cachepath):
            os.makedirs(cachepath)

        # check existing sqlite db
        dbfilename = os.path.join(cachepath, "%s.xml.gz.sqlite" % dbtype)
        if os.path.exists(dbfilename):
            try:
                csum, db = self.loadCache(dbfilename)
            except sqlite3.Error, e:
                log.error(e)
                csum = None
            if self.repomd.has_key(dbtype) and \
                   self.repomd[dbtype].has_key("checksum") and \
                   csum == self.repomd[dbtype]["checksum"]:
                setattr(self, "_%sdb" % dbtype, db)
                setattr(self, "_%sdb_cursor" % dbtype, db.cursor())
                return 1
Esempio n. 2
0
    def getDbFile(self, dbtype):
        if dbtype != "primary":
            log.info2("Loading %s for %s...", dbtype, self.reponame)

        cachebase = os.path.join(self.config.cachedir, self.reponame)
        cachepath = os.path.join(self.config.cachedir, self.reponame, "sqlite")

        if not os.path.isdir(cachebase):
            os.makedirs(cachebase)
        if not os.path.isdir(cachepath):
            os.makedirs(cachepath)

        # check existing sqlite db
        dbfilename = os.path.join(cachepath, "%s.xml.gz.sqlite" % dbtype)
        if os.path.exists(dbfilename):
            try:
                csum, db = self.loadCache(dbfilename)
            except sqlite3.Error, e:
                log.error(e)
                csum = None
            if self.repomd.has_key(dbtype) and \
                   self.repomd[dbtype].has_key("checksum") and \
                   csum == self.repomd[dbtype]["checksum"]:
                setattr(self, "_%sdb" % dbtype, db)
                setattr(self, "_%sdb_cursor" % dbtype, db.cursor())
                return 1
Esempio n. 3
0
    def loadCache(self, filename):
        """Load cache from filename, check if it is valid and that dbversion
        matches the required dbversion"""
        db = sqlite3.connect(filename)
        db.row_factory = sqlite3.Row
        db.text_factory = str
        cur = db.cursor()
        cur.execute("SELECT * FROM db_info")
        info = cur.fetchone()
        # If info is not in there this is an incompelete cache file
        # (this could happen when the user hits ctrl-c or kills yum
        # when the cache is being generated or updated)
        if not info:
            raise sqlite3.DatabaseError, "Incomplete database cache file"

        # Now check the database version
        if info['dbversion'] not in supported_dbversions:
            log.info2(
                "Cache file is version %s, we need %s, will "
                "regenerate.\n", info['dbversion'], dbversion)
            raise sqlite3.DatabaseError, "Older version of yum sqlite: %s" % info[
                'dbversion']

        # This appears to be a valid database, return checksum value and
        # database object
        return (info['checksum'], db)
Esempio n. 4
0
    def __readDir(self, dir, location):
        """Look for non-excluded *.rpm files under dir and add them to
        self.pkglist.

        dir must be a local file system path.  The remote location prefix
        corresponding to dir is location. Set pkg["yumlocation"] to the remote
        relative path to the package."""

        tmplist = []
        functions.readDir(dir, tmplist,
                          ("name", "epoch", "version", "release", "arch",
                           "sourcerpm", "requirename", "requireflags",
                           "requireversion"))
        for pkg in tmplist:
            # FIXME: this is done in createRepo too
            # If it is a source rpm change the arch to "src". Only valid
            # for createRepo, never do this anywhere else. ;)
            if pkg.isSourceRPM():
                pkg["arch"] = "src"
            nevra = pkg.getNEVRA()
            log.info2("Adding %s to repo and checking file requires.", nevra)
            pkg["yumlocation"] = location+pkg.source[len(dir):]
            self.addPkg(pkg)
Esempio n. 5
0
def run_main(main):
    """Run main, handling --hotshot.

    The return value from main, if not None, is a return code."""

    dohotshot = 0
    if len(sys.argv) >= 2 and sys.argv[1] == "--hotshot":
        dohotshot = 1
        sys.argv.pop(1)
    if dohotshot:
        import hotshot, hotshot.stats
        htfilename = mkstemp_file("/tmp", tmpprefix)[1]
        prof = hotshot.Profile(htfilename)
        prof.runcall(main)
        prof.close()
        del prof
        log.info2("Starting profil statistics. This takes some time...")
        s = hotshot.stats.load(htfilename)
        s.strip_dirs().sort_stats("time").print_stats(100)
        s.strip_dirs().sort_stats("cumulative").print_stats(100)
        os.unlink(htfilename)
    else:
        return main()
Esempio n. 6
0
    def __readDir(self, dir, location):
        """Look for non-excluded *.rpm files under dir and add them to
        self.pkglist.

        dir must be a local file system path.  The remote location prefix
        corresponding to dir is location. Set pkg["yumlocation"] to the remote
        relative path to the package."""

        tmplist = []
        functions.readDir(
            dir, tmplist,
            ("name", "epoch", "version", "release", "arch", "sourcerpm",
             "requirename", "requireflags", "requireversion"))
        for pkg in tmplist:
            # FIXME: this is done in createRepo too
            # If it is a source rpm change the arch to "src". Only valid
            # for createRepo, never do this anywhere else. ;)
            if pkg.isSourceRPM():
                pkg["arch"] = "src"
            nevra = pkg.getNEVRA()
            log.info2("Adding %s to repo and checking file requires.", nevra)
            pkg["yumlocation"] = location + pkg.source[len(dir):]
            self.addPkg(pkg)
Esempio n. 7
0
def run_main(main):
    """Run main, handling --hotshot.

    The return value from main, if not None, is a return code."""

    dohotshot = 0
    if len(sys.argv) >= 2 and sys.argv[1] == "--hotshot":
        dohotshot = 1
        sys.argv.pop(1)
    if dohotshot:
        import hotshot, hotshot.stats
        htfilename = mkstemp_file("/tmp", tmpprefix)[1]
        prof = hotshot.Profile(htfilename)
        prof.runcall(main)
        prof.close()
        del prof
        log.info2("Starting profil statistics. This takes some time...")
        s = hotshot.stats.load(htfilename)
        s.strip_dirs().sort_stats("time").print_stats(100)
        s.strip_dirs().sort_stats("cumulative").print_stats(100)
        os.unlink(htfilename)
    else:
        return main()
Esempio n. 8
0
    def loadCache(self, filename):
        """Load cache from filename, check if it is valid and that dbversion
        matches the required dbversion"""
        db = sqlite3.connect(filename)
        db.row_factory = sqlite3.Row
        db.text_factory = str
        cur = db.cursor()
        cur.execute("SELECT * FROM db_info")
        info = cur.fetchone()
        # If info is not in there this is an incompelete cache file
        # (this could happen when the user hits ctrl-c or kills yum
        # when the cache is being generated or updated)
        if not info:
            raise sqlite3.DatabaseError, "Incomplete database cache file"

        # Now check the database version
        if info['dbversion'] not in supported_dbversions:
            log.info2("Cache file is version %s, we need %s, will "
                      "regenerate.\n", info['dbversion'], dbversion)
            raise sqlite3.DatabaseError, "Older version of yum sqlite: %s" % info['dbversion']

        # This appears to be a valid database, return checksum value and
        # database object
        return (info['checksum'], db)
Esempio n. 9
0
 #    ofd = gzip.GzipFile(os.path.join(datapath, "other.xml.gz"), "wb")
 #except IOError:
 #    return 0
 pdoc = libxml2.newDoc("1.0")
 proot = pdoc.newChild(None, "metadata", None)
 fdoc = libxml2.newDoc("1.0")
 froot = fdoc.newChild(None, "filelists", None)
 #odoc = libxml2.newDoc("1.0")
 #oroot = odoc.newChild(None, "filelists", None)
 log.info1("Pass 2: Writing repodata information.")
 pfd.write('<?xml version="1.0" encoding="UTF-8"?>\n')
 pfd.write('<metadata xmlns="http://linux.duke.edu/metadata/common" xmlns:rpm="http://linux.duke.edu/metadata/rpm" packages="%d">\n' % len(self.getPkgs()))
 ffd.write('<?xml version="1.0" encoding="UTF-8"?>\n')
 ffd.write('<filelists xmlns:rpm="http://linux.duke.edu/filelists" packages="%d">\n' % len(self.getPkgs()))
 for pkg in self.getPkgs():
     log.info2("Processing complete data of package %s.",
               pkg.getNEVRA())
     pkg.header_read = 0
     try:
         pkg.open()
         pkg.read()
     except (IOError, ValueError), e:
         log.warning("%s: %s", pkg.getNEVRA(), e)
         continue
     # If it is a source rpm change the arch to "src". Only valid
     # for createRepo, never do this anywhere else. ;)
     if pkg.isSourceRPM():
         pkg["arch"] = "src"
     try:
         checksum = self.__getChecksum(pkg)
     except (IOError, NotImplementedError), e:
         log.warning("%s: %s", pkg.getNEVRA(), e)
Esempio n. 10
0
           self.repomd[dbtype].has_key("checksum") and \
           csum == self.repomd[dbtype]["checksum"] and \
           self.nc.isCached("repodata/%s.xml.gz" % dbtype):
            filename = self.nc.getCachedFilename("repodata/%s.xml.gz" % dbtype)
        else:
            filename = self.nc.cache("repodata/%s.xml.gz" % dbtype, 1,
                                     copy_local=True)
            (csum, destfile) = self.nc.checksum("repodata/%s.xml.gz" % dbtype,
                                                "sha")
            if not (self.repomd.has_key(dbtype) and \
                    self.repomd[dbtype].has_key("checksum") and \
                    csum == self.repomd[dbtype]["checksum"]):
                return 0

        if filename:
            log.info2("Creating %s", dbfilename)
            if USEYUM:
                parser = yum.mdparser.MDParser(filename)
                storage = yum.storagefactory.GetStorage()
                cache = storage.GetCacheHandler(dbfilename, 'tmp', None)
                if dbtype == 'primary':
                    db = cache.getPrimary(filename, csum)
                elif dbtype == 'filelists':
                    db = cache.getFilelists(filename, csum)
                elif dbtype == 'other':
                    db = cache.getOtherdata(filename, csum)
                # XXX error handling
                shutil.move(filename + '.sqlite', dbfilename)
                setattr(self, "_%sdb" % dbtype, db)
                setattr(self, "_%sdb_cursor" % dbtype, db.cursor())
                # TODO: add all other indices
Esempio n. 11
0
           csum == self.repomd[dbtype]["checksum"] and \
           self.nc.isCached("repodata/%s.xml.gz" % dbtype):
            filename = self.nc.getCachedFilename("repodata/%s.xml.gz" % dbtype)
        else:
            filename = self.nc.cache("repodata/%s.xml.gz" % dbtype,
                                     1,
                                     copy_local=True)
            (csum, destfile) = self.nc.checksum("repodata/%s.xml.gz" % dbtype,
                                                "sha")
            if not (self.repomd.has_key(dbtype) and \
                    self.repomd[dbtype].has_key("checksum") and \
                    csum == self.repomd[dbtype]["checksum"]):
                return 0

        if filename:
            log.info2("Creating %s", dbfilename)
            if USEYUM:
                parser = yum.mdparser.MDParser(filename)
                storage = yum.storagefactory.GetStorage()
                cache = storage.GetCacheHandler(dbfilename, 'tmp', None)
                if dbtype == 'primary':
                    db = cache.getPrimary(filename, csum)
                elif dbtype == 'filelists':
                    db = cache.getFilelists(filename, csum)
                elif dbtype == 'other':
                    db = cache.getOtherdata(filename, csum)
                # XXX error handling
                shutil.move(filename + '.sqlite', dbfilename)
                setattr(self, "_%sdb" % dbtype, db)
                setattr(self, "_%sdb_cursor" % dbtype, db.cursor())
                # TODO: add all other indices
Esempio n. 12
0
 def read(self):
     log.info2("Reading RHN channel repository '%s'", self.reponame)
     return SqliteRepoDB.read(self)
Esempio n. 13
0
 proot = pdoc.newChild(None, "metadata", None)
 fdoc = libxml2.newDoc("1.0")
 froot = fdoc.newChild(None, "filelists", None)
 #odoc = libxml2.newDoc("1.0")
 #oroot = odoc.newChild(None, "filelists", None)
 log.info1("Pass 2: Writing repodata information.")
 pfd.write('<?xml version="1.0" encoding="UTF-8"?>\n')
 pfd.write(
     '<metadata xmlns="http://linux.duke.edu/metadata/common" xmlns:rpm="http://linux.duke.edu/metadata/rpm" packages="%d">\n'
     % len(self.getPkgs()))
 ffd.write('<?xml version="1.0" encoding="UTF-8"?>\n')
 ffd.write(
     '<filelists xmlns:rpm="http://linux.duke.edu/filelists" packages="%d">\n'
     % len(self.getPkgs()))
 for pkg in self.getPkgs():
     log.info2("Processing complete data of package %s.",
               pkg.getNEVRA())
     pkg.header_read = 0
     try:
         pkg.open()
         pkg.read()
     except (IOError, ValueError), e:
         log.warning("%s: %s", pkg.getNEVRA(), e)
         continue
     # If it is a source rpm change the arch to "src". Only valid
     # for createRepo, never do this anywhere else. ;)
     if pkg.isSourceRPM():
         pkg["arch"] = "src"
     try:
         checksum = self.__getChecksum(pkg)
     except (IOError, NotImplementedError), e:
         log.warning("%s: %s", pkg.getNEVRA(), e)