Exemplo n.º 1
0
    def _parse(self, ip):
        """Parse <package> tags."""

        for event, elem in ip:
            tag = elem.tag
            if event != "start":
                continue
            if not tag.endswith("}package") and \
               not tag.endswith("}repomd"):
                continue
            if tag.endswith("}repomd"):
                return self.__parseRepomd(ip)
            props = elem.attrib
            if   props.get("type") == "rpm":
                try:
                    pkg = self.__parsePackage(ip)
                except ValueError, e:
                    log.warning("%s: %s", pkg.getNEVRA(), e)
                    continue
                pkg.yumrepo = self
                if self.comps != None:
                    if   self.comps.hasType(pkg["name"], "mandatory"):
                        pkg.compstype = "mandatory"
                    elif self.comps.hasType(pkg["name"], "default"):
                        pkg.compstype = "default"
                    elif self.comps.hasType(pkg["name"], "optional"):
                        pkg.compstype = "optional"
                self.addPkg(pkg)
            elif props.has_key("name"):
                arch = props.get("arch")
                if arch == None:
                    log.warning("%s: missing arch= in <package>",
                                pkg.getNEVRA())
                    continue
                self.__parseFilelist(ip, props["name"], arch)
Exemplo n.º 2
0
    def _parse(self, ip):
        """Parse <package> tags."""

        for event, elem in ip:
            tag = elem.tag
            if event != "start":
                continue
            if not tag.endswith("}package") and \
               not tag.endswith("}repomd"):
                continue
            if tag.endswith("}repomd"):
                return self.__parseRepomd(ip)
            props = elem.attrib
            if props.get("type") == "rpm":
                try:
                    pkg = self.__parsePackage(ip)
                except ValueError, e:
                    log.warning("%s: %s", ip, e)
                    continue
                pkg.yumrepo = self
                if self.comps != None:
                    if self.comps.hasType(pkg["name"], "mandatory"):
                        pkg.compstype = "mandatory"
                    elif self.comps.hasType(pkg["name"], "default"):
                        pkg.compstype = "default"
                    elif self.comps.hasType(pkg["name"], "optional"):
                        pkg.compstype = "optional"
                self.addPkg(pkg)
            elif props.has_key("name"):
                arch = props.get("arch")
                if arch == None:
                    log.warning("%s: missing arch= in <package>",
                                pkg.getNEVRA())
                    continue
                self.__parseFilelist(ip, props["name"], arch)
Exemplo n.º 3
0
def filterArchCompat(list, arch):
    """Modify RpmPackage list list to contain only packages that can be
    installed on arch arch.

    Warn using config.rpmconfig about dropped packages."""

    i = 0
    while i < len(list):
        if archCompat(list[i]["arch"], arch):
            i += 1
        else:
            log.warning("%s: Architecture not compatible with %s",
                        list[i].source, arch)
            list.pop(i)
Exemplo n.º 4
0
def filterArchCompat(list, arch):
    """Modify RpmPackage list list to contain only packages that can be
    installed on arch arch.

    Warn using config.rpmconfig about dropped packages."""

    i = 0
    while i < len(list):
        if archCompat(list[i]["arch"], arch):
            i += 1
        else:
            log.warning("%s: Architecture not compatible with %s",
                        list[i].source, arch)
            list.pop(i)
Exemplo n.º 5
0
 def __parseRepomd(self, ip):
     """Parse repomd.xml for SHA1 checks of the files.
     Returns a hash of the form:
       name -> {location, checksum, timestamp, open-checksum}"""
     rethash = {}
     # Make local variables for heavy used functions to speed up this loop
     tmphash = {}
     fname = None
     for event, elem in ip:
         isend = (event == "end")
         props = elem.attrib
         tag = elem.tag
         if not isend and tag.endswith("}data"):
             fname = props.get("type")
             if not fname:
                 break
             tmphash = {}
             rethash[fname] = tmphash
         if not isend:
             continue
         if tag.endswith("}repomd"):
             break
         elif tag.endswith("}location"):
             loc = props.get("href")
             if loc:
                 tmphash["location"] = loc
         elif tag.endswith("}checksum"):
             type = props.get("type")
             if type != "sha" and type != "sha256":
                 log.warning(
                     "Unsupported checksum type %s in repomd.xml "
                     "for file %s", type, fname)
                 continue
             tmphash["checksum"] = elem.text
             tmphash["checksum.type"] = type
         elif tag.endswith("}timestamp"):
             tmphash["timestamp"] = elem.text
         elif tag.endswith("}open-checksum"):
             type = props.get("type")
             if type != "sha" and type != "sha256":
                 log.warning(
                     "Unsupported open-checksum type %s in "
                     "repomd.xml for file %s", type, fname)
                 continue
             tmphash["open-checksum"] = elem.text
             tmphash["open-checksum.type"] = type
     return rethash
Exemplo n.º 6
0
    def _isExcluded(self, pkg):
        """Return True if RpmPackage pkg is excluded by configuration."""

        if pkg["arch"] == "src":
            return 1
        if not self.config.ignorearch and \
           (not functions.archCompat(pkg["arch"], self.config.machine) or \
            (self.config.archlist != None and not pkg["arch"] in self.config.archlist)) and \
           not pkg.isSourceRPM():
                log.warning("%s: Package excluded because of arch "
                            "incompatibility", pkg.getNEVRA())
                return 1

        index = lists.NevraList()
        index.addPkg(pkg)
        result = index.search(self.excludes)
        return bool(result)
Exemplo n.º 7
0
Arquivo: io.py Projeto: kholia/pyrpm
 def updateDigestFromRegion(self, digest, region, header_pos):
     if region is None or len(region) != 16:
         # What was the digest computed from?
         raise ValueError, "No region"
     (tag, type_, offset, count) = unpack("!2IiI", region)
     # FIXME: other regions than "immutable"?
     if (tag != 63 or type_ != RPM_BIN or -offset <= 0 or -offset % 16 != 0
         or count != 16):
         raise ValueError, "Invalid region"
     regionIndexEntries = -offset / 16
     if header_pos[0] is None:
         raise NotImplementedError
     fd = self.__getFdForRange(*header_pos)
     data = fd.read(16)
     if len(data) != 16:
         raise ValueError, "Unexpected EOF in header"
     (totalIndexEntries, totalDataSize) = unpack("!8x2I", data)
     data = fd.read(16 * totalIndexEntries)
     if len(data) != 16 * totalIndexEntries:
         raise ValueError, "Unexpected EOF in header"
     unsignedTags = []
     for i in xrange(totalIndexEntries):
         (tag, type_, offset, count) = \
               unpack("!4I", data[i * 16 : (i + 1) * 16])
         # FIXME: other regions than "immutable"?
         if tag == 63:
             break
         unsignedTags.append(tag)
     else:
         raise ValueError, "%s: immutable tag disappeared" % self.source
     if (type_ != RPM_BIN or count != 16 or
         i + regionIndexEntries > totalIndexEntries):
         raise ValueError, "Invalid region tag"
     digest.update(pack("!2I", regionIndexEntries, offset + 16))
     digest.update(data[i * 16 : (i + regionIndexEntries) * 16])
     for i in xrange(i + regionIndexEntries, totalIndexEntries):
         (tag,) = unpack("!I", data[i * 16 : i * 16 + 4])
         unsignedTags.append(tag)
     if unsignedTags:
         # FIXME: only once per package
         log.warning("%s: Unsigned tags %s",
                     self.source, [rpmtagname[i] for i in unsignedTags])
     # In practice region data starts at offset 0, but the original design
     # was proposing concatenated regions etc; where would the data region
     # start in that case? Lowest offset in region perhaps?
     functions.updateDigestFromFile(digest, fd, offset + 16)
Exemplo n.º 8
0
 def updateDigestFromRegion(self, digest, region, header_pos):
     if region is None or len(region) != 16:
         # What was the digest computed from?
         raise ValueError, "No region"
     (tag, type_, offset, count) = unpack("!2IiI", region)
     # FIXME: other regions than "immutable"?
     if (tag != 63 or type_ != RPM_BIN or -offset <= 0 or -offset % 16 != 0
             or count != 16):
         raise ValueError, "Invalid region"
     regionIndexEntries = -offset / 16
     if header_pos[0] is None:
         raise NotImplementedError
     fd = self.__getFdForRange(*header_pos)
     data = fd.read(16)
     if len(data) != 16:
         raise ValueError, "Unexpected EOF in header"
     (totalIndexEntries, totalDataSize) = unpack("!8x2I", data)
     data = fd.read(16 * totalIndexEntries)
     if len(data) != 16 * totalIndexEntries:
         raise ValueError, "Unexpected EOF in header"
     unsignedTags = []
     for i in xrange(totalIndexEntries):
         (tag, type_, offset, count) = \
               unpack("!4I", data[i * 16 : (i + 1) * 16])
         # FIXME: other regions than "immutable"?
         if tag == 63:
             break
         unsignedTags.append(tag)
     else:
         raise ValueError, "%s: immutable tag disappeared" % self.source
     if (type_ != RPM_BIN or count != 16
             or i + regionIndexEntries > totalIndexEntries):
         raise ValueError, "Invalid region tag"
     digest.update(pack("!2I", regionIndexEntries, offset + 16))
     digest.update(data[i * 16:(i + regionIndexEntries) * 16])
     for i in xrange(i + regionIndexEntries, totalIndexEntries):
         (tag, ) = unpack("!I", data[i * 16:i * 16 + 4])
         unsignedTags.append(tag)
     if unsignedTags:
         # FIXME: only once per package
         log.warning("%s: Unsigned tags %s", self.source,
                     [rpmtagname[i] for i in unsignedTags])
     # In practice region data starts at offset 0, but the original design
     # was proposing concatenated regions etc; where would the data region
     # start in that case? Lowest offset in region perhaps?
     functions.updateDigestFromFile(digest, fd, offset + 16)
Exemplo n.º 9
0
 def __parseRepomd(self, ip):
     """Parse repomd.xml for SHA1 checks of the files.
     Returns a hash of the form:
       name -> {location, checksum, timestamp, open-checksum}"""
     rethash = {}
     # Make local variables for heavy used functions to speed up this loop
     tmphash = {}
     fname = None
     for event, elem in ip:
         isend = (event == "end")
         props = elem.attrib
         tag = elem.tag
         if not isend and tag.endswith("}data"):
             fname = props.get("type")
             if not fname:
                 break
             tmphash = {}
             rethash[fname] = tmphash
         if not isend:
             continue
         if   tag.endswith("}repomd"):
             break
         elif tag.endswith("}location"):
             loc = props.get("href")
             if loc:
                 tmphash["location"] = loc
         elif tag.endswith("}checksum"):
             type = props.get("type")
             if type != "sha" and type != "sha256":
                 log.warning("Unsupported checksum type %s in repomd.xml "
                             "for file %s", type, fname)
                 continue
             tmphash["checksum"] = elem.text
             tmphash["checksum.type"] = type
         elif tag.endswith("}timestamp"):
             tmphash["timestamp"] = elem.text
         elif tag.endswith("}open-checksum"):
             type = props.get("type")
             if type != "sha" and type != "sha256":
                 log.warning("Unsupported open-checksum type %s in "
                             "repomd.xml for file %s", type, fname)
                 continue
             tmphash["open-checksum"] = elem.text
             tmphash["open-checksum.type"] = type
     return rethash
Exemplo n.º 10
0
    def __parse(self, ip):
        """Parse node and its siblings under the root element.

        Return 1 on success, 0 on failure.  Handle <group>, <grouphierarchy>,
        warn about other tags."""

        for event, elem in ip:
            tag = elem.tag
            if  tag == "comps":
                continue
            elif tag == "group" or tag == "category":
                self.__parseGroup(ip)
            elif tag == "grouphierarchy":
                ret = self.__parseGroupHierarchy(ip)
            else:
                log.warning("Unknown entry in comps.xml: %s", tag)
                return 0
        return 1
Exemplo n.º 11
0
    def _isExcluded(self, pkg):
        """Return True if RpmPackage pkg is excluded by configuration."""

        if pkg["arch"] == "src":
            return 1
        if not self.config.ignorearch and \
           (not functions.archCompat(pkg["arch"], self.config.machine) or \
            (self.config.archlist != None and not pkg["arch"] in self.config.archlist)) and \
           not pkg.isSourceRPM():
            log.warning(
                "%s: Package excluded because of arch "
                "incompatibility", pkg.getNEVRA())
            return 1

        index = lists.NevraList()
        index.addPkg(pkg)
        result = index.search(self.excludes)
        return bool(result)
Exemplo n.º 12
0
    def create(self, filename):
        """Create an initial database"""

        # If it exists, remove it as we were asked to create a new one
        if os.path.exists(filename):
            try:
                os.unlink(filename)
            except OSError:
                pass

        # Try to create the databse in filename, or use in memory when
        # this fails
        try:
            f = open(filename, 'w')
            db = sqlite3.connect(filename)
        except IOError:
            log.warning("Could not create sqlite cache file, using in memory "
                        "cache instead")
            db = sqlite3.connect(":memory:")
        db.row_factory = sqlite3.Row
        db.text_factory = str
        return db
Exemplo n.º 13
0
    def create(self, filename):
        """Create an initial database"""

        # If it exists, remove it as we were asked to create a new one
        if os.path.exists(filename):
            try:
                os.unlink(filename)
            except OSError:
                pass

        # Try to create the databse in filename, or use in memory when
        # this fails
        try:
            f = open(filename, 'w')
            db = sqlite3.connect(filename)
        except IOError:
            log.warning("Could not create sqlite cache file, using in memory "
                        "cache instead")
            db = sqlite3.connect(":memory:")
        db.row_factory = sqlite3.Row
        db.text_factory = str
        return db
Exemplo n.º 14
0
    def __parse(self, ip):
        """Parse node and its siblings under the root element.

        Return 1 on success, 0 on failure.  Handle <group>, <grouphierarchy>,
        warn about other tags."""

        for event, elem in ip:
            tag = elem.tag
            if  tag == "comps":
                continue
            elif tag == "group" or tag == "category":
                self.__parseGroup(ip)
            elif tag == "grouphierarchy":
                ret = self.__parseGroupHierarchy(ip)
            elif tag == "blacklist" or tag == "whiteout":
                for ev2, el2 in ip:
                    if ev2 == "end" and (el2.tag == "blacklist" or el2.tag == "whiteout"):
                        break
            else:
                log.warning("Unknown entry in comps.xml: %s", tag)
                return 0
        return 1
Exemplo n.º 15
0
        #odoc = libxml2.newDoc("1.0")
        #oroot = odoc.newChild(None, "filelists", None)
        log.info1("Pass 2: Writing repodata information.")
        pfd.write('<?xml version="1.0" encoding="UTF-8"?>\n')
        pfd.write('<metadata xmlns="http://linux.duke.edu/metadata/common" xmlns:rpm="http://linux.duke.edu/metadata/rpm" packages="%d">\n' % len(self.getPkgs()))
        ffd.write('<?xml version="1.0" encoding="UTF-8"?>\n')
        ffd.write('<filelists xmlns:rpm="http://linux.duke.edu/filelists" packages="%d">\n' % len(self.getPkgs()))
        for pkg in self.getPkgs():
            log.info2("Processing complete data of package %s.",
                      pkg.getNEVRA())
            pkg.header_read = 0
            try:
                pkg.open()
                pkg.read()
            except (IOError, ValueError), e:
                log.warning("%s: %s", pkg.getNEVRA(), e)
                continue
            # If it is a source rpm change the arch to "src". Only valid
            # for createRepo, never do this anywhere else. ;)
            if pkg.isSourceRPM():
                pkg["arch"] = "src"
            try:
                checksum = self.__getChecksum(pkg)
            except (IOError, NotImplementedError), e:
                log.warning("%s: %s", pkg.getNEVRA(), e)
                continue
            pkg["yumchecksum"] = checksum
            self.__writePrimary(pfd, proot, pkg)
            self.__writeFilelists(ffd, froot, pkg)
#            self.__writeOther(ofd, oroot, pkg)
            try:
Exemplo n.º 16
0
 pfd.write(
     '<metadata xmlns="http://linux.duke.edu/metadata/common" xmlns:rpm="http://linux.duke.edu/metadata/rpm" packages="%d">\n'
     % len(self.getPkgs()))
 ffd.write('<?xml version="1.0" encoding="UTF-8"?>\n')
 ffd.write(
     '<filelists xmlns:rpm="http://linux.duke.edu/filelists" packages="%d">\n'
     % len(self.getPkgs()))
 for pkg in self.getPkgs():
     log.info2("Processing complete data of package %s.",
               pkg.getNEVRA())
     pkg.header_read = 0
     try:
         pkg.open()
         pkg.read()
     except (IOError, ValueError), e:
         log.warning("%s: %s", pkg.getNEVRA(), e)
         continue
     # If it is a source rpm change the arch to "src". Only valid
     # for createRepo, never do this anywhere else. ;)
     if pkg.isSourceRPM():
         pkg["arch"] = "src"
     try:
         checksum = self.__getChecksum(pkg)
     except (IOError, NotImplementedError), e:
         log.warning("%s: %s", pkg.getNEVRA(), e)
         continue
     pkg["yumchecksum"] = checksum
     self.__writePrimary(pfd, proot, pkg)
     self.__writeFilelists(ffd, froot, pkg)
     #            self.__writeOther(ofd, oroot, pkg)
     try:
Exemplo n.º 17
0

import sys, time
from pyrpm.database.jointdb import JointDB
from pyrpm.database.sqliterepodb import SqliteRepoDB
from pyrpm.logger import log
sys.path.append("/usr/share/rhn/")
try:
    import up2date_client.up2dateAuth as up2dateAuth
    import up2date_client.config as rhnconfig
    from up2date_client import rhnChannel
    from up2date_client import rhnPackageInfo
    from up2date_client import up2dateErrors
    use_rhn = True
except:
    log.warning("Couldn't import up2date_client modules. Disabling RHN support.")
    use_rhn = False


class RhnRepoDB(JointDB):

    def __init__(self, config, source, buildroot='', nc=None):
        JointDB.__init__(self, config, source, buildroot)
        self.comps = None
        self.reponame = "rhnrepo"
        if not use_rhn:
            return
        up2date_cfg = rhnconfig.initUp2dateConfig()
        try:
            login_info = up2dateAuth.getLoginInfo()
        except up2dateErrors.RhnServerException, e: