Exemplo n.º 1
0
    def _do_checksum(self):
        """return a checksum for a package:
           - check if the checksum cache is enabled
              if not - return the checksum
              if so - check to see if it has a cache file
                if so, open it and return the first line's contents
                if not, grab the checksum and write it to a file for this pkg
         """
        # already got it
        if self._checksum:
            return self._checksum

        # not using the cachedir
        if not self._cachedir:
            self._checksum = misc.checksum(self.checksum_type, self.localpath)
            self._checksums = [(self.checksum_type, self._checksum, 1)]            
            return self._checksum


        t = []
        if type(self.hdr[rpm.RPMTAG_SIGGPG]) is not types.NoneType:
            t.append("".join(self.hdr[rpm.RPMTAG_SIGGPG]))   
        if type(self.hdr[rpm.RPMTAG_SIGPGP]) is not types.NoneType:
            t.append("".join(self.hdr[rpm.RPMTAG_SIGPGP]))
        if type(self.hdr[rpm.RPMTAG_HDRID]) is not types.NoneType:
            t.append("".join(self.hdr[rpm.RPMTAG_HDRID]))

        kcsum = misc.Checksums(checksums=[self.checksum_type])
        kcsum.update("".join(t))
        key = kcsum.hexdigest()
                                                
        csumtag = '%s-%s-%s-%s' % (os.path.basename(self.localpath),
                                   key, self.size, self.filetime)
        csumfile = '%s/%s' % (self._cachedir, csumtag)

        if os.path.exists(csumfile) and float(self.filetime) <= float(os.stat(csumfile)[-2]):
            csumo = open(csumfile, 'r')
            checksum = csumo.readline()
            csumo.close()
             
        else:
            checksum = misc.checksum(self.checksum_type, self.localpath)

            #  This is atomic cache creation via. rename, so we can have two
            # tasks using the same cachedir ... mash does this.
            try:
                (csumo, tmpfilename) = tempfile.mkstemp(dir=self._cachedir)
                csumo = os.fdopen(csumo, 'w', -1)
                csumo.write(checksum)
                csumo.close()
                os.rename(tmpfilename, csumfile)
            except:
                pass
        
        self._checksum = checksum
        self._checksums = [(self.checksum_type, checksum, 1)]

        return self._checksum
Exemplo n.º 2
0
    def _do_checksum(self):
        """return a checksum for a package:
           - check if the checksum cache is enabled
              if not - return the checksum
              if so - check to see if it has a cache file
                if so, open it and return the first line's contents
                if not, grab the checksum and write it to a file for this pkg
         """
        # already got it
        if self._checksum:
            return self._checksum

        # not using the cachedir
        if not hasattr(self, '_cachedir') or not self._cachedir:
            self._checksum = misc.checksum(self.checksum_type, self.localpath)
            self._checksums = [(self.checksum_type, self._checksum, 1)]
            return self._checksum

        t = []
        if type(self.hdr[rpm.RPMTAG_SIGGPG]) is not types.NoneType:
            t.append("".join(self.hdr[rpm.RPMTAG_SIGGPG]))
        if type(self.hdr[rpm.RPMTAG_SIGPGP]) is not types.NoneType:
            t.append("".join(self.hdr[rpm.RPMTAG_SIGPGP]))
        if type(self.hdr[rpm.RPMTAG_HDRID]) is not types.NoneType:
            t.append("".join(self.hdr[rpm.RPMTAG_HDRID]))

        kcsum = misc.Checksums(checksums=[self.checksum_type])
        kcsum.update("".join(t))
        key = kcsum.hexdigest()

        csumtag = '%s-%s-%s-%s' % (os.path.basename(
            self.localpath), key, self.size, self.filetime)
        csumfile = '%s/%s' % (self._cachedir, csumtag)

        if os.path.exists(csumfile) and float(self.filetime) <= float(
                os.stat(csumfile)[-2]):
            csumo = open(csumfile, 'r')
            checksum = csumo.readline()
            csumo.close()

        else:
            checksum = misc.checksum(self.checksum_type, self.localpath)

            #  This is atomic cache creation via. rename, so we can have two
            # tasks using the same cachedir ... mash does this.
            try:
                (csumo, tmpfilename) = tempfile.mkstemp(dir=self._cachedir)
                csumo = os.fdopen(csumo, 'w', -1)
                csumo.write(checksum)
                csumo.close()
                os.rename(tmpfilename, csumfile)
            except:
                pass

        self._checksum = checksum
        self._checksums = [(self.checksum_type, checksum, 1)]

        return self._checksum
Exemplo n.º 3
0
class DeltaRPMPackage:
    """each drpm is one object, you pass it a drpm file
       it opens the file, and pulls the information out in bite-sized chunks :)
    """

    mode_cache = {}

    def __init__(self, po, basedir, filename):
        try:
            stats = os.stat(os.path.join(basedir, filename))
            self.size = stats[6]
            self.mtime = stats[8]
            del stats
        except OSError, e:
            raise MDError, "Error Stat'ing file %s%s" % (basedir, filename)
        self.csum_type = 'sha256'
        self.relativepath = filename
        self.po  = po

        fd = os.open(self.po.localpath, os.O_RDONLY)
        os.lseek(fd, 0, 0)
        fo = os.fdopen(fd, 'rb')
        self.csum = misc.checksum(self.csum_type, fo)
        del fo
        del fd
        self._getDRPMInfo(os.path.join(basedir, filename))
Exemplo n.º 4
0
def checksum_and_rename(fn_path, sumtype='sha256'):
    """checksum the file rename the file to contain the checksum as a prefix
       return the new filename"""
    csum = misc.checksum(sumtype, fn_path)
    fn = os.path.basename(fn_path)
    fndir = os.path.dirname(fn_path)
    csum_fn = csum + '-' + fn
    csum_path = os.path.join(fndir, csum_fn)
    os.rename(fn_path, csum_path)
    return (csum, csum_path)
Exemplo n.º 5
0
def checksum_and_rename(fn_path, sumtype='sha256'):
    """checksum the file rename the file to contain the checksum as a prefix
       return the new filename"""
    csum = misc.checksum(sumtype, fn_path)
    fn = os.path.basename(fn_path)
    fndir = os.path.dirname(fn_path)
    csum_fn = csum + '-' + fn
    csum_path = os.path.join(fndir, csum_fn)
    os.rename(fn_path, csum_path)
    return (csum, csum_path)
Exemplo n.º 6
0
def checksum_and_rename(fn_path, sumtype='sha256'):
    """checksum the file rename the file to contain the checksum as a prefix
       return the new filename"""
    csum = misc.checksum(sumtype, fn_path)
    fn = os.path.basename(fn_path)
    fndir = os.path.dirname(fn_path)
    fn_match = re.match(r'[0-9A-Fa-f]{32,128}-(.+)', fn)
    if fn_match:
        fn = fn_match.groups()[0]
    csum_fn = csum + '-' + fn
    csum_path = os.path.join(fndir, csum_fn)
    os.rename(fn_path, csum_path)
    return (csum, csum_path)
Exemplo n.º 7
0
def checksum_and_rename(fn_path, sumtype='sha256'):
    """checksum the file rename the file to contain the checksum as a prefix
       return the new filename"""
    csum = misc.checksum(sumtype, fn_path)
    fn = os.path.basename(fn_path)
    fndir = os.path.dirname(fn_path)
    fn_match = re.match(r'[0-9A-Fa-f]{32,128}-(.+)', fn)
    if fn_match:
        fn = fn_match.groups()[0]
    csum_fn = csum + '-' + fn
    csum_path = os.path.join(fndir, csum_fn)
    os.rename(fn_path, csum_path)
    return (csum, csum_path)
Exemplo n.º 8
0
def checkfileurl(pkg):
    pkg_path = pkg.remote_url
    pkg_path = pkg_path.replace('file://', '')
    (csum_type, csum) = pkg.returnIdSum()
    
    try:
        filesum = checksum(csum_type, pkg_path)
    except Errors.MiscError:
        return False
    
    if filesum != csum:
        return False
    
    return True
Exemplo n.º 9
0
def checkfileurl(pkg):
    pkg_path = pkg.remote_url
    pkg_path = pkg_path.replace('file://', '')
    (csum_type, csum) = pkg.returnIdSum()
    
    try:
        filesum = checksum(csum_type, pkg_path)
    except Errors.MiscError:
        return False
    
    if filesum != csum:
        return False
    
    return True
Exemplo n.º 10
0
Arquivo: drpm.py Projeto: pnasrat/yum
    def verifyLocalPkg(self):
        # check file size first
        try: fsize = os.path.getsize(self.localpath)
        except OSError: return False
        if fsize != self.size: return False

        # checksum
        ctype, csum = self.csum
        try: fsum = checksum(ctype, self.localpath)
        except MiscError: return False
        if fsum != csum: return False

        # hooray
        return True
Exemplo n.º 11
0
    def verifyLocalPkg(self):
        # check file size first
        try:
            fsize = os.path.getsize(self.localpath)
        except OSError:
            return False
        if fsize != self.size: return False

        # checksum
        ctype, csum = self.csum
        try:
            fsum = checksum(ctype, self.localpath)
        except MiscError:
            return False
        if fsum != csum: return False

        # hooray
        return True
Exemplo n.º 12
0
Arquivo: util.py Projeto: tyll/bodhi
def sanity_check_repodata(myurl):
    """
    Sanity check the repodata for a given repository.
    Initial implementation by Seth Vidal.
    """
    myurl = str(myurl)
    tempdir = tempfile.mkdtemp()
    errorstrings = []
    if myurl[-1] != '/':
        myurl += '/'
    baseurl = myurl
    if not myurl.endswith('repodata/'):
        myurl += 'repodata/'
    else:
        baseurl = baseurl.replace('repodata/', '/')

    rf = myurl + 'repomd.xml'
    try:
        rm = urlgrabber.urlopen(rf)
        repomd = repoMDObject.RepoMD('foo', rm)
        for t in repomd.fileTypes():
            data = repomd.getData(t)
            base, href = data.location
            if base:
                loc = base + '/' + href
            else:
                loc = baseurl + href

            destfn = tempdir + '/' + os.path.basename(href)
            dest = urlgrabber.urlgrab(loc, destfn)
            ctype, known_csum = data.checksum
            csum = checksum(ctype, dest)
            if csum != known_csum:
                errorstrings.append("checksum: %s" % t)

            if href.find('xml') != -1:
                decompressed = decompress(dest)
                retcode = subprocess.call(
                    ['/usr/bin/xmllint', '--noout', decompressed])
                if retcode != 0:
                    errorstrings.append("failed xml read: %s" % t)

    except urlgrabber.grabber.URLGrabError, e:
        errorstrings.append('Error accessing repository %s' % e)
Exemplo n.º 13
0
def treeinfo_checksum(treeinfo):
    # read treeinfo file into cp
    # take checksums section
    result = 0
    cp = LocalConfigParser()
    try:
        cp.read(treeinfo)
    except ConfigParser.MissingSectionHeaderError:
        # Generally this means we failed to access the file
        print "  could not find sections in treeinfo file %s" % treeinfo
        return BAD_IMAGES
    except ConfigParser.Error:
        print "  could not parse treeinfo file %s" % treeinfo
        return BAD_IMAGES

    if not cp.has_section('checksums'):
        print "  no checksums section in treeinfo file %s" % treeinfo
        return BAD_IMAGES

    dir_path = os.path.dirname(treeinfo)
    for opt in cp.options('checksums'):

        fnpath = dir_path + '/%s' % opt
        fnpath = os.path.normpath(fnpath)
        csuminfo = cp.get('checksums', opt).split(':')
        if len(csuminfo) < 2:
            print "  checksum information doesn't make any sense for %s." % opt
            result = BAD_IMAGES
            continue

        if not os.path.exists(fnpath):
            print "  cannot find file %s listed in treeinfo" % fnpath
            result = BAD_IMAGES
            continue

        csum = checksum(csuminfo[0], fnpath)
        if csum != csuminfo[1]:
            print "  file %s %s does not match:\n   ondisk %s vs treeinfo: %s" % (
                opt, csuminfo[0], csum, csuminfo[1])
            result = BAD_IMAGES
            continue

    return result
Exemplo n.º 14
0
Arquivo: util.py Projeto: kalev/bodhi
def sanity_check_repodata(myurl):
    """
    Sanity check the repodata for a given repository.
    Initial implementation by Seth Vidal.
    """
    myurl = str(myurl)
    tempdir = tempfile.mkdtemp()
    errorstrings = []
    if myurl[-1] != '/':
        myurl += '/'
    baseurl = myurl
    if not myurl.endswith('repodata/'):
        myurl += 'repodata/'
    else:
        baseurl = baseurl.replace('repodata/', '/')

    rf = myurl + 'repomd.xml'
    try:
        rm = urlgrabber.urlopen(rf)
        repomd = repoMDObject.RepoMD('foo', rm)
        for t in repomd.fileTypes():
            data = repomd.getData(t)
            base, href = data.location
            if base:
                loc = base + '/' + href
            else:
                loc = baseurl + href

            destfn = tempdir + '/' + os.path.basename(href)
            dest = urlgrabber.urlgrab(loc, destfn)
            ctype, known_csum = data.checksum
            csum = checksum(ctype, dest)
            if csum != known_csum:
                errorstrings.append("checksum: %s" % t)

            if href.find('xml') != -1:
                decompressed = decompress(dest)
                retcode = subprocess.call(['/usr/bin/xmllint', '--noout', decompressed])
                if retcode != 0:
                    errorstrings.append("failed xml read: %s" % t)

    except urlgrabber.grabber.URLGrabError, e:
        errorstrings.append('Error accessing repository %s' % e)
Exemplo n.º 15
0
def treeinfo_checksum(treeinfo):
    # read treeinfo file into cp
    # take checksums section
    result = 0
    cp = LocalConfigParser()
    try:
        cp.read(treeinfo)
    except ConfigParser.MissingSectionHeaderError:
        # Generally this means we failed to access the file
        print "  could not find sections in treeinfo file %s" % treeinfo
        return BAD_IMAGES
    except ConfigParser.Error:
        print "  could not parse treeinfo file %s" % treeinfo
        return BAD_IMAGES
    
    if not cp.has_section('checksums'):
        print "  no checksums section in treeinfo file %s" % treeinfo
        return BAD_IMAGES
    
    dir_path = os.path.dirname(treeinfo)
    for opt in cp.options('checksums'):

        fnpath = dir_path + '/%s' % opt
        fnpath = os.path.normpath(fnpath)
        csuminfo = cp.get('checksums', opt).split(':')
        if len(csuminfo) < 2:
            print "  checksum information doesn't make any sense for %s." % opt
            result = BAD_IMAGES
            continue

        if not os.path.exists(fnpath):
            print "  cannot find file %s listed in treeinfo" % fnpath
            result = BAD_IMAGES
            continue
        
        csum = checksum(csuminfo[0], fnpath)
        if csum != csuminfo[1]:
            print "  file %s %s does not match:\n   ondisk %s vs treeinfo: %s" % (opt, csuminfo[0], csum, csuminfo[1])
            result = BAD_IMAGES
            continue
    
    return result
Exemplo n.º 16
0
    def add(self, metadata, mdtype=None):
        """ Insert arbitrary metadata into this repository.
            metadata can be either an xml.dom.minidom.Document object, or
            a filename.
        """
        md = None
        if not metadata:
            raise MDError, 'metadata cannot be None'
        if isinstance(metadata, minidom.Document):
            md = metadata.toxml()
            mdname = 'updateinfo.xml'
            oldmd = AutoFileChecksums(StringIO(md), [self.checksum_type])
            oldmd.read()
        elif isinstance(metadata, str):
            if os.path.exists(metadata):
                mdname = os.path.basename(metadata)
                if mdname.split('.')[-1] in ('gz', 'bz2', 'xz'):
                    mdname = mdname.rsplit('.', 1)[0]
                    oldmd = compressOpen(metadata, mode='rb')
                else:
                    oldmd = file(metadata, 'r')
                oldmd = AutoFileChecksums(oldmd, [self.checksum_type])
                md = oldmd.read()
                oldmd.close()
            else:
                raise MDError, '%s not found' % metadata
        else:
            raise MDError, 'invalid metadata type'

        if not md and self.compress_type == 'xz':
            raise MDError, 'LZMA does not support compressing empty files'

        ## Compress the metadata and move it into the repodata
        mdtype = self._get_mdtype(mdname, mdtype)
        destmd = os.path.join(self.repodir, mdname)
        if self.compress:
            destmd += '.' + self.compress_type
            newmd = compressOpen(destmd,
                                 mode='wb',
                                 compress_type=self.compress_type)
        else:
            newmd = open(destmd, 'wb')

        newmd.write(md)
        newmd.close()
        print "Wrote:", destmd

        if self.unique_md_filenames:
            csum, destmd = checksum_and_rename(destmd, self.checksum_type)
        else:
            csum = checksum(self.checksum_type, destmd)
        base_destmd = os.path.basename(destmd)

        # Remove any stale metadata
        old_rd = self.repoobj.repoData.pop(mdtype, None)

        new_rd = RepoData()
        new_rd.type = mdtype
        new_rd.location = (None, 'repodata/' + base_destmd)
        new_rd.checksum = (self.checksum_type, csum)
        new_rd.size = str(os.stat(destmd).st_size)
        if self.compress:
            new_rd.openchecksum = (self.checksum_type,
                                   oldmd.checksums.hexdigests().popitem()[1])
            new_rd.opensize = str(oldmd.checksums.length)
        new_rd.timestamp = str(int(os.stat(destmd).st_mtime))
        self.repoobj.repoData[new_rd.type] = new_rd
        self._print_repodata(new_rd)
        self._write_repomd()

        if old_rd is not None and old_rd.location[1] != new_rd.location[1]:
            # remove the old file when overwriting metadata
            # with the same mdtype but different location
            self._remove_repodata_file(old_rd)
Exemplo n.º 17
0
    def add(self, metadata, mdtype=None):
        """ Insert arbitrary metadata into this repository.
            metadata can be either an xml.dom.minidom.Document object, or
            a filename.
        """
        md = None
        if not metadata:
            raise MDError, 'metadata cannot be None'
        if isinstance(metadata, minidom.Document):
            md = metadata.toxml()
            mdname = 'updateinfo.xml'
        elif isinstance(metadata, str):
            if os.path.exists(metadata):
                mdname = os.path.basename(metadata)
                if mdname.split('.')[-1] in ('gz', 'bz2', 'xz'):
                    mdname = mdname.rsplit('.', 1)[0]
                    oldmd = compressOpen(metadata, mode='rb')
                else:
                    oldmd = file(metadata, 'r')
                oldmd = AutoFileChecksums(oldmd, [self.checksum_type])
                md = oldmd.read()
                oldmd.close()
            else:
                raise MDError, '%s not found' % metadata
        else:
            raise MDError, 'invalid metadata type'

        ## Compress the metadata and move it into the repodata
        mdtype = self._get_mdtype(mdname, mdtype)
        destmd = os.path.join(self.repodir, mdname)
        if self.compress:
            destmd += '.' + self.compress_type
            newmd = compressOpen(destmd, mode='wb', compress_type=self.compress_type)
        else:
            newmd = open(destmd, 'wb')
            
        newmd.write(md)
        newmd.close()
        print "Wrote:", destmd

        if self.unique_md_filenames:
            csum, destmd = checksum_and_rename(destmd, self.checksum_type)
        else:
            csum = checksum(self.checksum_type, destmd)
        base_destmd = os.path.basename(destmd)

        # Remove any stale metadata
        old_rd = self.repoobj.repoData.pop(mdtype, None)

        new_rd = RepoData()
        new_rd.type = mdtype
        new_rd.location = (None, 'repodata/' + base_destmd)
        new_rd.checksum = (self.checksum_type, csum)
        new_rd.size = str(os.stat(destmd).st_size)
        if self.compress:
            new_rd.openchecksum = oldmd.checksums.hexdigests().popitem()
            new_rd.opensize = str(oldmd.checksums.length)
        new_rd.timestamp = str(int(os.stat(destmd).st_mtime))
        self.repoobj.repoData[new_rd.type] = new_rd
        self._print_repodata(new_rd)
        self._write_repomd()

        if old_rd is not None and old_rd.location[1] != new_rd.location[1]:
            # remove the old file when overwriting metadata
            # with the same mdtype but different location
            self._remove_repodata_file(old_rd)
Exemplo n.º 18
0
    def add(self, metadata, mdtype=None):
        """ Insert arbitrary metadata into this repository.
            metadata can be either an xml.dom.minidom.Document object, or
            a filename.
        """
        md = None
        if not metadata:
            raise MDError, 'metadata cannot be None'
        if isinstance(metadata, minidom.Document):
            md = metadata.toxml()
            mdname = 'updateinfo.xml'
        elif isinstance(metadata, str):
            if os.path.exists(metadata):
                if metadata.split('.')[-1] in ('gz', 'bz2', 'xz'):
                    oldmd = compressOpen(metadata, mode='rb')
                else:
                    oldmd = file(metadata, 'r')
                md = oldmd.read()
                oldmd.close()
                mdname = os.path.basename(metadata)
            else:
                raise MDError, '%s not found' % metadata
        else:
            raise MDError, 'invalid metadata type'

        do_compress = False
        ## Compress the metadata and move it into the repodata
        if self.compress or not mdname.split('.')[-1] in ('gz', 'bz2', 'xz'):
            do_compress = True
            mdname += '.' + self.compress_type
        mdtype = self._get_mdtype(mdname, mdtype)

        destmd = os.path.join(self.repodir, mdname)
        if do_compress:
            newmd = compressOpen(destmd,
                                 mode='wb',
                                 compress_type=self.compress_type)
        else:
            newmd = open(destmd, 'wb')

        newmd.write(md)
        newmd.close()
        print "Wrote:", destmd

        open_csum = checksum(self.checksum_type, metadata)
        csum, destmd = checksum_and_rename(destmd, self.checksum_type)
        base_destmd = os.path.basename(destmd)

        # Remove any stale metadata
        old_rd = self.repoobj.repoData.pop(mdtype, None)

        new_rd = RepoData()
        new_rd.type = mdtype
        new_rd.location = (None, 'repodata/' + base_destmd)
        new_rd.checksum = (self.checksum_type, csum)
        new_rd.openchecksum = (self.checksum_type, open_csum)
        new_rd.size = str(os.stat(destmd).st_size)
        new_rd.timestamp = str(os.stat(destmd).st_mtime)
        self.repoobj.repoData[new_rd.type] = new_rd
        self._print_repodata(new_rd)
        self._write_repomd()

        if old_rd is not None and old_rd.location[1] != new_rd.location[1]:
            # remove the old file when overwriting metadata
            # with the same mdtype but different location
            self._remove_repodata_file(old_rd)
Exemplo n.º 19
0
    def add(self, metadata, mdtype=None):
        """ Insert arbitrary metadata into this repository.
            metadata can be either an xml.dom.minidom.Document object, or
            a filename.
        """
        md = None
        if not metadata:
            raise MDError, 'metadata cannot be None'
        if isinstance(metadata, minidom.Document):
            md = metadata.toxml()
            mdname = 'updateinfo.xml'
        elif isinstance(metadata, str):
            if os.path.exists(metadata):
                if metadata.endswith('.gz'):
                    oldmd = GzipFile(filename=metadata, mode='rb')
                else:
                    oldmd = file(metadata, 'r')
                md = oldmd.read()
                oldmd.close()
                mdname = os.path.basename(metadata)
            else:
                raise MDError, '%s not found' % metadata
        else:
            raise MDError, 'invalid metadata type'

        ## Compress the metadata and move it into the repodata
        if not mdname.endswith('.gz'):
            mdname += '.gz'
        if not mdtype:
            mdtype = mdname.split('.')[0]
            
        destmd = os.path.join(self.repodir, mdname)
        newmd = GzipFile(filename=destmd, mode='wb')
        newmd.write(md)
        newmd.close()
        print "Wrote:", destmd

        open_csum = checksum(self.checksum_type, metadata)
        csum, destmd = checksum_and_rename(destmd, self.checksum_type)
        base_destmd = os.path.basename(destmd)


        ## Remove any stale metadata
        if mdtype in self.repoobj.repoData:
            del self.repoobj.repoData[mdtype]
            

        new_rd = RepoData()
        new_rd.type = mdtype
        new_rd.location = (None, 'repodata/' + base_destmd)
        new_rd.checksum = (self.checksum_type, csum)
        new_rd.openchecksum = (self.checksum_type, open_csum)
        new_rd.size = str(os.stat(destmd).st_size)
        new_rd.timestamp = str(os.stat(destmd).st_mtime)
        self.repoobj.repoData[new_rd.type] = new_rd
        
        print "           type =", new_rd.type
        print "       location =", new_rd.location[1]
        print "       checksum =", new_rd.checksum[1]
        print "      timestamp =", new_rd.timestamp
        print "  open-checksum =", new_rd.openchecksum[1]

        ## Write the updated repomd.xml
        outmd = file(self.repomdxml, 'w')
        outmd.write(self.repoobj.dump_xml())
        outmd.close()
        print "Wrote:", self.repomdxml