Exemple #1
0
def _createrepo(path):
    mdconf = createrepo.MetaDataConfig()
    mdconf.directory = path
    mdgen = createrepo.MetaDataGenerator(mdconf)
    mdgen.doPkgMetadata()
    mdgen.doRepoMetadata()
    mdgen.doFinalMove()
Exemple #2
0
def create_metadata(repo, packages=None, comps=None):
    """ Generate YUM metadata for a repository.

    This method accepts a repository object and, based on its configuration,
    generates YUM metadata for it using the createrepo sister library.
    """
    util.validate_repo(repo)
    conf = createrepo.MetaDataConfig()
    conf.directory = os.path.dirname(repo.pkgdir)
    conf.outputdir = os.path.dirname(repo.pkgdir)
    if packages:
        conf.pkglist = packages
    conf.quiet = True

    if comps:
        groupdir = tempfile.mkdtemp()
        conf.groupfile = os.path.join(groupdir, 'groups.xml')
        with open(conf.groupfile, 'w') as f:
            f.write(comps)

    generator = createrepo.SplitMetaDataGenerator(conf)
    generator.doPkgMetadata()
    generator.doRepoMetadata()
    generator.doFinalMove()

    if comps and os.path.exists(groupdir):
        shutil.rmtree(groupdir)
Exemple #3
0
 def _createrepo(self):
     mdconf = createrepo.MetaDataConfig()
     mdconf.directory = self.tmpdir
     mdgen = createrepo.MetaDataGenerator(mdconf, MDCallback())
     mdgen.doPkgMetadata()
     mdgen.doRepoMetadata()
     mdgen.doFinalMove()
Exemple #4
0
 def __init__(self,
              repolist=[],
              yumbase=None,
              mdconf=None,
              mdbase_class=None):
     self.repolist = repolist
     self.outputdir = '%s/merged_repo' % os.getcwd()
     self.exclude_tuples = []
     self.sort_func = self._sort_func  # callback function to magically sort pkgs
     if not mdconf:
         self.mdconf = createrepo.MetaDataConfig()
     else:
         self.mdconf = mdconf
     if not mdbase_class:
         self.mdbase_class = createrepo.MetaDataGenerator
     else:
         self.mdbase_class = mdbase_class
     if not yumbase:
         self.yumbase = yum.YumBase()
     else:
         self.yumbase = yumbase
     self.yumbase.conf.cachedir = getCacheDir()
     self.yumbase.conf.cache = 0
     # default to all arches
     self.archlist = unique(rpmUtils.arch.arches.keys() +
                            rpmUtils.arch.arches.values())
     self.groups = True
     self.updateinfo = True
Exemple #5
0
 def __init__(self, directory):
     self.config = createrepo.MetaDataConfig()
     if not directory.endswith('/'):
         self.config.directory = directory + '/'
     else:
         self.config.directory = directory
     self.config.database_only = True
     self.output_dir = os.path.join(self.config.directory, 'repodata')
     self.temp_dir = os.path.join(self.config.directory, '.repodata')
def update_repodata(repopath, rpmfiles, options):
    tmpdir = tempfile.mkdtemp()
    s3base = urlparse.urlunsplit(('s3', options.bucket, repopath, '', ''))
    s3grabber = S3Grabber(s3base)

    # Set up temporary repo that will fetch repodata from s3
    yumbase = yum.YumBase()
    yumbase.preconf.disabled_plugins = '*'
    yumbase.conf.cachedir = os.path.join(tmpdir, 'cache')
    yumbase.repos.disableRepo('*')
    repo = yumbase.add_enable_repo('s3')
    repo._grab = s3grabber
    repo._urls = [os.path.join(s3base, '')]
    # Ensure that missing base path doesn't cause trouble
    repo._sack = yum.sqlitesack.YumSqlitePackageSack(
        createrepo.readMetadata.CreaterepoPkgOld)

    # Create metadata generator
    mdconf = createrepo.MetaDataConfig()
    mdconf.directory = tmpdir
    mdconf.pkglist = yum.packageSack.MetaSack()
    mdgen = createrepo.MetaDataGenerator(mdconf, LoggerCallback())
    mdgen.tempdir = tmpdir
    mdgen._grabber = s3grabber

    # Combine existing package sack with new rpm file list
    new_packages = yum.packageSack.PackageSack()
    for rpmfile in rpmfiles:
        newpkg = mdgen.read_in_package(os.path.join(s3base, rpmfile))
        newpkg._baseurl = ''   # don't leave s3 base urls in primary metadata
        older_pkgs = yumbase.pkgSack.searchNevra(name=newpkg.name)
        # Remove older versions of this package (or if it's the same version)
        for i, older in enumerate(reversed(older_pkgs), 1):
            if i > options.keep or older.pkgtup == newpkg.pkgtup:
                yumbase.pkgSack.delPackage(older)
                if options.delete_old and i > options.keep:
                    s3grabber.urldelete(older.remote_url)
                else:
                    logging.info('ignoring: %s', older.ui_nevra)
        new_packages.addPackage(newpkg)

    mdconf.pkglist.addSack('existing', yumbase.pkgSack)
    mdconf.pkglist.addSack('new', new_packages)

    # Write out new metadata to tmpdir
    mdgen.doPkgMetadata()
    mdgen.doRepoMetadata()
    mdgen.doFinalMove()

    # Replace metadata on s3
    s3grabber.syncdir(os.path.join(tmpdir, 'repodata'), 'repodata')

    shutil.rmtree(tmpdir)
Exemple #7
0
def make_cr_conf():
    def_workers = os.nice(0)
    if def_workers > 0:
        def_workers = 1
    else:
        def_workers = 0

    conf = createrepo.MetaDataConfig()
    conf.excludes = ['debug/*']
    conf.quiet = True
    conf.checksum = yum.misc._default_checksums[0]
    conf.database = True
    conf.update = True
    conf.retain_old_md = 15
    conf.compress_type = 'compat'
    conf.workers = def_workers
    conf.split = True
    return conf
Exemple #8
0
 def _create_repo_metadata(self):
     print 'Creating repodata for %r' % self
     ensure_dir(self.output_dir)
     conf = createrepo.MetaDataConfig()
     conf.database = False
     conf.outputdir = self.output_dir
     conf.directory = self.output_dir
     conf.directories = [self.output_dir]
     if re.match(r'RedHatEnterpriseLinux[345]$', self.distro):
         conf.sumtype = 'sha'
     conf.pkglist = self.rpm_filenames
     # not sure what this is, but it defaults to True in newer createrepo,
     # but it makes yum explode
     conf.collapse_glibc_requires = False
     mdgen = createrepo.MetaDataGenerator(config_obj=conf)
     mdgen.doPkgMetadata()
     mdgen.doRepoMetadata()
     mdgen.doFinalMove()
Exemple #9
0
    def _rebuild_repository(self, conn, repo, packages, groupstree=None):
        basepath = utils.get_path_from_url(repo.url)
        self.logger.info("rebuild repository in %s", basepath)
        md_config = createrepo.MetaDataConfig()
        mdfile_path = os.path.join(basepath, md_config.finaldir,
                                   md_config.repomdfile)
        update = packages is not None and os.path.exists(mdfile_path)
        groupsfile = None
        if groupstree is None and update:
            # The createrepo lose the groups information on update
            # to prevent this set group info manually
            groupstree = self._load_groups(conn, repo)

        if groupstree is not None:
            groupsfile = os.path.join(tempfile.gettempdir(), "groups.xml")
            with open(groupsfile, "w") as fd:
                groupstree.write(fd)
        try:
            md_config.workers = 1
            md_config.directory = str(basepath)
            md_config.groupfile = groupsfile
            md_config.update = update
            if not packages:
                # only generate meta-files, without packages info
                md_config.excludes = ["*"]

            mdgen = createrepo.MetaDataGenerator(config_obj=md_config,
                                                 callback=CreaterepoCallBack(
                                                     self.logger))
            mdgen.doPkgMetadata()
            mdgen.doRepoMetadata()
            mdgen.doFinalMove()
        except createrepo.MDError as e:
            err_msg = six.text_type(e)
            self.logger.exception("failed to create yum repository in %s: %s",
                                  basepath, err_msg)
            shutil.rmtree(os.path.join(md_config.outputdir, md_config.tempdir),
                          ignore_errors=True)
            raise RuntimeError(
                "Failed to create yum repository in {0}.".format(err_msg))
        finally:
            if groupsfile is not None:
                os.unlink(groupsfile)
Exemple #10
0
def main(args):
    """createrepo from cli main flow"""
    start_st = time.time()
    conf = createrepo.MetaDataConfig()
    conf = parse_args(args, conf)
    if conf.profile:
        print ('start time: %0.3f' % (time.time() - start_st))

    mid_st = time.time()
    try:
        if conf.split:
            mdgen = createrepo.SplitMetaDataGenerator(config_obj=conf,
                                                      callback=MDCallBack())
        else:
            mdgen = createrepo.MetaDataGenerator(config_obj=conf,
                                                 callback=MDCallBack())
            if mdgen.checkTimeStamps():
                if mdgen.conf.verbose:
                    print _('repo is up to date')
                mdgen._cleanup_tmp_repodata_dir()
                sys.exit(0)

        if conf.profile:
            print ('mid time: %0.3f' % (time.time() - mid_st))

        pm_st = time.time()
        mdgen.doPkgMetadata()
        if conf.profile:
            print ('pm time: %0.3f' % (time.time() - pm_st))
        rm_st = time.time()
        mdgen.doRepoMetadata()
        if conf.profile:
            print ('rm time: %0.3f' % (time.time() - rm_st))
        fm_st = time.time()
        mdgen.doFinalMove()
        if conf.profile:
            print ('fm time: %0.3f' % (time.time() - fm_st))


    except MDError, errormsg:
        errorprint(_('%s') % errormsg)
        sys.exit(1)
Exemple #11
0
    def build_metadata(self):
        staging = tempfile.mkdtemp(prefix='yumsync-', suffix='-metadata')

        if self._packages is None:
            packages = []
        else:
            packages = [
                os.path.join(os.path.basename(self.package_dir), pkg)
                for pkg in self._packages
            ]

        if self.checksum == 'sha' or self.checksum == 'sha1':
            sumtype = 'sha'
        else:
            sumtype = 'sha256'

        conf = createrepo.MetaDataConfig()
        conf.directory = os.path.dirname(self.package_dir)
        conf.outputdir = staging
        conf.sumtype = sumtype
        conf.pkglist = packages
        conf.quiet = True

        if self._comps:
            groupdir = tempfile.mkdtemp(prefix='yumsync-', suffix='-groupdata')
            conf.groupfile = os.path.join(groupdir, 'groups.xml')
            with open(conf.groupfile, 'w') as f:
                f.write(self._comps)

        generator = createrepo.SplitMetaDataGenerator(conf)
        generator.doPkgMetadata()
        generator.doRepoMetadata()
        generator.doFinalMove()

        if self._comps and os.path.exists(groupdir):
            shutil.rmtree(groupdir)

        return staging
Exemple #12
0
def mkmetadatadir(dir):
    """
    Generate package metadata for a given directory; if it doesn't exist, then
    create it.
    """
    log.debug("mkmetadatadir(%s)" % dir)
    if not isdir(dir):
        os.makedirs(dir)
    cache = config.get('createrepo_cache_dir')
    try:
        import createrepo
        conf = createrepo.MetaDataConfig()
        conf.cachedir = cache
        conf.outputdir = dir
        conf.directory = dir
        conf.quiet = True
        mdgen = createrepo.MetaDataGenerator(conf)
        mdgen.doPkgMetadata()
        mdgen.doRepoMetadata()
        mdgen.doFinalMove()
    except ImportError:
        sys.path.append('/usr/share/createrepo')
        import genpkgmetadata
        genpkgmetadata.main(['--cachedir', str(cache), '-q', str(dir)])
def update_repodata(bucketName, key, operation):
    logger.debug("key={0}".format(key))
    if key.rfind("/") > -1:
        fileName = key[key.rfind("/") + 1:]
        (repoPath, relativeFileName) = extract_repo_file(key)
        packagePath = relativeFileName[:relativeFileName.rfind("/")]
    else:
        fileName = key
        relativeFileName = fileName
        repoPath = ""
        packagePath = ''

    (name, version, release, epoch, arch) = splitFilename(fileName)

    logger.debug("fileName={0}".format(fileName))
    logger.debug("relativeFileName={0}".format(relativeFileName))
    logger.debug("packagePath={0}".format(packagePath))
    logger.debug("repoPath={0}".format(repoPath))

    tmpdir = tempfile.mkdtemp()
    os.makedirs(os.path.join(tmpdir, packagePath))

    s3base = urlparse.urlunsplit(("s3", bucketName, repoPath, "", ""))
    overridekey = os.path.join(tmpdir, fileName)
    overrideval = os.path.join(tmpdir, relativeFileName)
    s3grabber = S3Grabber(s3base, {overridekey: overrideval})

    # Set up temporary repo that will fetch repodata from s3
    yumbase = yum.YumBase()
    yumbase.preconf.disabled_plugins = '*'
    yumbase.conf.cachedir = os.path.join(tmpdir, 'cache')
    yumbase.repos.disableRepo('*')
    repo = yumbase.add_enable_repo('s3')
    repo._grab = s3grabber
    repo._urls = [os.path.join(s3base, '')]
    # Ensure that missing base path doesn't cause trouble
    repo._sack = yum.sqlitesack.YumSqlitePackageSack(
        createrepo.readMetadata.CreaterepoPkgOld)

    # Create metadata generator
    mdconf = createrepo.MetaDataConfig()
    mdconf.directory = tmpdir
    mdconf.pkglist = yum.packageSack.MetaSack()
    mdgen = createrepo.MetaDataGenerator(mdconf, LoggerCallback())
    mdgen.tempdir = tmpdir
    mdgen._grabber = s3grabber

    new_packages = yum.packageSack.PackageSack()
    if operation == "add":
        # Combine existing package sack with new rpm file list
        newpkg = mdgen.read_in_package(os.path.join(s3base, relativeFileName))
        newpkg._baseurl = ''  # don't leave s3 base urls in primary metadata
        new_packages.addPackage(newpkg)
    else:
        # Remove deleted package
        logger.debug("Delete package {0}".format(key))
        older_pkgs = yumbase.pkgSack.searchNevra(name=name)
        for i, older in enumerate(older_pkgs, 1):
            if older.version == version and older.release == release:
                yumbase.pkgSack.delPackage(older)

    mdconf.pkglist.addSack('existing', yumbase.pkgSack)
    mdconf.pkglist.addSack('new', new_packages)

    # Write out new metadata to tmpdir
    mdgen.doPkgMetadata()
    mdgen.doRepoMetadata()
    mdgen.doFinalMove()

    # Replace metadata on s3
    s3grabber.syncdir(os.path.join(tmpdir, 'repodata'), 'repodata')

    shutil.rmtree(tmpdir)
Exemple #14
0
        beg = "%*d/%d - " % (len(str(total)), current, total)
        left = 80 - len(beg)
        sys.stdout.write("\r%s%-*.*s" % (beg, left, left, item))
        sys.stdout.flush()


def main(args):
    """createrepo from cli main flow"""
    try:
        os.getcwd()
    except OSError, e:
        if e.errno != errno.ENOENT: raise
        print('No getcwd() access in current directory.')
        sys.exit(1)
    start_st = time.time()
    conf = createrepo.MetaDataConfig()
    conf = parse_args(args, conf)
    if conf.profile:
        print('start time: %0.3f' % (time.time() - start_st))

    mid_st = time.time()
    try:
        if conf.split:
            mdgen = createrepo.SplitMetaDataGenerator(config_obj=conf,
                                                      callback=MDCallBack())
        else:
            mdgen = createrepo.MetaDataGenerator(config_obj=conf,
                                                 callback=MDCallBack())
            if mdgen.checkTimeStamps():
                if mdgen.conf.verbose:
                    print _('repo is up to date')