Example #1
0
 def setUp(self):
     self.base = dnf.Base(dnf.conf.Conf())
     self.moduleBase = dnf.module.module_base.ModuleBase(self.base)
Example #2
0
  def run_pull(self):
    t = Template(self.args.template, user=self.args.username)

    try:
      t = self.cs.template_get(t)

    except ServiceException as e:
      print(e)
      return 1

    # prepare dnf
    print('info: analysing system ...')
    db = dnf.Base()

    # install repos from template
    for r in t.repos_all:
      dr = r.to_repo()
      dr.load()
      db.repos.add(dr)

    db.read_comps()

    try:
      db.fill_sack()

    except OSError as e:
      pass

    multilib_policy = db.conf.multilib_policy
    clean_deps = db.conf.clean_requirements_on_remove

    # process all packages in template
    for p in t.packages_all:
      if p.included():
        #
        # stripped from dnf.base install() in full and optimesd
        # for canvas usage

        subj = dnf.subject.Subject(p.to_pkg_spec())
        if multilib_policy == "all" or subj.is_arch_specified(db.sack):
          q = subj.get_best_query(db.sack)

          if not q:
            continue

          already_inst, available = db._query_matches_installed(q)

          for a in available:
            db._goal.install(a, optional=False)

        elif multilib_policy == "best":
          sltrs = subj.get_best_selectors(db.sack)
          match = reduce(lambda x, y: y.matches() or x, sltrs, [])

          if match:
            for sltr in sltrs:
              if sltr.matches():
                db._goal.install(select=sltr, optional=False)

      else:
        #
        # stripped from dnf.base remove() in full and optimesd
        # for canvas usage
        matches = dnf.subject.Subject(p.to_pkg_spec()).get_best_query(db.sack)

        for pkg in matches.installed():
          db._goal.erase(pkg, clean_deps=clean_deps)

    db.resolve()

    # describe process for dry runs
    if self.args.dry_run:
      packages_install = list(db.transaction.install_set)
      packages_install.sort(key=lambda x: x.name)

      packages_remove = list(db.transaction.remove_set)
      packages_remove.sort(key=lambda x: x.name)

      if len(packages_install) or len(packages_remove):
        print('The following would be installed to (+) and removed from (-) the system:')

        for p in packages_install:
          print('  + ' + str(p))

        for p in packages_remove:
          print('  - ' + str(p))

        print()
        print('Summary:')
        print('  - Package(s): %d' % (len(packages_install)+len(packages_remove)))
        print()

      else:
        print('No system changes required.')

      print('No action peformed during this dry-run.')
      return 0

    # TODO: progress for download, install and removal
    db.download_packages(list(db.transaction.install_set))
    return db.do_transaction()
Example #3
0
 def test_default_config_user(self):
     base = dnf.Base()
     self.assertIsNotNone(base.conf)
     self.assertIsNotNone(base.conf.cachedir)
     reg = re.compile('/var/tmp/dnf-[a-zA-Z0-9_-]+')
     self.assertIsNotNone(reg.match(base.conf.cachedir))
Example #4
0
 def test_init(self):
     base = dnf.Base(dnf.conf.Conf())
Example #5
0
from typing import Dict, List, Optional, Tuple  # pylint: disable=unused-import

from copr.v3 import Client  # type: ignore
import dnf  # type: ignore
import rpm  # type: ignore
import semantic_version  # type: ignore

COPR_OWNER = 'dshea'
COPR_PROJECT = 'npmlib-packaging'
NPM2SRPM = 'npm2srpm'

# initialize the dnf sack from the system repos
# expire the metadata to ensure the copr stuff gets reloaded
subprocess.check_call(["dnf", "clean", "metadata"])
dnfBase = dnf.Base()
dnfBase.read_all_repos()
print("loading repo data...")
dnfBase.fill_sack(load_system_repo=False, load_available_repos=True)
print("done")

# initialize a copr client object
coprClient = Client.create_from_config_file()

# create a global dict for in-progress builds, so we don't have to keep hitting the repos
# keys are npm module names, values are a dictionary describing the versions in progress.
# the version dictionary is keyed by version number, and the values are an Event object,
# set to True when the build is complete
inProgress = {}  # type: Dict[str, Dict[str, threading.Event]]
inProgressLock = threading.Lock()
Example #6
0
 def setUp(self):
     self.base = dnf.Base()
Example #7
0
argparser.add_argument('--format',
                       metavar='FORMAT',
                       help='package format (default is ' + PKG_FORMAT + ')',
                       required=False)
argparser.add_argument('--verbose',
                       help='print additional info to stderr',
                       action='store_true')
args = argparser.parse_args()

# Kickstart parsing
ksparser = pykickstart.parser.KickstartParser(
    pykickstart.version.makeVersion())
ksparser.readKickstart(args.kickstart_file)

# Base object for dnf operations: https://dnf.readthedocs.io/en/latest/api.html
dnf_base = dnf.Base()

# Set release version if any
if args.releasever:
    if args.verbose:
        print(f'# Setting release version to {args.releasever}',
              file=sys.stderr)
    dnf_base.conf.releasever = args.releasever

if args.verbose:
    print(
        f'# Exclude weak dependencies: {ksparser.handler.packages.excludeWeakdeps}',
        file=sys.stderr)
dnf_base.conf.install_weak_deps = not ksparser.handler.packages.excludeWeakdeps

# Create parser for kickstart 'repo' command
Example #8
0
 def test_translate_comps_pkg_types(self):
     base = dnf.Base()
     num = base._translate_comps_pkg_types(('mandatory', 'optional'))
     self.assertEqual(num, 12)
Example #9
0
import dnf
import fnmatch
import functools
import hawkey
import json
import logging
import os
import rpm
import subprocess

from Pharlap import kerneldetection
from Pharlap.dnfcache import DNFCache
from Pharlap.hwdata import PCI, USB

db = dnf.Base()
system_architecture = dnf.rpm.basearch(hawkey.detect_arch())

device_pci = PCI()
device_usb = USB()


def load_modalias_map():
    maps = [
        './pharlap-modalias.map', '/usr/share/pharlap/pharlap-modalias.map'
    ]

    modalias_map = {}

    for m in maps:
        try:
Example #10
0
 def setUp(self):
     self.base = dnf.Base()
     self.moduleBase = dnf.module.module_base.ModuleBase(self.base)
Example #11
0
    def __init__(self):
        self.curdir = os.getcwd()
        self.root_dir = None

        ap = argparse.ArgumentParser(
            description='Create a portable linux folder-application')
        # ap.add_argument('--output', required=True, help='Destination directory')
        ap.add_argument('--debug',
                        default=False,
                        action='store_true',
                        help='Debug version of release')
        ap.add_argument('--docs',
                        default=False,
                        action='store_true',
                        help='Output documentation version')

        self.stages = {
            'download-rpms': 'download RPMs',
            'download-sources-for-rpms':
            'download SRPMs — sources packages for RPMS',
            'checkout': 'checkout sources',
            'install-rpms': 'install downloaded RPMS',
            'download-wheels': 'download needed WHL-python packages',
            'build-wheels': 'compile wheels for our python sources',
            'install-wheels': 'Install our and external Python wheels',
            'build-nuitka': 'Compile Python packages to executable',
            'make-isoexe':
            'Also make self-executable install archive and ISO disk',
            'pack-me': 'Pack current dir to time prefixed tar.bz2'
        }

        for stage, desc in self.stages.items():
            ap.add_argument('--stage-%s' % stage,
                            default=False,
                            action='store_true',
                            help='Stage for %s ' % desc)

        # ap.add_argument('--stage-download', default=False, action='store_true', help='Stage for download binary artifacts')
        # ap.add_argument('--stage-build-wheels', default=False, action='store_true', help='Build Wheels for source packages')
        # ap.add_argument('--stage-setupsystem', default=False, action='store_true', help='Stage for setup local OS')
        # ap.add_argument('--stage-build-nuitka', default=False, action='store_true', help='Compile Nuitka packages')
        ap.add_argument('--stage-build-and-pack',
                        default='',
                        type=str,
                        help='Install, build and pack')
        ap.add_argument('--stage-download-all',
                        default=False,
                        action='store_true',
                        help='Download all — sources, packages')
        ap.add_argument('--stage-my-source-changed',
                        default='',
                        type=str,
                        help='Fast rebuild/repack if only pythonsourcechanged')
        ap.add_argument('--stage-all',
                        default='',
                        type=str,
                        help='Install, build and pack')
        ap.add_argument('--stage-pack',
                        default='',
                        type=str,
                        help='Stage pack to given destination directory')
        ap.add_argument('specfile', type=str, help='Specification File')

        self.args = args = ap.parse_args()
        if self.args.stage_all:
            self.args.stage_build_and_pack = self.args.stage_all
            self.args.stage_download_all = True

        if self.args.stage_build_and_pack:
            self.args.stage_install_rpms = True
            self.args.stage_build_wheels = True
            self.args.stage_install_wheels = True
            self.args.stage_build_nuitka = True
            self.args.stage_pack = self.args.stage_build_and_pack

        if self.args.stage_my_source_changed:
            self.args.stage_checkout = True
            self.args.stage_download_wheels = True
            self.args.stage_build_wheels = True
            self.args.stage_install_wheels = True
            self.args.stage_build_nuitka = True
            self.args.stage_pack = self.args.stage_my_source_changed

        if self.args.stage_download_all:
            self.args.stage_download_rpms = True
            self.args.stage_checkout = True
            self.args.stage_download_wheels = True

        specfile_ = expandpath(args.specfile)
        os.environ['TERRA_SPECDIR'] = os.path.split(specfile_)[0]
        self.spec = spec = yaml_load(specfile_)

        self.start_dir = os.getcwd()

        self.tvars = edict()
        self.tvars.python_version_1, self.tvars.python_version_2 = sys.version_info[:
                                                                                    2]
        self.tvars.py_ext = ".pyc"
        if self.args.debug:
            self.tvars.py_ext = ".py"
        self.tvars.release = not self.args.debug

        need_patch = just_copy = None
        if 'bin_regexps' in spec:
            br_ = spec.bin_regexps
            if "need_patch" in br_:
                need_patch = br_.need_patch
            if "just_copy" in br_:
                just_copy = br_.just_copy

        self.br = BinRegexps(need_patch=need_patch, just_copy=just_copy)

        self.need_packages = [
            'patchelf', 'ccache', 'gcc', 'gcc-c++', 'gcc-gfortran', 'chrpath',
            'python3-wheel', 'python3-pip', 'python3-devel', 'python3-yaml',
            'genisoimage', 'makeself', 'dnf-utils'
        ]

        nflags_ = {}
        if 'nuitka' in spec:
            nflags_ = spec.nuitka

        self.nuitkas = NuitkaFlags(**nflags_)
        self.ps = PackagesSpec(**spec.packages)
        self.pp = PythonPackages(**spec.python_packages)
        fs_ = []
        if 'folders' in spec:
            fs_ = spec.folders
        self.fs = FoldersSpec(folders=fs_)

        self.in_bin = os.path.abspath('in/bin')
        self.src_dir = 'in/src'
        if 'src_dir' in spec:
            self.src_dir = expandpath(self.src_dir)
        self.out_dir = 'out'
        self.out_dir = expandpath(self.out_dir)
        mkdir_p(self.src_dir)
        mkdir_p(self.out_dir)
        mkdir_p(self.in_bin)

        os.environ['PATH'] = "/usr/lib64/ccache:" + os.environ['PATH']

        base = dnf.Base()
        base.fill_sack()
        q_ = base.sack.query()
        self.installed_packages = q_.installed()

        pass
Example #12
0
    def setUpClass(self):
        self.tmp_dir = tempfile.mkdtemp(prefix="lorax.test.repo.")
        for f in glob("./tests/pylorax/repos/*.repo"):
            shutil.copy2(f, self.tmp_dir)

        self.dbo = dnf.Base()
Example #13
0
    def prepare_dnfcache(self):
        self.tempdnfdir = tempfile.mkdtemp(
            prefix="rpmreproduce-dnf-", dir=self.tmpdir)

        cachedir = os.path.join(self.tempdnfdir, 'cache')
        installroot = os.path.join(self.tempdnfdir, 'installroot')
        reposdir = os.path.join(self.tempdnfdir, 'repos')
        os.makedirs(cachedir)
        os.makedirs(installroot)
        os.makedirs(reposdir)

        logger.debug("Preparing DNF cache...")
        base = dnf.Base()
        base.conf.cachedir = cachedir
        base.conf.installroot = installroot
        base.conf.substitutions['releasever'] = \
            self.buildinfo.get_fedora_release()
        base.conf.reposdir = []
        base.read_comps(arch_filter=True)

        # Add Fedora repositories (base + updates)
        fedora_repo = """
[fedora]
name=Fedora $releasever - $basearch
metalink=https://mirrors.fedoraproject.org/metalink?repo=fedora-$releasever&arch=$basearch
enabled=1
countme=1
metadata_expire=7d
repo_gpgcheck=0
type=rpm
gpgcheck=1
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-fedora-$releasever-$basearch
skip_if_unavailable=False

[fedora-source]
name=Fedora $releasever - Source
metalink=https://mirrors.fedoraproject.org/metalink?repo=fedora-source-$releasever&arch=$basearch
enabled=1
metadata_expire=7d
repo_gpgcheck=0
type=rpm
gpgcheck=1
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-fedora-$releasever-$basearch
skip_if_unavailable=False

[updates]
name=Fedora $releasever - $basearch - Updates
metalink=https://mirrors.fedoraproject.org/metalink?repo=updates-released-f$releasever&arch=$basearch
enabled=1
countme=1
repo_gpgcheck=0
type=rpm
gpgcheck=1
metadata_expire=6h
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-fedora-$releasever-$basearch
skip_if_unavailable=False

[updates-source]
name=Fedora $releasever - Updates Source
metalink=https://mirrors.fedoraproject.org/metalink?repo=updates-released-source-f$releasever&arch=$basearch
enabled=1
repo_gpgcheck=0
type=rpm
gpgcheck=1
metadata_expire=6h
gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-fedora-$releasever-$basearch
skip_if_unavailable=False
"""
        with open(os.path.join(reposdir, 'fedora.repo'), 'w') as fd:
            fd.write(fedora_repo)

        if self.extra_repository_files:
            for repo_src in self.extra_repository_files:
                repo_dst = os.path.join(reposdir, os.path.basename(repo_src))
                os.symlink(repo_src, repo_dst)

        base.conf.reposdir = os.path.join(self.tempdnfdir, 'repos')
        base.read_all_repos()
        base.fill_sack()

        query = base.sack.query
        self.tempdnfcache = {}
        for rpm in set(query()):
            pkg = Package(rpm.name, rpm.version, rpm.release, rpm.arch,
                          rpm.epoch, chksum=rpm.chksum,
                          source_name=rpm.source_name,
                          url=rpm.remote_location())
            self.tempdnfcache[str(pkg)] = pkg
Example #14
0
 def setUp(self):
     self.base = dnf.Base(dnf.conf.Conf())
     self.base.conf.persistdir = "/tmp/tests"
     self.base.fill_sack(False, False)
     self.base.resolve()
     self.rpmTrans = self.base.transaction
            suggested_by_set.update(target_relations[pkg_id]["suggested_by"])
            suggested_by_set.update(pkg["suggested_by"])
            target_relations[pkg_id]["suggested_by"] = list(suggested_by_set)

        else:
            target_relations[pkg_id] = pkg


# Saves given data as JSON
def dump_data(path, data):
    with open(path, 'w') as file:
        json.dump(data, file)


print(arch + ": Setup")
baseCore = dnf.Base()
baseCore.conf.read(repoConfDir + repoName + "." + arch + ".repo")
baseCore.conf.substitutions['releasever'] = releasever
baseCore.conf.installroot = installroot
baseCore.conf.arch = arch
baseCore.conf.install_weak_deps = False
baseCore.repos.enable_source_repos()

#print(" Start read repos")
baseCore.read_all_repos()

#print(" Start fill sack")
baseCore.fill_sack(load_system_repo=False)

print(arch + ": Populating Core Buildroot")
for this_binary in coreBuildRoot:
Example #16
0
    def system_prepare(self, clean=False, db=dnf.Base()):
        """
        Prepares the system for applying template configuration.

        Args:
          db: dnf.Base object to use for preparation.
          clean: specify wheter system packages not defined in the template are removed.

        Returns:
          Nothing.
        """

        if not isinstance(self._db, dnf.Base):
            self._db = db

        else:
            self._db.reset(goal=True, repos=True)

        # prepare dnf
        logging.info('Analysing system ...')

        # install repos from template
        if len(self.repos_all):
            for r in self.repos_all:
                dr = r.to_repo(conf=self._db.conf)
                dr.set_progress_bar(dnf.cli.progress.MultiFileProgressMeter())
                db.repos.add(dr)

        # indicate we're using sytem repos if we're mangling packages
        elif len(self.packages_all):
            logging.info(
                'No template repos specified, using available system repos.')
            db.read_all_repos()

        db.read_comps()

        try:
            db.fill_sack()

        except OSError as e:
            pass

        q_installed = db.sack.query().installed()

        # check we have packages to assess
        if len(self.packages_all) == 0:
            return

        multilib_policy = db.conf.multilib_policy
        clean_deps = db.conf.clean_requirements_on_remove

        logging.info('Preparing package transaction ...')
        # process all packages in template
        for p in self.packages_all:
            # handle package groups
            if p.is_group:
                if p.included:
                    try:
                        db.group_install(p.name, 'default')

                    except:
                        logging.error(
                            'Package group does not exist {0}'.format(str(p)))

                elif p.excluded:
                    try:
                        db.group_remove(p.name)

                    except:
                        logging.debug('Package not installed: {0}'.format(
                            str(p)))

            # handle packages
            else:
                p_spec = p.to_pkg_spec()

                # TODO: improve matching on all p_spec params (not just name)
                p_installed = list(q_installed.filter(name__glob=p_spec))

                if p.included and len(p_installed) == 0:
                    try:
                        db.install(p_spec)

                    except:
                        logging.error('Package does not exist {0}'.format(
                            str(p)))

                elif p.excluded and len(p_installed) > 0:
                    try:
                        for pi in p_installed:
                            db.remove(pi)

                    except:
                        logging.debug('Package not installed: {0}'.format(
                            str(p)))

                else:
                    pass

        logging.info('Resolving package actions ...')
        db.resolve(allow_erasing=True)
Example #17
0
    def __init__(self, url, name, yumsrc_conf=YUMSRC_CONF, org="1", channel_label="",
                 no_mirrors=False, ca_cert_file=None, client_cert_file=None,
                 client_key_file=None):
        name = re.sub('[^a-zA-Z0-9_.:-]+', '_', name)
        self.url = url
        self.name = name
        self.dnfbase = dnf.Base()
        self.dnfbase.conf.read(yumsrc_conf)
        if not os.path.exists(yumsrc_conf):
            self.dnfbase.conf.read('/dev/null')
        self.configparser = ConfigParser()      # Reading config file directly as dnf only ready MAIN section.
        self.configparser.setSubstitutions( dnf.Base().conf.substitutions)
        self.configparser.read(yumsrc_conf)
        if org:
            self.org = org
        else:
            self.org = "NULL"
        self.dnfbase.conf.cachedir = os.path.join(CACHE_DIR, self.org)

        self.proxy_addr = None
        self.proxy_user = None
        self.proxy_pass = None
        self.authtoken = None

        # read the proxy configuration
        # /etc/rhn/rhn.conf has more priority than yum.conf
        initCFG('server.satellite')

        # keep authtokens for mirroring
        (_scheme, _netloc, _path, query, _fragid) = urlparse.urlsplit(url)
        if query:
            self.authtoken = query

        if CFG.http_proxy:
            self.proxy_addr = CFG.http_proxy
            self.proxy_user = CFG.http_proxy_username
            self.proxy_pass = CFG.http_proxy_password
        else:
            db_cfg = self.configparser
            section_name = None

            if db_cfg.has_section(self.name):
                section_name = self.name
            elif db_cfg.has_section(channel_label):
                section_name = channel_label
            elif db_cfg.has_section('main'):
                section_name = 'main'

            if section_name:
                if db_cfg.has_option(section_name, option='proxy'):
                    self.proxy_addr = db_cfg.get(section_name, option='proxy')

                if db_cfg.has_option(section_name, 'proxy_username'):
                    self.proxy_user = db_cfg.get(section_name, 'proxy_username')

                if db_cfg.has_option(section_name, 'proxy_password'):
                    self.proxy_pass = db_cfg.get(section_name, 'proxy_password')

        self._authenticate(url)

        # Check for settings in yum configuration files (for custom repos/channels only)
        if org:
            repos = self.dnfbase.repos
        else:
            repos = None
        if repos and name in repos:
            repo = repos[name]
        elif repos and channel_label in repos:
            repo = repos[channel_label]
            # In case we are using Repo object based on channel config, override it's id to name of the repo
            # To not create channel directories in cache directory
            repo.id = name
        else:
            # Not using values from config files
            repo = dnf.repo.Repo(name,self.dnfbase.conf)
            repo.repofile = yumsrc_conf
            # pylint: disable=W0212
            repo._populate(self.configparser, name, yumsrc_conf)
        self.repo = repo

        self.yumbase = self.dnfbase # for compatibility

        self.setup_repo(repo, no_mirrors, ca_cert_file, client_cert_file, client_key_file)
        self.num_packages = 0
        self.num_excluded = 0
        self.groupsfile = None
        self.repo = self.dnfbase.repos[self.repoid]
        self.get_metadata_paths()
Example #18
0
 def test_instance(self):
     base = dnf.Base()
Example #19
0
def get_dnf_base_object(installroot,
                        sources,
                        mirrorlists=None,
                        repos=None,
                        enablerepos=None,
                        disablerepos=None,
                        tempdir="/var/tmp",
                        proxy=None,
                        releasever=DEFAULT_RELEASEVER,
                        cachedir=None,
                        logdir=None,
                        sslverify=True,
                        dnfplugins=None):
    """ Create a dnf Base object and setup the repositories and installroot

        :param string installroot: Full path to the installroot
        :param list sources: List of source repo urls to use for the installation
        :param list enablerepos: List of repo names to enable
        :param list disablerepos: List of repo names to disable
        :param list mirrorlist: List of mirrors to use
        :param string tempdir: Path of temporary directory
        :param string proxy: http proxy to use when fetching packages
        :param string releasever: Release version to pass to dnf
        :param string cachedir: Directory to use for caching packages
        :param bool noverifyssl: Set to True to ignore the CA of ssl certs. eg. use self-signed ssl for https repos.

        If tempdir is not set /var/tmp is used.
        If cachedir is None a dnf.cache directory is created inside tmpdir
    """
    def sanitize_repo(repo):
        """Convert bare paths to file:/// URIs, and silently reject protocols unhandled by yum"""
        if repo.startswith("/"):
            return "file://{0}".format(repo)
        elif any(
                repo.startswith(p)
                for p in ('http://', 'https://', 'ftp://', 'file://')):
            return repo
        else:
            return None

    mirrorlists = mirrorlists or []
    enablerepos = enablerepos or []
    disablerepos = disablerepos or []

    # sanitize the repositories
    sources = list(sanitize_repo(r) for r in sources)
    mirrorlists = list(sanitize_repo(r) for r in mirrorlists)

    # remove invalid repositories
    sources = list(r for r in sources if r)
    mirrorlists = list(r for r in mirrorlists if r)

    if not cachedir:
        cachedir = os.path.join(tempdir, "dnf.cache")
    if not os.path.isdir(cachedir):
        os.mkdir(cachedir)

    if not logdir:
        logdir = os.path.join(tempdir, "dnf.logs")
        if not os.path.isdir(logdir):
            os.mkdir(logdir)

    dnfbase = dnf.Base()
    # Enable DNF pluings
    # NOTE: These come from the HOST system's environment
    if dnfplugins:
        if dnfplugins[0] == "*":
            # Enable them all
            dnfbase.init_plugins()
        else:
            # Only enable the listed plugins
            dnfbase.init_plugins(disabled_glob=["*"],
                                 enable_plugins=dnfplugins)
    conf = dnfbase.conf
    conf.logdir = logdir
    conf.cachedir = cachedir

    conf.install_weak_deps = False
    conf.releasever = releasever
    conf.installroot = installroot
    conf.prepend_installroot('persistdir')
    # this is a weird 'AppendOption' thing that, when you set it,
    # actually appends. Doing this adds 'nodocs' to the existing list
    # of values, over in libdnf, it does not replace the existing values.
    conf.tsflags = ['nodocs']
    # Log details about the solver
    conf.debug_solver = True

    if proxy:
        conf.proxy = proxy

    if sslverify == False:
        conf.sslverify = False

    # DNF 3.2 needs to have module_platform_id set, otherwise depsolve won't work correctly
    if not os.path.exists("/etc/os-release"):
        log.warning(
            "/etc/os-release is missing, cannot determine platform id, falling back to %s",
            DEFAULT_PLATFORM_ID)
        platform_id = DEFAULT_PLATFORM_ID
    else:
        os_release = flatconfig("/etc/os-release")
        platform_id = os_release.get("PLATFORM_ID", DEFAULT_PLATFORM_ID)
    log.info("Using %s for module_platform_id", platform_id)
    conf.module_platform_id = platform_id

    # Add .repo files
    if repos:
        reposdir = os.path.join(tempdir, "dnf.repos")
        if not os.path.isdir(reposdir):
            os.mkdir(reposdir)
        for r in repos:
            shutil.copy2(r, reposdir)
        conf.reposdir = [reposdir]
        dnfbase.read_all_repos()

    # add the sources
    for i, r in enumerate(sources):
        if "SRPM" in r or "srpm" in r:
            log.info("Skipping source repo: %s", r)
            continue
        repo_name = "lorax-repo-%d" % i
        repo = dnf.repo.Repo(repo_name, conf)
        repo.baseurl = [r]
        if proxy:
            repo.proxy = proxy
        repo.enable()
        dnfbase.repos.add(repo)
        log.info("Added '%s': %s", repo_name, r)
        log.info("Fetching metadata...")
        try:
            repo.load()
        except dnf.exceptions.RepoError as e:
            log.error("Error fetching metadata for %s: %s", repo_name, e)
            return None

    # add the mirrorlists
    for i, r in enumerate(mirrorlists):
        if "SRPM" in r or "srpm" in r:
            log.info("Skipping source repo: %s", r)
            continue
        repo_name = "lorax-mirrorlist-%d" % i
        repo = dnf.repo.Repo(repo_name, conf)
        repo.mirrorlist = r
        if proxy:
            repo.proxy = proxy
        repo.enable()
        dnfbase.repos.add(repo)
        log.info("Added '%s': %s", repo_name, r)
        log.info("Fetching metadata...")
        try:
            repo.load()
        except dnf.exceptions.RepoError as e:
            log.error("Error fetching metadata for %s: %s", repo_name, e)
            return None

    # Enable repos listed on the cmdline
    for r in enablerepos:
        repolist = dnfbase.repos.get_matching(r)
        if not repolist:
            log.warning("%s is an unknown repo, not enabling it", r)
        else:
            repolist.enable()
            log.info("Enabled repo %s", r)

    # Disable repos listed on the cmdline
    for r in disablerepos:
        repolist = dnfbase.repos.get_matching(r)
        if not repolist:
            log.warning("%s is an unknown repo, not disabling it", r)
        else:
            repolist.disable()
            log.info("Disabled repo %s", r)

    dnfbase.fill_sack(load_system_repo=False)
    dnfbase.read_comps()

    return dnfbase
def _get_rpms_in_external_repo(repo_url, arches, cache_dir_name):
    """
    Get the available RPMs in the external repo for the provided arches.

    :param str repo_url: the URL of the external repo with the "$arch" variable included
    :param list arches: the list of arches to query the external repo for
    :param str cache_dir_name: the cache directory name under f"{conf.cache_dir}/dnf"
    :return: a set of the RPM NEVRAs
    :rtype: set
    :raise RuntimeError: if the cache is not writeable or the external repo couldn't be loaded
    :raises ValueError: if there is no "$arch" variable in repo URL
    """
    if "$arch" not in repo_url:
        raise ValueError(
            "The external repo {} does not contain the $arch variable".format(
                repo_url))

    base = dnf.Base()
    try:
        dnf_conf = base.conf
        # Expire the metadata right away so that when a repo is loaded, it will always check to
        # see if the external repo has been updated
        dnf_conf.metadata_expire = 0

        cache_location = os.path.join(conf.cache_dir, "dnf", cache_dir_name)
        try:
            # exist_ok=True can't be used in Python 2
            os.makedirs(cache_location, mode=0o0770)
        except OSError as e:
            # Don't fail if the directories already exist
            if e.errno != errno.EEXIST:
                log.exception("Failed to create the cache directory %s",
                              cache_location)
                raise RuntimeError("The MBS cache is not writeable.")

        # Tell DNF to use the cache directory
        dnf_conf.cachedir = cache_location
        # Don't skip repos that can't be synchronized
        dnf_conf.skip_if_unavailable = False
        dnf_conf.timeout = conf.dnf_timeout
        # Get rid of everything to be sure it's a blank slate. This doesn't delete the cached repo
        # data.
        base.reset(repos=True, goal=True, sack=True)

        # Add a separate repo for each architecture
        for arch in arches:
            # Convert arch to canon_arch. This handles cases where Koji "i686" arch is mapped to
            # "i386" when generating RPM repository.
            canon_arch = koji.canonArch(arch)
            repo_name = "repo_{}".format(canon_arch)
            repo_arch_url = repo_url.replace("$arch", canon_arch)
            base.repos.add_new_repo(
                repo_name,
                dnf_conf,
                baseurl=[repo_arch_url],
                minrate=conf.dnf_minrate,
            )

        try:
            # Load the repos in parallel
            base.update_cache()
        except dnf.exceptions.RepoError:
            msg = "Failed to load the external repos"
            log.exception(msg)
            raise RuntimeError(msg)

        # dnf will not always raise an error on repo failures, so we check explicitly
        for repo_name in base.repos:
            if not base.repos[repo_name].metadata:
                msg = "Failed to load metadata for repo %s" % repo_name
                log.exception(msg)
                raise RuntimeError(msg)

        base.fill_sack(load_system_repo=False)

        # Return all the available RPMs
        nevras = set()
        for rpm in base.sack.query().available():
            rpm_dict = {
                "arch": rpm.arch,
                "epoch": rpm.epoch,
                "name": rpm.name,
                "release": rpm.release,
                "version": rpm.version,
            }
            nevra = kobo.rpmlib.make_nvra(rpm_dict, force_epoch=True)
            nevras.add(nevra)
    finally:
        base.close()

    return nevras
Example #21
0
 def setUp(self):
     base = dnf.Base()
     self.cli = dnf.cli.cli.Cli(base=base)
Example #22
0
 def test_default_config_root(self):
     base = dnf.Base()
     self.assertIsNotNone(base.conf)
     self.assertIsNotNone(base.conf.cachedir)
     reg = re.compile('/var/cache/dnf')
     self.assertIsNotNone(reg.match(base.conf.cachedir))
Example #23
0
 def test_init(self):
     base = dnf.Base()
     _ = dnf.cli.cli.Cli(base=base)
Example #24
0
 def setUp(self):
     self.base = dnf.Base(dnf.conf.Conf())
     self.base.conf.persistdir = "/tmp/tests"
Example #25
0
def get_sack():
    base = dnf.Base()
    base.read_all_repos()
    base.fill_sack()
    return base.sack
Example #26
0
#!/usr/bin/python3
import dnf

base = dnf.Base()
base.read_all_repos()
base.fill_sack()

module_base = dnf.module.module_base.ModuleBase(base)
module_base.disable(['nodejs'])

base.do_transaction()
Example #27
0
    def __init__(self, url, name, insecure=False, interactive=False, yumsrc_conf=YUMSRC_CONF, org="1", channel_label="",
                 no_mirrors=True, ca_cert_file=None, client_cert_file=None,
                 client_key_file=None):
        # insecure and interactive are not implemented for this module.
        """
        Plugin constructor.
        """

        name = re.sub('[^a-zA-Z0-9_.:-]+', '_', name)
        if urlsplit(url).scheme:
          self.url = url
        else:
          self.url = "file://%s" % url
        self.name = name
        self.insecure = insecure
        self.interactive = interactive
        self.org = org if org else "NULL"
        self.proxy_hostname = None
        self.proxy_url = None
        self.proxy_user = None
        self.proxy_pass = None
        self.authtoken = None
        self.sslcacert = ca_cert_file
        self.sslclientcert = client_cert_file
        self.sslclientkey = client_key_file
        self.http_headers = {}

        self.dnfbase = dnf.Base()
        self.dnfbase.conf.read(yumsrc_conf)
        if not os.path.exists(yumsrc_conf):
            self.dnfbase.conf.read('/dev/null')
        self.configparser = ConfigParser()      # Reading config file directly as dnf only ready MAIN section.
        self.configparser.setSubstitutions( dnf.Base().conf.substitutions)
        self.configparser.read(yumsrc_conf)
        self.dnfbase.conf.cachedir = os.path.join(CACHE_DIR, self.org)


        # store the configuration and restore it at the end.
        comp = CFG.getComponent()
        # read the proxy configuration
        # /etc/rhn/rhn.conf has more priority than yum.conf
        initCFG('server.satellite')

        # ensure the config namespace will be switched back in any case
        try:
            # keep authtokens for mirroring
            (_scheme, _netloc, _path, query, _fragid) = urlsplit(url)
            if query:
                self.authtoken = query

            # load proxy configuration based on the url
            self._load_proxy_settings(self.url)

            # perform authentication if implemented
            self._authenticate(url)

            # Check for settings in yum configuration files (for custom repos/channels only)
            if org:
                repos = self.dnfbase.repos
            else:
                repos = None
            if repos and name in repos:
                repo = repos[name]
            elif repos and channel_label in repos:
                repo = repos[channel_label]
                # In case we are using Repo object based on channel config, override it's id to name of the repo
                # To not create channel directories in cache directory
                repo.id = name
            else:
                # Not using values from config files
                repo = dnf.repo.Repo(name,self.dnfbase.conf)
                repo.repofile = yumsrc_conf
                # pylint: disable=W0212
                repo._populate(self.configparser, name, yumsrc_conf)
            self.repo = repo

            self.yumbase = self.dnfbase # for compatibility

            self.setup_repo(repo, no_mirrors, ca_cert_file, client_cert_file, client_key_file)
            self.num_packages = 0
            self.num_excluded = 0
            self.groupsfile = None

            # configure network connection
            try:
                # bytes per second
                self.minrate = int(CFG.REPOSYNC_MINRATE)
            except ValueError:
                self.minrate = 1000
            try:
                # seconds
                self.timeout = int(CFG.REPOSYNC_TIMEOUT)
            except ValueError:
                self.timeout = 300


            self.repo = self.dnfbase.repos[self.repoid]
            self.get_metadata_paths()
        finally:
            # set config component back to original
            initCFG(comp)
Example #28
0
    def run_push(self):
        t = Template(self.args.template, user=self.args.username)

        # grab the template we're pushing to
        try:
            t = self.cs.template_get(t)

        except ServiceException as e:
            logging.exception(e)
            return 1

        if self.args.push_clean:
            t.clear()

        if self.args.kickstart is not None:
            logging.info('Parsing kickstart ...')
            t.from_kickstart(self.args.kickstart)

        else:
            # prepare dnf
            logging.info('Analysing system ...')
            db = dnf.Base()
            db.read_all_repos()
            db.read_comps()

            try:
                db.fill_sack()

            except OSError as e:
                pass

            db_list = db.iter_userinstalled()

            if self.args.push_all:
                db_list = db.sack.query().installed()

            # add our user installed packages
            for p in db_list:
                # no need to store versions
                t.add_package(Package(p, evr=False))

            # add only enabled repos
            for r in db.repos.enabled():
                t.add_repo(Repository(r))

        objects = list(t.objects_delta)
        objects.sort(key=lambda x: x.name)

        packages = list(t.packages_delta)
        packages.sort(key=lambda x: x.name)

        repos = list(t.repos_delta)
        repos.sort(key=lambda x: x.name)

        # describe process for dry runs
        if self.args.dry_run:
            if len(packages) or len(repos):
                print('The following would be added to the template: {0}'.format(t.name))

                for r in repos:
                    print('  - ' + str(r))

                for p in packages:
                    print('  - ' + str(p))

                for o in objects:
                    print('  - ' + str(o))

                print()
                print('Summary:')
                print('  - Repo(s): %d' % (len(repos)))
                print('  - Package(s): %d' % (len(packages)))
                print('  - Object(s): %d' % (len(objects)))
                print()

            else:
                print('No template changes required.')

            logging.info('No action peformed during this dry-run.')
            return 0

        if self.args.kickstart is None and not len(packages) and not len(repos):
            logging.info('No changes detected, template up to date.')
            return 0

        # push our updated template
        try:
            res = self.cs.template_update(t)

        except ServiceException as e:
            logging.exception(e)
            return 1

        logging.info('Template pushed.')
        return 0
Example #29
0
 def setUp(self):
     self.base = dnf.Base(dnf.conf.Conf())