def _create(self):
     self._cleanup()
     for directory in itertools.chain(self.additional_directories,
                                      (self.target, )):
         try:
             _makedirs(directory, exists_ok=True)
         except (OSError) as e:
             raise MountError(
                 'Failed to create mount target directory {}'.format(
                     directory), str(e))
     try:
         run(['mount'] + self._mount_options() + [self.target], split=False)
     except (OSError, CalledProcessError) as e:
         api.current_logger().warn('Mounting %s failed with: %s',
                                   self.target,
                                   str(e),
                                   exc_info=True)
         raise MountError(
             message='Mount operation with mode {} from {} to {} failed: {}'
             .format(self._mode, self.source, self.target, str(e)),
             details=None)
     return self
Exemple #2
0
def resolve_conflicting_requests(tasks):
    """
    Do not remove what is supposed to be kept or installed.

    PES events may give us conflicting requests - to both install/keep and remove a pkg.
    Example of two real-world PES events resulting in a conflict:
      PES event 1: sip-devel  SPLIT INTO   python3-sip-devel, sip
      PES event 2: sip        SPLIT INTO   python3-pyqt5-sip, python3-sip
        -> without this function, sip would reside in both [Task.KEEP] and [Task.REMOVE], causing a dnf conflict
    """
    pkgs_in_conflict = set()
    for pkg in list(tasks[Task.INSTALL].keys()) + list(
            tasks[Task.KEEP].keys()):
        if pkg in tasks[Task.REMOVE]:
            pkgs_in_conflict.add(pkg)
            del tasks[Task.REMOVE][pkg]

    if pkgs_in_conflict:
        api.current_logger().debug(
            'The following packages were marked to be kept/installed and removed at the same'
            ' time. Leapp will upgrade them.\n{}'.format('\n'.join(
                sorted(pkgs_in_conflict))))
Exemple #3
0
def filter_out_transaction_conf_pkgs(tasks, transaction_configuration):
    """
    Filter out those PES events conflicting with the higher priority transaction configuration files.

    :param tasks: A dict with three dicts holding pkgs to keep, to install and to remove
    :param transaction_configuration: RpmTransactionTasks model instance with pkgs to install, keep and REMOVE based
                                      on the user configuration files
    """
    do_not_keep = [
        p for p in tasks[Task.KEEP] if p in transaction_configuration.to_remove
    ]
    do_not_install = [
        p for p in tasks[Task.INSTALL]
        if p in transaction_configuration.to_remove
    ]
    do_not_remove = [
        p for p in tasks[Task.REMOVE]
        if p in transaction_configuration.to_install
        or p in transaction_configuration.to_keep
    ]

    for pkg in do_not_keep:
        # Removing a package from the to_keep dict may cause that some repositories won't get enabled
        tasks[Task.KEEP].pop(pkg)

    if do_not_install:
        for pkg in do_not_install:
            tasks[Task.INSTALL].pop(pkg)
        api.current_logger().debug(
            'The following packages will not be installed because of the'
            ' /etc/leapp/transaction/to_remove transaction configuration file:'
            '\n- ' + '\n- '.join(sorted(do_not_install)))
    if do_not_remove:
        for pkg in do_not_remove:
            tasks[Task.REMOVE].pop(pkg)
        api.current_logger().debug(
            'The following packages will not be removed because of the to_keep and to_install'
            ' transaction configuration files in /etc/leapp/transaction/:'
            '\n- ' + '\n- '.join(sorted(do_not_remove)))
def set_container_mode(context):
    """
    Put RHSM into the container mode.

    Inside the container, we have to ensure the RHSM is not used AND that host
    is not affected. If the RHSM is not set into the container mode, the host
    could be affected and the generated repo file in the container could be
    affected as well (e.g. when the release is set, using rhsm, on the host).

    :param context: An instance of a mounting.IsolatedActions class
    :type context: mounting.IsolatedActions class
    """
    if not context.is_isolated():
        api.current_logger().error('Trying to set RHSM into the container mode'
                                   'on host. Skipping the action.')
        return
    try:
        context.call(['ln', '-s', '/etc/rhsm', '/etc/rhsm-host'])
    except CalledProcessError:
        raise StopActorExecutionError(
            message=
            'Cannot set the container mode for the subscription-manager.')
def perform():
    packages = {'dnf'}
    for message in api.consume(RequiredTargetUserspacePackages):
        packages.update(message.packages)

    rhsm_info = next(api.consume(SourceRHSMInfo), None)
    if not rhsm_info and not rhsm.skip_rhsm():
        api.current_logger().warn(
            'Could not receive RHSM information - Is this system registered?')
        return

    xfs_info = next(api.consume(XFSPresence), XFSPresence())
    storage_info = next(api.consume(StorageInfo), None)
    if not storage_info:
        api.current_logger.error('No storage info available cannot proceed.')

    prod_cert_path = _get_product_certificate_path()
    with overlaygen.create_source_overlay(mounts_dir=constants.MOUNTS_DIR,
                                          scratch_dir=constants.SCRATCH_DIR,
                                          storage_info=storage_info,
                                          xfs_info=xfs_info) as overlay:
        with overlay.nspawn() as context:
            target_version = api.current_actor().configuration.version.target
            with rhsm.switched_certificate(context, rhsm_info, prod_cert_path,
                                           target_version) as target_rhsm_info:
                api.current_logger().debug(
                    'Target RHSM Info: SKUs: {skus} Repositories: {repos}'.
                    format(repos=target_rhsm_info.enabled_repos,
                           skus=rhsm_info.attached_skus if rhsm_info else []))
                target_repoids = gather_target_repositories(target_rhsm_info)
                api.current_logger().debug(
                    "Gathered target repositories: {}".format(
                        ', '.join(target_repoids)))
                if not target_repoids:
                    raise StopActorExecutionError(
                        message=
                        'There are no enabled target repositories for the upgrade process to proceed.',
                        details={
                            'hint':
                            ('Ensure your system is correctly registered with the subscription manager and that'
                             ' your current subscription is entitled to install the requested target version {version}'
                             ).format(version=api.current_actor().
                                      configuration.version.target)
                        })
                prepare_target_userspace(context, constants.TARGET_USERSPACE,
                                         target_repoids, list(packages))
                _prep_repository_access(context, constants.TARGET_USERSPACE)
                dnfplugin.install(constants.TARGET_USERSPACE)
                api.produce(
                    UsedTargetRepositories(repos=[
                        UsedTargetRepository(repoid=repo)
                        for repo in target_repoids
                    ]))
                api.produce(target_rhsm_info)
                api.produce(
                    TargetUserSpaceInfo(path=constants.TARGET_USERSPACE,
                                        scratch=constants.SCRATCH_DIR,
                                        mounts=constants.MOUNTS_DIR))
Exemple #6
0
def filter_out_transaction_conf_pkgs(tasks, transaction_configuration):
    """
    Filter out those PES events conflicting with the higher priority transaction configuration files.

    :param tasks: A dict with three dicts holding pkgs to keep, to install and to remove
    :param transaction_configuration: RpmTransactionTasks model instance with pkgs to install, keep and remove based
                                      on the user configuration files
    """
    pkgs_not_to_be_kept = []
    pkgs_not_to_be_installed = []
    pkgs_not_to_be_removed = []

    for pkg_to_keep in tasks['to_keep']:
        if pkg_to_keep in transaction_configuration.to_remove:
            pkgs_not_to_be_kept.append(pkg_to_keep)
    for pkg_to_install in tasks['to_install']:
        if pkg_to_install in transaction_configuration.to_remove:
            pkgs_not_to_be_installed.append(pkg_to_install)
    for pkg_to_remove in tasks['to_remove']:
        if pkg_to_remove in transaction_configuration.to_install + transaction_configuration.to_keep:
            pkgs_not_to_be_removed.append(pkg_to_remove)

    for pkg in pkgs_not_to_be_kept:
        # Removing a package from the to_keep dict may cause that some repositories won't get enabled
        tasks['to_keep'].pop(pkg)

    if pkgs_not_to_be_installed:
        for pkg in pkgs_not_to_be_installed:
            tasks['to_install'].pop(pkg)
        api.current_logger().debug('The following packages will not be installed because of the'
                                   ' /etc/leapp/transaction/to_remove transaction configuration file:'
                                   '\n- ' + '\n- '.join(sorted(pkgs_not_to_be_installed)))
    if pkgs_not_to_be_removed:
        for pkg in pkgs_not_to_be_removed:
            tasks['to_remove'].pop(pkg)
        api.current_logger().debug('The following packages will not be removed because of the to_keep and to_install'
                                   ' transaction configuration files in /etc/leapp/transaction/:'
                                   '\n- ' + '\n- '.join(sorted(pkgs_not_to_be_removed)))
Exemple #7
0
    def process(self):
        # Consume a single TCP Wrappers message
        tcp_wrappers_messages = self.consume(TcpWrappersFacts)
        tcp_wrappers_facts = next(tcp_wrappers_messages, None)
        if list(tcp_wrappers_messages):
            api.current_logger().warning(
                'Unexpectedly received more than one TcpWrappersFacts message.'
            )
        if not tcp_wrappers_facts:
            raise StopActorExecutionError(
                'Could not check tcp wrappers configuration',
                details={'details': 'No TcpWrappersFacts found.'})

        # Convert installed packages message to list
        packages = create_lookup(InstalledRedHatSignedRPM,
                                 field='items',
                                 key='name')

        found_packages = config_affects_daemons(tcp_wrappers_facts, packages,
                                                DAEMONS)

        if found_packages:
            report_with_links(
                title=
                'TCP Wrappers configuration affects some installed packages',
                summary=
                ('tcp_wrappers support has been removed in RHEL-8. '
                 'There is some configuration affecting installed packages (namely {}) '
                 'in /etc/hosts.deny or /etc/hosts.allow, which '
                 'is no longer going to be effective after update. '
                 'Please migrate it manually.'.format(
                     ', '.join(found_packages))),
                links=[{
                    'title': 'Replacing TCP Wrappers in RHEL 8',
                    'href': 'https://access.redhat.com/solutions/3906701'
                }],
                severity='high',
                flags=['inhibitor'])
def get_available_repo_ids(context, releasever=None):
    """
    Retrieve repo ids of all the repositories available through the subscription-manager.

    :param context: An instance of a mounting.IsolatedActions class
    :type context: mounting.IsolatedActions class
    :param releasever: Release version to pass to the `yum repoinfo` command
    :type releasever: string
    :return: Repositories that are available to the current system through the subscription-manager
    :rtype: List(string)
    """
    cmd = ['yum', 'repoinfo']
    if releasever:
        cmd.extend(['--releasever', releasever])
    try:
        result = context.call(cmd)
    except CalledProcessError as exc:
        raise StopActorExecutionError(
            'Unable to get list of available yum repositories.',
            details={
                'details': str(exc),
                'stderr': exc.stderr
            })
    _inhibit_on_duplicate_repos(result['stderr'])
    available_repos = list(_get_repos(result['stdout']))
    available_rhsm_repos = [
        repo.repoid for repo in available_repos
        if repo.file == _DEFAULT_RHSM_REPOFILE
    ]
    list_separator_fmt = '\n    - '
    if available_rhsm_repos:
        api.current_logger().info(
            'The following repoids are available through RHSM:{0}{1}'.format(
                list_separator_fmt,
                list_separator_fmt.join(available_rhsm_repos)))
    else:
        api.current_logger().info('There are no repos available through RHSM.')
    return available_rhsm_repos
Exemple #9
0
def check_memcached(memcached_installed):
    """Report potential issues in memcached configuration."""
    if not memcached_installed:
        api.current_logger().info('memcached package is not installed')
        return

    default_memcached_conf = is_sysconfig_default()
    disabled_udp_port = is_udp_disabled()

    if default_memcached_conf:
        reporting.create_report([
            reporting.Title(
                'memcached service is using default configuration'),
            reporting.Summary(
                'memcached in RHEL8 listens on loopback only and has UDP port disabled by default'
            ),
            reporting.Severity(reporting.Severity.MEDIUM),
            reporting.Tags(COMMON_REPORT_TAGS),
        ] + related)

    elif not disabled_udp_port:
        reporting.create_report([
            reporting.Title('memcached has enabled UDP port'),
            reporting.Summary(
                'memcached in RHEL7 has UDP port enabled by default, but it is disabled by default in RHEL8'
            ),
            reporting.Severity(reporting.Severity.MEDIUM),
            reporting.Tags(COMMON_REPORT_TAGS),
        ] + related)

    else:
        reporting.create_report([
            reporting.Title('memcached has already disabled UDP port'),
            reporting.Summary(
                'memcached in RHEL8 has UDP port disabled by default'),
            reporting.Severity(reporting.Severity.LOW),
            reporting.Tags(COMMON_REPORT_TAGS),
        ] + related)
Exemple #10
0
def switch_certificate(context, rhsm_info, cert_path):
    """
    Perform all actions needed to switch the passed RHSM product certificate.

    This function will copy the certificate to /etc/pki/product, and /etc/pki/product-default if necessary, and
    remove other product certificates from there.

    :param context: An instance of a mounting.IsolatedActions class
    :type context: mounting.IsolatedActions class
    :param rhsm_info: An instance of the RHSMInfo model
    :type rhsm_info: RHSMInfo model
    :param cert_path: Path to the product certificate to switch to
    :type cert_path: string
    """
    for existing in rhsm_info.existing_product_certificates:
        try:
            context.remove(existing)
        except OSError:
            api.current_logger().warning('Failed to remove existing certificate: %s', existing, exc_info=True)

    for path in ('/etc/pki/product', '/etc/pki/product-default'):
        if os.path.isdir(context.full_path(path)):
            context.copy_to(cert_path, os.path.join(path, os.path.basename(cert_path)))
def report_skipped_packages(title, message, package_repo_pairs, remediation=None):
    """Generate report message about skipped packages"""
    package_repo_pairs = sorted(package_repo_pairs)
    summary = '{} {}\n{}'.format(
        len(package_repo_pairs), message, '\n'.join(
            [
                '- {pkg} (repoid: {repo})'.format(pkg=pkg, repo=repo)
                for pkg, repo in package_repo_pairs
            ]
        )
    )
    report_content = [
        reporting.Title(title),
        reporting.Summary(summary),
        reporting.Severity(reporting.Severity.HIGH),
        reporting.Tags([reporting.Tags.REPOSITORY]),
    ]
    if remediation:
        report_content += [reporting.Remediation(hint=remediation)]
    report_content += [reporting.RelatedResource('package', p) for p, _ in package_repo_pairs]
    reporting.create_report(report_content)
    if is_verbose():
        api.current_logger().info(summary)
def _migrate_config(config, fileops=FileOperations()):
    if not config.tcp_wrappers and config.strict_ssl_read_eof is not None:
        return
    try:
        content = fileops.read(config.path)
    except IOError as e:
        api.current_logger().warning(
            'Failed to read vsftpd configuration file %s: %s' %
            (config.path, e))
        return
    lines = content.split('\n')
    if config.tcp_wrappers:
        lines = _replace_in_config(lines, TCP_WRAPPERS, 'NO')
    if config.strict_ssl_read_eof is None:
        lines = _replace_in_config(lines, STRICT_SSL_READ_EOF, 'NO')
    content = '\n'.join(lines)
    content += '\n'
    try:
        fileops.write(config.path, content)
    except IOError as e:
        api.current_logger().warning(
            'Failed to write vsftpd configuration file %s: %s' %
            (config.path, e))
Exemple #13
0
def read_nm_config(file_path=None):
    if file_path:
        try:
            with open(file_path, 'r') as f:
                r = f.read()
                return r
        except IOError as e:
            api.current_logger().warning(
                'Error reading NetworkManager configuration from {}: {}'.
                format(file_path, e))
            return None
    else:
        try:
            # Use 'NM --print-config' to read the configurationo so
            # that the main configuration file and other files in
            # various directories get merged in the right way.
            r = run(['NetworkManager', '--print-config'],
                    split=False)['stdout']
            return r
        except (OSError, CalledProcessError) as e:
            api.current_logger().warning(
                'Error reading NetworkManager configuration: {}'.format(e))
            return None
Exemple #14
0
def interfaces():
    """
    Generator which produces an Interface objects containing assorted interface properties relevant for network naming
    """
    for dev in physical_interfaces():
        attrs = {}

        try:
            attrs['name'] = dev.sys_name
            attrs['devpath'] = dev.device_path
            attrs['driver'] = dev['ID_NET_DRIVER']
            attrs['vendor'] = dev['ID_VENDOR_ID']
            attrs['pci_info'] = PCIAddress(**pci_info(dev['ID_PATH']))
            attrs['mac'] = dev.attributes['address']
        except Exception as e:  # pylint: disable=broad-except
            # FIXME(msekleta): We should probably handle errors more granularly
            # Maybe we should inhibit upgrade process at this point
            api.current_logger().warn(
                'Failed to gather information about network interface: ' +
                str(e))
            continue

        yield Interface(**attrs)
Exemple #15
0
    def process(self):
        if os.path.isfile('/usr/bin/python3'):
            api.current_logger().info('The python3 file exists. the actor can be removed probably.')
            return

        cmd = [
            'alternatives', '--install', '/usr/bin/python3', 'python3', '/usr/bin/python3.9', '1000000',
            '--slave', '/usr/share/man/man1/python3.1.gz', 'python3-man', '/usr/share/man/man1/python3.9.1.gz',
            '--slave', '/usr/bin/pip3', 'pip3', '/usr/bin/pip3.9',
            '--slave', '/usr/bin/pip-3', 'pip-3', '/usr/bin/pip-3.9',
            '--slave', '/usr/bin/easy_install-3', 'easy_install-3', '/usr/bin/easy_install-3.9',
            '--slave', '/usr/bin/pydoc3', 'pydoc3', '/usr/bin/pydoc3.9',
            '--slave', '/usr/bin/pydoc-3', 'pydoc-3', '/usr/bin/pydoc3.9',
            '--slave', '/usr/bin/pyvenv-3', 'pyvenv-3', '/usr/bin/pyvenv-3.9',
        ]

        try:
            run(cmd)
        except CalledProcessError as exc:
            raise StopActorExecutionError(
                message='Cannot create python3 alternatives; upgrade cannot be finished',
                details={'details': str(exc), 'stderr': exc.stderr},
            )
Exemple #16
0
def _report_excluded_repos(repos):
    api.current_logger().info(
        "The optional repository is not enabled. Excluding %r "
        "from the upgrade",
        repos,
    )

    report = [
        reporting.Title("Excluded RHEL 8 repositories"),
        reporting.Summary(
            "The following repositories are not supported by "
            "Red Hat and are excluded from the list of repositories "
            "used during the upgrade.\n- {}".format("\n- ".join(repos))),
        reporting.Severity(reporting.Severity.INFO),
        reporting.Tags([reporting.Tags.REPOSITORY]),
        reporting.Flags([reporting.Flags.FAILURE]),
        reporting.Remediation(hint=(
            "If some of excluded repositories are still required to be used"
            " during the upgrade, execute leapp with the --enablerepo option"
            " with the repoid of the repository required to be enabled"
            " as an argument (the option can be used multiple times).")),
    ]
    reporting.create_report(report)
Exemple #17
0
def _handle_rhsm_exceptions(hint=None):
    """
    Context manager based function that handles exceptions of `run` for the subscription manager calls.
    """
    try:
        yield
    except OSError as e:
        api.current_logger().error('Failed to execute subscription-manager executable')
        raise StopActorExecutionError(
            message='Unable to execute subscription-manager executable: {}'.format(str(e)),
            details={
                'hint': 'Please ensure subscription-manager is installed and executable.'
            }
        )
    except CalledProcessError as e:
        raise StopActorExecutionError(
            message='A subscription-manager command failed to execute',
            details={
                'details': str(e),
                'stderr': e.stderr,
                'hint': hint or 'Please ensure you have a valid RHEL subscription and your network is up.'
            }
        )
Exemple #18
0
def _get_repositories_mapping():
    """
    Get all repositories mapped from repomap file and map repositories id with respective names.

    :return: Dictionary with all repositories mapped.
    """
    repositories_mapping = {}

    repositories_map_msgs = api.consume(RepositoriesMap)
    repositories_map_msg = next(repositories_map_msgs, None)
    if list(repositories_map_msgs):
        api.current_logger().warning('Unexpectedly received more than one RepositoriesMap message.')
    if not repositories_map_msg:
        raise StopActorExecutionError(
            'Cannot parse RepositoriesMap data properly',
            details={'Problem': 'Did not receive a message with mapped repositories'}
        )

    for repository in repositories_map_msg.repositories:
        if repository.arch == api.current_actor().configuration.architecture:
            repositories_mapping[repository.to_pes_repo] = repository.to_repoid

    return repositories_mapping
Exemple #19
0
def add_output_pkgs_to_transaction_conf(transaction_configuration, events):
    """
    Add more packages for removal to transaction configuration if they can be derived as outputs of PES events.

    Output packages from an event are added to packages for removal only if all input packages are already there.

    :param transaction_configuration: RpmTransactionTasks model instance with pkgs to install, keep and remove based
                                      on the user configuration files
    :param events: List of Event tuples, where each event contains event type and input/output pkgs
    """
    message = 'Marking packages for removal:\n'

    for event in events:
        if event.action in ('Split', 'Merged', 'Replaced', 'Renamed'):
            if all([pkg in transaction_configuration.to_remove for pkg in event.in_pkgs]):
                transaction_configuration.to_remove.extend(event.out_pkgs)
                message += '- [{action}] {ins} -> {outs}\n'.format(
                    action=event.action,
                    ins=', '.join(sorted(event.in_pkgs.keys())),
                    outs=', '.join(sorted(event.out_pkgs.keys()))
                )

    api.current_logger().debug(message)
def get_kde_apps_info():
    installed = list()
    base_kde_apps = ("kde-baseapps", "okular", "ark", "kdepim", "konsole",
                     "gwenview", "kdenetwork", "kate", "kwrite")

    api.current_logger().info("  Detecting installed KDE apps  ")
    api.current_logger().info("================================")
    for app in [
            application for application in base_kde_apps
            if has_package(InstalledRPM, application)
    ]:
        api.current_logger().info("Application {0} is installed.".format(app))
        installed.append(app)
    api.current_logger().info("----------------------------------")

    return installed
Exemple #21
0
def process():
    pkgs = get_kernel_rpms()
    if not pkgs:
        # Hypothatical, user is not allowed to install any kernel that is not signed by RH
        # In case we would like to be cautious, we could check whether there are no other
        # kernels installed as well.
        api.current_logger().log.error(
            'Cannot find any installed kernel signed by Red Hat.')
        raise StopActorExecutionError(
            'Cannot find any installed kernel signed by Red Hat.')

    if len(pkgs) > 1 and architecture.matches_architecture(
            architecture.ARCH_S390X):
        # It's temporary solution, so no need to try automatize everything.
        title = 'Multiple kernels installed'
        summary = (
            'The upgrade process does not handle well the case when multiple kernels'
            ' are installed on s390x. There is a severe risk of the bootloader configuration'
            ' getting corrupted during the upgrade.')
        remediation = (
            'Boot into the most up-to-date kernel and remove all older'
            ' kernels installed on the machine before running Leapp again.')
        reporting.create_report([
            reporting.Title(title),
            reporting.Summary(summary),
            reporting.Severity(reporting.Severity.HIGH),
            reporting.Tags([reporting.Tags.KERNEL, reporting.Tags.BOOT]),
            reporting.Flags([reporting.Flags.INHIBITOR]),
            reporting.Remediation(hint=remediation),
            reporting.RelatedResource('package', 'kernel')
        ])

    newest = pkgs[-1]
    newest_release = get_kernel_rpm_release(newest)
    newest_version = get_kernel_rpm_version(newest)
    current_release = get_current_kernel_release()
    current_version = get_current_kernel_version()
    api.current_logger().debug('Current kernel: V {}, R {}'.format(
        current_version, current_release))
    api.current_logger().debug('Newest kernel: V {}, R {}'.format(
        newest_version, newest_release))

    if newest_release != current_release or newest_version != current_version:
        title = 'Newest installed kernel not in use'
        summary = ('To ensure a stable upgrade, the machine needs to be'
                   ' booted into the latest installed kernel.')
        remediation = ('Boot into the most up-to-date kernel installed'
                       ' on the machine before running Leapp again.')
        reporting.create_report([
            reporting.Title(title),
            reporting.Summary(summary),
            reporting.Severity(reporting.Severity.HIGH),
            reporting.Tags([reporting.Tags.KERNEL, reporting.Tags.BOOT]),
            reporting.Flags([reporting.Flags.INHIBITOR]),
            reporting.Remediation(hint=remediation),
            reporting.RelatedResource('package', 'kernel')
        ])
Exemple #22
0
def check_os_version(supported_version):
    """ Check OS version and inhibit upgrade if not the same as supported ones """
    if not isinstance(supported_version, dict):
        api.current_logger().warning('The supported version value is invalid.')
        raise StopActorExecution()

    release_id, version_id = version.current_version()

    if not version.matches_release(supported_version.keys(), release_id):
        reporting.create_report([
            reporting.Title('Unsupported OS'),
            reporting.Summary('Only RHEL is supported by the upgrade process'),
            reporting.Severity(reporting.Severity.HIGH),
            reporting.Tags(COMMON_REPORT_TAGS),
            reporting.Flags([reporting.Flags.INHIBITOR])
        ] + related)

        return

    if not isinstance(supported_version[release_id], list):
        raise StopActorExecutionError(
            'Invalid versions',
            details={'details': 'OS versions are invalid, please provide a valid list.'},
        )

    if not version.matches_version(supported_version[release_id], version_id):
        reporting.create_report([
            reporting.Title('Unsupported OS version'),
            reporting.Summary(
                'The supported OS versions for the upgrade process: {}'.format(
                    ', '.join(supported_version[release_id])
                )
            ),
            reporting.Severity(reporting.Severity.HIGH),
            reporting.Tags(COMMON_REPORT_TAGS),
            reporting.Flags([reporting.Flags.INHIBITOR])
        ] + related)
Exemple #23
0
    def process(self):
        # Consume a single TCP Wrappers message
        tcp_wrappers_messages = self.consume(TcpWrappersFacts)
        tcp_wrappers_facts = next(tcp_wrappers_messages, None)
        if list(tcp_wrappers_messages):
            api.current_logger().warning('Unexpectedly received more than one TcpWrappersFacts message.')
        if not tcp_wrappers_facts:
            raise StopActorExecutionError(
                'Could not check tcp wrappers configuration', details={'details': 'No TcpWrappersFacts found.'}
            )

        # Convert installed packages message to list
        packages = create_lookup(InstalledRedHatSignedRPM, field='items', key='name')

        found_packages = config_affects_daemons(tcp_wrappers_facts, packages, DAEMONS)

        if found_packages:
            create_report([
                reporting.Title('TCP Wrappers configuration affects some installed packages'),
                reporting.Summary(
                    'tcp_wrappers support has been removed in RHEL-8. '
                    'There is some configuration affecting installed packages (namely {}) '
                    'in /etc/hosts.deny or /etc/hosts.allow, which '
                    'is no longer going to be effective after update. '
                    'Please migrate it manually.'.format(', '.join(found_packages))
                ),
                reporting.Severity(reporting.Severity.HIGH),
                reporting.ExternalLink(
                    title='Replacing TCP Wrappers in RHEL 8',
                    url='https://access.redhat.com/solutions/3906701'
                ),
                reporting.Tags([reporting.Tags.SECURITY, reporting.Tags.NETWORK]),
                reporting.Flags([reporting.Flags.INHIBITOR]),
                reporting.RelatedResource('file', '/etc/hosts.allow'),
                reporting.RelatedResource('file', '/etc/hosts.deny'),
                reporting.RelatedResource('package', 'tcp_wrappers')
            ] + [reporting.RelatedResource('package', fp) for fp in found_packages])
Exemple #24
0
def copy_dracut_modules(context, modules):
    """
    Copy dracut modules into the target userspace.

    If duplicated requirements to copy a dracut module are detected,
    log the debug msg and skip any try to copy a dracut module into the
    target userspace that already exists inside DRACTUR_DIR.
    """
    try:
        context.remove_tree(DRACUT_DIR)
    except EnvironmentError:
        pass
    for module in modules:
        if not module.module_path:
            continue
        dst_path = os.path.join(DRACUT_DIR,
                                os.path.basename(module.module_path))
        if os.path.exists(context.full_path(dst_path)):
            # we are safe to skip it as we now the module is from the same path
            # regarding the actor checking all initramfs tasks
            api.current_logger().debug(
                'The {name} dracut module has been already installed. Skipping.'
                .format(name=module.name))
            continue
        try:
            context.copytree_to(module.module_path, dst_path)
        except shutil.Error as e:
            api.current_logger().error(
                'Failed to copy dracut module "{name}" from "{source}" to "{target}"'
                .format(name=module.name,
                        source=module.module_path,
                        target=context.full_path(DRACUT_DIR)),
                exc_info=True)
            raise StopActorExecutionError(
                message=
                'Failed to install dracut modules required in the initram. Error: {}'
                .format(str(e)))
Exemple #25
0
def process():
    if not architecture.matches_architecture(architecture.ARCH_S390X):
        return
    cpuinfo = next(api.consume(CPUInfo), None)
    if cpuinfo is None:
        raise StopActorExecutionError(
            message=("Missing information about CPU."))

    if not cpuinfo.machine_type:
        # this is not expected to happen, but in case...
        api.current_logger().warning("The machine (CPU) type is empty.")

    if cpuinfo.machine_type not in SUPPORTED_MACHINE_TYPES:
        summary = (
            "The system is not possible to upgrade because of unsupported"
            " type of the processor. Based on the official documentation,"
            " z14 and z15 processors are supported on the Red Hat Enterprise"
            " Linux 9 system for the IBM Z architecture. The supported processors"
            " have machine types {}. The detected machine type of the CPU is '{}'."
            .format(", ".join([str(i) for i in SUPPORTED_MACHINE_TYPES]),
                    cpuinfo.machine_type))
        report = [
            reporting.Title(
                "The processor is not supported by the target system."),
            reporting.Summary(summary),
            reporting.Severity(reporting.Severity.HIGH),
            reporting.Tags([reporting.Tags.SANITY]),
            reporting.Flags([reporting.Flags.INHIBITOR]),
            reporting.ExternalLink(
                title="Considerations in adopting RHEL 8",
                url=
                ("https://access.redhat.com/ecosystem/hardware/#/search?p=1&"
                 "c_version=Red%20Hat%20Enterprise%20Linux%208&ch_architecture=s390x"
                 ))
        ]
        # FIXME(dhorak): update the URL to the document once it exists
        reporting.create_report(report)
Exemple #26
0
def read_or_fetch(filename, directory="/etc/leapp/files", service=None, allow_empty=False):
    """
    Return contents of a file or fetch them from an online service if the file does not exist.
    """
    logger = api.current_logger()
    local_path = os.path.join(directory, filename)

    # try to get the data locally
    if not os.path.exists(local_path):
        logger.warning("File {lp} does not exist, falling back to online service".format(lp=local_path))
    else:
        try:
            with open(local_path) as f:
                data = f.read()
                if not allow_empty and not data:
                    _raise_error(local_path, "File {lp} exists but is empty".format(lp=local_path))
                logger.warning("File {lp} successfully read ({l} bytes)".format(lp=local_path, l=len(data)))
                return data
        except EnvironmentError:
            _raise_error(local_path, "File {lp} exists but couldn't be read".format(lp=local_path))
        except Exception as e:
            raise e

    # if the data is not present locally, fetch it from the online service
    service = service or get_env("LEAPP_SERVICE_HOST", default=SERVICE_HOST_DEFAULT)
    service_path = "{s}/api/pes/{f}".format(s=service, f=filename)
    proxy = get_env("LEAPP_PROXY_HOST")
    proxies = {"https": proxy} if proxy else None
    cert = ("/etc/pki/consumer/cert.pem", "/etc/pki/consumer/key.pem")
    response = None
    try:
        response = _request_data(service_path, cert=cert, proxies=proxies)
    except requests.exceptions.RequestException as e:
        logger.error(e)
        _raise_error(local_path, "Could not fetch {f} from {sp} (unreachable address).".format(
            f=filename, sp=service_path))
    # almost certainly missing certs
    except (OSError, IOError) as e:
        logger.error(e)
        _raise_error(local_path, ("Could not fetch {f} from {sp} (missing certificates). Is the machine"
                                  " registered?".format(f=filename, sp=service_path)))
    if response.status_code != 200:
        _raise_error(local_path, "Could not fetch {f} from {sp} (error code: {e}).".format(
            f=filename, sp=service_path, e=response.status_code))
    if not allow_empty and not response.text:
        _raise_error(local_path, "File {lp} successfully retrieved but it's empty".format(lp=local_path))
    logger.warning("File {sp} successfully retrieved and read ({l} bytes)".format(
        sp=service_path, l=len(response.text)))
    return response.text
Exemple #27
0
def scan_xfs():
    storage_info_msgs = api.consume(StorageInfo)
    storage_info = next(storage_info_msgs, None)

    if list(storage_info_msgs):
        api.current_logger().warning('Unexpectedly received more than one StorageInfo message.')

    fstab_data = set()
    mount_data = set()
    systemdmount_data = set()
    if storage_info:
        fstab_data = scan_xfs_fstab(storage_info.fstab)
        mount_data = scan_xfs_mount(storage_info.mount)
        systemdmount_data = scan_xfs_systemdmount(storage_info.systemdmount)

    mountpoints = fstab_data | mount_data | systemdmount_data
    mountpoints_ftype0 = list(filter(is_xfs_without_ftype, mountpoints))

    # By now, we only have XFS mountpoints and check whether or not it has ftype = 0
    api.produce(XFSPresence(
        present=len(mountpoints) > 0,
        without_ftype=len(mountpoints_ftype0) > 0,
        mountpoints_without_ftype=mountpoints_ftype0,
    ))
Exemple #28
0
def update_sane(debug_log=api.current_logger().debug,
                error_log=api.current_logger().error,
                is_installed=_check_package,
                append_function=_append_string,
                check_function=_macro_exists):
    """
    Iterate over dictionary and updates each configuration file.

    :param func debug_log: function for debug logging
    :param func error_log: function for error logging
    :param func is_installed: checks if the package is installed
    :param func append_function: appends a string into file
    :param func check_function: checks if a string exists in file
    """

    error_list = []

    if not is_installed('sane-backends'):
        return

    for path, lines in NEW_QUIRKS.items():

        debug_log('Updating SANE configuration file {}.'.format(path))

        try:
            update_config(path, lines, check_function, append_function)
        except (OSError, IOError) as error:
            error_list.append((path, error))

    if error_list:
        error_log('The files below have not been modified '
                  '(error message included):' + ''.join([
                      '\n    - {}: {}'.format(err[0], err[1])
                      for err in error_list
                  ]))
        return
Exemple #29
0
def copy_dracut_modules(context, modules):
    """
    Copies our dracut modules into the target userspace.
    """
    try:
        shutil.rmtree(context.full_path('/dracut'))
    except EnvironmentError:
        pass
    for module in modules:
        try:
            context.copytree_to(
                module.module_path,
                os.path.join('/dracut', os.path.basename(module.module_path)))
        except shutil.Error as e:
            api.current_logger().error(
                'Failed to copy dracut module "{name}" from "{source}" to "{target}"'
                .format(name=module.name,
                        source=module.module_path,
                        target=context.full_path('/dracut')),
                exc_info=True)
            raise StopActorExecutionError(
                message=
                'Failed to install dracut modules required in the initram. Error: {}'
                .format(str(e)))
def _get_cmd_output(cmd, delim, expected_len):
    """ Verify if command exists and return output """
    if not any(
            os.access(os.path.join(path, cmd[0]), os.X_OK)
            for path in os.environ['PATH'].split(os.pathsep)):
        api.current_logger().warning("'%s': command not found" % cmd[0])
        return

    try:
        # FIXME: Will keep call to subprocess until our stdlib supports "env" parameter
        # when there is any fd except 0,1,2 open, lvm closes the fd and prints a warning.
        # In our case /dev/urandom has other fd opened, probably for caching purposes.
        output = subprocess.check_output(cmd,
                                         env={
                                             'LVM_SUPPRESS_FD_WARNINGS': '1',
                                             'PATH': os.environ['PATH']
                                         })

    except subprocess.CalledProcessError as e:
        api.current_logger().debug(
            "Command '%s' return non-zero exit status: %s" %
            (" ".join(cmd), e.returncode))
        return

    if bytes is not str:
        output = output.decode('utf-8')

    for entry in output.split('\n'):
        entry = entry.strip()
        if not entry:
            continue

        data = entry.split(delim)
        data.extend([''] * (expected_len - len(data)))

        yield data