Esempio n. 1
0
def enable_rhel_subscriptions(module_target_sat, module_org, manifest, version):
    """Enable and sync RHEL rpms repos"""
    major = version.split('.')[0]
    minor = ""
    if major == '8':
        repo_names = ['rhel8_bos', 'rhel8_aps']
        minor = version[1:]
    else:
        repo_names = ['rhel7']

    rh_repos = []
    tasks = []
    for name in repo_names:
        rh_repo_id = enable_rhrepo_and_fetchid(
            basearch=DEFAULT_ARCHITECTURE,
            org_id=module_org.id,
            product=REPOS[name]['product'],
            repo=REPOS[name]['name'] + minor,
            reposet=REPOS[name]['reposet'],
            releasever=REPOS[name]['releasever'] + minor,
        )
        # Sync step because repo is not synced by default
        rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read()
        task = rh_repo.sync(synchronous=False)
        tasks.append(task)
        rh_repos.append(rh_repo)
    for task in tasks:
        wait_for_tasks(
            search_query=(f'id = {task["id"]}'),
            poll_timeout=2500,
        )
        task_status = module_target_sat.api.ForemanTask(id=task['id']).poll()
        assert task_status['result'] == 'success'
    return rh_repos
Esempio n. 2
0
 def validate_task_status(repo_id, max_tries=10):
     wait_for_tasks(
         search_query='resource_type = Katello::Repository'
                      ' and owner.login = foreman_admin'
                      ' and resource_id = {}'.format(repo_id),
         max_tries=max_tries
     )
Esempio n. 3
0
 def validate_task_status(repo_id, max_tries=10):
     wait_for_tasks(
         search_query='resource_type = Katello::Repository'
                      ' and owner.login = foreman_admin'
                      ' and resource_id = {}'.format(repo_id),
         max_tries=max_tries
     )
Esempio n. 4
0
    def test_positive_post_hypervisors(self):
        """ Post large json file to /rhsm/hypervisors"

        :id: e344c9d2-3538-4432-9a74-b025e9ef852d

        :expectedresults:
            hypervisor/guest json can be posted and the task is success status

        :CaseLevel: Integration

        :CaseImportance: Medium

        :BZ: 1637042, 1769680
        """
        org = Org.info({'name': DEFAULT_ORG})
        data = hypervisor_json_create(hypervisors=100, guests=10)
        url = f"https://{settings.server.hostname}/rhsm/hypervisors/{org['label']}"
        auth = (settings.server.admin_username, settings.server.admin_password)
        result = requests.post(url, auth=auth, verify=False, json=data)
        if result.status_code != 200:
            if "foreman_tasks_sync_task_timeout" in result.text:
                task_id = re.findall('waiting for task (.*?) to finish',
                                     result.text)[-1]
                wait_for_tasks(search_query=f'id = {task_id}', max_tries=10)
            else:
                assert result.status_code == 200
Esempio n. 5
0
def test_positive_install_in_hc(module_org, activation_key, custom_repo,
                                target_sat):
    """Install errata in a host-collection

    :id: 6f0242df-6511-4c0f-95fc-3fa32c63a064

    :Setup: Errata synced on satellite server.

    :Steps: PUT /api/v2/hosts/bulk/update_content

    :expectedresults: errata is installed in the host-collection.

    :CaseLevel: System

    :BZ: 1983043
    """
    with VMBroker(nick=constants.DISTRO_RHEL7,
                  host_classes={'host': ContentHost},
                  _count=2) as clients:
        for client in clients:
            client.install_katello_ca(target_sat)
            client.register_contenthost(module_org.label, activation_key.name)
            assert client.subscribed
            client.add_rex_key(satellite=target_sat)
        host_ids = [client.nailgun_host.id for client in clients]
        _install_package(
            module_org,
            clients=clients,
            host_ids=host_ids,
            package_name=constants.FAKE_1_CUSTOM_PACKAGE,
        )
        host_collection = target_sat.api.HostCollection(
            organization=module_org).create()
        host_ids = [client.nailgun_host.id for client in clients]
        host_collection.host_ids = host_ids
        host_collection = host_collection.update(['host_ids'])
        task_id = target_sat.api.JobInvocation().run(data={
            'feature':
            'katello_errata_install',
            'inputs': {
                'errata': str(CUSTOM_REPO_ERRATA_ID)
            },
            'targeting_type':
            'static_query',
            'search_query':
            f'host_collection_id = {host_collection.id}',
            'organization_id':
            module_org.id,
        }, )['id']
        wait_for_tasks(
            search_query=
            (f'label = Actions::RemoteExecution::RunHostsJob and id = {task_id}'
             ),
            search_rate=15,
            max_tries=10,
        )
        for client in clients:
            result = client.run(f'rpm -q {constants.FAKE_2_CUSTOM_PACKAGE}')
            assert result.status == 0
Esempio n. 6
0
def test_positive_end_to_end_bulk_update(session, vm):
    """Create VM, set up VM as host, register it as a content host,
    read content host details, install a package ( e.g. walrus-0.71) and
    use bulk action (Update All Packages) to update the package by name
    to a later version.

    :id: d460ba30-82c7-11e9-9af5-54ee754f2151

    :expectedresults: package installation and update to a later version
        are successful.

    :BZ: 1712069

    :CaseLevel: System
    """
    hc_name = gen_string('alpha')
    description = gen_string('alpha')
    result = vm.run('yum -y install {0}'.format(FAKE_1_CUSTOM_PACKAGE))
    assert result.return_code == 0
    with session:
        # Ensure content host is searchable
        assert session.contenthost.search(
            vm.hostname)[0]['Name'] == vm.hostname
        # Update package using bulk action
        # use the Host Collection view to access Update Packages dialogue
        session.hostcollection.create({
            'name': hc_name,
            'unlimited_hosts': False,
            'max_hosts': 2,
            'description': description
        })
        session.hostcollection.associate_host(hc_name, vm.hostname)
        # make a note of time for later CLI wait_for_tasks, and include
        # 5 mins margin of safety.
        timestamp = (datetime.utcnow() -
                     timedelta(minutes=5)).strftime("%Y-%m-%d %H:%M")
        # Update the package by name
        session.hostcollection.manage_packages(
            hc_name,
            content_type='Package',
            packages=FAKE_1_CUSTOM_PACKAGE_NAME,
            action='update_all')
        # Wait for upload profile event (in case Satellite system slow)
        host = entities.Host().search(
            query={'search': 'name={}'.format(vm.hostname)})
        wait_for_tasks(
            search_query='label = Actions::Katello::Host::UploadProfiles'
            ' and resource_id = {}'
            ' and started_at >= "{}"'.format(host[0].id, timestamp),
            search_rate=15,
            max_tries=10,
        )
        # Ensure package updated to a later version
        packages = session.contenthost.search_package(
            vm.hostname, FAKE_2_CUSTOM_PACKAGE_NAME)
        assert packages[0]['Installed Package'] == FAKE_2_CUSTOM_PACKAGE
        # Delete content host
        session.contenthost.delete(vm.hostname)
def test_rhcloud_inventory_e2e(
    inventory_settings, organization_ak_setup, registered_hosts, session
):
    """Generate report and verify its basic properties

    :id: 833bd61d-d6e7-4575-887a-9e0729d0fa76

    :customerscenario: true

    :expectedresults:

        1. Report can be generated
        2. Report can be downloaded
        3. Report has non-zero size
        4. Report can be extracted
        5. JSON files inside report can be parsed
        6. metadata.json lists all and only slice JSON files in tar
        7. Host counts in metadata matches host counts in slices
        8. Assert Hostnames, IP addresses, and installed packages are present in report.

    :BZ: 1807829, 1926100
    """
    org, ak = organization_ak_setup
    virtual_host, baremetal_host = registered_hosts
    with session:
        session.organization.select(org_name=org.name)
        timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M')
        session.cloudinventory.generate_report(org.name)
        wait_for_tasks(
            search_query='label = ForemanInventoryUpload::Async::GenerateReportJob'
            f' and started_at >= "{timestamp}"',
            search_rate=15,
            max_tries=10,
        )
        report_path = session.cloudinventory.download_report(org.name)
        inventory_data = session.cloudinventory.read(org.name)

    common_assertion(report_path, inventory_data, org)
    json_data = get_report_data(report_path)
    hostnames = [host['fqdn'] for host in json_data['hosts']]
    assert virtual_host.hostname in hostnames
    assert baremetal_host.hostname in hostnames
    ip_addresses = [
        host['system_profile']['network_interfaces'][0]['ipv4_addresses'][0]
        for host in json_data['hosts']
    ]
    ipv4_addresses = [host['ip_addresses'][0] for host in json_data['hosts']]
    assert virtual_host.ip_addr in ip_addresses
    assert baremetal_host.ip_addr in ip_addresses
    assert virtual_host.ip_addr in ipv4_addresses
    assert baremetal_host.ip_addr in ipv4_addresses
    all_host_profiles = [host['system_profile'] for host in json_data['hosts']]
    for host_profiles in all_host_profiles:
        assert 'installed_packages' in host_profiles
        assert len(host_profiles['installed_packages']) > 1
Esempio n. 8
0
def validate_task_status(repo_id, max_tries=6):
    """Wait for foreman_tasks to complete or timeout

    :param repo_id: Repository Id to identify the correct task
    :param max_tries: Max tries to poll for the task creation
    """
    wait_for_tasks(
        search_query='Actions::Katello::Repository::Sync'
        f' and resource_id = {repo_id}',
        max_tries=max_tries,
    )
Esempio n. 9
0
def test_convert2rhel_centos(target_sat, centos, activation_key_rhel, version):
    """Convert Centos linux to RHEL

    :id: 6f698440-7d85-4deb-8dd9-363ea9003b92

    :Steps:
        0. Have host registered to Satellite
        1. Check for operating system
        2. Convert host to RHEL

    :expectedresults: Host is converted to RHEL with correct os facts
        and subscription status

    :parametrized: yes

    :CaseImportance: Medium
    """
    host_content = target_sat.api.Host(id=centos.hostname).read_json()
    major = version.split('.')[0]
    assert host_content['operatingsystem_name'] == f"CentOS {major}"

    # execute job 'Convert 2 RHEL' on host
    template_id = (
        target_sat.api.JobTemplate().search(query={'search': 'name="Convert to RHEL"'})[0].id
    )
    job = target_sat.api.JobInvocation().run(
        synchronous=False,
        data={
            'job_template_id': template_id,
            'inputs': {
                'Activation Key': activation_key_rhel.id,
                'Restart': 'yes',
            },
            'targeting_type': 'static_query',
            'search_query': f'name = {centos.hostname}',
        },
    )
    # wait for job to complete
    wait_for_tasks(
        f'resource_type = JobInvocation and resource_id = {job["id"]}', poll_timeout=1000
    )
    result = target_sat.api.JobInvocation(id=job['id']).read()
    assert result.succeeded == 1

    # check facts: correct os and valid subscription status
    host_content = target_sat.api.Host(id=centos.hostname).read_json()
    # workaround for BZ 2080347
    assert (
        host_content['operatingsystem_name'].startswith(f"RHEL Server {version}")
        or host_content['operatingsystem_name'].startswith(f"RedHat {version}")
        or host_content['operatingsystem_name'].startswith(f"RHEL {version}")
    )
    assert host_content['subscription_status'] == 0
Esempio n. 10
0
def test_positive_install_in_host(module_org, activation_key, custom_repo,
                                  rhel7_contenthost, target_sat):
    """Install errata in a host

    :id: 1e6fc159-b0d6-436f-b945-2a5731c46df5

    :Setup: Errata synced on satellite server.

    :Steps: POST /api/v2/job_invocations/{hash}

    :expectedresults: errata is installed in the host.

    :parametrized: yes

    :CaseLevel: System

    :BZ: 1983043
    """
    rhel7_contenthost.install_katello_ca(target_sat)
    rhel7_contenthost.register_contenthost(module_org.label,
                                           activation_key.name)
    assert rhel7_contenthost.subscribed
    host_id = rhel7_contenthost.nailgun_host.id
    _install_package(
        module_org,
        clients=[rhel7_contenthost],
        host_ids=[host_id],
        package_name=constants.FAKE_1_CUSTOM_PACKAGE,
    )
    rhel7_contenthost.add_rex_key(satellite=target_sat)
    task_id = target_sat.api.JobInvocation().run(data={
        'feature':
        'katello_errata_install',
        'inputs': {
            'errata': str(CUSTOM_REPO_ERRATA_ID)
        },
        'targeting_type':
        'static_query',
        'search_query':
        f'name = {rhel7_contenthost.hostname}',
        'organization_id':
        module_org.id,
    }, )['id']
    wait_for_tasks(
        search_query=(
            f'label = Actions::RemoteExecution::RunHostsJob and id = {task_id}'
        ),
        search_rate=15,
        max_tries=10,
    )
    _validate_package_installed([rhel7_contenthost],
                                constants.FAKE_2_CUSTOM_PACKAGE)
Esempio n. 11
0
def test_positive_install_multiple_in_host(
    module_org, activation_key, custom_repo, rhel7_contenthost, default_sat
):
    """For a host with multiple applicable errata install one and ensure
    the rest of errata is still available

    :id: 67b7e95b-9809-455a-a74e-f1815cc537fc

    :customerscenario: true

    :BZ: 1469800, 1528275, 1983043, 1905560

    :expectedresults: errata installation task succeeded, available errata
        counter decreased by one; it's possible to schedule another errata
        installation

    :CaseImportance: Medium

    :parametrized: yes

    :CaseLevel: System
    """
    rhel7_contenthost.install_katello_ca(default_sat)
    rhel7_contenthost.register_contenthost(module_org.label, activation_key.name)
    assert rhel7_contenthost.subscribed
    host = rhel7_contenthost.nailgun_host
    for package in constants.FAKE_9_YUM_OUTDATED_PACKAGES:
        _install_package(
            module_org, clients=[rhel7_contenthost], host_ids=[host.id], package_name=package
        )
    host = host.read()
    applicable_errata_count = host.content_facet_attributes['errata_counts']['total']
    assert applicable_errata_count > 1
    rhel7_contenthost.add_rex_key(satellite=default_sat)
    for errata in settings.repos.yum_9.errata[1:4]:
        task_id = default_sat.api.JobInvocation().run(
            data={
                'feature': 'katello_errata_install',
                'inputs': {'errata': str(errata)},
                'targeting_type': 'static_query',
                'search_query': f'name = {rhel7_contenthost.hostname}',
                'organization_id': module_org.id,
            },
        )['id']
        wait_for_tasks(
            search_query=(f'label = Actions::RemoteExecution::RunHostsJob and id = {task_id}'),
            search_rate=20,
            max_tries=15,
        )
        host = host.read()
        applicable_errata_count -= 1
        assert host.content_facet_attributes['errata_counts']['total'] == applicable_errata_count
Esempio n. 12
0
def validate_task_status(repo_id, org_id, max_tries=10):
    """Wait for foreman_tasks to complete or timeout

    :param repo_id: Repository Id to identify the correct task
    :param max_tries: Max tries to poll for the task creation
    :param org_id: Org ID to ensure valid check on busy Satellite
    """
    wait_for_tasks(
        search_query='Actions::Katello::Repository::Sync'
        f' and organization_id = {org_id}'
        f' and resource_id = {repo_id}',
        max_tries=max_tries,
    )
Esempio n. 13
0
def validate_task_status(repo_id, max_tries=10, repo_backend_id=None):
    """Wait for Pulp and foreman_tasks to complete or timeout

    :param repo_id: Repository Id to identify the correct task
    :param max_tries: Max tries to poll for the task creation
    :param repo_backend_id: Backend identifier of repository to filter the
        pulp tasks
    """
    if repo_backend_id:
        wait_for_syncplan_tasks(repo_backend_id)
    wait_for_tasks(search_query='resource_type = Katello::Repository'
                   ' and owner.login = foreman_admin'
                   ' and resource_id = {}'.format(repo_id),
                   max_tries=max_tries)
Esempio n. 14
0
    def validate_task_status(repo_id, max_tries=10, repo_backend_id=None):
        """Wait for Pulp and foreman_tasks to complete or timeout

        :param repo_id: Repository Id to identify the correct task
        :param max_tries: Max tries to poll for the task creation
        :param repo_backend_id: Backend identifier of repository to filter the
            pulp tasks
        """
        if repo_backend_id:
            wait_for_syncplan_tasks(repo_backend_id)
        wait_for_tasks(
            search_query='resource_type = Katello::Repository'
                         ' and owner.login = foreman_admin'
                         ' and resource_id = {}'.format(repo_id),
            max_tries=max_tries
        )
Esempio n. 15
0
def validate_task_status(repo_id, max_tries=6, repo_name=None):
    """Wait for Pulp and foreman_tasks to complete or timeout

    :param repo_id: Repository Id to identify the correct task
    :param max_tries: Max tries to poll for the task creation
    :param repo_name: Repository name of repository to filter the
        pulp tasks
    """
    if repo_name:
        wait_for_syncplan_tasks(repo_name=repo_name)
    wait_for_tasks(
        search_query='resource_type = Katello::Repository'
        ' and owner.login = foreman_admin'
        f' and resource_id = {repo_id}',
        max_tries=max_tries,
    )
Esempio n. 16
0
def test_positive_run_capsule_upgrade_playbook():
    """Run Capsule Upgrade playbook against an External Capsule

    :id: 9ec6903d-2bb7-46a5-8002-afc74f06d83b

    :steps:
        1. Create a Capsule VM, add REX key.
        2. Run the Capsule Upgrade Playbook.

    :expectedresults: Capsule is upgraded successfully

    :CaseImportance: Medium
    """
    with CapsuleVirtualMachine() as capsule_vm:
        template_id = (entities.JobTemplate().search(
            query={'search': 'name="Capsule Upgrade Playbook"'})[0].id)

        add_remote_execution_ssh_key(capsule_vm.ip_addr)
        job = entities.JobInvocation().run(
            synchronous=False,
            data={
                'job_template_id': template_id,
                'inputs': {
                    'target_version':
                    CAPSULE_TARGET_VERSION,
                    'whitelist_options':
                    "repositories-validate,repositories-setup",
                },
                'targeting_type': "static_query",
                'search_query': f"name = {capsule_vm.hostname}",
            },
        )
        wait_for_tasks(
            f"resource_type = JobInvocation and resource_id = {job['id']}")
        result = entities.JobInvocation(id=job['id']).read()
        assert result.succeeded == 1

        result = capsule_vm.run('foreman-maintain health check')
        assert result.return_code == 0
        for line in result.stdout:
            assert 'FAIL' not in line

        result = entities.SmartProxy(id=entities.SmartProxy(
            name=capsule_vm.hostname).search()[0].id).refresh()
        feature_list = [feat['name'] for feat in result['features']]
        assert {'Discovery', 'Dynflow', 'Ansible', 'SSH', 'Logs',
                'Pulp'}.issubset(feature_list)
Esempio n. 17
0
def test_positive_run_capsule_upgrade_playbook(capsule_configured,
                                               default_sat):
    """Run Capsule Upgrade playbook against an External Capsule

    :id: 9ec6903d-2bb7-46a5-8002-afc74f06d83b

    :steps:
        1. Create a Capsule VM, add REX key.
        2. Run the Capsule Upgrade Playbook.

    :expectedresults: Capsule is upgraded successfully

    :CaseImportance: Medium
    """
    template_id = (default_sat.api.JobTemplate().search(
        query={'search': 'name="Capsule Upgrade Playbook"'})[0].id)

    capsule_configured.add_rex_key(satellite=default_sat)
    job = default_sat.api.JobInvocation().run(
        synchronous=False,
        data={
            'job_template_id': template_id,
            'inputs': {
                'target_version': CAPSULE_TARGET_VERSION,
                'whitelist_options':
                'repositories-validate,repositories-setup',
            },
            'targeting_type': 'static_query',
            'search_query': f'name = {capsule_configured.hostname}',
        },
    )
    wait_for_tasks(
        f'resource_type = JobInvocation and resource_id = {job["id"]}')
    result = default_sat.api.JobInvocation(id=job['id']).read()
    assert result.succeeded == 1

    result = default_sat.execute('foreman-maintain health check')
    assert result.status == 0
    for line in result.stdout:
        assert 'FAIL' not in line

    result = default_sat.api.SmartProxy(id=default_sat.api.SmartProxy(
        name=default_sat.hostname).search()[0].id).refresh()
    feature_list = [feat['name'] for feat in result['features']]
    assert {'Discovery', 'Dynflow', 'Ansible', 'SSH', 'Logs',
            'Pulp'}.issubset(feature_list)
Esempio n. 18
0
def host(
    rhel7_contenthost_module,
    module_manifest_org,
    dev_lce,
    qe_lce,
    custom_repo,
    module_ak,
    module_cv,
    default_sat,
):
    # Create client machine and register it to satellite with rhel_7_partial_ak
    rhel7_contenthost_module.install_katello_ca(default_sat)
    # Register, enable tools repo and install katello-host-tools.
    rhel7_contenthost_module.register_contenthost(module_manifest_org.label,
                                                  module_ak.name)
    rhel7_contenthost_module.enable_repo(REPOS['rhst7']['id'])
    rhel7_contenthost_module.install_katello_host_tools()
    # make a note of time for later wait_for_tasks, and include 4 mins margin of safety.
    timestamp = (datetime.utcnow() -
                 timedelta(minutes=4)).strftime('%Y-%m-%d %H:%M')
    # AK added custom repo for errata package, just install it.
    rhel7_contenthost_module.execute(f'yum install -y {FAKE_4_CUSTOM_PACKAGE}')
    rhel7_contenthost_module.execute('katello-package-upload')
    # Wait for applicability update event (in case Satellite system slow)
    wait_for_tasks(
        search_query=
        'label = Actions::Katello::Applicability::Hosts::BulkGenerate'
        f' and started_at >= "{timestamp}"'
        f' and state = stopped'
        f' and result = success',
        search_rate=15,
        max_tries=10,
    )
    # Add filter of type include but do not include anything.
    # this will hide all RPMs from selected erratum before publishing.
    entities.RPMContentViewFilter(content_view=module_cv,
                                  inclusion=True,
                                  name='Include Nothing').create()
    module_cv.publish()
    module_cv = module_cv.read()
    return rhel7_contenthost_module
Esempio n. 19
0
def test_host_errata_search_commands(request, module_org, module_cv,
                                     module_lce, host_collection,
                                     errata_hosts):
    """View a list of affected hosts for security (RHSA) and bugfix (RHBA) errata,
    filtered with errata status and applicable flags. Applicability is calculated using the
    Library, but Installability is calculated using the attached CV, and is subject to the
    CV's own filtering.

    :id: 07757a77-7ab4-4020-99af-2beceb023266

    :Setup: Errata synced on satellite server, custom package installed on errata hosts.

    :Steps:
        1.  host list --search 'errata_status = errata_needed'
        2.  host list --search 'errata_status = security_needed'
        3.  host list --search 'applicable_errata = <bugfix_advisory>'
        4.  host list --search 'applicable_errata = <security_advisory>'
        5.  host list --search 'applicable_rpms = <bugfix_package>'
        6.  host list --search 'applicable_rpms = <security_package>'
        7.  Create filter & rule to hide RPM (applicable vs. installable test)
        8.  Repeat steps 3 and 5, but 5 expects host name not found.


    :expectedresults: The hosts are correctly listed for security and bugfix advisories.
    """
    # note time for later wait_for_tasks include 2 mins margin of safety.
    timestamp = (datetime.utcnow() -
                 timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M')

    errata = REPO_WITH_ERRATA['errata']

    # Update package on first host so that the security advisory doesn't apply.
    result = errata_hosts[0].execute(
        f'yum update -y {errata[0]["new_package"]}')
    assert result.status == 0, 'Failed to install rpm'

    # Update package on second host so that the bugfix advisory doesn't apply.
    result = errata_hosts[1].execute(
        f'yum update -y {errata[1]["new_package"]}')
    assert result.status == 0, 'Failed to install rpm'

    # Wait for upload profile event (in case Satellite system slow)
    host = entities.Host().search(
        query={'search': f'name={errata_hosts[0].hostname}'})
    wait_for_tasks(
        search_query=('label = Actions::Katello::Host::UploadProfiles'
                      f' and resource_id = {host[0].id}'
                      f' and started_at >= "{timestamp}"'),
        search_rate=15,
        max_tries=10,
    )

    # Step 1: Search for hosts that require bugfix advisories
    result = Host.list({
        'search': 'errata_status = errata_needed',
        'organization-id': module_org.id,
        'per-page': PER_PAGE_LARGE,
    })
    result = [item['name'] for item in result]
    assert errata_hosts[0].hostname in result
    assert errata_hosts[1].hostname not in result

    # Step 2: Search for hosts that require security advisories
    result = Host.list({
        'search': 'errata_status = security_needed',
        'organization-id': module_org.id,
        'per-page': PER_PAGE_LARGE,
    })
    result = [item['name'] for item in result]
    assert errata_hosts[0].hostname not in result
    assert errata_hosts[1].hostname in result

    # Step 3: Search for hosts that require the specified bugfix advisory
    result = Host.list({
        'search': f'applicable_errata = {errata[1]["id"]}',
        'organization-id': module_org.id,
        'per-page': PER_PAGE_LARGE,
    })
    result = [item['name'] for item in result]
    assert errata_hosts[0].hostname in result
    assert errata_hosts[1].hostname not in result

    # Step 4: Search for hosts that require the specified security advisory
    result = Host.list({
        'search': f'applicable_errata = {errata[0]["id"]}',
        'organization-id': module_org.id,
        'per-page': PER_PAGE_LARGE,
    })
    result = [item['name'] for item in result]
    assert errata_hosts[0].hostname not in result
    assert errata_hosts[1].hostname in result

    # Step 5: Search for hosts that require the specified bugfix package
    result = Host.list({
        'search': f'applicable_rpms = {errata[1]["new_package"]}',
        'organization-id': module_org.id,
        'per-page': PER_PAGE_LARGE,
    })
    result = [item['name'] for item in result]
    assert errata_hosts[0].hostname in result
    assert errata_hosts[1].hostname not in result

    # Step 6: Search for hosts that require the specified security package
    result = Host.list({
        'search': f'applicable_rpms = {errata[0]["new_package"]}',
        'organization-id': module_org.id,
        'per-page': PER_PAGE_LARGE,
    })
    result = [item['name'] for item in result]
    assert errata_hosts[0].hostname not in result
    assert errata_hosts[1].hostname in result

    # Step 7: Apply filter and rule to CV to hide RPM, thus making erratum not installable
    # Make RPM exclude filter
    cv_filter = make_content_view_filter({
        'content-view-id': module_cv.id,
        'name': 'erratum_search_test',
        'description': 'Hide the installable errata',
        'organization-id': module_org.id,
        'type': 'rpm',
        'inclusion': 'false',
    })

    @request.addfinalizer
    def cleanup():
        cv_filter_cleanup(cv_filter['filter-id'], module_cv, module_org,
                          module_lce)

    # Make rule to exclude the specified bugfix package
    make_content_view_filter_rule({
        'content-view-id':
        module_cv.id,
        'content-view-filter-id':
        cv_filter['filter-id'],
        'name':
        errata[1]['package_name'],
    })

    # Publish and promote a new version with the filter
    cv_publish_promote(module_cv, module_org, module_lce)

    # Step 8: Run tests again. Applicable should still be true, installable should now be false.
    # Search for hosts that require the bugfix package.
    result = Host.list({
        'search': f'applicable_rpms = {errata[1]["new_package"]}',
        'organization-id': module_org.id,
        'per-page': PER_PAGE_LARGE,
    })
    result = [item['name'] for item in result]
    assert errata_hosts[0].hostname in result
    assert errata_hosts[1].hostname not in result

    # Search for hosts that require the specified bugfix advisory.
    result = Host.list({
        'search': f'installable_errata = {errata[1]["id"]}',
        'organization-id': module_org.id,
        'per-page': PER_PAGE_LARGE,
    })
    result = [item['name'] for item in result]
    assert errata_hosts[0].hostname not in result
    assert errata_hosts[1].hostname not in result
Esempio n. 20
0
def test_positive_oscap_run_via_ansible_bz_1814988(module_org, default_proxy,
                                                   content_view,
                                                   lifecycle_env):
    """End-to-End Oscap run via ansible

    :id: 375f8f08-9299-4d16-91f9-9426eeecb9c5

    :parametrized: yes

    :customerscenario: true

    :setup: scap content, scap policy, host group

    :steps:

        1. Create a valid scap content
        2. Import Ansible role theforeman.foreman_scap_client
        3. Import Ansible Variables needed for the role
        4. Create a scap policy with anisble as deploy option
        5. Associate the policy with a hostgroup
        6. Provision a host using the hostgroup
        7. Harden the host by remediating it with DISA STIG security policy
        8. Configure REX and associate the Ansible role to created host
        9. Play roles for the host

    :expectedresults: REX job should be success and ARF report should be sent to satellite

    :BZ: 1814988

    :CaseImportance: Critical
    """
    hgrp_name = gen_string('alpha')
    policy_name = gen_string('alpha')
    # Creates host_group for rhel7
    make_hostgroup({
        'content-source-id': default_proxy,
        'name': hgrp_name,
        'organizations': module_org.name,
    })
    # Creates oscap_policy.
    scap_id, scap_profile_id = fetch_scap_and_profile_id(
        OSCAP_DEFAULT_CONTENT['rhel7_content'], OSCAP_PROFILE['dsrhel7'])
    Ansible.roles_import({'proxy-id': default_proxy})
    Ansible.variables_import({'proxy-id': default_proxy})
    role_id = Ansible.roles_list({'search':
                                  'foreman_scap_client'})[0].get('id')
    make_scap_policy({
        'scap-content-id': scap_id,
        'hostgroups': hgrp_name,
        'deploy-by': 'ansible',
        'name': policy_name,
        'period': OSCAP_PERIOD['weekly'].lower(),
        'scap-content-profile-id': scap_profile_id,
        'weekday': OSCAP_WEEKDAY['friday'].lower(),
        'organizations': module_org.name,
    })
    with VMBroker(nick=DISTRO_RHEL7, host_classes={'host': ContentHost}) as vm:
        host_name, _, host_domain = vm.hostname.partition('.')
        vm.install_katello_ca()
        vm.register_contenthost(module_org.name, ak_name[DISTRO_RHEL7])
        assert vm.subscribed
        Host.set_parameter({
            'host': vm.hostname.lower(),
            'name': 'remote_execution_connect_by_ip',
            'value': 'True',
        })
        vm.configure_rhel_repo(settings.repos.rhel7_repo)
        # Harden the rhel7 client with DISA STIG security policy
        vm.run('yum install -y scap-security-guide')
        vm.run(
            'oscap xccdf eval --remediate --profile xccdf_org.ssgproject.content_profile_stig '
            '--fetch-remote-resources --results-arf results.xml '
            '/usr/share/xml/scap/ssg/content/ssg-rhel7-ds.xml', )
        add_remote_execution_ssh_key(vm.ip_addr)
        Host.update({
            'name': vm.hostname.lower(),
            'lifecycle-environment': lifecycle_env.name,
            'content-view': content_view.name,
            'hostgroup': hgrp_name,
            'openscap-proxy-id': default_proxy,
            'organization': module_org.name,
            'ansible-role-ids': role_id,
        })
        job_id = Host.ansible_roles_play({'name':
                                          vm.hostname.lower()})[0].get('id')
        wait_for_tasks(
            f'resource_type = JobInvocation and resource_id = {job_id} and action ~ "hosts job"'
        )
        try:
            result = JobInvocation.info({'id': job_id})['success']
            assert result == '1'
        except AssertionError:
            output = ' '.join(
                JobInvocation.get_output({
                    'id': job_id,
                    'host': vm.hostname
                }))
            result = f'host output: {output}'
            raise AssertionError(result)
        result = vm.run(
            'cat /etc/foreman_scap_client/config.yaml | grep profile')
        assert result.status == 0
        # Runs the actual oscap scan on the vm/clients and
        # uploads report to Internal Capsule.
        vm.execute_foreman_scap_client()
        # Assert whether oscap reports are uploaded to
        # Satellite6.
        result = Arfreport.list({'search': f'host={vm.hostname.lower()}'})
        assert result is not None
Esempio n. 21
0
def test_positive_oscap_run_via_ansible(module_org, default_proxy,
                                        content_view, lifecycle_env, distro):
    """End-to-End Oscap run via ansible

    :id: c7ea56eb-6cf1-4e79-8d6a-fb872d1bb804

    :parametrized: yes

    :setup: scap content, scap policy, host group

    :steps:

        1. Create a valid scap content
        2. Import Ansible role theforeman.foreman_scap_client
        3. Import Ansible Variables needed for the role
        4. Create a scap policy with anisble as deploy option
        5. Associate the policy with a hostgroup
        6. Provision a host using the hostgroup
        7. Configure REX and associate the Ansible role to created host
        8. Play roles for the host

    :expectedresults: REX job should be success and ARF report should be sent to satellite

    :BZ: 1716307

    :CaseImportance: Critical
    """
    if distro == 'rhel7':
        rhel_repo = settings.repos.rhel7_repo
        profile = OSCAP_PROFILE['security7']
    else:
        rhel_repo = settings.repos.rhel8_repo
        profile = OSCAP_PROFILE['ospp8']
    content = OSCAP_DEFAULT_CONTENT[f'{distro}_content']
    hgrp_name = gen_string('alpha')
    policy_name = gen_string('alpha')
    # Creates host_group for rhel7
    make_hostgroup({
        'content-source-id': default_proxy,
        'name': hgrp_name,
        'organizations': module_org.name,
    })
    # Creates oscap_policy.
    scap_id, scap_profile_id = fetch_scap_and_profile_id(content, profile)
    Ansible.roles_import({'proxy-id': default_proxy})
    Ansible.variables_import({'proxy-id': default_proxy})
    role_id = Ansible.roles_list({'search':
                                  'foreman_scap_client'})[0].get('id')
    make_scap_policy({
        'scap-content-id': scap_id,
        'hostgroups': hgrp_name,
        'deploy-by': 'ansible',
        'name': policy_name,
        'period': OSCAP_PERIOD['weekly'].lower(),
        'scap-content-profile-id': scap_profile_id,
        'weekday': OSCAP_WEEKDAY['friday'].lower(),
        'organizations': module_org.name,
    })
    with VMBroker(nick=distro, host_classes={'host': ContentHost}) as vm:
        host_name, _, host_domain = vm.hostname.partition('.')
        vm.install_katello_ca()
        vm.register_contenthost(module_org.name, ak_name[distro])
        assert vm.subscribed
        Host.set_parameter({
            'host': vm.hostname.lower(),
            'name': 'remote_execution_connect_by_ip',
            'value': 'True',
        })
        vm.configure_rhel_repo(rhel_repo)
        add_remote_execution_ssh_key(vm.ip_addr)
        Host.update({
            'name': vm.hostname.lower(),
            'lifecycle-environment': lifecycle_env.name,
            'content-view': content_view.name,
            'hostgroup': hgrp_name,
            'openscap-proxy-id': default_proxy,
            'organization': module_org.name,
            'ansible-role-ids': role_id,
        })
        job_id = Host.ansible_roles_play({'name':
                                          vm.hostname.lower()})[0].get('id')
        wait_for_tasks(
            f'resource_type = JobInvocation and resource_id = {job_id} and action ~ "hosts job"'
        )
        try:
            result = JobInvocation.info({'id': job_id})['success']
            assert result == '1'
        except AssertionError:
            output = ' '.join(
                JobInvocation.get_output({
                    'id': job_id,
                    'host': vm.hostname
                }))
            result = f'host output: {output}'
            raise AssertionError(result)
        result = vm.run(
            'cat /etc/foreman_scap_client/config.yaml | grep profile')
        assert result.status == 0
        # Runs the actual oscap scan on the vm/clients and
        # uploads report to Internal Capsule.
        vm.execute_foreman_scap_client()
        # Assert whether oscap reports are uploaded to
        # Satellite6.
        result = Arfreport.list({'search': f'host={vm.hostname.lower()}'})
        assert result is not None
Esempio n. 22
0
def test_rhcloud_insights_e2e(
    session,
    rhel8_insights_vm,
    fixable_rhel8_vm,
    organization_ak_setup,
    unset_rh_cloud_token,
):
    """Synchronize hits data from cloud, verify it is displayed in Satellite and run remediation.

    :id: d952e83c-3faf-4299-a048-2eb6ccb8c9c2

    :Steps:
        1. Prepare misconfigured machine and upload its data to Insights.
        2. Add Cloud API key in Satellite.
        3. In Satellite UI, Configure -> Insights -> Add RH Cloud token and syns recommendations.
        4. Run remediation for dnf.conf recommendation against rhel8 host.
        5. Assert that job completed successfully.
        6. Sync Insights recommendations.
        7. Search for previously remediated issue.

    :expectedresults:
        1. Insights recommendation related to dnf.conf issue is listed for misconfigured machine.
        2. Remediation job finished successfully.
        3. Insights recommendation related to dnf.conf issue is not listed.

    :CaseAutomation: Automated
    """
    org, ak = organization_ak_setup
    query = 'dnf.conf'
    job_query = (
        f'Remote action: Insights remediations for selected issues on {rhel8_insights_vm.hostname}'
    )
    with session:
        session.organization.select(org_name=org.name)
        session.location.select(loc_name=DEFAULT_LOC)
        session.cloudinsights.save_token_sync_hits(settings.rh_cloud.token)
        timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M')
        wait_for_tasks(
            search_query=f'Insights full sync and started_at >= "{timestamp}"',
            search_rate=15,
            max_tries=10,
        )
        # Workaround for alert message causing search to fail. See airgun issue 584.
        session.browser.refresh()
        result = session.cloudinsights.search(query)[0]
        assert result['Hostname'] == rhel8_insights_vm.hostname
        assert (result['Recommendation'] ==
                'The dnf installs lower versions of packages when the '
                '"best" option is not present in the /etc/dnf/dnf.conf')
        timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M')
        session.cloudinsights.remediate(query)
        result = wait_for_tasks(
            search_query=f'{job_query} and started_at >= "{timestamp}"',
            search_rate=15,
            max_tries=10,
        )
        task_output = entities.ForemanTask().search(
            query={'search': result[0].id})
        assert task_output[
            0].result == 'success', f'result: {result}\n task_output: {task_output}'
        timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M')
        session.cloudinsights.sync_hits()
        wait_for_tasks(
            search_query=f'Insights full sync and started_at >= "{timestamp}"',
            search_rate=15,
            max_tries=10,
        )
        # Workaround for alert message causing search to fail. See airgun issue 584.
        session.browser.refresh()
        assert not session.cloudinsights.search(query)
Esempio n. 23
0
def test_module_status_update_without_force_upload_package_profile(
        session, vm, vm_module_streams):
    """ Verify you do not have to run dnf upload-profile or restart rhsmcertd
    to update the module stream status to Satellite and that the web UI will also be updated.

    :id: 16675b57-71c2-4aee-950b-844aa32002d1

    :expectedresults: module stream status should get updated in Satellite

    :CaseLevel: System

    :CaseImportance: Medium
    """
    with session:
        # Ensure content host is searchable
        assert session.contenthost.search(
            vm.hostname)[0]['Name'] == vm.hostname
        module_name = "walrus"
        stream_version = "0.71"
        profile = "flipper"
        # reset walrus module streams
        run_remote_command_on_content_host(
            'dnf module reset {} -y'.format(module_name), vm_module_streams)
        # make a note of time for later CLI wait_for_tasks, and include
        # 5 mins margin of safety.
        timestamp = (datetime.utcnow() -
                     timedelta(minutes=5)).strftime("%Y-%m-%d %H:%M")
        # install walrus module stream with flipper profile
        run_remote_command_on_content_host(
            'dnf module install {}:{}/{} -y'.format(module_name,
                                                    stream_version, profile),
            vm_module_streams)
        # Wait for upload profile event (in case Satellite system slow)
        host = entities.Host().search(
            query={'search': 'name={}'.format(vm.hostname)})
        wait_for_tasks(
            search_query='label = Actions::Katello::Host::UploadProfiles'
            ' and resource_id = {}'
            ' and started_at >= "{}"'.format(host[0].id, timestamp),
            search_rate=15,
            max_tries=10,
        )
        # Check web UI for the new module stream version
        module_stream = session.contenthost.search_module_stream(
            vm_module_streams.hostname,
            FAKE_2_CUSTOM_PACKAGE_NAME,
            status='Installed',
            stream_version=stream_version)
        assert module_stream[0]['Name'] == FAKE_2_CUSTOM_PACKAGE_NAME
        assert module_stream[0]['Stream'] == stream_version
        assert module_stream[0]['Installed Profile'] == profile

        # remove walrus module stream with flipper profile
        run_remote_command_on_content_host(
            'dnf module remove {}:{}/{} -y'.format(module_name, stream_version,
                                                   profile), vm_module_streams)
        assert not session.contenthost.search_module_stream(
            vm_module_streams.hostname,
            FAKE_2_CUSTOM_PACKAGE_NAME,
            status='Installed',
            stream_version=stream_version)
Esempio n. 24
0
    def setUp(self):
        """Creates the pre-requisites for the Incremental updates that used per
        each test"""
        super(IncrementalUpdateTestCase, self).setUp()
        # Create content view that will be used filtered erratas
        self.rhel_6_partial_cv = entities.ContentView(
            organization=self.org,
            name=gen_alpha(),
            repository=[self.rhva_6_repo, self.rhel6_sat6tools_repo]
        ).create()

        # Create a content view filter to filter out errata
        rhel_6_partial_cvf = entities.ErratumContentViewFilter(
            content_view=self.rhel_6_partial_cv,
            type='erratum',
            name='rhel_6_partial_cv_filter',
            repository=[self.rhva_6_repo]
        ).create()

        # Create a content view filter rule - filtering out errata in the last
        # 365 days
        start_date = (date.today() - timedelta(days=365)).strftime('%Y-%m-%d')
        entities.ContentViewFilterRule(
            content_view_filter=rhel_6_partial_cvf,
            types=['security', 'enhancement', 'bugfix'],
            start_date=start_date,
            end_date=date.today().strftime('%Y-%m-%d')
        ).create()

        # Publish content view and re-read it

        self.rhel_6_partial_cv.publish()
        self.rhel_6_partial_cv = self.rhel_6_partial_cv.read()

        # Promote content view to 'DEV' and 'QE'
        assert len(self.rhel_6_partial_cv.version) == 1
        for env in (self.dev_lce, self.qe_lce):
            promote(self.rhel_6_partial_cv.version[0], env.id)

        # Create host collection
        self.rhel_6_partial_hc = entities.HostCollection(
            organization=self.org, name=gen_alpha(), max_hosts=5).create()

        # Create activation key for content view
        kwargs = {'organization': self.org, 'environment': self.qe_lce.id}
        rhel_6_partial_ak = entities.ActivationKey(
            name=gen_alpha(),
            content_view=self.rhel_6_partial_cv,
            host_collection=[self.rhel_6_partial_hc],
            **kwargs
        ).create()

        # Assign subscription to activation key. Fetch available subscriptions
        subs = entities.Subscription(organization=self.org).search()
        assert len(subs) > 0

        # Add subscription to activation key
        sub_found = False
        for sub in subs:
            if sub.read_json()['product_name'] == DEFAULT_SUBSCRIPTION_NAME:
                rhel_6_partial_ak.add_subscriptions(data={
                    u'subscription_id': sub.id
                })
                sub_found = True
        assert sub_found

        # Enable product content in activation key
        rhel_6_partial_ak.content_override(data={'content_override': {
            u'content_label': REPOS['rhst6']['id'],
            u'value': u'1'
        }})

        # Create client machine and register it to satellite with
        # rhel_6_partial_ak
        self.vm = VirtualMachine(distro=DISTRO_RHEL6, tag='incupdate')
        self.addCleanup(vm_cleanup, self.vm)
        self.setup_vm(self.vm, rhel_6_partial_ak.name, self.org.label)
        self.vm.enable_repo(REPOS['rhva6']['id'])
        timestamp = datetime.utcnow()
        self.vm.run('yum install -y {0}'.format(REAL_0_RH_PACKAGE))

        # Find the content host and ensure that tasks started by package
        # installation has finished
        host = entities.Host().search(
            query={'search': 'name={}'.format(self.vm.hostname)})
        wait_for_tasks(
            search_query='label = Actions::Katello::Host::UploadPackageProfile'
                         ' and resource_id = {}'
                         ' and started_at >= {}'.format(host[0].id, timestamp)
        )
Esempio n. 25
0
def test_obfuscate_host_ipv4_addresses(
    inventory_settings, organization_ak_setup, registered_hosts, session
):
    """Test whether `Obfuscate host ipv4 addresses` setting works as expected.

    :id: c0fc4ee9-a6a1-42c0-83f0-0f131ca9ab41

    :customerscenario: true

    :Steps:

        1. Prepare machine and upload its data to Insights
        2. Add Cloud API key in Satellite
        3. Go to Configure > Inventory upload > enable “Obfuscate host ipv4 addresses” setting.
        4. Generate report after enabling the setting.
        5. Check if hosts ipv4 addresses are obfuscated in generated reports.
        6. Disable previous setting.
        7. Go to Administer > Settings > RH Cloud and enable "Obfuscate IPs" setting.
        8. Generate report after enabling the setting.
        9. Check if hosts ipv4 addresses are obfuscated in generated reports.

    :expectedresults:
        1. Obfuscated host ipv4 addresses in generated reports.

    :BZ: 1852594, 1889690

    :CaseAutomation: Automated
    """
    org, ak = organization_ak_setup
    virtual_host, baremetal_host = registered_hosts
    with session:
        session.organization.select(org_name=org.name)
        # Enable obfuscate_ips setting on inventory page.
        session.cloudinventory.update({'obfuscate_ips': True})
        timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M')
        session.cloudinventory.generate_report(org.name)
        # wait_for_tasks report generation task to finish.
        wait_for_tasks(
            search_query='label = ForemanInventoryUpload::Async::GenerateReportJob'
            f' and started_at >= "{timestamp}"',
            search_rate=15,
            max_tries=10,
        )
        report_path = session.cloudinventory.download_report(org.name)
        inventory_data = session.cloudinventory.read(org.name)
        # Assert that obfuscate_ips is enabled.
        assert inventory_data['obfuscate_ips'] is True
        # Assert that generated archive is valid.
        common_assertion(report_path, inventory_data, org)
        # Get report data for assertion
        json_data = get_report_data(report_path)
        hostnames = [host['fqdn'] for host in json_data['hosts']]
        assert virtual_host.hostname in hostnames
        assert baremetal_host.hostname in hostnames
        # Assert that ip_addresses are obfuscated from report.
        ip_addresses = [
            host['system_profile']['network_interfaces'][0]['ipv4_addresses'][0]
            for host in json_data['hosts']
        ]
        ipv4_addresses = [host['ip_addresses'][0] for host in json_data['hosts']]
        assert virtual_host.ip_addr not in ip_addresses
        assert baremetal_host.ip_addr not in ip_addresses
        assert virtual_host.ip_addr not in ipv4_addresses
        assert baremetal_host.ip_addr not in ipv4_addresses
        # Disable obfuscate_ips setting on inventory page.
        session.cloudinventory.update({'obfuscate_ips': False})

        # Enable obfuscate_inventory_ips setting.
        setting_update('obfuscate_inventory_ips', True)
        timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M')
        session.cloudinventory.generate_report(org.name)
        # wait_for_tasks report generation task to finish.
        wait_for_tasks(
            search_query='label = ForemanInventoryUpload::Async::GenerateReportJob'
            f' and started_at >= "{timestamp}"',
            search_rate=15,
            max_tries=10,
        )
        report_path = session.cloudinventory.download_report(org.name)
        inventory_data = session.cloudinventory.read(org.name)

        assert inventory_data['obfuscate_ips'] is True
        # Get report data for assertion
        json_data = get_report_data(report_path)
        hostnames = [host['fqdn'] for host in json_data['hosts']]
        assert virtual_host.hostname in hostnames
        assert baremetal_host.hostname in hostnames
        ip_addresses = [
            host['system_profile']['network_interfaces'][0]['ipv4_addresses'][0]
            for host in json_data['hosts']
        ]
        ipv4_addresses = [host['ip_addresses'][0] for host in json_data['hosts']]
        assert virtual_host.ip_addr not in ip_addresses
        assert baremetal_host.ip_addr not in ip_addresses
        assert virtual_host.ip_addr not in ipv4_addresses
        assert baremetal_host.ip_addr not in ipv4_addresses
Esempio n. 26
0
    def test_positive_oscap_run_with_tailoring_file_with_ansible(self):
        """End-to-End Oscap run with tailoring files via ansible

        :id: c7ea56eb-6cf1-4e79-8d6a-fb872d1bb804

        :setup: scap content, scap policy, tailoring file, host group

        :steps:

            1. Create a valid scap content
            2. Upload a valid tailoring file
            3. Import Ansible role theforeman.foreman_scap_client
            4. Import Ansible Variables needed for the role
            5. Create a scap policy with anisble as deploy option
            6. Associate scap content with it's tailoring file
            7. Associate the policy with a hostgroup
            8. Provision a host using the hostgroup
            9. Configure REX and associate the Ansible role to created host
            10. Play roles for the host

        :expectedresults: REX job should be success and ARF report should be sent to satellite
                         reflecting the changes done via tailoring files

        :BZ: 1716307

        :CaseImportance: Critical
        """
        if settings.rhel7_repo is None:
            self.skipTest('Missing configuration for rhel7_repo')
        rhel7_repo = settings.rhel7_repo
        hgrp7_name = gen_string('alpha')
        policy_values = {
            'content': self.rhel7_content,
            'hgrp': hgrp7_name,
            'policy': gen_string('alpha'),
            'profile': OSCAP_PROFILE['security7'],
        }
        vm_values = {
            'distro': DISTRO_RHEL7,
            'hgrp': hgrp7_name,
            'rhel_repo': rhel7_repo
        }
        tailoring_file_name = gen_string('alpha')
        tailor_path = file_downloader(file_url=settings.oscap.tailoring_path,
                                      hostname=settings.server.hostname)[0]
        # Creates host_group for rhel7
        make_hostgroup({
            'content-source-id': self.proxy_id,
            'name': hgrp7_name,
            'organizations': self.config_env['org_name'],
        })

        tailor_result = make_tailoringfile({
            'name':
            tailoring_file_name,
            'scap-file':
            tailor_path,
            'organization':
            self.config_env['org_name'],
        })
        result = TailoringFiles.info({'name': tailoring_file_name})
        assert result['name'] == tailoring_file_name
        # Creates oscap_policy for rhel7.
        scap_id, scap_profile_id = self.fetch_scap_and_profile_id(
            policy_values.get('content'), policy_values.get('profile'))
        Ansible.roles_import({'proxy-id': self.proxy_id})
        Ansible.variables_import({'proxy-id': self.proxy_id})
        role_id = Ansible.roles_list({'search':
                                      'foreman_scap_client'})[0].get('id')
        make_scap_policy({
            'scap-content-id':
            scap_id,
            'hostgroups':
            policy_values.get('hgrp'),
            'deploy-by':
            'ansible',
            'name':
            policy_values.get('policy'),
            'period':
            OSCAP_PERIOD['weekly'].lower(),
            'scap-content-profile-id':
            scap_profile_id,
            'weekday':
            OSCAP_WEEKDAY['friday'].lower(),
            'tailoring-file-id':
            tailor_result['id'],
            'tailoring-file-profile-id':
            tailor_result['tailoring-file-profiles'][0]['id'],
            'organizations':
            self.config_env['org_name'],
        })
        distro_os = vm_values.get('distro')
        with VirtualMachine(distro=distro_os) as vm:
            host_name, _, host_domain = vm.hostname.partition('.')
            vm.install_katello_ca()
            vm.register_contenthost(self.config_env['org_name'],
                                    self.config_env['ak_name'].get(distro_os))
            assert vm.subscribed
            Host.set_parameter({
                'host': vm.hostname.lower(),
                'name': 'remote_execution_connect_by_ip',
                'value': 'True',
            })
            vm.configure_rhel_repo(settings.rhel7_repo)
            add_remote_execution_ssh_key(vm.ip_addr)
            Host.update({
                'name': vm.hostname.lower(),
                'lifecycle-environment': self.config_env['env_name'],
                'content-view': self.config_env['cv_name'],
                'hostgroup': vm_values.get('hgrp'),
                'openscap-proxy-id': self.proxy_id,
                'organization': self.config_env['org_name'],
                'ansible-role-ids': role_id,
            })
            job_id = Host.ansible_roles_play({'name': vm.hostname.lower()
                                              })[0].get('id')
            wait_for_tasks(
                f"resource_type = JobInvocation and resource_id = {job_id} and "
                "action ~ \"hosts job\"")
            try:
                result = JobInvocation.info({'id': job_id})['success']
                assert result == '1'
            except AssertionError:
                output = ' '.join(
                    JobInvocation.get_output({
                        'id': job_id,
                        'host': vm.hostname
                    }))
                result = f'host output: {output}'
                raise AssertionError(result)
            result = vm.run(
                'cat /etc/foreman_scap_client/config.yaml | grep profile')
            assert result.return_code == 0
            # Runs the actual oscap scan on the vm/clients and
            # uploads report to Internal Capsule.
            vm.execute_foreman_scap_client()
            # Assert whether oscap reports are uploaded to
            # Satellite6.
            result = Arfreport.list({'search': f'host={vm.hostname.lower()}'})
            assert result is not None
Esempio n. 27
0
def test_exclude_packages_setting(
    inventory_settings, organization_ak_setup, registered_hosts, session
):
    """Test whether `Exclude Packages` setting works as expected.

    :id: 646093fa-fdd6-4f70-82aa-725e31fa3f12

    :customerscenario: true

    :Steps:

        1. Prepare machine and upload its data to Insights
        2. Add Cloud API key in Satellite
        3. Go to Configure > Inventory upload > enable “Exclude Packages” setting.
        4. Generate report after enabling the setting.
        5. Check if packages are excluded from generated reports.
        6. Disable previous setting.
        7. Go to Administer > Settings > RH Cloud and enable
            "Don't upload installed packages" setting.
        8. Generate report after enabling the setting.
        9. Check if packages are excluded from generated reports.

    :expectedresults:
        1. Packages are excluded from reports generated.

    :BZ: 1852594

    :CaseAutomation: Automated
    """
    org, ak = organization_ak_setup
    virtual_host, baremetal_host = registered_hosts
    with session:
        session.organization.select(org_name=org.name)
        # Enable exclude_packages setting on inventory page.
        session.cloudinventory.update({'exclude_packages': True})
        timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M')
        session.cloudinventory.generate_report(org.name)
        wait_for_tasks(
            search_query='label = ForemanInventoryUpload::Async::GenerateReportJob'
            f' and started_at >= "{timestamp}"',
            search_rate=15,
            max_tries=10,
        )
        report_path = session.cloudinventory.download_report(org.name)
        inventory_data = session.cloudinventory.read(org.name)
        assert inventory_data['exclude_packages'] is True
        # Disable exclude_packages setting on inventory page.
        session.cloudinventory.update({'exclude_packages': False})
        # Assert that generated archive is valid.
        common_assertion(report_path, inventory_data, org)
        # Get report data for assertion
        json_data = get_report_data(report_path)
        # Assert that right hosts are present in report.
        hostnames = [host['fqdn'] for host in json_data['hosts']]
        assert virtual_host.hostname in hostnames
        assert baremetal_host.hostname in hostnames
        # Assert that packages are excluded from report
        all_host_profiles = [host['system_profile'] for host in json_data['hosts']]
        for host_profiles in all_host_profiles:
            assert 'installed_packages' not in host_profiles

        # Enable exclude_installed_packages setting.
        setting_update('exclude_installed_packages', True)
        timestamp = (datetime.utcnow() - timedelta(minutes=2)).strftime('%Y-%m-%d %H:%M')
        session.cloudinventory.generate_report(org.name)
        wait_for_tasks(
            search_query='label = ForemanInventoryUpload::Async::GenerateReportJob'
            f' and started_at >= "{timestamp}"',
            search_rate=15,
            max_tries=10,
        )
        report_path = session.cloudinventory.download_report(org.name)
        inventory_data = session.cloudinventory.read(org.name)
        assert inventory_data['exclude_packages'] is True
        json_data = get_report_data(report_path)
        hostnames = [host['fqdn'] for host in json_data['hosts']]
        assert virtual_host.hostname in hostnames
        assert baremetal_host.hostname in hostnames
        all_host_profiles = [host['system_profile'] for host in json_data['hosts']]
        for host_profiles in all_host_profiles:
            assert 'installed_packages' not in host_profiles
Esempio n. 28
0
def test_positive_CRUD(module_org):
    """Check if product can be created, updated, synchronized and deleted

    :id: 9d7b5ec8-59d0-4371-b5d2-d43145e4e2db

    :expectedresults: Product is created, updated, synchronized and deleted

    :BZ: 1422552

    :CaseImportance: Critical
    """
    desc = list(valid_data_list().values())[0]
    gpg_key = make_content_credential({'organization-id': module_org.id})
    name = list(valid_data_list().values())[0]
    label = valid_labels_list()[0]
    sync_plan = make_sync_plan({'organization-id': module_org.id})
    product = make_product(
        {
            'description': desc,
            'gpg-key-id': gpg_key['id'],
            'label': label,
            'name': name,
            'organization-id': module_org.id,
            'sync-plan-id': sync_plan['id'],
        },
    )
    assert product['name'] == name
    assert len(product['label']) > 0
    assert product['label'] == label
    assert product['description'] == desc
    assert product['gpg']['gpg-key-id'] == gpg_key['id']
    assert product['sync-plan-id'] == sync_plan['id']

    # update
    desc = list(valid_data_list().values())[0]
    new_gpg_key = make_content_credential({'organization-id': module_org.id})
    new_sync_plan = make_sync_plan({'organization-id': module_org.id})
    new_prod_name = gen_string('alpha', 8)
    Product.update(
        {
            'description': desc,
            'id': product['id'],
            'gpg-key-id': new_gpg_key['id'],
            'sync-plan-id': new_sync_plan['id'],
            'name': new_prod_name,
        }
    )
    product = Product.info({'id': product['id'], 'organization-id': module_org.id})
    assert product['name'] == new_prod_name
    assert product['description'] == desc
    assert product['gpg']['gpg-key-id'] == new_gpg_key['id']
    assert product['gpg']['gpg-key-id'] != gpg_key['id']
    assert product['sync-plan-id'] == new_sync_plan['id']
    assert product['sync-plan-id'] != sync_plan['id']

    # synchronize
    repo = make_repository(
        {
            'organization-id': module_org.id,
            'product-id': product['id'],
            'url': settings.repos.yum_0.url,
        },
    )
    Product.synchronize({'id': product['id'], 'organization-id': module_org.id})
    packages = Package.list({'product-id': product['id']})
    repo = Repository.info({'id': repo['id']})
    assert int(repo['content-counts']['packages']) == len(packages)
    assert len(packages) == FAKE_0_YUM_REPO_PACKAGES_COUNT

    # delete
    Product.remove_sync_plan({'id': product['id']})
    product = Product.info({'id': product['id'], 'organization-id': module_org.id})
    assert len(product['sync-plan-id']) == 0
    Product.delete({'id': product['id']})
    wait_for_tasks(
        search_query="label = Actions::Katello::Product::Destroy"
        f" and resource_id = {product['id']}",
        max_tries=10,
    )
    with pytest.raises(CLIReturnCodeError):
        Product.info({'id': product['id'], 'organization-id': module_org.id})
Esempio n. 29
0
    def setUp(self):
        """Creates the pre-requisites for the Incremental updates that used per
        each test"""
        super(IncrementalUpdateTestCase, self).setUp()
        # Create content view that will be used filtered erratas
        self.rhel_6_partial_cv = entities.ContentView(
            organization=self.org,
            name=gen_alpha(),
            repository=[self.rhva_6_repo, self.rhel6_sat6tools_repo]).create()

        # Create a content view filter to filter out errata
        rhel_6_partial_cvf = entities.ErratumContentViewFilter(
            content_view=self.rhel_6_partial_cv,
            type='erratum',
            name='rhel_6_partial_cv_filter',
            repository=[self.rhva_6_repo]).create()

        # Create a content view filter rule - filtering out errata in the last
        # 365 days
        start_date = (date.today() - timedelta(days=365)).strftime('%Y-%m-%d')
        entities.ContentViewFilterRule(
            content_view_filter=rhel_6_partial_cvf,
            types=['security', 'enhancement', 'bugfix'],
            start_date=start_date,
            end_date=date.today().strftime('%Y-%m-%d')).create()

        # Publish content view and re-read it

        self.rhel_6_partial_cv.publish()
        self.rhel_6_partial_cv = self.rhel_6_partial_cv.read()

        # Promote content view to 'DEV' and 'QE'
        assert len(self.rhel_6_partial_cv.version) == 1
        for env in (self.dev_lce, self.qe_lce):
            promote(self.rhel_6_partial_cv.version[0], env.id)

        # Create host collection
        self.rhel_6_partial_hc = entities.HostCollection(organization=self.org,
                                                         name=gen_alpha(),
                                                         max_hosts=5).create()

        # Create activation key for content view
        kwargs = {'organization': self.org, 'environment': self.qe_lce.id}
        rhel_6_partial_ak = entities.ActivationKey(
            name=gen_alpha(),
            content_view=self.rhel_6_partial_cv,
            host_collection=[self.rhel_6_partial_hc],
            **kwargs).create()

        # Assign subscription to activation key. Fetch available subscriptions
        subs = entities.Subscription(organization=self.org).search()
        assert len(subs) > 0

        # Add subscription to activation key
        sub_found = False
        for sub in subs:
            if sub.read_json()['product_name'] == DEFAULT_SUBSCRIPTION_NAME:
                rhel_6_partial_ak.add_subscriptions(
                    data={u'subscription_id': sub.id})
                sub_found = True
        assert sub_found

        # Enable product content in activation key
        rhel_6_partial_ak.content_override(
            data={
                'content_override': {
                    u'content_label': REPOS['rhst6']['id'],
                    u'value': u'1'
                }
            })

        # Create client machine and register it to satellite with
        # rhel_6_partial_ak
        self.vm = VirtualMachine(distro=DISTRO_RHEL6, tag='incupdate')
        self.addCleanup(vm_cleanup, self.vm)
        self.setup_vm(self.vm, rhel_6_partial_ak.name, self.org.label)
        self.vm.enable_repo(REPOS['rhva6']['id'])
        timestamp = datetime.utcnow()
        self.vm.run('yum install -y {0}'.format(REAL_0_RH_PACKAGE))

        # Find the content host and ensure that tasks started by package
        # installation has finished
        host = entities.Host().search(
            query={'search': 'name={}'.format(self.vm.hostname)})
        wait_for_tasks(
            search_query='label = Actions::Katello::Host::UploadPackageProfile'
            ' and resource_id = {}'
            ' and started_at >= "{}"'.format(host[0].id, timestamp))
        # Force host to generate or refresh errata applicability
        call_entity_method_with_timeout(host[0].errata_applicability,
                                        timeout=600)
Esempio n. 30
0
    def test_positive_CRUD(self):
        """Check if product can be created, updated, synchronized and deleted

        :id: 9d7b5ec8-59d0-4371-b5d2-d43145e4e2db

        :expectedresults: Product is created, updated, synchronized and deleted

        :BZ: 1422552

        :CaseImportance: Critical
        """
        desc = list(valid_data_list().values())[0]
        gpg_key = make_gpg_key({'organization-id': self.org['id']})
        name = list(valid_data_list().values())[0]
        label = valid_labels_list()[0]
        sync_plan = make_sync_plan({'organization-id': self.org['id']})
        product = make_product({
            'description': desc,
            'gpg-key-id': gpg_key['id'],
            'label': label,
            'name': name,
            'organization-id': self.org['id'],
            'sync-plan-id': sync_plan['id'],
        })
        self.assertEqual(product['name'], name)
        self.assertGreater(len(product['label']), 0)
        self.assertEqual(product['label'], label)
        self.assertEqual(product['description'], desc)
        self.assertEqual(product['gpg']['gpg-key-id'], gpg_key['id'])
        self.assertEqual(product['sync-plan-id'], sync_plan['id'])

        # update
        desc = list(valid_data_list().values())[0]
        new_gpg_key = make_gpg_key({'organization-id': self.org['id']})
        new_sync_plan = make_sync_plan({'organization-id': self.org['id']})
        new_prod_name = gen_string('alpha', 8)
        Product.update({
            'description': desc,
            'id': product['id'],
            'gpg-key-id': new_gpg_key['id'],
            'sync-plan-id': new_sync_plan['id'],
            'name': new_prod_name,
        })
        product = Product.info({
            'id': product['id'],
            'organization-id': self.org['id']
        })
        self.assertEqual(product['name'], new_prod_name)
        self.assertEqual(product['description'], desc)
        self.assertEqual(product['gpg']['gpg-key-id'], new_gpg_key['id'])
        self.assertNotEqual(product['gpg']['gpg-key-id'], gpg_key['id'])
        self.assertEqual(product['sync-plan-id'], new_sync_plan['id'])
        self.assertNotEqual(product['sync-plan-id'], sync_plan['id'])

        # synchronize
        repo = make_repository({
            'product-id': product['id'],
            'url': FAKE_0_YUM_REPO
        })
        Product.synchronize({
            'id': product['id'],
            'organization-id': self.org['id']
        })
        packages = Package.list({'product-id': product['id']})
        repo = Repository.info({'id': repo['id']})
        self.assertEqual(int(repo['content-counts']['packages']),
                         len(packages))
        self.assertEqual(len(packages), FAKE_0_YUM_REPO_PACKAGES_COUNT)

        # delete
        Product.remove_sync_plan({'id': product['id']})
        product = Product.info({
            'id': product['id'],
            'organization-id': self.org['id']
        })
        self.assertEqual(len(product['sync-plan-id']), 0)
        Product.delete({'id': product['id']})
        wait_for_tasks(
            search_query='label = Actions::Katello::Product::Destroy'
            ' and resource_id = {}'.format(product['id']),
            max_tries=10,
        )
        with self.assertRaises(CLIReturnCodeError):
            Product.info({
                'id': product['id'],
                'organization-id': self.org['id']
            })
Esempio n. 31
0
def wait_for_virtwho_report_task(config_id, poll_timeout=600, poll_rate=30):
    search = 'label=Actions::Katello::Host::Hypervisors '\
             'and user=virt_who_reporter_{}'.format(config_id)
    return wait_for_tasks(search, poll_timeout=poll_timeout, poll_rate=poll_rate)