Пример #1
0
    def test_positive_create_with_properties(self, module_org,
                                             module_location):
        """Create a hostgroup with properties

        :id: 528afd01-356a-4082-9e88-a5b2a715a792

        :expectedresults: A hostgroup is created with expected properties,
            updated and deleted

        :CaseLevel: Integration

        :CaseImportance: High
        """
        env = entities.Environment(location=[module_location],
                                   organization=[module_org]).create()
        parent_hostgroup = entities.HostGroup(location=[module_location],
                                              organization=[module_org
                                                            ]).create()
        arch = entities.Architecture().create()
        ptable = entities.PartitionTable().create()
        os = entities.OperatingSystem(architecture=[arch],
                                      ptable=[ptable]).create()
        media = entities.Media(operatingsystem=[os],
                               location=[module_location],
                               organization=[module_org]).create()
        proxy = entities.SmartProxy().search(
            query={'search': f'url = https://{settings.server.hostname}:9090'
                   })[0]
        subnet = entities.Subnet(location=[module_location],
                                 organization=[module_org]).create()
        domain = entities.Domain(location=[module_location],
                                 organization=[module_org]).create()
        content_view = entities.ContentView(organization=module_org).create()
        content_view.publish()
        content_view = content_view.read()
        lce = entities.LifecycleEnvironment(organization=module_org).create()
        promote(content_view.version[0], lce.id)
        hostgroup = entities.HostGroup(
            architecture=arch,
            content_source=proxy,
            content_view=content_view,
            domain=domain,
            environment=env,
            lifecycle_environment=lce,
            location=[module_location],
            medium=media,
            operatingsystem=os,
            organization=[module_org],
            parent=parent_hostgroup,
            ptable=ptable,
            puppet_ca_proxy=proxy,
            puppet_proxy=proxy,
            subnet=subnet,
        ).create()
        assert hostgroup.environment.read().name == env.name
        assert hostgroup.parent.read().name == parent_hostgroup.name
        assert hostgroup.architecture.read().name == arch.name
        assert hostgroup.operatingsystem.read().name == os.name
        assert hostgroup.medium.read().name == media.name
        assert hostgroup.ptable.read().name == ptable.name
        assert hostgroup.puppet_ca_proxy.read().name == proxy.name
        assert hostgroup.subnet.read().name == subnet.name
        assert hostgroup.domain.read().name == domain.name
        assert hostgroup.puppet_proxy.read().name == proxy.name
        assert hostgroup.content_source.read().name == proxy.name
        assert hostgroup.content_view.read().name == content_view.name
        assert hostgroup.lifecycle_environment.read().name == lce.name

        # create new properties for update
        new_org = entities.Organization().create()
        new_loc = entities.Location(organization=[new_org]).create()
        new_arch = entities.Architecture().create()
        new_ptable = entities.PartitionTable().create()
        new_parent = entities.HostGroup(location=[new_loc],
                                        organization=[new_org]).create()
        new_env = entities.Environment(location=[new_loc],
                                       organization=[new_org]).create()
        new_os = entities.OperatingSystem(architecture=[new_arch],
                                          ptable=[new_ptable]).create()
        new_subnet = entities.Subnet(location=[new_loc],
                                     organization=[new_org]).create()
        new_domain = entities.Domain(location=[new_loc],
                                     organization=[new_org]).create()
        new_cv = entities.ContentView(organization=new_org).create()
        new_cv.publish()
        new_cv = new_cv.read()
        new_lce = entities.LifecycleEnvironment(organization=new_org).create()
        promote(new_cv.version[0], new_lce.id)
        new_media = entities.Media(operatingsystem=[os],
                                   location=[new_loc],
                                   organization=[new_org]).create()
        # update itself
        hostgroup.organization = [new_org]
        hostgroup.location = [new_loc]
        hostgroup.lifecycle_environment = new_lce
        hostgroup.content_view = new_cv
        hostgroup.domain = new_domain
        hostgroup.architecture = new_arch
        hostgroup.operatingsystem = new_os
        hostgroup.environment = new_env
        hostgroup.parent = new_parent
        hostgroup.ptable = new_ptable
        hostgroup.subnet = new_subnet
        hostgroup.medium = new_media
        hostgroup = hostgroup.update([
            'parent',
            'environment',
            'operatingsystem',
            'architecture',
            'ptable',
            'subnet',
            'domain',
            'content_view',
            'lifecycle_environment',
            'location',
            'organization',
            'medium',
        ])
        assert hostgroup.parent.read().name == new_parent.name
        assert hostgroup.environment.read().name == new_env.name
        assert hostgroup.operatingsystem.read().name == new_os.name
        assert hostgroup.architecture.read().name == new_arch.name
        assert hostgroup.ptable.read().name == new_ptable.name
        assert hostgroup.subnet.read().name == new_subnet.name
        assert hostgroup.domain.read().name == new_domain.name
        assert hostgroup.content_view.read().name == new_cv.name
        assert hostgroup.lifecycle_environment.read().name == new_lce.name
        assert hostgroup.location[0].read().name == new_loc.name
        assert hostgroup.organization[0].read().name == new_org.name
        assert hostgroup.medium.read().name == new_media.name

        # delete
        hostgroup.delete()
        with pytest.raises(HTTPError):
            hostgroup.read()
    def test_post_scenario_postclient_package_installation(self):
        """Post-upgrade scenario that creates and installs the package on
        post-upgrade client remotely and then verifies if the package installed

        :id: postupgrade-1a881c07-595f-425f-aca9-df2337824a8e

        :steps:

            1. Create a content host with existing client ak
            2. Create and sync repo from which the package will be
                installed on content host
            3. Add repo to CV and then in Activation key
            4. Install package on a pre-upgrade client

        :expectedresults:

            1. The content host is created
            2. The new repo and its product has been added to ak using which
                the content host is created
            3. The package is installed on post-upgrade client
        """
        org = entities.Organization(name=self.org_name).create()
        prior_env = entities.LifecycleEnvironment(organization=org).search(
            query={'search': 'name=Library'}
        )[0]
        environment = entities.LifecycleEnvironment(
            organization=org,
            prior=prior_env.id,
            label=self.le_name,
            name=self.le_name
        ).create()
        ak = create_activation_key_for_client_registration(
            ak_name=self.ak_name,
            client_os='rhel7',
            org=org,
            environment=environment,
            sat_state='post'
        )
        rhel7_client = dockerize(
            ak_name=ak.name, distro='rhel7', org_label=org.label)
        client_container_id = rhel7_client.values()[0]
        client_name = rhel7_client.keys()[0].lower()
        product, yum_repo = create_yum_test_repo(
            product_name=self.prod_name, repo_url=FAKE_REPO_ZOO3, org=org)
        update_product_subscription_in_ak(
            product=product, yum_repo=yum_repo, ak=ak, org=org)
        time.sleep(10)
        execute(
            attach_subscription_to_host_from_satellite,
            org.id,
            product.name,
            client_name,
            host=get_satellite_host()
        )
        # Refresh subscriptions on client
        execute(
            docker_execute_command,
            client_container_id,
            'subscription-manager refresh',
            host=self.docker_vm
        )
        # Run goferd on client as its docker container
        execute(
            docker_execute_command,
            client_container_id,
            'goferd -f',
            async=True,
            host=self.docker_vm
        )
        # Holding on for 30 seconds wihle goferd starts
        time.sleep(30)
        client_id = entities.Host().search(
            query={'search': 'name={}'.format(client_name)}
        )[0].id
        entities.Host().install_content(data={
            'organization_id': org.id,
            'included': {'ids': [client_id]},
            'content_type': 'package',
            'content': [self.package_name],
        })
        time.sleep(20)
        # Validate if that package is really installed
        installed_package = execute(
            docker_execute_command,
            client_container_id,
            'rpm -q {}'.format(self.package_name),
            host=self.docker_vm
        )[self.docker_vm]
        self.assertIn(self.package_name, installed_package)
Пример #3
0
    def configure_puppet_test(cls):
        """Sets up the whole provisioning environment needed for Puppet based
        end-to-end tests like OSCAP etc

        :returns: A dict of entities to help with provisioning
        """
        cls.rhel6_content = OSCAP_DEFAULT_CONTENT['rhel6_content']
        cls.rhel7_content = OSCAP_DEFAULT_CONTENT['rhel7_content']
        cls.rhel8_content = OSCAP_DEFAULT_CONTENT['rhel8_content']
        sat6_hostname = settings.server.hostname
        proxy = Proxy.list({'search': sat6_hostname})[0]
        p_features = set(proxy.get('features').split(', '))
        if {'Puppet', 'Ansible', 'Openscap'}.issubset(p_features):
            cls.proxy_id = proxy.get('id')
        else:
            raise ProxyError(
                'Some features like Puppet, DHCP, Openscap, Ansible are not present'
            )
        ak_name_8 = gen_string('alpha')
        ak_name_7 = gen_string('alpha')
        ak_name_6 = gen_string('alpha')
        repo_values = [
            {
                'repo': settings.sattools_repo['rhel8'],
                'akname': ak_name_8
            },
            {
                'repo': settings.sattools_repo['rhel7'],
                'akname': ak_name_7
            },
            {
                'repo': settings.sattools_repo['rhel6'],
                'akname': ak_name_6
            },
        ]
        # Create new organization and environment.
        org = entities.Organization(name=gen_string('alpha')).create()
        cls.puppet_env = (entities.Environment().search(
            query={'search': 'name=production'})[0].read())
        cls.puppet_env.organization.append(org)
        cls.puppet_env = cls.puppet_env.update(['organization'])
        smart_proxy = (entities.SmartProxy().search(
            query={'search': f'name={sat6_hostname}'})[0].read())
        smart_proxy.import_puppetclasses(environment=cls.puppet_env.name)
        env = entities.LifecycleEnvironment(organization=org,
                                            name=gen_string('alpha')).create()
        # Create content view
        content_view = entities.ContentView(organization=org,
                                            name=gen_string('alpha')).create()
        # Create activation keys for rhel6, rhel7 and rhel8.
        for repo in repo_values:
            activation_key = entities.ActivationKey(name=repo.get('akname'),
                                                    environment=env,
                                                    organization=org).create()
            # Setup org for a custom repo for RHEL6, RHEL7 and RHEL8.
            setup_org_for_a_custom_repo({
                'url': repo.get('repo'),
                'organization-id': org.id,
                'content-view-id': content_view.id,
                'lifecycle-environment-id': env.id,
                'activationkey-id': activation_key.id,
            })

        for content in cls.rhel8_content, cls.rhel7_content, cls.rhel6_content:
            content = Scapcontent.info({'title': content},
                                       output_format='json')
            organization_ids = [
                content_org['id']
                for content_org in content.get('organizations', [])
            ]
            organization_ids.append(org.id)
            Scapcontent.update({
                'title': content['title'],
                'organization-ids': organization_ids
            })

        return {
            'org_name': org.name,
            'cv_name': content_view.name,
            'sat6_hostname': settings.server.hostname,
            'ak_name': {
                'rhel8': ak_name_8,
                'rhel7': ak_name_7,
                'rhel6': ak_name_6
            },
            'env_name': env.name,
        }
Пример #4
0
def test_positive_create_by_type():
    """Create entities of different types and check audit logs for these
    events using entity type as search criteria

    :id: 6c7ea7fc-6728-447f-9655-26fe0a2881bc

    :customerscenario: true

    :expectedresults: Audit logs contain corresponding entries per each
        create event

    :BZ: 1426742, 1492668, 1492696

    :CaseImportance: Medium
    """
    for entity_item in [
        {
            'entity': entities.Architecture()
        },
        {
            'entity': entities.AuthSourceLDAP(),
            'entity_type': 'auth_source',
            'value_template': 'LDAP-{entity.name}',
        },
        {
            'entity': entities.ComputeProfile(),
            'entity_type': 'compute_profile'
        },
        {
            'entity': entities.LibvirtComputeResource(),
            'entity_type': 'compute_resource',
            'value_template': '{entity.name} (Libvirt)',
        },
        {
            'entity': entities.ConfigGroup(),
            'entity_type': 'config_group'
        },
        {
            'entity': entities.Domain()
        },
        {
            'entity': entities.Host()
        },
        {
            'entity': entities.HostGroup()
        },
        {
            'entity':
            entities.Image(
                compute_resource=entities.LibvirtComputeResource().create())
        },
        {
            'entity': entities.Location()
        },
        {
            'entity': entities.Media(),
            'entity_type': 'medium'
        },
        {
            'entity': entities.Organization()
        },
        {
            'entity': entities.OperatingSystem(),
            'entity_type': 'os',
            'value_template': '{entity.name} {entity.major}',
        },
        {
            'entity': entities.PartitionTable(),
            'entity_type': 'ptable'
        },
        {
            'entity': entities.PuppetClass()
        },
        {
            'entity': entities.Role()
        },
        {
            'entity': entities.Subnet(),
            'value_template': '{entity.name} ({entity.network}/{entity.cidr})',
        },
        {
            'entity': entities.ProvisioningTemplate(),
            'entity_type': 'provisioning_template'
        },
        {
            'entity': entities.User(),
            'value_template': '{entity.login}'
        },
        {
            'entity': entities.UserGroup()
        },
        {
            'entity': entities.ContentView(),
            'entity_type': 'katello/content_view'
        },
        {
            'entity': entities.LifecycleEnvironment(),
            'entity_type': 'katello/kt_environment'
        },
        {
            'entity': entities.ActivationKey(),
            'entity_type': 'katello/activation_key'
        },
        {
            'entity': entities.HostCollection(),
            'entity_type': 'katello/host_collection'
        },
        {
            'entity': entities.Product(),
            'entity_type': 'katello/product'
        },
        {
            'entity': entities.GPGKey(),
            'entity_type': 'katello/gpg_key',
            'value_template': 'content credential (gpg_key - {entity.name})',
        },
        {
            'entity':
            entities.SyncPlan(organization=entities.Organization(id=1)),
            'entity_type': 'katello/sync_plan',
        },
    ]:
        created_entity = entity_item['entity'].create()
        entity_type = entity_item.get(
            'entity_type', created_entity.__class__.__name__.lower())
        value_template = entity_item.get('value_template', '{entity.name}')
        entity_value = value_template.format(entity=created_entity)
        audits = entities.Audit().search(
            query={'search': f'type={entity_type}'})
        entity_audits = [
            entry for entry in audits if entry.auditable_name == entity_value
        ]
        assert entity_audits, (
            f'audit not found by name "{entity_value}" for entity: '
            f'{created_entity.__class__.__name__.lower()}')
        audit = entity_audits[0]
        assert audit.auditable_id == created_entity.id
        assert audit.action == 'create'
        assert audit.version == 1
Пример #5
0
def test_positive_get_applicable_for_host(module_org, rhel6_contenthost):
    """Get applicable errata ids for a host

    :id: 51d44d51-eb3f-4ee4-a1df-869629d427ac

    :Setup:
        1. Errata synced on satellite server.
        2. Some Content hosts present.

    :Steps: GET /api/v2/hosts/:id/errata

    :expectedresults: The available errata is retrieved.

    :CaseLevel: System
    """
    org = entities.Organization().create()
    env = entities.LifecycleEnvironment(organization=org).create()
    content_view = entities.ContentView(organization=org).create()
    activation_key = entities.ActivationKey(environment=env, organization=org).create()
    setup_org_for_a_rh_repo(
        {
            'product': constants.PRDS['rhel'],
            'repository-set': constants.REPOSET['rhst6'],
            'repository': constants.REPOS['rhst6']['name'],
            'organization-id': org.id,
            'content-view-id': content_view.id,
            'lifecycle-environment-id': env.id,
            'activationkey-id': activation_key.id,
        },
        force_manifest_upload=True,
    )
    setup_org_for_a_custom_repo(
        {
            'url': CUSTOM_REPO_URL,
            'organization-id': org.id,
            'content-view-id': content_view.id,
            'lifecycle-environment-id': env.id,
            'activationkey-id': activation_key.id,
        }
    )
    repo_id = enable_rhrepo_and_fetchid(
        basearch=constants.DEFAULT_ARCHITECTURE,
        org_id=org.id,
        product=constants.PRDS['rhel'],
        repo=constants.REPOS['rhva6']['name'],
        reposet=constants.REPOSET['rhva6'],
        releasever=constants.DEFAULT_RELEASE_VERSION,
    )
    repo = entities.Repository(id=repo_id)
    assert repo.sync()['result'] == 'success'
    content_view = content_view.read()
    content_view.repository.append(repo)
    content_view = content_view.update(['repository'])
    content_view.publish()
    versions = sorted(content_view.read().version, key=lambda ver: ver.id)
    cvv = versions[-1].read()
    promote(cvv, env.id)
    rhel6_contenthost.install_katello_ca()
    rhel6_contenthost.register_contenthost(org.label, activation_key.name)
    assert rhel6_contenthost.subscribed
    rhel6_contenthost.enable_repo(constants.REPOS['rhst6']['id'])
    rhel6_contenthost.enable_repo(constants.REPOS['rhva6']['id'])
    rhel6_contenthost.install_katello_agent()
    host = rhel6_contenthost.nailgun_host
    erratum = _fetch_available_errata(module_org, host, expected_amount=0)
    assert len(erratum) == 0
    rhel6_contenthost.run(f'yum install -y {constants.FAKE_1_CUSTOM_PACKAGE}')
    erratum = _fetch_available_errata(module_org, host, 1)
    assert len(erratum) == 1
    assert CUSTOM_REPO_ERRATA_ID in [errata['errata_id'] for errata in erratum]
    rhel6_contenthost.run(f'yum install -y {constants.REAL_0_RH_PACKAGE}')
    erratum = _fetch_available_errata(module_org, host, 3)
    assert len(erratum) == 3
    assert {constants.REAL_1_ERRATA_ID, constants.REAL_2_ERRATA_ID}.issubset(
        {errata['errata_id'] for errata in erratum}
    )
Пример #6
0
    def test_positive_iso_library_sync(self):
        """Ensure RH repo with ISOs after publishing to Library is synchronized
        to capsule automatically

        :id: 221a2d41-0fef-46dd-a804-fdedd7187163

        :customerscenario: true

        :BZ: 1303102, 1480358, 1303103

        :expectedresults: ISOs are present on external capsule

        :CaseLevel: System
        """
        # Create organization, product, enable & sync RH repository with ISOs
        org = entities.Organization(smart_proxy=[self.capsule_id]).create()
        with manifests.clone() as manifest:
            upload_manifest(org.id, manifest.content)
        rh_repo_id = enable_rhrepo_and_fetchid(
            basearch='x86_64',
            org_id=org.id,
            product=PRDS['rhsc'],
            repo=REPOS['rhsc7_iso']['name'],
            reposet=REPOSET['rhsc7_iso'],
            releasever=None,
        )
        rh_repo = entities.Repository(id=rh_repo_id).read()
        call_entity_method_with_timeout(rh_repo.sync, timeout=2500)
        capsule = entities.Capsule(id=self.capsule_id).read()
        # Find "Library" lifecycle env for specific organization
        lce = entities.LifecycleEnvironment(organization=org).search(query={
            'search': 'name={}'.format(ENVIRONMENT)
        })[0]
        # Associate the lifecycle environment with the capsule
        capsule.content_add_lifecycle_environment(data={
            'environment_id': lce.id,
        })
        result = capsule.content_lifecycle_environments()
        self.assertGreaterEqual(len(result['results']), 1)
        self.assertIn(
            lce.id, [capsule_lce['id'] for capsule_lce in result['results']])
        # Create a content view with the repository
        cv = entities.ContentView(
            organization=org,
            repository=[rh_repo],
        ).create()
        # Publish new version of the content view
        cv.publish()
        cv = cv.read()
        self.assertEqual(len(cv.version), 1)
        # Verify ISOs are present on satellite
        repo_path = os.path.join(
            PULP_PUBLISHED_ISO_REPOS_PATH, rh_repo.backend_identifier)
        sat_isos = get_repo_files(repo_path, extension='iso')
        self.assertGreater(len(result), 0)
        # Assert that a task to sync lifecycle environment to the capsule
        # is started (or finished already)
        sync_status = capsule.content_get_sync()
        self.assertTrue(
            len(sync_status['active_sync_tasks']) >= 1
            or sync_status['last_sync_time']
        )
        # Wait till capsule sync finishes
        for task in sync_status['active_sync_tasks']:
            entities.ForemanTask(id=task['id']).poll(timeout=600)
        # Verify all the ISOs are present on capsule
        capsule_isos = get_repo_files(
            repo_path, extension='iso', hostname=self.capsule_ip)
        self.assertGreater(len(result), 0)
        self.assertEqual(set(sat_isos), set(capsule_isos))
Пример #7
0
    def test_positive_mirror_on_sync(self):
        """Create 2 repositories with 'on_demand' download policy and mirror on
        sync option, associate them with capsule, sync first repo, move package
        from first repo to second one, sync it, attempt to install package on
        some host.

        :id: 39149642-1e7e-4ef8-8762-bec295913014

        :BZ: 1426408

        :expectedresults: host, subscribed to second repo only, can
            successfully install package

        :CaseLevel: System
        """
        repo1_name = gen_string('alphanumeric')
        repo2_name = gen_string('alphanumeric')
        # Create and publish first custom repository with 2 packages in it
        repo1_url = create_repo(
            repo1_name,
            FAKE_1_YUM_REPO,
            FAKE_1_YUM_REPO_RPMS[1:3],
        )
        # Create and publish second repo with no packages in it
        repo2_url = create_repo(repo2_name)
        # Create organization, product, repository in satellite, and lifecycle
        # environment
        org = entities.Organization().create()
        prod1 = entities.Product(organization=org).create()
        repo1 = entities.Repository(
            download_policy='on_demand',
            mirror_on_sync=True,
            product=prod1,
            url=repo1_url,
        ).create()
        prod2 = entities.Product(organization=org).create()
        repo2 = entities.Repository(
            download_policy='on_demand',
            mirror_on_sync=True,
            product=prod2,
            url=repo2_url,
        ).create()
        lce1 = entities.LifecycleEnvironment(organization=org).create()
        lce2 = entities.LifecycleEnvironment(organization=org).create()
        # Associate the lifecycle environments with the capsule
        capsule = entities.Capsule(id=self.capsule_id).read()
        for lce_id in (lce1.id, lce2.id):
            capsule.content_add_lifecycle_environment(data={
                'environment_id': lce_id,
            })
        result = capsule.content_lifecycle_environments()
        self.assertGreaterEqual(len(result['results']), 2)
        self.assertTrue(
            {lce1.id, lce2.id}.issubset(
                [capsule_lce['id'] for capsule_lce in result['results']]),
        )
        # Create content views with the repositories
        cv1 = entities.ContentView(
            organization=org,
            repository=[repo1],
        ).create()
        cv2 = entities.ContentView(
            organization=org,
            repository=[repo2],
        ).create()
        # Sync first repository
        repo1.sync()
        repo1 = repo1.read()
        # Publish new version of the content view
        cv1.publish()
        cv1 = cv1.read()
        self.assertEqual(len(cv1.version), 1)
        cvv1 = cv1.version[-1].read()
        # Promote content view to lifecycle environment
        promote(cvv1, lce1.id)
        cvv1 = cvv1.read()
        self.assertEqual(len(cvv1.environment), 2)
        # Assert that a task to sync lifecycle environment to the capsule
        # is started (or finished already)
        sync_status = capsule.content_get_sync()
        self.assertTrue(
            len(sync_status['active_sync_tasks']) >= 1
            or sync_status['last_sync_time']
        )
        # Wait till capsule sync finishes
        for task in sync_status['active_sync_tasks']:
            entities.ForemanTask(id=task['id']).poll()
        # Move one package from the first repo to second one
        ssh.command(
            'mv {} {}'.format(
                os.path.join(
                    PULP_PUBLISHED_YUM_REPOS_PATH,
                    repo1_name,
                    FAKE_1_YUM_REPO_RPMS[2],
                ),
                os.path.join(
                    PULP_PUBLISHED_YUM_REPOS_PATH,
                    repo2_name,
                    FAKE_1_YUM_REPO_RPMS[2],
                ),
            )
        )
        # Update repositories (re-trigger 'createrepo' command)
        create_repo(repo1_name)
        create_repo(repo2_name)
        # Synchronize first repository
        repo1.sync()
        cv1.publish()
        cv1 = cv1.read()
        self.assertEqual(len(cv1.version), 2)
        cv1.version.sort(key=lambda version: version.id)
        cvv1 = cv1.version[-1].read()
        # Promote content view to lifecycle environment
        promote(cvv1, lce1.id)
        cvv1 = cvv1.read()
        self.assertEqual(len(cvv1.environment), 2)
        # Synchronize second repository
        repo2.sync()
        repo2 = repo2.read()
        self.assertEqual(repo2.content_counts['package'], 1)
        cv2.publish()
        cv2 = cv2.read()
        self.assertEqual(len(cv2.version), 1)
        cvv2 = cv2.version[-1].read()
        # Promote content view to lifecycle environment
        promote(cvv2, lce2.id)
        cvv2 = cvv2.read()
        self.assertEqual(len(cvv2.environment), 2)
        # Create activation key, add subscription to second repo only
        activation_key = entities.ActivationKey(
            content_view=cv2,
            environment=lce2,
            organization=org,
        ).create()
        subscription = entities.Subscription(organization=org).search(query={
            'search': 'name={}'.format(prod2.name)
        })[0]
        activation_key.add_subscriptions(data={
            'subscription_id': subscription.id})
        # Subscribe a host with activation key
        with VirtualMachine(distro=DISTRO_RHEL7) as client:
            client.install_katello_ca()
            client.register_contenthost(
                org.label,
                activation_key.name,
            )
            # Install the package
            package_name = FAKE_1_YUM_REPO_RPMS[2].rstrip('.rpm')
            result = client.run('yum install -y {}'.format(package_name))
            self.assertEqual(result.return_code, 0)
            # Ensure package installed
            result = client.run('rpm -qa | grep {}'.format(package_name))
            self.assertEqual(result.return_code, 0)
            self.assertIn(package_name, result.stdout[0])
Пример #8
0
    def test_positive_checksum_sync(self, capsule_vm):
        """Synchronize repository to capsule, update repository's checksum
        type, trigger capsule sync and make sure checksum type was updated on
        capsule

        :id: eb07bdf3-6cd8-4a2f-919b-8dfc84e16115

        :customerscenario: true

        :BZ: 1288656, 1664288, 1732066

        :expectedresults: checksum type is updated in repodata of corresponding
            repository on  capsule

        :CaseLevel: System

        :CaseImportance: Critical
        """
        REPOMD_PATH = 'repodata/repomd.xml'
        # Create organization, product, lce and repository with sha256 checksum
        # type
        org = entities.Organization(smart_proxy=[capsule_vm._capsule.id]).create()
        product = entities.Product(organization=org).create()
        repo = entities.Repository(
            product=product, checksum_type='sha256', download_policy='immediate'
        ).create()
        lce = entities.LifecycleEnvironment(organization=org).create()
        # Associate the lifecycle environment with the capsule
        capsule = entities.Capsule(id=capsule_vm._capsule.id).read()
        capsule.content_add_lifecycle_environment(data={'environment_id': lce.id})
        result = capsule.content_lifecycle_environments()

        assert len(result['results']) >= 1
        assert lce.id in [capsule_lce['id'] for capsule_lce in result['results']]

        # Sync, publish and promote a repo
        cv = entities.ContentView(organization=org, repository=[repo]).create()
        repo.sync()
        repo = repo.read()
        cv.publish()
        cv = cv.read()

        assert len(cv.version) == 1

        cvv = cv.version[-1].read()
        promote(cvv, lce.id)
        cvv = cvv.read()

        assert len(cvv.environment) == 2

        # Wait till capsule sync finishes
        sync_status = capsule.content_get_sync()

        assert len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time']

        for task in sync_status['active_sync_tasks']:
            entities.ForemanTask(id=task['id']).poll()
        sync_status = capsule.content_get_sync()
        last_sync_time = sync_status['last_sync_time']
        # Verify repodata's checksum type is sha256, not sha1 on capsule
        lce_repo_path = form_repo_path(
            org=org.label, lce=lce.label, cv=cv.label, prod=product.label, repo=repo.label
        )
        result = ssh.command(
            f'grep -o \'checksum type="sha1"\' {lce_repo_path}/{REPOMD_PATH}',
            hostname=capsule_vm.ip_addr,
        )

        assert result.return_code != 0
        assert len(result.stdout) == 0

        result = ssh.command(
            f'grep -o \'checksum type="sha256"\' {lce_repo_path}/{REPOMD_PATH}',
            hostname=capsule_vm.ip_addr,
        )

        assert result.return_code == 0
        assert len(result.stdout) > 0

        # Update repo's checksum type to sha1
        repo.checksum_type = 'sha1'
        repo = repo.update(['checksum_type'])

        # Sync, publish, and promote repo
        repo.sync()
        cv.publish()
        cv = cv.read()

        assert len(cv.version) == 2

        cv.version.sort(key=lambda version: version.id)
        cvv = cv.version[-1].read()
        promote(cvv, lce.id)
        cvv = cvv.read()

        assert len(cvv.environment) == 2

        # Wait till capsule sync finishes
        sync_status = capsule.content_get_sync()

        assert (
            len(sync_status['active_sync_tasks']) >= 1
            or sync_status['last_sync_time'] != last_sync_time
        )

        for task in sync_status['active_sync_tasks']:
            entities.ForemanTask(id=task['id']).poll()
        # Verify repodata's checksum type has updated to sha1 on capsule
        result = ssh.command(
            f'grep -o \'checksum type="sha256"\' {lce_repo_path}/{REPOMD_PATH}',
            hostname=capsule_vm.ip_addr,
        )

        assert result.return_code != 0
        assert len(result.stdout) == 0

        result = ssh.command(
            f'grep -o \'checksum type="sha1"\' {lce_repo_path}/{REPOMD_PATH}',
            hostname=capsule_vm.ip_addr,
        )

        assert result.return_code == 0
        assert len(result.stdout) > 0
    def test_positive_remove_prod_promoted_cv_version_from_default_env(self):
        """Remove PROD promoted content view version from Library environment

        :id: 24911876-7c2a-4a12-a3aa-98051dfda29d

        :Steps:

            1. Create a content view
            2. Add yum repositories, puppet modules, docker repositories to CV
            3. Publish content view
            4. Promote the content view version to multiple environments
                Library -> DEV -> QE -> PROD
            5. remove the content view version from Library environment

        :expectedresults: Content view version exist only in DEV, QE, PROD and
            not in Library

        :CaseLevel: Integration
        """
        org = entities.Organization().create()
        lce_dev = entities.LifecycleEnvironment(organization=org).create()
        lce_qe = entities.LifecycleEnvironment(organization=org, prior=lce_dev).create()
        lce_prod = entities.LifecycleEnvironment(organization=org, prior=lce_qe).create()
        product = entities.Product(organization=org).create()
        yum_repo = entities.Repository(url=FAKE_1_YUM_REPO, product=product).create()
        yum_repo.sync()
        docker_repo = entities.Repository(
            content_type='docker',
            docker_upstream_name='busybox',
            product=product,
            url=DOCKER_REGISTRY_HUB,
        ).create()
        docker_repo.sync()
        puppet_repo = entities.Repository(
            url=FAKE_0_PUPPET_REPO, content_type='puppet', product=product
        ).create()
        puppet_repo.sync()
        # create a content view and add to it the yum and docker repos
        content_view = entities.ContentView(organization=org).create()
        content_view.repository = [yum_repo, docker_repo]
        content_view = content_view.update(['repository'])
        # get a random puppet module and add it to content view
        puppet_module = random.choice(content_view.available_puppet_modules()['results'])
        entities.ContentViewPuppetModule(
            author=puppet_module['author'], name=puppet_module['name'], content_view=content_view
        ).create()
        # publish the content view
        content_view.publish()
        content_view = content_view.read()
        self.assertEqual(len(content_view.version), 1)
        content_view_version = content_view.version[0].read()
        self.assertEqual(len(content_view_version.environment), 1)
        lce_library = entities.LifecycleEnvironment(
            id=content_view_version.environment[0].id
        ).read()
        self.assertEqual(lce_library.name, ENVIRONMENT)
        # promote content view version to DEV QE PROD lifecycle environments
        for lce in [lce_dev, lce_qe, lce_prod]:
            promote(content_view_version, lce.id)
        self.assertEqual(
            {lce_library.id, lce_dev.id, lce_qe.id, lce_prod.id},
            {lce.id for lce in content_view_version.read().environment},
        )
        # remove the content view version from Library environment
        content_view.delete_from_environment(lce_library.id)
        # assert that the content view version exists only in DEV QE PROD and
        # not in Library environment
        self.assertEqual(
            {lce_dev.id, lce_qe.id, lce_prod.id},
            {lce.id for lce in content_view_version.read().environment},
        )
Пример #10
0
def module_lce(module_gt_manifest_org):
    return entities.LifecycleEnvironment(
        organization=module_gt_manifest_org).create()
Пример #11
0
    def test_positive_sync_puppet_module_with_versions(self, capsule_vm):
        """Ensure it's possible to sync multiple versions of the same puppet
        module to the capsule

        :id: 83a0ddd6-8a6a-43a0-b169-094a2556dd28

        :customerscenario: true

        :BZ: 1365952, 1655243

        :Steps:

            1. Register a capsule
            2. Associate LCE with the capsule
            3. Sync a puppet module with multiple versions
            4. Publish a CV with one version of puppet module and promote it to
               capsule's LCE
            5. Wait for capsule synchronization to finish
            6. Publish another CV with different version of puppet module and
               promote it to capsule's LCE
            7. Wait for capsule synchronization to finish once more

        :expectedresults: Capsule was successfully synchronized, new version of
            puppet module is present on capsule

        :CaseLevel: System

        :CaseImportance: Medium
        """
        module_name = 'versioned'
        module_versions = ['2.2.2', '3.3.3']
        org = entities.Organization().create()
        lce = entities.LifecycleEnvironment(organization=org).create()
        content_view = entities.ContentView(organization=org).create()
        prod = entities.Product(organization=org).create()
        puppet_repository = entities.Repository(
            content_type=REPO_TYPE['puppet'], product=prod, url=CUSTOM_PUPPET_REPO
        ).create()
        capsule = entities.Capsule(id=capsule_vm._capsule.id).read()
        capsule.content_add_lifecycle_environment(data={'environment_id': lce.id})
        result = capsule.content_lifecycle_environments()

        assert len(result['results']) >= 1
        assert lce.id in [capsule_lce['id'] for capsule_lce in result['results']]

        puppet_repository.sync()
        puppet_module_old = entities.PuppetModule().search(
            query={'search': f'name={module_name} and version={module_versions[0]}'}
        )[0]
        # Add puppet module to the CV
        entities.ContentViewPuppetModule(
            content_view=content_view, id=puppet_module_old.id
        ).create()
        content_view = content_view.read()

        assert len(content_view.puppet_module) > 0

        # Publish and promote CVV
        content_view.publish()
        content_view = content_view.read()

        assert len(content_view.version) == 1

        cvv = content_view.version[-1].read()
        promote(cvv, lce.id)
        cvv = cvv.read()

        assert len(cvv.environment) == 2

        # Wait till capsule sync finishes
        sync_status = capsule.content_get_sync()

        assert len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time']

        for task in sync_status['active_sync_tasks']:
            entities.ForemanTask(id=task['id']).poll()
        sync_status = capsule.content_get_sync()
        last_sync_time = sync_status['last_sync_time']
        # Unassign old puppet module version from CV
        entities.ContentViewPuppetModule(
            content_view=content_view, id=content_view.puppet_module[0].id
        ).delete()
        # Assign new puppet module version
        puppet_module_new = entities.PuppetModule().search(
            query={'search': f'name={module_name} and version={module_versions[1]}'}
        )[0]
        entities.ContentViewPuppetModule(
            content_view=content_view, id=puppet_module_new.id
        ).create()

        assert len(content_view.puppet_module) > 0

        # Publish and promote CVV
        content_view.publish()
        content_view = content_view.read()

        assert len(content_view.version) == 2

        cvv = content_view.version[-1].read()
        promote(cvv, lce.id)
        cvv = cvv.read()

        assert len(cvv.environment) == 2

        # Wait till capsule sync finishes
        sync_status = capsule.content_get_sync()
        if sync_status['active_sync_tasks']:
            for task in sync_status['active_sync_tasks']:
                entities.ForemanTask(id=task['id']).poll()
        else:
            assert sync_status['last_sync_time'] != last_sync_time

        stored_modules = get_repo_files(PULP_PUBLISHED_PUPPET_REPOS_PATH, 'gz', capsule_vm.ip_addr)
        matching_filenames = filter(
            lambda filename: f'{module_name}-{module_versions[1]}' in filename, stored_modules
        )
        assert next(matching_filenames, None)
Пример #12
0
    def test_positive_get_diff_for_cv_envs(self):
        """Generate a difference in errata between a set of environments
        for a content view

        :id: 96732506-4a89-408c-8d7e-f30c8d469769

        :Setup:

            1. Errata synced on satellite server.
            2. Multiple environments present.

        :Steps: GET /katello/api/compare

        :expectedresults: Difference in errata between a set of environments
            for a content view is retrieved.

        :CaseLevel: System
        """
        org = entities.Organization().create()
        env = entities.LifecycleEnvironment(organization=org).create()
        content_view = entities.ContentView(organization=org).create()
        activation_key = entities.ActivationKey(
            environment=env,
            organization=org,
        ).create()
        setup_org_for_a_rh_repo(
            {
                'product': PRDS['rhel'],
                'repository-set': REPOSET['rhst7'],
                'repository': REPOS['rhst7']['name'],
                'organization-id': org.id,
                'content-view-id': content_view.id,
                'lifecycle-environment-id': env.id,
                'activationkey-id': activation_key.id,
            },
            force_use_cdn=True)
        setup_org_for_a_custom_repo({
            'url': CUSTOM_REPO_URL,
            'organization-id': org.id,
            'content-view-id': content_view.id,
            'lifecycle-environment-id': env.id,
            'activationkey-id': activation_key.id,
        })
        new_env = entities.LifecycleEnvironment(
            organization=org,
            prior=env,
        ).create()
        cvvs = content_view.read().version[-2:]
        promote(cvvs[-1], new_env.id)
        result = entities.Errata().compare(
            data={
                'content_view_version_ids': [cvv.id for cvv in cvvs],
                'per_page': 9999,
            })
        cvv2_only_errata = next(
            errata for errata in result['results']
            if errata['errata_id'] == CUSTOM_REPO_ERRATA_ID)
        self.assertEqual([cvvs[-1].id], cvv2_only_errata['comparison'])
        both_cvvs_errata = next(errata for errata in result['results']
                                if errata['errata_id'] == REAL_0_ERRATA_ID)
        self.assertEqual(set(cvv.id for cvv in cvvs),
                         set(both_cvvs_errata['comparison']))
Пример #13
0
    def test_positive_get_applicable_for_host(self):
        """Get applicable errata ids for a host

        :id: 51d44d51-eb3f-4ee4-a1df-869629d427ac

        :Setup:
            1. Errata synced on satellite server.
            2. Some Content hosts present.

        :Steps: GET /api/v2/hosts/:id/errata

        :expectedresults: The available errata is retrieved.

        :CaseLevel: System
        """
        org = entities.Organization().create()
        env = entities.LifecycleEnvironment(organization=org).create()
        content_view = entities.ContentView(organization=org).create()
        activation_key = entities.ActivationKey(
            environment=env,
            organization=org,
        ).create()
        setup_org_for_a_rh_repo(
            {
                'product': PRDS['rhel'],
                'repository-set': REPOSET['rhst6'],
                'repository': REPOS['rhst6']['name'],
                'organization-id': org.id,
                'content-view-id': content_view.id,
                'lifecycle-environment-id': env.id,
                'activationkey-id': activation_key.id,
            },
            force_manifest_upload=True)
        setup_org_for_a_custom_repo({
            'url': CUSTOM_REPO_URL,
            'organization-id': org.id,
            'content-view-id': content_view.id,
            'lifecycle-environment-id': env.id,
            'activationkey-id': activation_key.id,
        })
        repo_id = enable_rhrepo_and_fetchid(
            basearch=DEFAULT_ARCHITECTURE,
            org_id=org.id,
            product=PRDS['rhel'],
            repo=REPOS['rhva6']['name'],
            reposet=REPOSET['rhva6'],
            releasever=DEFAULT_RELEASE_VERSION,
        )
        repo = entities.Repository(id=repo_id)
        self.assertEqual(repo.sync()['result'], 'success')
        content_view = content_view.read()
        content_view.repository.append(repo)
        content_view = content_view.update(['repository'])
        content_view.publish()
        versions = sorted(content_view.read().version, key=lambda ver: ver.id)
        cvv = versions[-1].read()
        promote(cvv, env.id)
        with VirtualMachine(distro=DISTRO_RHEL6) as client:
            client.install_katello_ca()
            client.register_contenthost(org.label, activation_key.name)
            self.assertTrue(client.subscribed)
            client.enable_repo(REPOS['rhst6']['id'])
            client.enable_repo(REPOS['rhva6']['id'])
            client.install_katello_agent()
            host = entities.Host().search(
                query={'search': 'name={0}'.format(client.hostname)
                       })[0].read()
            erratum = self._fetch_available_errata(host, 0)
            self.assertEqual(len(erratum), 0)
            client.run('yum install -y {0}'.format(FAKE_1_CUSTOM_PACKAGE))
            erratum = self._fetch_available_errata(host, 1)
            self.assertEqual(len(erratum), 1)
            self.assertIn(
                CUSTOM_REPO_ERRATA_ID,
                [errata['errata_id'] for errata in erratum],
            )
            client.run('yum install -y {0}'.format(REAL_0_RH_PACKAGE))
            erratum = self._fetch_available_errata(host, 3)
            self.assertEqual(len(erratum), 3)
            self.assertTrue({REAL_1_ERRATA_ID, REAL_2_ERRATA_ID}.issubset(
                {errata['errata_id']
                 for errata in erratum}))
Пример #14
0
    def test_inherit_puppetclass(self):
        """Host that created from HostGroup entity with PuppetClass
        assigned to it should inherit such puppet class information under
        'all_puppetclasses' field

        :id: 7b840f3d-413c-40bb-9a7d-cd9dad3c0737

        :expectedresults: Host inherited 'all_puppetclasses' details from
            HostGroup that was used for such Host create procedure

        :BZ: 1107708, 1222118, 1487586

        :CaseLevel: System
        """
        # Creating entities like organization, content view and lifecycle_env
        # with not utf-8 names for easier interaction with puppet environment
        # further in test
        org = entities.Organization(name=gen_string('alpha')).create()
        location = entities.Location(organization=[org]).create()
        # Creating puppet repository with puppet module assigned to it
        product = entities.Product(organization=org).create()
        puppet_repo = entities.Repository(content_type='puppet',
                                          product=product).create()
        # Working with 'ntp' module as we know for sure that it contains at
        # least few puppet classes
        with open(get_data_file(PUPPET_MODULE_NTP_PUPPETLABS), 'rb') as handle:
            puppet_repo.upload_content(files={'content': handle})

        content_view = entities.ContentView(name=gen_string('alpha'),
                                            organization=org).create()

        result = content_view.available_puppet_modules()['results']
        assert len(result) == 1
        entities.ContentViewPuppetModule(author=result[0]['author'],
                                         name=result[0]['name'],
                                         content_view=content_view).create()
        content_view.publish()
        content_view = content_view.read()
        lc_env = entities.LifecycleEnvironment(name=gen_string('alpha'),
                                               organization=org).create()
        promote(content_view.version[0], lc_env.id)
        content_view = content_view.read()
        assert len(content_view.version) == 1
        assert len(content_view.puppet_module) == 1

        # Form environment name variable for our test
        env_name = f'KT_{org.name}_{lc_env.name}_{content_view.name}_{content_view.id}'

        # Get all environments for current organization.
        # We have two environments (one created after publishing and one more
        # was created after promotion), so we need to select promoted one
        environments = entities.Environment().search(
            query={'organization_id': org.id})
        assert len(environments) == 2
        environments = [
            environment for environment in environments
            if environment.name == env_name
        ]
        assert len(environments) == 1
        environment = environments[0].read()
        environment.location = [location]
        environment.update()

        # Create a host group and it dependencies.
        mac = entity_fields.MACAddressField().gen_value()
        root_pass = entity_fields.StringField(length=(8, 30)).gen_value()
        domain = entities.Domain().create()
        architecture = entities.Architecture().create()
        ptable = entities.PartitionTable().create()
        operatingsystem = entities.OperatingSystem(architecture=[architecture],
                                                   ptable=[ptable]).create()
        medium = entities.Media(operatingsystem=[operatingsystem]).create()
        hostgroup = entities.HostGroup(
            architecture=architecture,
            domain=domain,
            environment=environment,
            location=[location.id],
            medium=medium,
            name=gen_string('alpha'),
            operatingsystem=operatingsystem,
            organization=[org.id],
            ptable=ptable,
        ).create()
        assert len(hostgroup.read_json()['all_puppetclasses']) == 0

        # Get puppet class id for ntp module
        response = client.get(
            environment.path('self') + '/puppetclasses',
            auth=get_credentials(),
            verify=False,
        )
        response.raise_for_status()
        results = response.json()['results']
        puppet_class_id = results['ntp'][0]['id']

        # Assign puppet class
        client.post(
            hostgroup.path('self') + '/puppetclass_ids',
            data={
                'puppetclass_id': puppet_class_id
            },
            auth=get_credentials(),
            verify=False,
        ).raise_for_status()
        hostgroup_attrs = hostgroup.read_json()
        assert len(hostgroup_attrs['all_puppetclasses']) == 1
        assert hostgroup_attrs['all_puppetclasses'][0]['name'] == 'ntp'

        # Create Host entity using HostGroup
        host = entities.Host(
            hostgroup=hostgroup,
            mac=mac,
            root_pass=root_pass,
            environment=environment,
            location=location,
            organization=org,
            content_facet_attributes={
                'content_view_id': content_view.id,
                'lifecycle_environment_id': lc_env.id,
            },
            name=gen_string('alpha'),
        ).create(False)
        host_attrs = host.read_json()
        assert len(host_attrs['all_puppetclasses']) == 1
        assert host_attrs['all_puppetclasses'][0]['name'] == 'ntp'
Пример #15
0
    def test_positive_checksum_sync(self):
        """Synchronize repository to capsule, update repository's checksum
        type, trigger capsule sync and make sure checksum type was updated on
        capsule

        :id: eb07bdf3-6cd8-4a2f-919b-8dfc84e16115

        :customerscenario: true

        :BZ: 1288656, 1664288

        :expectedresults: checksum type is updated in repodata of corresponding
            repository on  capsule

        :CaseLevel: System
        """
        repomd_path = 'repodata/repomd.xml'
        # Create organization, product, lce and repository with sha256 checksum
        # type
        org = entities.Organization(smart_proxy=[self.capsule_id]).create()
        product = entities.Product(organization=org).create()
        repo = entities.Repository(
            product=product,
            checksum_type='sha256',
            download_policy='immediate'
        ).create()
        lce = entities.LifecycleEnvironment(organization=org).create()
        # Associate the lifecycle environment with the capsule
        capsule = entities.Capsule(id=self.capsule_id).read()
        capsule.content_add_lifecycle_environment(data={
            'environment_id': lce.id,
        })
        result = capsule.content_lifecycle_environments()
        self.assertGreaterEqual(len(result['results']), 1)
        self.assertIn(
            lce.id, [capsule_lce['id'] for capsule_lce in result['results']])
        # Sync, publish and promote a repo
        cv = entities.ContentView(
            organization=org,
            repository=[repo],
        ).create()
        repo.sync()
        repo = repo.read()
        cv.publish()
        cv = cv.read()
        self.assertEqual(len(cv.version), 1)
        cvv = cv.version[-1].read()
        promote(cvv, lce.id)
        cvv = cvv.read()
        self.assertEqual(len(cvv.environment), 2)
        # Wait till capsule sync finishes
        sync_status = capsule.content_get_sync()
        self.assertTrue(
            len(sync_status['active_sync_tasks']) >= 1
            or sync_status['last_sync_time']
        )
        for task in sync_status['active_sync_tasks']:
            entities.ForemanTask(id=task['id']).poll()
        sync_status = capsule.content_get_sync()
        last_sync_time = sync_status['last_sync_time']
        # Verify repodata's checksum type is sha256, not sha1 on capsule
        lce_repo_path = form_repo_path(
            org=org.label,
            lce=lce.label,
            cv=cv.label,
            prod=product.label,
            repo=repo.label,
        )
        result = ssh.command(
            'grep -o \'checksum type="sha1"\' {}/{}'
            .format(lce_repo_path, repomd_path),
            hostname=self.capsule_ip
        )
        self.assertNotEqual(result.return_code, 0)
        self.assertEqual(len(result.stdout), 0)
        result = ssh.command(
            'grep -o \'checksum type="sha256"\' {}/{}'
            .format(lce_repo_path, repomd_path),
            hostname=self.capsule_ip
        )
        self.assertEqual(result.return_code, 0)
        self.assertGreater(len(result.stdout), 0)
        # Update repo's checksum type to sha1
        repo.checksum_type = 'sha1'
        repo = repo.update(['checksum_type'])
        # Sync, publish and promote repo
        repo.sync()
        cv.publish()
        cv = cv.read()
        self.assertEqual(len(cv.version), 2)
        cv.version.sort(key=lambda version: version.id)
        cvv = cv.version[-1].read()
        promote(cvv, lce.id)
        cvv = cvv.read()
        self.assertEqual(len(cvv.environment), 2)
        # Wait till capsule sync finishes
        sync_status = capsule.content_get_sync()
        self.assertTrue(
            len(sync_status['active_sync_tasks']) >= 1
            or sync_status['last_sync_time'] != last_sync_time
        )
        for task in sync_status['active_sync_tasks']:
            entities.ForemanTask(id=task['id']).poll()
        # Verify repodata's checksum type has updated to sha1 on capsule
        result = ssh.command(
            'grep -o \'checksum type="sha256"\' {}/{}'
            .format(lce_repo_path, repomd_path),
            hostname=self.capsule_ip
        )
        self.assertNotEqual(result.return_code, 0)
        self.assertEqual(len(result.stdout), 0)
        result = ssh.command(
            'grep -o \'checksum type="sha1"\' {}/{}'
            .format(lce_repo_path, repomd_path),
            hostname=self.capsule_ip
        )
        self.assertEqual(result.return_code, 0)
        self.assertGreater(len(result.stdout), 0)
    def test_positive_remove_cv_version_from_multi_env(self):
        """Remove promoted content view version from multiple environment

        :id: 18b86a68-8e6a-43ea-b95e-188fba125a26

        :Steps:

            1. Create a content view
            2. Add a yum repo and a puppet module to the content view
            3. Publish the content view
            4. Promote the content view version to multiple environments
               Library -> DEV -> QE -> STAGE -> PROD
            5. Remove content view version from QE, STAGE and PROD

        :expectedresults: Content view version exists only in Library, DEV

        :CaseLevel: Integration

        :CaseImportance: Low
        """
        org = entities.Organization().create()
        lce_dev = entities.LifecycleEnvironment(organization=org).create()
        lce_qe = entities.LifecycleEnvironment(organization=org, prior=lce_dev).create()
        lce_stage = entities.LifecycleEnvironment(organization=org, prior=lce_qe).create()
        lce_prod = entities.LifecycleEnvironment(organization=org, prior=lce_stage).create()
        product = entities.Product(organization=org).create()
        yum_repo = entities.Repository(url=FAKE_1_YUM_REPO, product=product).create()
        yum_repo.sync()
        puppet_repo = entities.Repository(
            url=FAKE_0_PUPPET_REPO, content_type='puppet', product=product
        ).create()
        puppet_repo.sync()
        # create a content view and add to it the yum repo
        content_view = entities.ContentView(organization=org).create()
        content_view.repository = [yum_repo]
        content_view = content_view.update(['repository'])
        # get a random puppet module and add it to content view
        puppet_module = random.choice(content_view.available_puppet_modules()['results'])
        entities.ContentViewPuppetModule(
            author=puppet_module['author'], name=puppet_module['name'], content_view=content_view
        ).create()
        # publish the content view
        content_view.publish()
        content_view = content_view.read()
        self.assertEqual(len(content_view.version), 1)
        content_view_version = content_view.version[0].read()
        self.assertEqual(len(content_view_version.environment), 1)
        lce_library = entities.LifecycleEnvironment(
            id=content_view_version.environment[0].id
        ).read()
        self.assertEqual(lce_library.name, ENVIRONMENT)
        # promote content view version to DEV QE STAGE PROD lifecycle
        # environments
        for lce in [lce_dev, lce_qe, lce_stage, lce_prod]:
            promote(content_view_version, lce.id)
        self.assertEqual(
            {lce_library.id, lce_dev.id, lce_qe.id, lce_stage.id, lce_prod.id},
            {lce.id for lce in content_view_version.read().environment},
        )
        # remove the content view version from QE STAGE and PROD environments
        for lce in [lce_qe, lce_stage, lce_prod]:
            content_view.delete_from_environment(lce.id)
        # assert that the content view version exists only in Library and DEV
        # environments
        self.assertEqual(
            {lce_library.id, lce_dev.id},
            {lce.id for lce in content_view_version.read().environment},
        )
Пример #17
0
    def test_positive_capsule_sync(self):
        """Create repository, add it to lifecycle environment, assign lifecycle
        environment with a capsule, sync repository, sync it once again, update
        repository (add 1 new package), sync repository once again.

        :id: 35513099-c918-4a8e-90d0-fd4c87ad2f82

        :customerscenario: true

        :BZ: 1394354

        :expectedresults:

            1. Repository sync triggers capsule sync
            2. After syncing capsule contains same repo content as satellite
            3. Syncing repository which has no changes for a second time does
               not trigger any new publish task
            4. Repository revision on capsule remains exactly the same after
               second repo sync with no changes
            5. Syncing repository which was updated will update the content on
               capsule

        :CaseLevel: System
        """
        repo_name = gen_string('alphanumeric')
        # Create and publish custom repository with 2 packages in it
        repo_url = create_repo(
            repo_name,
            FAKE_1_YUM_REPO,
            FAKE_1_YUM_REPO_RPMS[0:2],
        )
        # Create organization, product, repository in satellite, and lifecycle
        # environment
        org = entities.Organization(smart_proxy=[self.capsule_id]).create()
        product = entities.Product(organization=org).create()
        repo = entities.Repository(
            product=product,
            url=repo_url,
        ).create()
        lce = entities.LifecycleEnvironment(organization=org).create()
        # Associate the lifecycle environment with the capsule
        capsule = entities.Capsule(id=self.capsule_id).read()
        capsule.content_add_lifecycle_environment(data={
            'environment_id': lce.id,
        })
        result = capsule.content_lifecycle_environments()
        self.assertGreaterEqual(len(result['results']), 1)
        self.assertIn(
            lce.id, [capsule_lce['id'] for capsule_lce in result['results']])
        # Create a content view with the repository
        cv = entities.ContentView(
            organization=org,
            repository=[repo],
        ).create()
        # Sync repository
        repo.sync()
        repo = repo.read()
        # Publish new version of the content view
        cv.publish()
        cv = cv.read()
        self.assertEqual(len(cv.version), 1)
        cvv = cv.version[-1].read()
        # Promote content view to lifecycle environment
        promote(cvv, lce.id)
        cvv = cvv.read()
        self.assertEqual(len(cvv.environment), 2)
        # Assert that a task to sync lifecycle environment to the capsule
        # is started (or finished already)
        sync_status = capsule.content_get_sync()
        self.assertTrue(
            len(sync_status['active_sync_tasks']) >= 1
            or sync_status['last_sync_time']
        )
        # Assert that the content of the published content view in
        # lifecycle environment is exactly the same as content of
        # repository
        lce_repo_path = form_repo_path(
            org=org.label,
            lce=lce.label,
            cv=cv.label,
            prod=product.label,
            repo=repo.label,
        )
        cvv_repo_path = form_repo_path(
            org=org.label,
            cv=cv.label,
            cvv=cvv.version,
            prod=product.label,
            repo=repo.label,
        )
        # Wait till capsule sync finishes
        for task in sync_status['active_sync_tasks']:
            entities.ForemanTask(id=task['id']).poll()
        sync_status = capsule.content_get_sync()
        last_sync_time = sync_status['last_sync_time']

        # If BZ1439691 is open, need to sync repo once more, as repodata
        # will change on second attempt even with no changes in repo
        if bz_bug_is_open(1439691):
            repo.sync()
            repo = repo.read()
            cv.publish()
            cv = cv.read()
            self.assertEqual(len(cv.version), 2)
            cv.version.sort(key=lambda version: version.id)
            cvv = cv.version[-1].read()
            promote(cvv, lce.id)
            cvv = cvv.read()
            self.assertEqual(len(cvv.environment), 2)
            sync_status = capsule.content_get_sync()
            self.assertTrue(
                len(sync_status['active_sync_tasks']) >= 1
                or sync_status['last_sync_time'] != last_sync_time
            )
            for task in sync_status['active_sync_tasks']:
                entities.ForemanTask(id=task['id']).poll()
            sync_status = capsule.content_get_sync()
            last_sync_time = sync_status['last_sync_time']

        # Assert that the content published on the capsule is exactly the
        # same as in repository on satellite
        lce_revision_capsule = get_repomd_revision(
            lce_repo_path, hostname=self.capsule_ip)
        self.assertEqual(
            get_repo_files(lce_repo_path, hostname=self.capsule_ip),
            get_repo_files(cvv_repo_path)
        )
        # Sync repository for a second time
        result = repo.sync()
        # Assert that the task summary contains a message that says the
        # publish was skipped because content had not changed
        self.assertEqual(result['result'], 'success')
        self.assertTrue(result['output']['post_sync_skipped'])
        self.assertEqual(
            result['humanized']['output'],
            'No new packages.'
        )
        # Publish a new version of content view
        cv.publish()
        cv = cv.read()
        cv.version.sort(key=lambda version: version.id)
        cvv = cv.version[-1].read()
        # Promote new content view version to lifecycle environment
        promote(cvv, lce.id)
        cvv = cvv.read()
        self.assertEqual(len(cvv.environment), 2)
        # Wait till capsule sync finishes
        sync_status = capsule.content_get_sync()
        tasks = []
        if not sync_status['active_sync_tasks']:
            self.assertNotEqual(
                sync_status['last_sync_time'], last_sync_time)
        else:
            for task in sync_status['active_sync_tasks']:
                tasks.append(entities.ForemanTask(id=task['id']))
                tasks[-1].poll()
        # Assert that the value of repomd revision of repository in
        # lifecycle environment on the capsule has not changed
        new_lce_revision_capsule = get_repomd_revision(
            lce_repo_path, hostname=self.capsule_ip)
        self.assertEqual(lce_revision_capsule, new_lce_revision_capsule)
        # Update a repository with 1 new rpm
        create_repo(
            repo_name,
            FAKE_1_YUM_REPO,
            FAKE_1_YUM_REPO_RPMS[-1:],
        )
        # Sync, publish and promote the repository
        repo.sync()
        repo = repo.read()
        cv.publish()
        cv = cv.read()
        cv.version.sort(key=lambda version: version.id)
        cvv = cv.version[-1].read()
        promote(cvv, lce.id)
        cvv = cvv.read()
        self.assertEqual(len(cvv.environment), 2)
        # Assert that a task to sync lifecycle environment to the capsule
        # is started (or finished already)
        sync_status = capsule.content_get_sync()
        self.assertTrue(
            len(sync_status['active_sync_tasks']) >= 1
            or sync_status['last_sync_time'] != last_sync_time
        )
        # Assert that packages count in the repository is updated
        self.assertEqual(repo.content_counts['package'], 3)
        # Assert that the content of the published content view in
        # lifecycle environment is exactly the same as content of the
        # repository
        cvv_repo_path = form_repo_path(
            org=org.label,
            cv=cv.label,
            cvv=cvv.version,
            prod=product.label,
            repo=repo.label,
        )
        self.assertEqual(
            repo.content_counts['package'],
            cvv.package_count,
        )
        self.assertEqual(
            get_repo_files(lce_repo_path),
            get_repo_files(cvv_repo_path)
        )
        # Wait till capsule sync finishes
        for task in sync_status['active_sync_tasks']:
            entities.ForemanTask(id=task['id']).poll()
        # Assert that the content published on the capsule is exactly the
        # same as in the repository
        self.assertEqual(
            get_repo_files(lce_repo_path, hostname=self.capsule_ip),
            get_repo_files(cvv_repo_path)
        )
    def test_positive_delete_cv_promoted_to_multi_env(self):
        """Delete published content view with version promoted to multiple
         environments

        :id: c164bd97-e710-4a5a-9c9f-657e6bed804b

        :Steps:

            1. Create a content view
            2. Add a yum repo and a puppet module to the content view
            3. Publish the content view
            4. Promote the content view to multiple environment
               Library -> DEV -> QE -> STAGE -> PROD
            5. Delete the content view, this should delete the content with all
               it's published/promoted versions from all environments

        :expectedresults: The content view doesn't exists

        :CaseLevel: Integration

        :CaseImportance: Critical
        """
        org = entities.Organization().create()
        lce_dev = entities.LifecycleEnvironment(organization=org).create()
        lce_qe = entities.LifecycleEnvironment(organization=org, prior=lce_dev).create()
        lce_stage = entities.LifecycleEnvironment(organization=org, prior=lce_qe).create()
        lce_prod = entities.LifecycleEnvironment(organization=org, prior=lce_stage).create()
        product = entities.Product(organization=org).create()
        yum_repo = entities.Repository(url=FAKE_1_YUM_REPO, product=product).create()
        yum_repo.sync()
        puppet_repo = entities.Repository(
            url=FAKE_0_PUPPET_REPO, content_type='puppet', product=product
        ).create()
        puppet_repo.sync()
        # create a content view and add to it the yum repo
        content_view = entities.ContentView(organization=org).create()
        content_view.repository = [yum_repo]
        content_view = content_view.update(['repository'])
        # get a random puppet module and add it to content view
        puppet_module = random.choice(content_view.available_puppet_modules()['results'])
        entities.ContentViewPuppetModule(
            author=puppet_module['author'], name=puppet_module['name'], content_view=content_view
        ).create()
        # publish the content view
        content_view.publish()
        content_view = content_view.read()
        self.assertEqual(len(content_view.version), 1)
        content_view_version = content_view.version[0].read()
        self.assertEqual(len(content_view_version.environment), 1)
        lce_library = entities.LifecycleEnvironment(
            id=content_view_version.environment[0].id
        ).read()
        self.assertEqual(lce_library.name, ENVIRONMENT)
        # promote content view version to DEV QE STAGE PROD lifecycle
        # environments
        for lce in [lce_dev, lce_qe, lce_stage, lce_prod]:
            promote(content_view_version, lce.id)
        content_view_version = content_view_version.read()
        self.assertEqual(
            {lce_library.id, lce_dev.id, lce_qe.id, lce_stage.id, lce_prod.id},
            {lce.id for lce in content_view_version.environment},
        )
        # remove content view version from all lifecycle environments
        for lce in content_view_version.environment:
            content_view.delete_from_environment(lce.id)
        # delete the content view
        content_view.delete()
        with self.assertRaises(HTTPError):
            content_view.read()
Пример #19
0
    def test_positive_on_demand_sync(self):
        """Create a repository with 'on_demand' sync, add it to lifecycle
        environment with a capsule, sync repository, examine existing packages
        on capsule, download any package, examine packages once more

        :id: ba470269-a7ad-4181-bc7c-8e17a177ca20

        :expectedresults:

            1. After initial syncing only symlinks are present on both
               satellite and capsule, no real packages were fetched.
            2. All the symlinks are pointing to non-existent files.
            3. Attempt to download package is successful
            4. Downloaded package checksum matches checksum of the source
               package

        :CaseLevel: System
        """
        repo_url = FAKE_3_YUM_REPO
        packages_count = FAKE_3_YUM_REPOS_COUNT
        package = FAKE_1_YUM_REPO_RPMS[0]
        # Create organization, product, repository in satellite, and lifecycle
        # environment
        org = entities.Organization().create()
        prod = entities.Product(organization=org).create()
        repo = entities.Repository(
            download_policy='on_demand',
            mirror_on_sync=True,
            product=prod,
            url=repo_url,
        ).create()
        lce = entities.LifecycleEnvironment(organization=org).create()
        # Associate the lifecycle environment with the capsule
        capsule = entities.Capsule(id=self.capsule_id).read()
        capsule.content_add_lifecycle_environment(data={
            'environment_id': lce.id,
        })
        result = capsule.content_lifecycle_environments()
        self.assertGreaterEqual(len(result['results']), 1)
        self.assertIn(
            lce.id,
            [capsule_lce['id'] for capsule_lce in result['results']]
        )
        # Create a content view with the repository
        cv = entities.ContentView(
            organization=org,
            repository=[repo],
        ).create()
        # Sync repository
        repo.sync()
        repo = repo.read()
        # Publish new version of the content view
        cv.publish()
        cv = cv.read()
        self.assertEqual(len(cv.version), 1)
        cvv = cv.version[-1].read()
        # Promote content view to lifecycle environment
        promote(cvv, lce.id)
        cvv = cvv.read()
        self.assertEqual(len(cvv.environment), 2)
        # Assert that a task to sync lifecycle environment to the capsule
        # is started (or finished already)
        sync_status = capsule.content_get_sync()
        self.assertTrue(
            len(sync_status['active_sync_tasks']) >= 1
            or sync_status['last_sync_time']
        )
        # Check whether the symlinks for all the packages were created on
        # satellite
        cvv_repo_path = form_repo_path(
            org=org.label,
            cv=cv.label,
            cvv=cvv.version,
            prod=prod.label,
            repo=repo.label,
        )
        result = ssh.command('find {}/ -type l'.format(cvv_repo_path))
        self.assertEqual(result.return_code, 0)
        links = set(link for link in result.stdout if link)
        self.assertEqual(len(links), packages_count)
        # Ensure all the symlinks on satellite are broken (pointing to
        # nonexistent files)
        result = ssh.command(
            'find {}/ -type l ! -exec test -e {{}} \\; -print'
            .format(cvv_repo_path)
        )
        self.assertEqual(result.return_code, 0)
        broken_links = set(link for link in result.stdout if link)
        self.assertEqual(len(broken_links), packages_count)
        self.assertEqual(broken_links, links)
        # Wait till capsule sync finishes
        for task in sync_status['active_sync_tasks']:
            entities.ForemanTask(id=task['id']).poll()
        lce_repo_path = form_repo_path(
            org=org.label,
            lce=lce.label,
            cv=cv.label,
            prod=prod.label,
            repo=repo.label,
        )
        # Check whether the symlinks for all the packages were created on
        # capsule
        result = ssh.command(
            'find {}/ -type l'.format(lce_repo_path),
            hostname=self.capsule_ip,
        )
        self.assertEqual(result.return_code, 0)
        links = set(link for link in result.stdout if link)
        self.assertEqual(len(links), packages_count)
        # Ensure all the symlinks on capsule are broken (pointing to
        # nonexistent files)
        result = ssh.command(
            'find {}/ -type l ! -exec test -e {{}} \\; -print'
            .format(lce_repo_path),
            hostname=self.capsule_ip,
        )
        self.assertEqual(result.return_code, 0)
        broken_links = set(link for link in result.stdout if link)
        self.assertEqual(len(broken_links), packages_count)
        self.assertEqual(broken_links, links)
        # Download package from satellite and get its md5 checksum
        published_repo_url = 'http://{}{}/pulp/{}/'.format(
            settings.server.hostname,
            ':{}'.format(settings.server.port) if settings.server.port else '',
            lce_repo_path.split('http/')[1]
        )
        package_md5 = md5_by_url('{}{}'.format(repo_url, package))
        # Get md5 checksum of source package
        published_package_md5 = md5_by_url(
            '{}{}'.format(published_repo_url, package))
        # Assert checksums are matching
        self.assertEqual(package_md5, published_package_md5)
Пример #20
0
def test_positive_VM_import(session, module_ca_cert, module_org,
                            rhev_data, version):
    """Import an existing VM as a Host

    :id: 47aea4b7-9258-4863-8966-9a0bc9e94116

    :expectedresults: VM is shown as Host in Foreman

    :CaseLevel: Integration
    """
    # create entities for hostgroup
    location = entities.Location().create()
    default_loc_id = entities.Location().search(
        query={'search': 'name="{}"'.format(DEFAULT_LOC)})[0].id
    entities.SmartProxy(id=1, location=[default_loc_id, location.id]).update()
    domain = entities.Domain(organization=[module_org.id],
                             location=[location]).create()
    subnet = entities.Subnet(organization=[module_org.id],
                             location=[location], domain=[domain]).create()
    architecture = entities.Architecture().create()
    ptable = entities.PartitionTable(organization=[module_org.id],
                                     location=[location]).create()
    operatingsystem = entities.OperatingSystem(architecture=[architecture],
                                               ptable=[ptable]).create()
    medium = entities.Media(organization=[module_org.id], location=[location],
                            operatingsystem=[operatingsystem]).create()
    le = entities.LifecycleEnvironment(name="Library",
                                       organization=module_org.id).search()[0].read().id
    cv = entities.ContentView(organization=[module_org.id]).create()
    cv.publish()

    # create hostgroup
    hostgroup_name = gen_string('alpha')
    entities.HostGroup(
        name=hostgroup_name,
        architecture=architecture,
        domain=domain,
        subnet=subnet,
        location=[location.id],
        medium=medium,
        operatingsystem=operatingsystem,
        organization=[module_org],
        ptable=ptable,
        lifecycle_environment=le,
        content_view=cv,
        content_source=1,
    ).create()

    name = gen_string('alpha')
    with session:

        session.computeresource.create({
            'name': name,
            'provider': FOREMAN_PROVIDERS['rhev'],
            'provider_content.url': rhev_data['rhev_url'],
            'provider_content.user': rhev_data['username'],
            'provider_content.password': rhev_data['password'],
            'provider_content.api4': version,
            'provider_content.datacenter.value': rhev_data['datacenter'],
            'provider_content.certification_authorities': module_ca_cert,
            'locations.resources.assigned': [location.name],
        })
        session.hostgroup.update(hostgroup_name, {'deploy_on': name+" (RHV)"})
        session.computeresource.vm_import(name, rhev_data['vm_name'],
                                          hostgroup_name, location.name)
        assert session.host.search(rhev_data['vm_name']) is not None
    entities.Host(name=rhev_data['vm_name']).search()[0].delete()
Пример #21
0
    def test_positive_update_with_immediate_sync(self):
        """Create a repository with on_demand download policy, associate it
        with capsule, sync repo, update download policy to immediate, sync once
        more.

        :id: 511b531d-1fbe-4d64-ae31-0f9eb6625e7f

        :customerscenario: true

        :BZ: 1315752

        :expectedresults: content was successfully synchronized - capsule
            filesystem contains valid links to packages

        :CaseLevel: System
        """
        repo_url = FAKE_1_YUM_REPO
        packages_count = FAKE_1_YUM_REPOS_COUNT
        # Create organization, product, repository in satellite, and lifecycle
        # environment
        org = entities.Organization().create()
        prod = entities.Product(organization=org).create()
        repo = entities.Repository(
            download_policy='on_demand',
            mirror_on_sync=True,
            product=prod,
            url=repo_url,
        ).create()
        lce = entities.LifecycleEnvironment(organization=org).create()
        # Update capsule's download policy to on_demand to match repository's
        # policy
        self.update_capsule_download_policy(self.capsule_id, 'on_demand')
        # Associate the lifecycle environment with the capsule
        capsule = entities.Capsule(id=self.capsule_id).read()
        capsule.content_add_lifecycle_environment(data={
            'environment_id': lce.id,
        })
        result = capsule.content_lifecycle_environments()
        self.assertGreaterEqual(len(result['results']), 1)
        self.assertIn(
            lce.id,
            [capsule_lce['id'] for capsule_lce in result['results']]
        )
        # Create a content view with the repository
        cv = entities.ContentView(
            organization=org,
            repository=[repo],
        ).create()
        # Sync repository
        repo.sync()
        repo = repo.read()
        # Publish new version of the content view
        cv.publish()
        cv = cv.read()
        self.assertEqual(len(cv.version), 1)
        cvv = cv.version[-1].read()
        # Promote content view to lifecycle environment
        promote(cvv, lce.id)
        cvv = cvv.read()
        self.assertEqual(len(cvv.environment), 2)
        # Assert that a task to sync lifecycle environment to the capsule
        # is started (or finished already)
        sync_status = capsule.content_get_sync()
        self.assertTrue(
            len(sync_status['active_sync_tasks']) >= 1
            or sync_status['last_sync_time']
        )
        # Wait till capsule sync finishes
        for task in sync_status['active_sync_tasks']:
            entities.ForemanTask(id=task['id']).poll()
        # Update download policy to 'immediate'
        repo.download_policy = 'immediate'
        repo = repo.update(['download_policy'])
        self.assertEqual(repo.download_policy, 'immediate')
        # Update capsule's download policy as well
        self.update_capsule_download_policy(self.capsule_id, 'immediate')
        # Make sure to revert capsule's download policy after the test as the
        # capsule is shared among other tests
        self.addCleanup(
            self.update_capsule_download_policy, self.capsule_id, 'on_demand')
        # Sync repository once again
        repo.sync()
        repo = repo.read()
        # Publish new version of the content view
        cv.publish()
        cv = cv.read()
        self.assertEqual(len(cv.version), 2)
        cv.version.sort(key=lambda version: version.id)
        cvv = cv.version[-1].read()
        # Promote content view to lifecycle environment
        promote(cvv, lce.id)
        cvv = cvv.read()
        self.assertEqual(len(cvv.environment), 2)
        # Assert that a task to sync lifecycle environment to the capsule
        # is started (or finished already)
        sync_status = capsule.content_get_sync()
        self.assertTrue(
            len(sync_status['active_sync_tasks']) >= 1
            or sync_status['last_sync_time']
        )
        # Check whether the symlinks for all the packages were created on
        # satellite
        cvv_repo_path = form_repo_path(
            org=org.label,
            cv=cv.label,
            cvv=cvv.version,
            prod=prod.label,
            repo=repo.label,
        )
        result = ssh.command('find {}/ -type l'.format(cvv_repo_path))
        self.assertEqual(result.return_code, 0)
        links = set(link for link in result.stdout if link)
        self.assertEqual(len(links), packages_count)
        # Ensure there're no broken symlinks (pointing to nonexistent files) on
        # satellite
        result = ssh.command(
            'find {}/ -type l ! -exec test -e {{}} \\; -print'.format(
                cvv_repo_path))
        self.assertEqual(result.return_code, 0)
        broken_links = set(link for link in result.stdout if link)
        self.assertEqual(len(broken_links), 0)
        # Wait till capsule sync finishes
        for task in sync_status['active_sync_tasks']:
            entities.ForemanTask(id=task['id']).poll()
        lce_repo_path = form_repo_path(
            org=org.label,
            lce=lce.label,
            cv=cv.label,
            prod=prod.label,
            repo=repo.label,
        )
        # Check whether the symlinks for all the packages were created on
        # capsule
        result = ssh.command('find {}/ -type l'.format(lce_repo_path),
                             hostname=self.capsule_ip)
        self.assertEqual(result.return_code, 0)
        links = set(link for link in result.stdout if link)
        self.assertEqual(len(links), packages_count)
        # Ensure there're no broken symlinks (pointing to nonexistent files) on
        # capsule
        result = ssh.command(
            'find {}/ -type l ! -exec test -e {{}} \\; -print'.format(
                lce_repo_path), hostname=self.capsule_ip)
        self.assertEqual(result.return_code, 0)
        broken_links = set(link for link in result.stdout if link)
        self.assertEqual(len(broken_links), 0)
Пример #22
0
    def test_positive_end_to_end(self, fake_manifest_is_set, default_sat,
                                 rhel6_contenthost):
        """Perform end to end smoke tests using RH and custom repos.

        1. Create a new user with admin permissions
        2. Using the new user from above
            1. Create a new organization
            2. Clone and upload manifest
            3. Create a new lifecycle environment
            4. Create a custom product
            5. Create a custom YUM repository
            6. Enable a Red Hat repository
            7. Synchronize the three repositories
            8. Create a new content view
            9. Associate the YUM and Red Hat repositories to new content view
            10. Publish content view
            11. Promote content view to the lifecycle environment
            12. Create a new activation key
            13. Add the products to the activation key
            14. Create a new libvirt compute resource
            15. Create a new subnet
            16. Create a new domain
            17. Create a new hostgroup and associate previous entities to it
            18. Provision a client  **  NOT CURRENTLY PROVISIONING

        :id: b2f73740-d3ce-4e6e-abc7-b23e5562bac1

        :expectedresults: All tests should succeed and Content should be
            successfully fetched by client.
        """
        # step 1: Create a new user with admin permissions
        login = gen_string('alphanumeric')
        password = gen_string('alphanumeric')
        entities.User(admin=True, login=login, password=password).create()

        # step 2.1: Create a new organization
        server_config = get_nailgun_config()
        server_config.auth = (login, password)
        org = entities.Organization(server_config).create()

        # step 2.2: Clone and upload manifest
        if fake_manifest_is_set:
            with manifests.clone() as manifest:
                upload_manifest(org.id, manifest.content)

        # step 2.3: Create a new lifecycle environment
        le1 = entities.LifecycleEnvironment(server_config,
                                            organization=org).create()

        # step 2.4: Create a custom product
        prod = entities.Product(server_config, organization=org).create()
        repositories = []

        # step 2.5: Create custom YUM repository
        repo1 = entities.Repository(server_config,
                                    product=prod,
                                    content_type='yum',
                                    url=CUSTOM_RPM_REPO).create()
        repositories.append(repo1)

        # step 2.6: Enable a Red Hat repository
        if fake_manifest_is_set:
            repo3 = entities.Repository(id=enable_rhrepo_and_fetchid(
                basearch='x86_64',
                org_id=org.id,
                product=PRDS['rhel'],
                repo=REPOS['rhva6']['name'],
                reposet=REPOSET['rhva6'],
                releasever='6Server',
            ))
            repositories.append(repo3)

        # step 2.7: Synchronize the three repositories
        for repo in repositories:
            repo.sync()

        # step 2.8: Create content view
        content_view = entities.ContentView(server_config,
                                            organization=org).create()

        # step 2.9: Associate the YUM and Red Hat repositories to new content
        # view
        content_view.repository = repositories
        content_view = content_view.update(['repository'])

        # step 2.10: Publish content view
        content_view.publish()

        # step 2.11: Promote content view to the lifecycle environment
        content_view = content_view.read()
        assert len(content_view.version) == 1
        cv_version = content_view.version[0].read()
        assert len(cv_version.environment) == 1
        promote(cv_version, le1.id)
        # check that content view exists in lifecycle
        content_view = content_view.read()
        assert len(content_view.version) == 1
        cv_version = cv_version.read()

        # step 2.12: Create a new activation key
        activation_key_name = gen_string('alpha')
        activation_key = entities.ActivationKey(
            name=activation_key_name,
            environment=le1,
            organization=org,
            content_view=content_view).create()

        # step 2.13: Add the products to the activation key
        for sub in entities.Subscription(organization=org).search():
            if sub.name == DEFAULT_SUBSCRIPTION_NAME:
                activation_key.add_subscriptions(data={
                    'quantity': 1,
                    'subscription_id': sub.id
                })
                break
        # step 2.13.1: Enable product content
        if fake_manifest_is_set:
            activation_key.content_override(
                data={
                    'content_overrides': [{
                        'content_label': AK_CONTENT_LABEL,
                        'value': '1'
                    }]
                })

        # BONUS: Create a content host and associate it with promoted
        # content view and last lifecycle where it exists
        content_host = entities.Host(
            content_facet_attributes={
                'content_view_id': content_view.id,
                'lifecycle_environment_id': le1.id,
            },
            organization=org,
        ).create()
        # check that content view matches what we passed
        assert content_host.content_facet_attributes[
            'content_view_id'] == content_view.id
        # check that lifecycle environment matches
        assert content_host.content_facet_attributes[
            'lifecycle_environment_id'] == le1.id

        # step 2.14: Create a new libvirt compute resource
        entities.LibvirtComputeResource(
            server_config,
            url=f'qemu+ssh://root@{settings.libvirt.libvirt_hostname}/system',
        ).create()

        # step 2.15: Create a new subnet
        subnet = entities.Subnet(server_config).create()

        # step 2.16: Create a new domain
        domain = entities.Domain(server_config).create()

        # step 2.17: Create a new hostgroup and associate previous entities to
        # it
        entities.HostGroup(server_config, domain=domain,
                           subnet=subnet).create()

        # step 2.18: Provision a client
        # TODO this isn't provisioning through satellite as intended
        # Note it wasn't well before the change that added this todo
        rhel6_contenthost.install_katello_ca(default_sat)
        # Register client with foreman server using act keys
        rhel6_contenthost.register_contenthost(org.label, activation_key_name)
        assert rhel6_contenthost.subscribed
        # Install rpm on client
        package_name = 'python-kitchen'
        result = rhel6_contenthost.execute(f'yum install -y {package_name}')
        assert result.status == 0
        # Verify that the package is installed by querying it
        result = rhel6_contenthost.run(f'rpm -q {package_name}')
        assert result.status == 0
Пример #23
0
def test_positive_get_count_for_host(module_org, rhel6_contenthost):
    """Available errata count when retrieving Host

    :id: 2f35933f-8026-414e-8f75-7f4ec048faae

    :Setup:

        1. Errata synced on satellite server.
        2. Some Content hosts present.

    :Steps: GET /api/v2/hosts

    :expectedresults: The available errata count is retrieved.

    :CaseLevel: System
    """
    org = entities.Organization().create()
    env = entities.LifecycleEnvironment(organization=org).create()
    content_view = entities.ContentView(organization=org).create()
    activation_key = entities.ActivationKey(environment=env, organization=org).create()
    setup_org_for_a_rh_repo(
        {
            'product': constants.PRDS['rhel'],
            'repository-set': constants.REPOSET['rhst6'],
            'repository': constants.REPOS['rhst6']['name'],
            'organization-id': org.id,
            'content-view-id': content_view.id,
            'lifecycle-environment-id': env.id,
            'activationkey-id': activation_key.id,
        },
        force_manifest_upload=True,
    )
    setup_org_for_a_custom_repo(
        {
            'url': CUSTOM_REPO_URL,
            'organization-id': org.id,
            'content-view-id': content_view.id,
            'lifecycle-environment-id': env.id,
            'activationkey-id': activation_key.id,
        }
    )
    repo_id = enable_rhrepo_and_fetchid(
        basearch=constants.DEFAULT_ARCHITECTURE,
        org_id=org.id,
        product=constants.PRDS['rhel'],
        repo=constants.REPOS['rhva6']['name'],
        reposet=constants.REPOSET['rhva6'],
        releasever=constants.DEFAULT_RELEASE_VERSION,
    )
    repo = entities.Repository(id=repo_id)
    assert repo.sync()['result'] == 'success'
    content_view = content_view.read()
    content_view.repository.append(repo)
    content_view = content_view.update(['repository'])
    content_view.publish()
    versions = sorted(content_view.read().version, key=lambda ver: ver.id)
    cvv = versions[-1].read()
    promote(cvv, env.id)
    rhel6_contenthost.install_katello_ca()
    rhel6_contenthost.register_contenthost(org.label, activation_key.name)
    assert rhel6_contenthost.subscribed
    rhel6_contenthost.enable_repo(constants.REPOS['rhst6']['id'])
    rhel6_contenthost.enable_repo(constants.REPOS['rhva6']['id'])
    rhel6_contenthost.install_katello_agent()
    host = rhel6_contenthost.nailgun_host
    for errata in ('security', 'bugfix', 'enhancement'):
        _validate_errata_counts(module_org, host, errata_type=errata, expected_value=0)
    rhel6_contenthost.run(f'yum install -y {constants.FAKE_1_CUSTOM_PACKAGE}')
    _validate_errata_counts(module_org, host, errata_type='security', expected_value=1)
    rhel6_contenthost.run(f'yum install -y {constants.REAL_0_RH_PACKAGE}')
    for errata in ('bugfix', 'enhancement'):
        _validate_errata_counts(module_org, host, errata_type=errata, expected_value=1)
Пример #24
0
def configure_provisioning(org=None, loc=None, compute=False, os=None):
    """Create and configure org, loc, product, repo, cv, env. Update proxy,
    domain, subnet, compute resource, provision templates and medium with
    previously created entities and create a hostgroup using all mentioned
    entities.

    :param str org: Default Organization that should be used in both host
        discovering and host provisioning procedures
    :param str loc: Default Location that should be used in both host
        discovering and host provisioning procedures
    :param bool compute: If False creates a default Libvirt compute resource
    :param str os: Specify the os to be used while provisioning and to
        associate related entities to the specified os.
    :return: List of created entities that can be re-used further in
        provisioning or validation procedure (e.g. hostgroup or domain)
    """
    # Create new organization and location in case they were not passed
    if org is None:
        org = entities.Organization().create()
    if loc is None:
        loc = entities.Location(organization=[org]).create()
    if settings.repos.rhel7_os is None:
        raise ImproperlyConfigured(
            'settings file is not configured for rhel os')
    # Create a new Life-Cycle environment
    lc_env = entities.LifecycleEnvironment(organization=org).create()
    # Create a Product, Repository for custom RHEL7 contents
    product = entities.Product(organization=org).create()
    repo = entities.Repository(product=product,
                               url=settings.repos.rhel7_os,
                               download_policy='immediate').create()

    # Increased timeout value for repo sync and CV publishing and promotion
    try:
        old_task_timeout = entity_mixins.TASK_TIMEOUT
        entity_mixins.TASK_TIMEOUT = 3600
        repo.sync()
        # Create, Publish and promote CV
        content_view = entities.ContentView(organization=org).create()
        content_view.repository = [repo]
        content_view = content_view.update(['repository'])
        content_view.publish()
        content_view = content_view.read()
        promote(content_view.version[0], lc_env.id)
    finally:
        entity_mixins.TASK_TIMEOUT = old_task_timeout
    # Search for existing organization puppet environment, otherwise create a
    # new one, associate organization and location where it is appropriate.
    environments = entities.Environment().search(query=dict(
        search=f'organization_id={org.id}'))
    if len(environments) > 0:
        environment = environments[0].read()
        environment.location.append(loc)
        environment = environment.update(['location'])
    else:
        environment = entities.Environment(organization=[org],
                                           location=[loc]).create()

    # Search for SmartProxy, and associate location
    proxy = entities.SmartProxy().search(
        query={'search': f'name={settings.server.hostname}'})
    proxy = proxy[0].read()
    if loc.id not in [location.id for location in proxy.location]:
        proxy.location.append(loc)
    if org.id not in [organization.id for organization in proxy.organization]:
        proxy.organization.append(org)
    proxy = proxy.update(['location', 'organization'])

    # Search for existing domain or create new otherwise. Associate org,
    # location and dns to it
    _, _, domain = settings.server.hostname.partition('.')
    domain = entities.Domain().search(query={'search': f'name="{domain}"'})
    if len(domain) == 1:
        domain = domain[0].read()
        domain.location.append(loc)
        domain.organization.append(org)
        domain.dns = proxy
        domain = domain.update(['dns', 'location', 'organization'])
    else:
        domain = entities.Domain(dns=proxy, location=[loc],
                                 organization=[org]).create()

    # Search if subnet is defined with given network.
    # If so, just update its relevant fields otherwise,
    # Create new subnet
    network = settings.vlan_networking.subnet
    subnet = entities.Subnet().search(query={'search': f'network={network}'})
    if len(subnet) == 1:
        subnet = subnet[0].read()
        subnet.domain = [domain]
        subnet.location.append(loc)
        subnet.organization.append(org)
        subnet.dns = proxy
        subnet.dhcp = proxy
        subnet.tftp = proxy
        subnet.discovery = proxy
        subnet.ipam = 'DHCP'
        subnet = subnet.update([
            'domain', 'discovery', 'dhcp', 'dns', 'location', 'organization',
            'tftp', 'ipam'
        ])
    else:
        # Create new subnet
        subnet = entities.Subnet(
            network=network,
            mask=settings.vlan_networking.netmask,
            domain=[domain],
            location=[loc],
            organization=[org],
            dns=proxy,
            dhcp=proxy,
            tftp=proxy,
            discovery=proxy,
            ipam='DHCP',
        ).create()

    # Search if Libvirt compute-resource already exists
    # If so, just update its relevant fields otherwise,
    # Create new compute-resource with 'libvirt' provider.
    # compute boolean is added to not block existing test's that depend on
    # Libvirt resource and use this same functionality to all CR's.
    if compute is False:
        resource_url = f'qemu+ssh://root@{settings.libvirt.libvirt_hostname}/system'
        comp_res = [
            res for res in entities.LibvirtComputeResource().search()
            if res.provider == 'Libvirt' and res.url == resource_url
        ]
        if len(comp_res) > 0:
            computeresource = entities.LibvirtComputeResource(
                id=comp_res[0].id).read()
            computeresource.location.append(loc)
            computeresource.organization.append(org)
            computeresource.update(['location', 'organization'])
        else:
            # Create Libvirt compute-resource
            entities.LibvirtComputeResource(
                provider='libvirt',
                url=resource_url,
                set_console_password=False,
                display_type='VNC',
                location=[loc.id],
                organization=[org.id],
            ).create()

    # Get the Partition table ID
    ptable = (entities.PartitionTable().search(
        query={'search': f'name="{DEFAULT_PTABLE}"'})[0].read())
    if loc.id not in [location.id for location in ptable.location]:
        ptable.location.append(loc)
    if org.id not in [organization.id for organization in ptable.organization]:
        ptable.organization.append(org)
    ptable = ptable.update(['location', 'organization'])

    # Get the OS ID
    if os is None:
        os = (entities.OperatingSystem().search(
            query={
                'search':
                'name="RedHat" AND (major="{}" OR major="{}")'.format(
                    RHEL_6_MAJOR_VERSION, RHEL_7_MAJOR_VERSION)
            })[0].read())
    else:
        os_ver = os.split(' ')[1].split('.')
        os = (entities.OperatingSystem().search(
            query={
                'search':
                f'family="Redhat" AND major="{os_ver[0]}" AND minor="{os_ver[1]}")'
            })[0].read())

    # Get the Provisioning template_ID and update with OS, Org, Location
    provisioning_template = entities.ProvisioningTemplate().search(
        query={'search': f'name="{DEFAULT_TEMPLATE}"'})
    provisioning_template = provisioning_template[0].read()
    provisioning_template.operatingsystem.append(os)
    if org.id not in [
            organization.id
            for organization in provisioning_template.organization
    ]:
        provisioning_template.organization.append(org)
    if loc.id not in [
            location.id for location in provisioning_template.location
    ]:
        provisioning_template.location.append(loc)
    provisioning_template = provisioning_template.update(
        ['location', 'operatingsystem', 'organization'])

    # Get the PXE template ID and update with OS, Org, location
    pxe_template = entities.ProvisioningTemplate().search(
        query={'search': f'name="{DEFAULT_PXE_TEMPLATE}"'})
    pxe_template = pxe_template[0].read()
    pxe_template.operatingsystem.append(os)
    if org.id not in [
            organization.id for organization in pxe_template.organization
    ]:
        pxe_template.organization.append(org)
    if loc.id not in [location.id for location in pxe_template.location]:
        pxe_template.location.append(loc)
    pxe_template = pxe_template.update(
        ['location', 'operatingsystem', 'organization'])

    # Get the arch ID
    arch = (entities.Architecture().search(
        query={'search': f'name="{DEFAULT_ARCHITECTURE}"'})[0].read())

    # Update the OS to associate arch, ptable, templates
    os.architecture.append(arch)
    os.ptable.append(ptable)
    os.provisioning_template.append(provisioning_template)
    os.provisioning_template.append(pxe_template)
    os = os.update(['architecture', 'provisioning_template', 'ptable'])
    # kickstart_repository is the content view and lce bind repo
    kickstart_repository = entities.Repository().search(
        query=dict(content_view_id=content_view.id,
                   environment_id=lc_env.id,
                   name=repo.name))[0]
    # Create Hostgroup
    host_group = entities.HostGroup(
        architecture=arch,
        domain=domain.id,
        subnet=subnet.id,
        lifecycle_environment=lc_env.id,
        content_view=content_view.id,
        location=[loc.id],
        environment=environment.id,
        puppet_proxy=proxy,
        puppet_ca_proxy=proxy,
        content_source=proxy,
        kickstart_repository=kickstart_repository,
        root_pass=gen_string('alphanumeric'),
        operatingsystem=os.id,
        organization=[org.id],
        ptable=ptable.id,
    ).create()

    return {
        'host_group': host_group.name,
        'domain': domain.name,
        'environment': environment.name,
        'ptable': ptable.name,
        'subnet': subnet.name,
        'os': os.title,
    }
    def test_pre_scenario_preclient_package_installation(self):
        """Create product and repo from which the package will be installed
        post upgrade

        :id: preupgrade-eedab638-fdc9-41fa-bc81-75dd2790f7be

        :steps:

            1. Create a content host with existing client ak
            2. Create and sync repo from which the package will be
                installed on content host
            3. Add repo to CV and then in Activation key

        :expectedresults:

            1. The content host is created
            2. The new repo and its product has been added to ak using which
                the content host is created

        """
        prior_env = entities.LifecycleEnvironment(
            organization=self.org
        ).search(query={'search': 'name=Library'})[0]
        environment = entities.LifecycleEnvironment(
            organization=self.org,
            prior=prior_env.id,
            label=self.le_lable,
            name=self.le_name
        ).create()
        ak = create_activation_key_for_client_registration(
            ak_name=self.ak_name,
            client_os='rhel7',
            org=self.org,
            environment=environment,
            sat_state='pre'
        )
        rhel7_client = dockerize(
            ak_name=ak.name, distro='rhel7', org_label=self.org.label)
        client_container_id = rhel7_client.values()[0]
        client_name = rhel7_client.keys()[0]
        product, yum_repo = create_yum_test_repo(
            product_name=self.prod_name, repo_url=FAKE_REPO_ZOO3, org=self.org)
        update_product_subscription_in_ak(
            product=product, yum_repo=yum_repo, ak=ak, org=self.org)
        time.sleep(30)
        execute(
            attach_subscription_to_host_from_satellite,
            self.org.id,
            product.name,
            client_name,
            host=get_satellite_host()
        )
        # Refresh subscriptions on client
        execute(
            docker_execute_command,
            client_container_id,
            'subscription-manager refresh',
            host=self.docker_vm
        )
        # Run goferd on client as its docker container
        execute(
            docker_execute_command,
            client_container_id,
            'goferd -f',
            async=True,
            host=self.docker_vm
        )
        create_dict(
            {self.__class__.__name__: rhel7_client}
        )
Пример #26
0
    def test_positive_sync_puppet_module_with_versions(self):
        """Ensure it's possible to sync multiple versions of the same puppet
        module to the capsule

        :id: 83a0ddd6-8a6a-43a0-b169-094a2556dd28

        :customerscenario: true

        :BZ: 1365952

        :Steps:

            1. Register a capsule
            2. Associate LCE with the capsule
            3. Sync a puppet module with multiple versions
            4. Publish a CV with one version of puppet module and promote it to
               capsule's LCE
            5. Wait for capsule synchronization to finish
            6. Publish another CV with different version of puppet module and
               promote it to capsule's LCE
            7. Wait for capsule synchronization to finish once more

        :expectedresults: Capsule was successfully synchronized, new version of
            puppet module is present on capsule

        :CaseLevel: System
        """
        module_name = 'versioned'
        module_versions = ['2.2.2', '3.3.3']
        org = entities.Organization().create()
        lce = entities.LifecycleEnvironment(organization=org).create()
        content_view = entities.ContentView(organization=org).create()
        prod = entities.Product(organization=org).create()
        puppet_repository = entities.Repository(
            content_type=REPO_TYPE['puppet'],
            product=prod,
            url=CUSTOM_PUPPET_REPO,
        ).create()
        capsule = entities.Capsule(id=self.capsule_id).read()
        capsule.content_add_lifecycle_environment(data={
            'environment_id': lce.id,
        })
        result = capsule.content_lifecycle_environments()
        self.assertGreaterEqual(len(result['results']), 1)
        self.assertIn(
            lce.id, [capsule_lce['id'] for capsule_lce in result['results']])
        puppet_repository.sync()
        puppet_module_old = entities.PuppetModule().search(query={
            'search': 'name={} and version={}'
                      .format(module_name, module_versions[0])
        })[0]
        # Add puppet module to the CV
        entities.ContentViewPuppetModule(
            content_view=content_view,
            id=puppet_module_old.id,
        ).create()
        content_view = content_view.read()
        self.assertGreater(len(content_view.puppet_module), 0)
        # Publish and promote CVV
        content_view.publish()
        content_view = content_view.read()
        self.assertEqual(len(content_view.version), 1)
        cvv = content_view.version[-1].read()
        promote(cvv, lce.id)
        cvv = cvv.read()
        self.assertEqual(len(cvv.environment), 2)
        # Wait till capsule sync finishes
        sync_status = capsule.content_get_sync()
        self.assertTrue(
            len(sync_status['active_sync_tasks']) >= 1
            or sync_status['last_sync_time']
        )
        for task in sync_status['active_sync_tasks']:
            entities.ForemanTask(id=task['id']).poll()
        sync_status = capsule.content_get_sync()
        last_sync_time = sync_status['last_sync_time']
        # Unassign old puppet module version from CV
        entities.ContentViewPuppetModule(
            content_view=content_view,
            id=content_view.puppet_module[0].id,
        ).delete()
        # Assign new puppet module version
        puppet_module_new = entities.PuppetModule().search(query={
            'search': 'name={} and version={}'
                      .format(module_name, module_versions[1])
        })[0]
        entities.ContentViewPuppetModule(
            content_view=content_view,
            id=puppet_module_new.id,
        ).create()
        self.assertGreater(len(content_view.puppet_module), 0)
        # Publish and promote CVV
        content_view.publish()
        content_view = content_view.read()
        self.assertEqual(len(content_view.version), 2)
        cvv = content_view.version[-1].read()
        promote(cvv, lce.id)
        cvv = cvv.read()
        self.assertEqual(len(cvv.environment), 2)
        # Wait till capsule sync finishes
        sync_status = capsule.content_get_sync()
        if sync_status['active_sync_tasks']:
            for task in sync_status['active_sync_tasks']:
                entities.ForemanTask(id=task['id']).poll()
        else:
            self.assertNotEqual(
                sync_status['last_sync_time'], last_sync_time)
        stored_modules = get_repo_files(
            PULP_PUBLISHED_PUPPET_REPOS_PATH, 'gz', self.capsule_ip)
        with self.assertNotRaises(StopIteration):
            next(
                filename for filename in stored_modules
                if '{}-{}'.format(module_name, module_versions[1]) in filename
            )
Пример #27
0
def test_positive_view_vdc_guest_subscription_products(session):
    """Ensure that Virtual Data Centers guest subscription Provided
    Products and Content Products are not empty.

    :id: 4a6f6933-8e26-4c47-b544-a300e11a8454

    :customerscenario: true

    :steps:
        1. Upload a manifest with Virtual Datacenters subscription
        2. Config a virtual machine virt-who service for a hypervisor
        3. Ensure virt-who hypervisor host exist
        4. Attach Virtual Datacenters subscription to the virt-who
           hypervisor
        5. Go to Content -> Red Hat Subscription
        6. Select Virtual Datacenters subscription with type Guests of
           virt-who hypervisor

    :expectedresults:
        1. The Virtual Data Centers guests subscription Provided Products
           is not empty and one of the provided products exist
        2. The Virtual Data Centers guests subscription Product Content is
           not empty and one of the consumed products exist

    :BZ: 1395788, 1506636, 1487317

    :CaseLevel: System
    """
    org = entities.Organization().create()
    lce = entities.LifecycleEnvironment(organization=org).create()
    provisioning_server = settings.compute_resources.libvirt_hostname
    rh_product_repository = RHELAnsibleEngineRepository(cdn=True)
    product_name = rh_product_repository.data['product']
    # Create a new virt-who config
    virt_who_config = make_virt_who_config(
        {
            'organization-id': org.id,
            'hypervisor-type': VIRT_WHO_HYPERVISOR_TYPES['libvirt'],
            'hypervisor-server': f'qemu+ssh://{provisioning_server}/system',
            'hypervisor-username': '******',
        }
    )
    # create a virtual machine to host virt-who service
    with VirtualMachine() as virt_who_vm:
        # configure virtual machine and setup virt-who service
        virt_who_data = virt_who_hypervisor_config(
            virt_who_config['general-information']['id'],
            virt_who_vm,
            org_id=org.id,
            lce_id=lce.id,
            hypervisor_hostname=provisioning_server,
            configure_ssh=True,
            subscription_name=VDC_SUBSCRIPTION_NAME,
            extra_repos=[rh_product_repository.data],
        )
        virt_who_hypervisor_host = virt_who_data['virt_who_hypervisor_host']
        with session:
            session.organization.select(org.name)
            # ensure that VDS subscription is assigned to virt-who hypervisor
            content_hosts = session.contenthost.search(
                'subscription_name = "{}" and name = "{}"'.format(
                    VDC_SUBSCRIPTION_NAME, virt_who_hypervisor_host['name']
                )
            )
            assert content_hosts and content_hosts[0]['Name'] == virt_who_hypervisor_host['name']
            # ensure that hypervisor guests subscription provided products list is not empty and
            # that the product is in provided products.
            provided_products = session.subscription.provided_products(
                VDC_SUBSCRIPTION_NAME, virt_who=True
            )
            assert provided_products and product_name in provided_products
            # ensure that hypervisor guests subscription content products list is not empty and
            # that product is in content products.
            content_products = session.subscription.content_products(
                VDC_SUBSCRIPTION_NAME, virt_who=True
            )
            assert content_products and product_name in content_products
Пример #28
0
    def test_positive_uploaded_content_library_sync(self):
        """Ensure custom repo with no upstream url and manually uploaded
        content after publishing to Library is synchronized to capsule
        automatically

        :id: f5406312-dd31-4551-9f03-84eb9c3415f5

        :customerscenario: true

        :BZ: 1340686

        :expectedresults: custom content is present on external capsule

        :CaseLevel: System
        """
        # Create organization, product, repository with no upstream url
        org = entities.Organization(smart_proxy=[self.capsule_id]).create()
        product = entities.Product(organization=org).create()
        repo = entities.Repository(
            product=product,
            url=None,
        ).create()
        capsule = entities.Capsule(id=self.capsule_id).search(
            query={'search': 'name={0}'.format(self.capsule_hostname)}
        )[0]
        # Find "Library" lifecycle env for specific organization
        lce = entities.LifecycleEnvironment(organization=org).search(query={
            'search': 'name={}'.format(ENVIRONMENT)
        })[0]
        # Associate the lifecycle environment with the capsule
        capsule.content_add_lifecycle_environment(data={
            'environment_id': lce.id,
        })
        result = capsule.content_lifecycle_environments()
        self.assertGreaterEqual(len(result['results']), 1)
        self.assertIn(
            lce.id, [capsule_lce['id'] for capsule_lce in result['results']])

        # Create a content view with the repository
        cv = entities.ContentView(
            organization=org,
            repository=[repo],
        ).create()

        # Upload custom content into the repo
        with open(get_data_file(RPM_TO_UPLOAD), 'rb') as handle:
            repo.upload_content(files={'content': handle})
        self.assertEqual(repo.read().content_counts['package'], 1)
        # Publish new version of the content view

        cv.publish()
        cv = cv.read()
        self.assertEqual(len(cv.version), 1)

        # Assert that a task to sync lifecycle environment to the capsule
        # is started (or finished already)
        sync_status = capsule.content_get_sync()
        self.assertTrue(
            len(sync_status['active_sync_tasks']) >= 1
            or sync_status['last_sync_time']
        )
        # Wait till capsule sync finishes
        for task in sync_status['active_sync_tasks']:
            entities.ForemanTask(id=task['id']).poll()
        # Verify previously uploaded content is present on capsule
        lce_repo_path = form_repo_path(
            org=org.label,
            lce=lce.label,
            cv=cv.label,
            prod=product.label,
            repo=repo.label,
        )
        for i in range(5):
            capsule_rpms = get_repo_files(
                lce_repo_path, hostname=self.capsule_ip)
            if(len(capsule_rpms) != 0):
                break
            else:
                sleep(5)
        self.assertEqual(len(capsule_rpms), 1)
        self.assertEqual(capsule_rpms[0], RPM_TO_UPLOAD)
Пример #29
0
    def test_positive_get_count_for_host(self):
        """Available errata count when retrieving Host

        :id: 2f35933f-8026-414e-8f75-7f4ec048faae

        :Setup:

            1. Errata synced on satellite server.
            2. Some Content hosts present.

        :Steps: GET /api/v2/hosts

        :expectedresults: The available errata count is retrieved.

        :CaseLevel: System
        """
        org = entities.Organization().create()
        env = entities.LifecycleEnvironment(organization=org).create()
        content_view = entities.ContentView(organization=org).create()
        activation_key = entities.ActivationKey(
            environment=env,
            organization=org,
        ).create()
        setup_org_for_a_rh_repo(
            {
                'product': PRDS['rhel'],
                'repository-set': REPOSET['rhst6'],
                'repository': REPOS['rhst6']['name'],
                'organization-id': org.id,
                'content-view-id': content_view.id,
                'lifecycle-environment-id': env.id,
                'activationkey-id': activation_key.id,
            },
            force_manifest_upload=True)
        setup_org_for_a_custom_repo({
            'url': CUSTOM_REPO_URL,
            'organization-id': org.id,
            'content-view-id': content_view.id,
            'lifecycle-environment-id': env.id,
            'activationkey-id': activation_key.id,
        })
        repo_id = enable_rhrepo_and_fetchid(
            basearch=DEFAULT_ARCHITECTURE,
            org_id=org.id,
            product=PRDS['rhel'],
            repo=REPOS['rhva6']['name'],
            reposet=REPOSET['rhva6'],
            releasever=DEFAULT_RELEASE_VERSION,
        )
        repo = entities.Repository(id=repo_id)
        self.assertEqual(repo.sync()['result'], 'success')
        content_view = content_view.read()
        content_view.repository.append(repo)
        content_view = content_view.update(['repository'])
        content_view.publish()
        versions = sorted(content_view.read().version, key=lambda ver: ver.id)
        cvv = versions[-1].read()
        promote(cvv, env.id)
        with VirtualMachine(distro=DISTRO_RHEL6) as client:
            client.install_katello_ca()
            client.register_contenthost(org.label, activation_key.name)
            self.assertTrue(client.subscribed)
            client.enable_repo(REPOS['rhst6']['id'])
            client.enable_repo(REPOS['rhva6']['id'])
            client.install_katello_agent()
            host = entities.Host().search(
                query={'search': 'name={0}'.format(client.hostname)
                       })[0].read()
            for errata in ('security', 'bugfix', 'enhancement'):
                self._validate_errata_counts(host, errata, 0)
            client.run('yum install -y {0}'.format(FAKE_1_CUSTOM_PACKAGE))
            self._validate_errata_counts(host, 'security', 1)
            client.run('yum install -y {0}'.format(REAL_0_RH_PACKAGE))
            for errata in ('bugfix', 'enhancement'):
                self._validate_errata_counts(host, errata, 1)
Пример #30
0
def lifecycle_env(module_org, puppet_env):
    """Create lifecycle environment"""
    lce_env = entities.LifecycleEnvironment(organization=module_org,
                                            name=gen_string('alpha')).create()
    return lce_env