def test_positive_update_with_immediate_sync(self): """Create a repository with on_demand download policy, associate it with capsule, sync repo, update download policy to immediate, sync once more. :id: 511b531d-1fbe-4d64-ae31-0f9eb6625e7f :customerscenario: true :BZ: 1315752 :expectedresults: content was successfully synchronized - capsule filesystem contains valid links to packages :CaseLevel: System """ repo_url = FAKE_1_YUM_REPO packages_count = FAKE_1_YUM_REPOS_COUNT # Create organization, product, repository in satellite, and lifecycle # environment org = entities.Organization().create() prod = entities.Product(organization=org).create() repo = entities.Repository(download_policy='on_demand', mirror_on_sync=True, product=prod, url=repo_url).create() lce = entities.LifecycleEnvironment(organization=org).create() # Update capsule's download policy to on_demand to match repository's # policy self.update_capsule_download_policy(self.capsule_id, 'on_demand') # Associate the lifecycle environment with the capsule capsule = entities.Capsule(id=self.capsule_id).read() capsule.content_add_lifecycle_environment( data={'environment_id': lce.id}) result = capsule.content_lifecycle_environments() self.assertGreaterEqual(len(result['results']), 1) self.assertIn(lce.id, [capsule_lce['id'] for capsule_lce in result['results']]) # Create a content view with the repository cv = entities.ContentView(organization=org, repository=[repo]).create() # Sync repository repo.sync() repo = repo.read() # Publish new version of the content view cv.publish() cv = cv.read() self.assertEqual(len(cv.version), 1) cvv = cv.version[-1].read() # Promote content view to lifecycle environment promote(cvv, lce.id) cvv = cvv.read() self.assertEqual(len(cvv.environment), 2) # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time']) # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() # Update download policy to 'immediate' repo.download_policy = 'immediate' repo = repo.update(['download_policy']) self.assertEqual(repo.download_policy, 'immediate') # Update capsule's download policy as well self.update_capsule_download_policy(self.capsule_id, 'immediate') # Make sure to revert capsule's download policy after the test as the # capsule is shared among other tests self.addCleanup(self.update_capsule_download_policy, self.capsule_id, 'on_demand') # Sync repository once again repo.sync() repo = repo.read() # Publish new version of the content view cv.publish() cv = cv.read() self.assertEqual(len(cv.version), 2) cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() # Promote content view to lifecycle environment promote(cvv, lce.id) cvv = cvv.read() self.assertEqual(len(cvv.environment), 2) # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time']) # Check whether the symlinks for all the packages were created on # satellite cvv_repo_path = form_repo_path(org=org.label, cv=cv.label, cvv=cvv.version, prod=prod.label, repo=repo.label) result = ssh.command('find {}/ -type l'.format(cvv_repo_path)) self.assertEqual(result.return_code, 0) links = set(link for link in result.stdout if link) self.assertEqual(len(links), packages_count) # Ensure there're no broken symlinks (pointing to nonexistent files) on # satellite result = ssh.command( 'find {}/ -type l ! -exec test -e {{}} \\; -print'.format( cvv_repo_path)) self.assertEqual(result.return_code, 0) broken_links = set(link for link in result.stdout if link) self.assertEqual(len(broken_links), 0) # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() lce_repo_path = form_repo_path(org=org.label, lce=lce.label, cv=cv.label, prod=prod.label, repo=repo.label) # Check whether the symlinks for all the packages were created on # capsule result = ssh.command('find {}/ -type l'.format(lce_repo_path), hostname=self.capsule_ip) self.assertEqual(result.return_code, 0) links = set(link for link in result.stdout if link) self.assertEqual(len(links), packages_count) # Ensure there're no broken symlinks (pointing to nonexistent files) on # capsule result = ssh.command( 'find {}/ -type l ! -exec test -e {{}} \\; -print'.format( lce_repo_path), hostname=self.capsule_ip, ) self.assertEqual(result.return_code, 0) broken_links = set(link for link in result.stdout if link) self.assertEqual(len(broken_links), 0)
def module_org(): return entities.Organization().create()
def test_positive_custom_user_view_lce(self): """As a custom user attempt to view a lifecycle environment created by admin user :id: 768b647b-c530-4eca-9caa-38cf8622f36d :BZ: 1420511 :Steps: As an admin user: 1. Create an additional lifecycle environments other than Library 2. Create a user without administrator privileges 3. Create a role with the the following permissions: * (Miscellaneous): access_dashboard * Lifecycle Environment: * edit_lifecycle_environments * promote_or_remove_content_views_to_environment * view_lifecycle_environments * Location: view_locations * Organization: view_organizations 4. Assign the created role to the custom user As a custom user: 1. Log in 2. Navigate to Content -> Lifecycle Environments :expectedresults: The additional lifecycle environment is viewable and accessible by the custom user. :CaseLevel: Integration """ role_name = gen_string('alpha') env_name = gen_string('alpha') user_login = gen_string('alpha') user_password = gen_string('alpha') org = entities.Organization().create() role = entities.Role(name=role_name).create() permissions_types_names = { None: ['access_dashboard'], 'Organization': ['view_organizations'], 'Location': ['view_locations'], 'Katello::KTEnvironment': [ 'view_lifecycle_environments', 'edit_lifecycle_environments', 'promote_or_remove_content_views_to_environments' ] } create_role_permissions(role, permissions_types_names) entities.User(default_organization=org, organization=[org], role=[role], login=user_login, password=user_password).create() # create a life cycle environment as admin user and ensure it's visible with Session(self) as session: make_lifecycle_environment(session, org=org.name, name=env_name) self.assertIsNotNone(self.lifecycleenvironment.search(env_name)) # ensure the created user also can find the created life cycle # environment link with Session(self, user=user_login, password=user_password) as session: # to ensure that the created user has only the assigned # permissions, check that hosts menu tab does not exist self.assertIsNone( self.content_views.wait_until_element( menu_locators['menu.hosts'], timeout=1)) # assert that the created user is not a global admin user # check administer->users page with self.assertRaises(UINoSuchElementError): session.nav.go_to_users() # assert that the user can view the lvce created by admin user self.assertIsNotNone(self.lifecycleenvironment.search(env_name))
def default_org(): return entities.Organization().search( query={'search': 'name="Default Organization"'})[0]
def setUpClass(cls): """Create an organization and product which can be re-used in tests.""" super(DockerRepositoryTestCase, cls).setUpClass() cls.org = entities.Organization().create()
def test_positive_service_level_subscription_with_custom_product(self): """Subscribe a host to activation key with Premium service level and with custom product :id: 195a8049-860e-494d-b7f0-0794384194f7 :customerscenario: true :steps: 1. Create a product with custom repository synchronized 2. Create and Publish a content view with the created repository 3. Create an activation key and assign the created content view 4. Add a RedHat subscription to activation key (The product subscription should be added automatically) 5. Set the activation service_level to Premium 6. Register a host to activation key 7. List consumed subscriptions on host 8. List the subscription in Content Host UI :expectedresults: 1. The product subscription is listed in consumed subscriptions on host 2. The product subscription is listed in the contenthost subscriptions UI :BZ: 1394357 :CaseLevel: System """ org = entities.Organization().create() self.upload_manifest(org.id, manifests.clone()) subscription = entities.Subscription(organization=org) entities_ids = setup_org_for_a_custom_repo({ 'url': FAKE_1_YUM_REPO, 'organization-id': org.id, }) product = entities.Product(id=entities_ids['product-id']).read() activation_key = entities.ActivationKey( id=entities_ids['activationkey-id']).read() # add the default RH subscription for sub in subscription.search(): if sub.read_json()['product_name'] == DEFAULT_SUBSCRIPTION_NAME: activation_key.add_subscriptions(data={ 'quantity': 1, 'subscription_id': sub.id, }) break # ensure all the needed subscriptions are attached to activation key results = activation_key.subscriptions()['results'] self.assertEqual( {product.name, DEFAULT_SUBSCRIPTION_NAME}, {ak_subscription['name'] for ak_subscription in results}) activation_key.service_level = 'Premium' activation_key = activation_key.update(['service_level']) with VirtualMachine() as vm: vm.install_katello_ca() vm.register_contenthost(org.label, activation_key=activation_key.name) self.assertTrue(vm.subscribed) result = vm.run('subscription-manager list --consumed') self.assertEqual(result.return_code, 0) self.assertIn('Subscription Name: {0}'.format(product.name), '\n'.join(result.stdout)) with Session(self) as session: set_context(session, org=org.name) self.contenthost.search_and_click(vm.hostname) self.contenthost.click( tab_locators['contenthost.tab_subscriptions']) self.contenthost.click( tab_locators['contenthost.tab_subscriptions_subscriptions'] ) self.assertIsNotNone( self.contenthost.wait_until_element( locators['contenthost.subscription_select'] % product.name))
def test_pre_scenario_generate_errata_for_client(self): """Create product and repo from which the errata will be generated for the Satellite client or content host. :id: 88fd28e6-b4df-46c0-91d6-784859fd1c21 :steps: 1. Create Life Cycle Environment, Product and Custom Yum Repo 2. Create custom tools, rhel repos and sync them 3. Create content view and publish it 4. Create activation key and add subscription. 5. Registering Docker Content Host RHEL7 6. Check katello agent and goferd service running on host 7. Generate Errata by Installing Outdated/Older Packages 8. Collect the Erratum list :expectedresults: 1. The content host is created 2. errata count, erratum list will be generated to satellite client/content host """ org = entities.Organization().create() loc = entities.Location(organization=[org]).create() environment = entities.LifecycleEnvironment(organization=org).search( query={'search': 'name=Library'})[0] product = entities.Product(organization=org).create() custom_yum_repo = entities.Repository(product=product, content_type='yum', url=FAKE_9_YUM_REPO).create() product.sync() tools_repo, rhel_repo = self._create_custom_rhel_tools_repos(product) repolist = [custom_yum_repo, tools_repo, rhel_repo] content_view = publish_content_view(org=org, repolist=repolist) ak = entities.ActivationKey(content_view=content_view, organization=org.id, environment=environment).create() subscription = entities.Subscription(organization=org).search( query={'search': f'name={product.name}'})[0] ak.add_subscriptions(data={'subscription_id': subscription.id}) rhel7_client = dockerize(ak_name=ak.name, distro='rhel7', org_label=org.label) client_container_id = list(rhel7_client.values())[0] client_container_name = [key for key in rhel7_client.keys()][0] host_location_update(client_container_name=client_container_name, logger_obj=self.logger, loc=loc) wait_for( lambda: org.name in execute( docker_execute_command, client_container_id, 'subscription-manager identity', host=self.docker_vm, )[self.docker_vm], timeout=800, delay=2, logger=self.logger, ) install_or_update_package(client_hostname=client_container_id, package="katello-agent") run_goferd(client_hostname=client_container_id) for package in FAKE_9_YUM_OUTDATED_PACKAGES: install_or_update_package(client_hostname=client_container_id, package=package) host = entities.Host().search( query={'search': f'activation_key={ak.name}'})[0] installable_errata_count = host.content_facet_attributes[ 'errata_counts']['total'] self.assertGreater(installable_errata_count, 1) erratum_list = entities.Errata(repository=custom_yum_repo).search( query={ 'order': 'updated ASC', 'per_page': 1000 }) errata_ids = [errata.errata_id for errata in erratum_list] self.assertEqual(sorted(errata_ids), sorted(FAKE_9_YUM_ERRATUM)) scenario_dict = { self.__class__.__name__: { 'rhel_client': rhel7_client, 'activation_key': ak.name, 'custom_repo_id': custom_yum_repo.id, 'product_id': product.id, 'conten_view_id': content_view.id, } } create_dict(scenario_dict)
def test_positive_add_rh_and_custom_products(session): """Test that RH/Custom product can be associated to Activation keys :id: 3d8876fa-1412-47ca-a7a4-bce2e8baf3bc :Steps: 1. Create Activation key 2. Associate RH product(s) to Activation Key 3. Associate custom product(s) to Activation Key :expectedresults: RH/Custom product is successfully associated to Activation key :CaseLevel: Integration """ name = gen_string('alpha') rh_repo = { 'name': REPOS['rhva6']['name'], 'product': PRDS['rhel'], 'reposet': REPOSET['rhva6'], 'basearch': DEFAULT_ARCHITECTURE, 'releasever': DEFAULT_RELEASE_VERSION, } custom_product_name = gen_string('alpha') repo_name = gen_string('alpha') org = entities.Organization().create() product = entities.Product( name=custom_product_name, organization=org, ).create() repo = entities.Repository( name=repo_name, product=product, ).create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) rhel_repo_id = enable_rhrepo_and_fetchid( basearch=rh_repo['basearch'], org_id=org.id, product=rh_repo['product'], repo=rh_repo['name'], reposet=rh_repo['reposet'], releasever=rh_repo['releasever'], ) for repo_id in [rhel_repo_id, repo.id]: entities.Repository(id=repo_id).sync() with session: session.organization.select(org.name) session.activationkey.create({ 'name': name, 'lce': { ENVIRONMENT: True }, 'content_view': DEFAULT_CV, }) assert session.activationkey.search(name)[0]['Name'] == name for subscription in (DEFAULT_SUBSCRIPTION_NAME, custom_product_name): session.activationkey.add_subscription(name, subscription) ak = session.activationkey.read(name) subscriptions = [ subscription['Repository Name'] for subscription in ak['subscriptions']['resources']['assigned'] ] assert ({DEFAULT_SUBSCRIPTION_NAME, custom_product_name} == set(subscriptions))
def test_positive_access_non_admin_user(session, test_name): """Access activation key that has specific name and assigned environment by user that has filter configured for that specific activation key :id: 358a22d1-d576-475a-b90c-98e90a2ed1a9 :customerscenario: true :expectedresults: Only expected activation key can be accessed by new non admin user :BZ: 1463813 :CaseLevel: Integration """ ak_name = gen_string('alpha') non_searchable_ak_name = gen_string('alpha') org = entities.Organization().create() envs_list = ['STAGING', 'DEV', 'IT', 'UAT', 'PROD'] for name in envs_list: entities.LifecycleEnvironment(name=name, organization=org).create() env_name = random.choice(envs_list) cv = entities.ContentView(organization=org).create() cv.publish() promote(cv.read().version[0], entities.LifecycleEnvironment(name=env_name).search()[0].id) # Create new role role = entities.Role().create() # Create filter with predefined activation keys search criteria envs_condition = ' or '.join(['environment = ' + s for s in envs_list]) entities.Filter( organization=[org], permission=entities.Permission(name='view_activation_keys').search(), role=role, search='name ~ {} and ({})'.format(ak_name, envs_condition)).create() # Add permissions for Organization and Location entities.Filter( permission=entities.Permission(resource_type='Organization').search(), role=role, ).create() entities.Filter( permission=entities.Permission(resource_type='Location').search(), role=role, ).create() # Create new user with a configured role default_loc = entities.Location().search( query={'search': 'name="{0}"'.format(DEFAULT_LOC)})[0] user_login = gen_string('alpha') user_password = gen_string('alpha') entities.User( role=[role], admin=False, login=user_login, password=user_password, organization=[org], location=[default_loc], default_organization=org, ).create() with session: session.organization.select(org_name=org.name) session.location.select(DEFAULT_LOC) for name in [ak_name, non_searchable_ak_name]: session.activationkey.create({ 'name': name, 'lce': { env_name: True }, 'content_view': cv.name }) assert session.activationkey.read( name)['details']['lce'][env_name][env_name] with Session(test_name, user=user_login, password=user_password) as session: session.organization.select(org.name) session.location.select(DEFAULT_LOC) assert session.activationkey.search(ak_name)[0]['Name'] == ak_name assert session.activationkey.search( non_searchable_ak_name)[0]['Name'] != non_searchable_ak_name
def test_positive_service_level_subscription_with_custom_product(session): """Subscribe a host to activation key with Premium service level and with custom product :id: 195a8049-860e-494d-b7f0-0794384194f7 :customerscenario: true :steps: 1. Create a product with custom repository synchronized 2. Create and Publish a content view with the created repository 3. Create an activation key and assign the created content view 4. Add a RedHat subscription to activation key (The product subscription should be added automatically) 5. Set the activation service_level to Premium 6. Register a host to activation key 7. List consumed subscriptions on host 8. List the subscription in Content Host UI :expectedresults: 1. The product subscription is listed in consumed subscriptions on host 2. The product subscription is listed in the contenthost subscriptions UI :BZ: 1394357 :CaseLevel: System """ org = entities.Organization().create() manifests.upload_manifest_locked(org.id) entities_ids = setup_org_for_a_custom_repo({ 'url': FAKE_1_YUM_REPO, 'organization-id': org.id, }) product = entities.Product(id=entities_ids['product-id']).read() activation_key = entities.ActivationKey( id=entities_ids['activationkey-id']).read() # add the default RH subscription subscription = entities.Subscription(organization=org).search( query={'search': 'name="{}"'.format(DEFAULT_SUBSCRIPTION_NAME)})[0] activation_key.add_subscriptions(data={ 'quantity': 1, 'subscription_id': subscription.id, }) # ensure all the needed subscriptions are attached to activation key results = activation_key.subscriptions()['results'] assert ({product.name, DEFAULT_SUBSCRIPTION_NAME } == {ak_subscription['name'] for ak_subscription in results}) # Set the activation service_level to Premium activation_key.service_level = 'Premium' activation_key = activation_key.update(['service_level']) with VirtualMachine() as vm: vm.install_katello_ca() vm.register_contenthost(org.label, activation_key=activation_key.name) assert vm.subscribed result = vm.run('subscription-manager list --consumed') assert result.return_code == 0 assert 'Subscription Name: {0}'.format(product.name) in '\n'.join( result.stdout) with session: session.organization.select(org.name) chost = session.contenthost.read(vm.hostname) subscriptions = { subs['Repository Name'] for subs in chost['subscriptions']['resources']['assigned'] } assert product.name in subscriptions
def test_positive_update_rh_product(session): """Update Content View in an Activation key :id: 9b0ac209-45de-4cc4-97e8-e191f3f37239 :Steps: 1. Create an activation key 2. Update the content view with another content view which has RH products :expectedresults: Activation key is updated :CaseLevel: Integration """ name = gen_string('alpha') env1_name = gen_string('alpha') env2_name = gen_string('alpha') cv1_name = gen_string('alpha') cv2_name = gen_string('alpha') rh_repo1 = { 'name': REPOS['rhva6']['name'], 'product': PRDS['rhel'], 'reposet': REPOSET['rhva6'], 'basearch': DEFAULT_ARCHITECTURE, 'releasever': DEFAULT_RELEASE_VERSION, } rh_repo2 = { 'name': ('Red Hat Enterprise Virtualization Agents for RHEL 6 ' 'Server RPMs i386 6Server'), 'product': PRDS['rhel'], 'reposet': REPOSET['rhva6'], 'basearch': 'i386', 'releasever': DEFAULT_RELEASE_VERSION, } org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) repo1_id = enable_sync_redhat_repo(rh_repo1, org.id) cv_publish_promote(cv1_name, env1_name, repo1_id, org.id) repo2_id = enable_sync_redhat_repo(rh_repo2, org.id) cv_publish_promote(cv2_name, env2_name, repo2_id, org.id) with session: session.organization.select(org.name) session.activationkey.create({ 'name': name, 'lce': { env1_name: True }, 'content_view': cv1_name, }) assert session.activationkey.search(name)[0]['Name'] == name ak = session.activationkey.read(name) assert ak['details']['content_view'] == cv1_name if bz_bug_is_open(1597639): assert session.activationkey.search(name)[0]['Name'] == name session.activationkey.update(name, { 'details': { 'lce': { env2_name: True }, 'content_view': cv2_name, } }) ak = session.activationkey.read(name) assert ak['details']['content_view'] == cv2_name
def test_positive_end_to_end(session, module_org, module_loc, module_vmware_settings): """Perform end to end testing for compute resource VMware component. :id: 47fc9e77-5b22-46b4-a76c-3217434fde2f :expectedresults: All expected CRUD actions finished successfully. :CaseLevel: Integration """ cr_name = gen_string('alpha') new_cr_name = gen_string('alpha') description = gen_string('alpha') display_type = choice(('VNC', 'VMRC')) vnc_console_passwords = choice((False, True)) enable_caching = choice((False, True)) new_org = entities.Organization().create() new_loc = entities.Location().create() with session: session.computeresource.create( { 'name': cr_name, 'description': description, 'provider': FOREMAN_PROVIDERS['vmware'], 'provider_content.vcenter': module_vmware_settings['vcenter'], 'provider_content.user': module_vmware_settings['user'], 'provider_content.password': module_vmware_settings['password'], 'provider_content.datacenter.value': module_vmware_settings['datacenter'], 'provider_content.display_type': display_type, 'provider_content.vnc_console_passwords': vnc_console_passwords, 'provider_content.enable_caching': enable_caching, 'organizations.resources.assigned': [module_org.name], 'locations.resources.assigned': [module_loc.name], } ) cr_values = session.computeresource.read(cr_name) assert cr_values['name'] == cr_name assert cr_values['description'] == description assert cr_values['provider'] == FOREMAN_PROVIDERS['vmware'] assert cr_values['provider_content']['user'] == module_vmware_settings['user'] assert ( cr_values['provider_content']['datacenter']['value'] == module_vmware_settings['datacenter'] ) assert cr_values['provider_content']['display_type'] == display_type assert cr_values['provider_content']['vnc_console_passwords'] == vnc_console_passwords assert cr_values['provider_content']['enable_caching'] == enable_caching assert cr_values['organizations']['resources']['assigned'] == [module_org.name] assert cr_values['locations']['resources']['assigned'] == [module_loc.name] session.computeresource.edit( cr_name, { 'name': new_cr_name, 'organizations.resources.assigned': [new_org.name], 'locations.resources.assigned': [new_loc.name], }, ) assert not session.computeresource.search(cr_name) cr_values = session.computeresource.read(new_cr_name) assert cr_values['name'] == new_cr_name assert set(cr_values['organizations']['resources']['assigned']) == { module_org.name, new_org.name, } assert set(cr_values['locations']['resources']['assigned']) == { module_loc.name, new_loc.name, } # check that the compute resource is listed in one of the default compute profiles profile_cr_values = session.computeprofile.list_resources(COMPUTE_PROFILE_LARGE) profile_cr_names = [cr['Compute Resource'] for cr in profile_cr_values] assert '{} ({})'.format(new_cr_name, FOREMAN_PROVIDERS['vmware']) in profile_cr_names session.computeresource.delete(new_cr_name) assert not session.computeresource.search(new_cr_name)
def setUp(self): # noqa super(SyncTestCase, self).setUp() self.organization = entities.Organization().create()
def test_positive_end_to_end(session, module_org, module_location): """Perform end to end testing for discovery rule component. :id: dd35e566-dc3a-43d3-939c-a33ae528740f :expectedresults: All expected CRUD actions finished successfully :CaseImportance: Critical """ rule_name = gen_string('alpha') search = f'cpu_count = {gen_integer(1, 5)}' hg_name = gen_string('alpha') hostname = gen_string('alpha') hosts_limit = str(gen_integer(0, 100)) priority = str(gen_integer(1, 100)) new_rule_name = gen_string('alpha') new_search = f'cpu_count = {gen_integer(6, 10)}' new_hg_name = gen_string('alpha') new_hostname = gen_string('alpha') new_hosts_limit = str(gen_integer(101, 200)) new_priority = str(gen_integer(101, 200)) entities.HostGroup(name=hg_name, organization=[module_org], location=[module_location]).create() entities.HostGroup( name=new_hg_name, organization=[module_org], location=[module_location] ).create() new_org = entities.Organization().create() new_loc = entities.Location().create() with session: session.discoveryrule.create( { 'primary.name': rule_name, 'primary.search': search, 'primary.host_group': hg_name, 'primary.hostname': hostname, 'primary.hosts_limit': hosts_limit, 'primary.priority': priority, 'primary.enabled': False, 'organizations.resources.assigned': [module_org.name], 'locations.resources.assigned': [module_location.name], } ) values = session.discoveryrule.read( rule_name, widget_names=['primary', 'organizations', 'locations'] ) assert values['primary']['name'] == rule_name assert values['primary']['search'] == search assert values['primary']['host_group'] == hg_name assert values['primary']['hostname'] == hostname assert values['primary']['hosts_limit'] == hosts_limit assert values['primary']['priority'] == priority assert values['primary']['enabled'] is False assert values['organizations']['resources']['assigned'] == [module_org.name] assert values['locations']['resources']['assigned'] == [module_location.name] session.discoveryrule.update( rule_name, { 'primary.name': new_rule_name, 'primary.search': new_search, 'primary.host_group': new_hg_name, 'primary.hostname': new_hostname, 'primary.hosts_limit': new_hosts_limit, 'primary.priority': new_priority, 'primary.enabled': True, 'organizations.resources.assigned': [new_org.name], 'locations.resources.assigned': [new_loc.name], }, ) rules = session.discoveryrule.read_all() assert rule_name not in [rule['Name'] for rule in rules] values = session.discoveryrule.read( new_rule_name, widget_names=['primary', 'organizations', 'locations'] ) assert values['primary']['name'] == new_rule_name assert values['primary']['search'] == new_search assert values['primary']['host_group'] == new_hg_name assert values['primary']['hostname'] == new_hostname assert values['primary']['hosts_limit'] == new_hosts_limit assert values['primary']['priority'] == new_priority assert values['primary']['enabled'] is True assert {new_org.name, module_org.name} == set( values['organizations']['resources']['assigned'] ) assert {new_loc.name, module_location.name} == set( values['locations']['resources']['assigned'] ) session.discoveryrule.delete(new_rule_name) rules = session.discoveryrule.read_all() assert new_rule_name not in [rule['Name'] for rule in rules]
def test_positive_custom_user_view_lce(session, test_name): """As a custom user attempt to view a lifecycle environment created by admin user :id: 768b647b-c530-4eca-9caa-38cf8622f36d :BZ: 1420511 :Steps: As an admin user: 1. Create an additional lifecycle environments other than Library 2. Create a user without administrator privileges 3. Create a role with the the following permissions: * (Miscellaneous): access_dashboard * Lifecycle Environment: * edit_lifecycle_environments * promote_or_remove_content_views_to_environment * view_lifecycle_environments * Location: view_locations * Organization: view_organizations 4. Assign the created role to the custom user As a custom user: 1. Log in 2. Navigate to Content -> Lifecycle Environments :expectedresults: The additional lifecycle environment is viewable and accessible by the custom user. :CaseLevel: Integration """ role_name = gen_string('alpha') lce_name = gen_string('alpha') user_login = gen_string('alpha') user_password = gen_string('alpha') org = entities.Organization().create() role = entities.Role(name=role_name).create() permissions_types_names = { None: ['access_dashboard'], 'Organization': ['view_organizations'], 'Location': ['view_locations'], 'Katello::KTEnvironment': [ 'view_lifecycle_environments', 'edit_lifecycle_environments', 'promote_or_remove_content_views_to_environments' ] } create_role_permissions(role, permissions_types_names) entities.User(default_organization=org, organization=[org], role=[role], login=user_login, password=user_password).create() # create a life cycle environment as admin user and ensure it's visible with session: session.organization.select(org.name) session.lifecycleenvironment.create(values={'name': lce_name}) lce_values = session.lifecycleenvironment.read_all() assert lce_name in lce_values['lce'] # ensure the created user also can find the created lifecycle environment link with Session(test_name, user_login, user_password) as non_admin_session: # to ensure that the created user has only the assigned # permissions, check that hosts menu tab does not exist with raises(NavigationTriesExceeded): assert not non_admin_session.host.read_all() # assert that the user can view the lvce created by admin user lce_values = non_admin_session.lifecycleenvironment.read_all() assert lce_name in lce_values['lce']
def test_positive_default_end_to_end_with_custom_profile( session, module_org, module_loc, module_gce_settings, download_cert): """Create GCE compute resource with default properties and apply it's basic functionality. :id: 59ffd83e-a984-4c22-b91b-cad055b4fbd7 :Steps: 1. Create an GCE compute resource with default properties. 2. Update the compute resource name and add new taxonomies. 3. Associate compute profile with custom properties to GCE compute resource 4. Delete the compute resource. :expectedresults: The GCE compute resource is created, updated, compute profile associated and deleted. :CaseLevel: Integration :CaseImportance: Critical """ cr_name = gen_string('alpha') new_cr_name = gen_string('alpha') cr_description = gen_string('alpha') new_org = entities.Organization().create() new_loc = entities.Location().create() with session: # Compute Resource Create and Assertions session.computeresource.create({ 'name': cr_name, 'description': cr_description, 'provider': FOREMAN_PROVIDERS['google'], 'provider_content.google_project_id': module_gce_settings['project_id'], 'provider_content.client_email': module_gce_settings['client_email'], 'provider_content.certificate_path': module_gce_settings['cert_path'], 'organizations.resources.assigned': [module_org.name], 'locations.resources.assigned': [module_loc.name], }) cr_values = session.computeresource.read(cr_name) assert cr_values['name'] == cr_name assert cr_values['provider_content']['zone']['value'] assert (cr_values['organizations']['resources']['assigned'] == [ module_org.name ]) assert (cr_values['locations']['resources']['assigned'] == [ module_loc.name ]) assert cr_values['provider_content'][ 'google_project_id'] == module_gce_settings['project_id'] assert cr_values['provider_content'][ 'client_email'] == module_gce_settings['client_email'] # Compute Resource Edit/Updates and Assertions session.computeresource.edit( cr_name, { 'name': new_cr_name, 'organizations.resources.assigned': [new_org.name], 'locations.resources.assigned': [new_loc.name], }) assert not session.computeresource.search(cr_name) cr_values = session.computeresource.read(new_cr_name) assert cr_values['name'] == new_cr_name assert (set(cr_values['organizations']['resources']['assigned']) == { module_org.name, new_org.name }) assert (set(cr_values['locations']['resources']['assigned']) == { module_loc.name, new_loc.name }) # Compute Profile edit/updates and Assertions session.computeresource.update_computeprofile( new_cr_name, COMPUTE_PROFILE_SMALL, { 'provider_content.machine_type': GCE_MACHINE_TYPE_DEFAULT, 'provider_content.network': GCE_NETWORK_DEFAULT, 'provider_content.external_ip': GCE_EXTERNAL_IP_DEFAULT, 'provider_content.default_disk_size': '15' }) cr_profile_values = session.computeresource.read_computeprofile( new_cr_name, COMPUTE_PROFILE_SMALL) assert cr_profile_values['breadcrumb'] == 'Edit {0}'.format( COMPUTE_PROFILE_SMALL) assert cr_profile_values['compute_profile'] == COMPUTE_PROFILE_SMALL assert cr_profile_values['compute_resource'] == '{0} ({1}-{2})'.format( new_cr_name, module_gce_settings['zone'], FOREMAN_PROVIDERS['google']) assert (cr_profile_values['provider_content']['machine_type'] == GCE_MACHINE_TYPE_DEFAULT) assert cr_profile_values['provider_content'][ 'network'] == GCE_NETWORK_DEFAULT if not bz_bug_is_open(1721871): assert cr_profile_values['provider_content'][ 'external_ip'] == GCE_EXTERNAL_IP_DEFAULT assert cr_profile_values['provider_content'][ 'default_disk_size'] == '15' # Compute Resource Delete and Assertion session.computeresource.delete(new_cr_name) assert not session.computeresource.search(new_cr_name)
def setUpClass(cls): # noqa super(ActivationKeyTestCase, cls).setUpClass() cls.organization = entities.Organization().create() cls.base_key_name = entities.ActivationKey( organization=cls.organization).create().name cls.vm_distro = DISTRO_RHEL6
def setUpClass(cls): """Set up an organization for tests.""" super(LocationTestCase, cls).setUpClass() cls.org_ = entities.Organization().search( query={'search': 'name="{0}"'.format(DEFAULT_ORG)})[0]
def setUpClass(cls): super(UserGroupTestCase, cls).setUpClass() cls.organization = entities.Organization().create()
def test_positive_task_status(session): """Check if the Task Status is working in the Dashboard UI and filter from Tasks index page is working correctly :id: fb667d6a-7255-4341-9f79-2f03d19e8e0f :Steps: 1. Navigate to Monitor -> Dashboard 2. Review the Latest Warning/Error Tasks widget 3. Review the Running Chart widget 4. Review the Task Status widget 5. Review the Stopped Chart widget 6. Click few links from the widget :expectedresults: Each link shows the right info and filter can be set from Tasks dashboard :BZ: 1718889 :CaseLevel: Integration """ url = 'www.non_existent_repo_url.org' org = entities.Organization().create() product = entities.Product(organization=org).create() repo = entities.Repository(url=f'http://{url}', product=product, content_type='yum').create() with pytest.raises(TaskFailedError): repo.sync() with session: session.organization.select(org_name=org.name) session.dashboard.action( {'TaskStatus': { 'state': 'stopped', 'result': 'warning' }}) searchbox = session.task.read_all('searchbox') assert searchbox['searchbox'] == 'state=stopped&result=warning' session.task.set_chart_filter('ScheduledChart') tasks = session.task.read_all(['pagination', 'ScheduledChart']) assert tasks['pagination']['total_items'] == tasks['ScheduledChart'][ 'total'].split()[0] session.task.set_chart_filter('StoppedChart', { 'row': 1, 'focus': 'Total' }) tasks = session.task.read_all() assert tasks['pagination']['total_items'] == tasks['StoppedChart'][ 'table'][1]['Total'] task_name = "Synchronize repository '{}'; product '{}'; organization '{}'".format( repo.name, product.name, org.name) assert tasks['table'][0]['Action'] == task_name assert tasks['table'][0]['State'] == 'stopped' assert tasks['table'][0]['Result'] == 'warning' session.dashboard.action( {'LatestFailedTasks': { 'name': 'Synchronize' }}) values = session.task.read(task_name) assert values['task']['result'] == 'warning' assert ( values['task']['errors'] == f'Cannot connect to host {url}:80 ssl:default [Name or service not known]' )
def test_positive_end_to_end(self): """Perform end to end smoke tests using RH and custom repos. 1. Create a new user with admin permissions 2. Using the new user from above 1. Create a new organization 2. Clone and upload manifest 3. Create a new lifecycle environment 4. Create a custom product 5. Create a custom YUM repository 6. Create a custom PUPPET repository 7. Enable a Red Hat repository 8. Synchronize the three repositories 9. Create a new content view 10. Associate the YUM and Red Hat repositories to new content view 11. Add a PUPPET module to new content view 12. Publish content view 13. Promote content view to the lifecycle environment 14. Create a new activation key 15. Add the products to the activation key 16. Create a new libvirt compute resource 17. Create a new subnet 18. Create a new domain 19. Create a new hostgroup and associate previous entities to it 20. Provision a client :id: b2f73740-d3ce-4e6e-abc7-b23e5562bac1 :expectedresults: All tests should succeed and Content should be successfully fetched by client. """ # step 1: Create a new user with admin permissions login = gen_string('alphanumeric') password = gen_string('alphanumeric') entities.User(admin=True, login=login, password=password).create() # step 2.1: Create a new organization server_config = get_nailgun_config() server_config.auth = (login, password) org = entities.Organization(server_config).create() # step 2.2: Clone and upload manifest if self.fake_manifest_is_set: with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) # step 2.3: Create a new lifecycle environment le1 = entities.LifecycleEnvironment(server_config, organization=org).create() # step 2.4: Create a custom product prod = entities.Product(server_config, organization=org).create() repositories = [] # step 2.5: Create custom YUM repository repo1 = entities.Repository(server_config, product=prod, content_type='yum', url=CUSTOM_RPM_REPO).create() repositories.append(repo1) # step 2.6: Create custom PUPPET repository repo2 = entities.Repository(server_config, product=prod, content_type='puppet', url=FAKE_0_PUPPET_REPO).create() repositories.append(repo2) # step 2.7: Enable a Red Hat repository if self.fake_manifest_is_set: repo3 = entities.Repository(id=enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhva6']['name'], reposet=REPOSET['rhva6'], releasever='6Server', )) repositories.append(repo3) # step 2.8: Synchronize the three repositories for repo in repositories: repo.sync() # step 2.9: Create content view content_view = entities.ContentView(server_config, organization=org).create() # step 2.10: Associate the YUM and Red Hat repositories to new content # view repositories.remove(repo2) content_view.repository = repositories content_view = content_view.update(['repository']) # step 2.11: Add a PUPPET module to new content view puppet_mods = content_view.available_puppet_modules() self.assertGreater(len(puppet_mods['results']), 0) puppet_module = random.choice(puppet_mods['results']) puppet = entities.ContentViewPuppetModule( author=puppet_module['author'], content_view=content_view, name=puppet_module['name']).create() self.assertEqual(puppet.name, puppet_module['name']) # step 2.12: Publish content view content_view.publish() # step 2.13: Promote content view to the lifecycle environment content_view = content_view.read() self.assertEqual(len(content_view.version), 1) cv_version = content_view.version[0].read() self.assertEqual(len(cv_version.environment), 1) promote(cv_version, le1.id) # check that content view exists in lifecycle content_view = content_view.read() self.assertEqual(len(content_view.version), 1) cv_version = cv_version.read() # step 2.14: Create a new activation key activation_key_name = gen_string('alpha') activation_key = entities.ActivationKey( name=activation_key_name, environment=le1, organization=org, content_view=content_view).create() # step 2.15: Add the products to the activation key for sub in entities.Subscription(organization=org).search(): if sub.name == DEFAULT_SUBSCRIPTION_NAME: activation_key.add_subscriptions(data={ 'quantity': 1, 'subscription_id': sub.id }) break # step 2.15.1: Enable product content if self.fake_manifest_is_set: activation_key.content_override( data={ 'content_overrides': [{ 'content_label': AK_CONTENT_LABEL, 'value': '1' }] }) # BONUS: Create a content host and associate it with promoted # content view and last lifecycle where it exists content_host = entities.Host( content_facet_attributes={ 'content_view_id': content_view.id, 'lifecycle_environment_id': le1.id, }, organization=org, ).create() # check that content view matches what we passed self.assertEqual( content_host.content_facet_attributes['content_view_id'], content_view.id) # check that lifecycle environment matches self.assertEqual( content_host.content_facet_attributes['lifecycle_environment_id'], le1.id) # step 2.16: Create a new libvirt compute resource entities.LibvirtComputeResource( server_config, url='qemu+ssh://root@{0}/system'.format( settings.compute_resources.libvirt_hostname), ).create() # step 2.17: Create a new subnet subnet = entities.Subnet(server_config).create() # step 2.18: Create a new domain domain = entities.Domain(server_config).create() # step 2.19: Create a new hostgroup and associate previous entities to # it entities.HostGroup(server_config, domain=domain, subnet=subnet).create() # step 2.20: Provision a client self.client_provisioning(activation_key_name, org.label)
def test_positive_user_access_with_host_filter(test_name, module_location, rhel7_contenthost, target_sat): """Check if user with necessary host permissions can access dashboard and required widgets are rendered with proper values :id: 24b4b371-cba0-4bc8-bc6a-294c62e0586d :Steps: 1. Specify proper filter with permission for your role 2. Create new user and assign role to it 3. Login into application using this new user 4. Check dashboard and widgets on it 5. Register new content host to populate some values into dashboard widgets :expectedresults: Dashboard and Errata Widget rendered without errors and contain proper values :BZ: 1417114 :parametrized: yes :CaseLevel: System """ user_login = gen_string('alpha') user_password = gen_string('alphanumeric') org = entities.Organization().create() lce = entities.LifecycleEnvironment(organization=org).create() # create a role with necessary permissions role = entities.Role().create() user_permissions = { 'Organization': ['view_organizations'], 'Location': ['view_locations'], None: ['access_dashboard'], 'Host': ['view_hosts'], } create_role_permissions(role, user_permissions) # create a user and assign the above created role entities.User( default_organization=org, organization=[org], default_location=module_location, location=[module_location], role=[role], login=user_login, password=user_password, ).create() with Session(test_name, user=user_login, password=user_password) as session: assert session.dashboard.read( 'HostConfigurationStatus')['total_count'] == 0 assert len(session.dashboard.read('LatestErrata')['erratas']) == 0 repos_collection = RepositoryCollection( distro=DISTRO_RHEL7, repositories=[ SatelliteToolsRepository(), YumRepository(url=settings.repos.yum_6.url) ], ) repos_collection.setup_content(org.id, lce.id, upload_manifest=True) repos_collection.setup_virtual_machine( rhel7_contenthost, target_sat, location_title=module_location.name) result = rhel7_contenthost.run( f'yum install -y {FAKE_1_CUSTOM_PACKAGE}') assert result.status == 0 hostname = rhel7_contenthost.hostname # Check UI for values assert session.host.search(hostname)[0]['Name'] == hostname hosts_values = session.dashboard.read('HostConfigurationStatus') assert hosts_values['total_count'] == 1 errata_values = session.dashboard.read('LatestErrata')['erratas'] assert len(errata_values) == 1 assert errata_values[0]['Type'] == 'security' assert settings.repos.yum_6.errata[2] in errata_values[0]['Errata']
def setUpClass(cls): """Create an organization which can be re-used in tests.""" super(DockerContentViewTestCase, cls).setUpClass() cls.org = entities.Organization().create()
def test_positive_host_configuration_status(session): """Check if the Host Configuration Status Widget links are working :id: ffb0a6a1-2b65-4578-83c7-61492122d865 :customerscenario: true :Steps: 1. Navigate to Monitor -> Dashboard 2. Review the Host Configuration Status 3. Navigate to each of the links which has search string associated with it. :expectedresults: Each link shows the right info :BZ: 1631219 :CaseLevel: Integration """ org = entities.Organization().create() loc = entities.Location().create() host = entities.Host(organization=org, location=loc).create() criteria_list = [ 'Hosts that had performed modifications without error', 'Hosts in error state', 'Good host reports in the last 30 minutes', 'Hosts that had pending changes', 'Out of sync hosts', 'Hosts with alerts disabled', 'Hosts with no reports', ] search_strings_list = [ 'last_report > \"30 minutes ago\" and (status.applied > 0 or' ' status.restarted > 0) and (status.failed = 0)', 'last_report > \"30 minutes ago\" and (status.failed > 0 or' ' status.failed_restarts > 0) and status.enabled = true', 'last_report > \"30 minutes ago\" and status.enabled = true and' ' status.applied = 0 and status.failed = 0 and status.pending = 0', 'last_report > \"30 minutes ago\" and status.pending > 0 and status.enabled = true', 'last_report < \"30 minutes ago\" and status.enabled = true', 'status.enabled = false', 'not has last_report and status.enabled = true', ] if is_open('BZ:1631219'): criteria_list.pop() search_strings_list.pop() with session: session.organization.select(org_name=org.name) session.location.select(loc_name=loc.name) dashboard_values = session.dashboard.read('HostConfigurationStatus') for criteria in criteria_list: if criteria == 'Hosts with no reports': assert dashboard_values['status_list'][criteria] == 1 else: assert dashboard_values['status_list'][criteria] == 0 for criteria, search in zip(criteria_list, search_strings_list): if criteria == 'Hosts with no reports': session.dashboard.action( {'HostConfigurationStatus': { 'status_list': criteria }}) values = session.host.read_all() assert values['searchbox'] == search assert len(values['table']) == 1 assert values['table'][0]['Name'] == host.name else: session.dashboard.action( {'HostConfigurationStatus': { 'status_list': criteria }}) values = session.host.read_all() assert values['searchbox'] == search assert len(values['table']) == 0
def configure_puppet_test(cls): """Sets up the whole provisioning environment needed for Puppet based end-to-end tests like OSCAP etc :returns: A dict of entities to help with provisioning """ cls.rhel6_content = OSCAP_DEFAULT_CONTENT['rhel6_content'] cls.rhel7_content = OSCAP_DEFAULT_CONTENT['rhel7_content'] sat6_hostname = settings.server.hostname proxy = Proxy.list({'search': sat6_hostname})[0] p_features = set(proxy.get('features').split(', ')) if {'Puppet', 'Ansible', 'Openscap'}.issubset(p_features): cls.proxy_id = proxy.get('id') else: raise ProxyError( 'Some features like Puppet, DHCP, Openscap, Ansible are not present' ) ak_name_7 = gen_string('alpha') ak_name_6 = gen_string('alpha') repo_values = [ { 'repo': settings.sattools_repo['rhel6'], 'akname': ak_name_6 }, { 'repo': settings.sattools_repo['rhel7'], 'akname': ak_name_7 }, ] # Create new organization and environment. org = entities.Organization(name=gen_string('alpha')).create() loc = entities.Location().search( query={'search': "{0}".format(DEFAULT_LOC)})[0].read() cls.puppet_env = entities.Environment().search( query={u'search': u'name=production'})[0].read() cls.puppet_env.location.append(loc) cls.puppet_env.organization.append(org) cls.puppet_env = cls.puppet_env.update(['location', 'organization']) smart_proxy = entities.SmartProxy().search( query={'search': 'name={0}'.format(sat6_hostname)})[0].read() smart_proxy.organization.append(entities.Organization(id=org.id)) smart_proxy.location.append(entities.Location(id=loc.id)) smart_proxy.update(['location', 'organization']) smart_proxy.import_puppetclasses(environment=cls.puppet_env.name) env = entities.LifecycleEnvironment(organization=org, name=gen_string('alpha')).create() # Create content view content_view = entities.ContentView(organization=org, name=gen_string('alpha')).create() # Create two activation keys for rhel7 and rhel6 for repo in repo_values: activation_key = entities.ActivationKey( name=repo.get('akname'), environment=env, organization=org, ).create() # Setup org for a custom repo for RHEL6 and RHEL7 setup_org_for_a_custom_repo({ 'url': repo.get('repo'), 'organization-id': org.id, 'content-view-id': content_view.id, 'lifecycle-environment-id': env.id, 'activationkey-id': activation_key.id }) for content in cls.rhel6_content, cls.rhel7_content: Scapcontent.update({ 'title': content, 'organizations': org.name, 'locations': DEFAULT_LOC }) return { 'org_name': org.name, 'cv_name': content_view.name, 'sat6_hostname': settings.server.hostname, 'ak_name': { 'rhel7': ak_name_7, 'rhel6': ak_name_6 }, 'env_name': env.name, }
def setUpClass(cls): # noqa super(RepositoryTestCase, cls).setUpClass() cls.organization = entities.Organization().create() cls.loc = entities.Location().create()
def setUpClass(cls): """Create an organization which can be re-used in tests.""" super(HostContentHostUnificationTestCase, cls).setUpClass() cls.org_ = entities.Organization().create()
def setUpClass(cls): """Create common entities""" super(UserRoleTestCase, cls).setUpClass() cls.org = entities.Organization().create() cls.loc = entities.Location().create()
def setUpClass(cls): super(LifeCycleEnvironmentTestCase, cls).setUpClass() cls.organization = entities.Organization().create()
def test_positive_mirror_on_sync(self): """Create 2 repositories with 'on_demand' download policy and mirror on sync option, associate them with capsule, sync first repo, move package from first repo to second one, sync it, attempt to install package on some host. :id: 39149642-1e7e-4ef8-8762-bec295913014 :BZ: 1426408 :expectedresults: host, subscribed to second repo only, can successfully install package :CaseLevel: System """ repo1_name = gen_string('alphanumeric') repo2_name = gen_string('alphanumeric') # Create and publish first custom repository with 2 packages in it repo1_url = create_repo(repo1_name, FAKE_1_YUM_REPO, FAKE_1_YUM_REPO_RPMS[1:3]) # Create and publish second repo with no packages in it repo2_url = create_repo(repo2_name) # Create organization, product, repository in satellite, and lifecycle # environment org = entities.Organization().create() prod1 = entities.Product(organization=org).create() repo1 = entities.Repository(download_policy='on_demand', mirror_on_sync=True, product=prod1, url=repo1_url).create() prod2 = entities.Product(organization=org).create() repo2 = entities.Repository(download_policy='on_demand', mirror_on_sync=True, product=prod2, url=repo2_url).create() lce1 = entities.LifecycleEnvironment(organization=org).create() lce2 = entities.LifecycleEnvironment(organization=org).create() # Associate the lifecycle environments with the capsule capsule = entities.Capsule(id=self.capsule_id).read() for lce_id in (lce1.id, lce2.id): capsule.content_add_lifecycle_environment( data={'environment_id': lce_id}) result = capsule.content_lifecycle_environments() self.assertGreaterEqual(len(result['results']), 2) self.assertTrue({lce1.id, lce2.id}.issubset( [capsule_lce['id'] for capsule_lce in result['results']])) # Create content views with the repositories cv1 = entities.ContentView(organization=org, repository=[repo1]).create() cv2 = entities.ContentView(organization=org, repository=[repo2]).create() # Sync first repository repo1.sync() repo1 = repo1.read() # Publish new version of the content view cv1.publish() cv1 = cv1.read() self.assertEqual(len(cv1.version), 1) cvv1 = cv1.version[-1].read() # Promote content view to lifecycle environment promote(cvv1, lce1.id) cvv1 = cvv1.read() self.assertEqual(len(cvv1.environment), 2) # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time']) # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() # Move one package from the first repo to second one ssh.command('mv {} {}'.format( os.path.join(PULP_PUBLISHED_YUM_REPOS_PATH, repo1_name, FAKE_1_YUM_REPO_RPMS[2]), os.path.join(PULP_PUBLISHED_YUM_REPOS_PATH, repo2_name, FAKE_1_YUM_REPO_RPMS[2]), )) # Update repositories (re-trigger 'createrepo' command) create_repo(repo1_name) create_repo(repo2_name) # Synchronize first repository repo1.sync() cv1.publish() cv1 = cv1.read() self.assertEqual(len(cv1.version), 2) cv1.version.sort(key=lambda version: version.id) cvv1 = cv1.version[-1].read() # Promote content view to lifecycle environment promote(cvv1, lce1.id) cvv1 = cvv1.read() self.assertEqual(len(cvv1.environment), 2) # Synchronize second repository repo2.sync() repo2 = repo2.read() self.assertEqual(repo2.content_counts['package'], 1) cv2.publish() cv2 = cv2.read() self.assertEqual(len(cv2.version), 1) cvv2 = cv2.version[-1].read() # Promote content view to lifecycle environment promote(cvv2, lce2.id) cvv2 = cvv2.read() self.assertEqual(len(cvv2.environment), 2) # Create activation key, add subscription to second repo only activation_key = entities.ActivationKey(content_view=cv2, environment=lce2, organization=org).create() subscription = entities.Subscription(organization=org).search( query={'search': 'name={}'.format(prod2.name)})[0] activation_key.add_subscriptions( data={'subscription_id': subscription.id}) # Subscribe a host with activation key with VirtualMachine(distro=DISTRO_RHEL7) as client: client.install_katello_ca() client.register_contenthost(org.label, activation_key.name) # Install the package package_name = FAKE_1_YUM_REPO_RPMS[2].rstrip('.rpm') result = client.run('yum install -y {}'.format(package_name)) self.assertEqual(result.return_code, 0) # Ensure package installed result = client.run('rpm -qa | grep {}'.format(package_name)) self.assertEqual(result.return_code, 0) self.assertIn(package_name, result.stdout[0])